From 5d6a0a83bceefefacdcd633449bb75ceec432547 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Tue, 28 Nov 2023 11:22:11 +0100 Subject: [PATCH 1/7] Remove the findClass method from ClassPath the method was never used, and not well defined, e.g. with branches to search in both tasty files and class files, which could be severely inefficient. --- .../dotc/classpath/AggregateClassPath.scala | 19 ------------- .../dotc/classpath/DirectoryClassPath.scala | 22 ++------------- .../classpath/VirtualDirectoryClassPath.scala | 8 ++---- .../ZipAndJarFileLookupFactory.scala | 8 +----- .../dotc/classpath/ZipArchiveFileLookup.scala | 9 ------ compiler/src/dotty/tools/io/ClassPath.scala | 28 ++----------------- .../dotc/classpath/JrtClassPathTest.scala | 1 - .../ZipAndJarFileLookupFactoryTest.scala | 6 ++-- 8 files changed, 11 insertions(+), 90 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala index 4c5b632bf6ab..cd44ba27df96 100644 --- a/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala @@ -33,25 +33,6 @@ case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { packageIndex.getOrElseUpdate(pkg.dottedString, aggregates.filter(_.hasPackage(pkg))) } - override def findClass(className: String): Option[ClassRepresentation] = { - val (pkg, _) = PackageNameUtils.separatePkgAndClassNames(className) - - def findEntry(isSource: Boolean): Option[ClassRepresentation] = - aggregatesForPackage(PackageName(pkg)).iterator.map(_.findClass(className)).collectFirst { - case Some(s: SourceFileEntry) if isSource => s - case Some(s: BinaryFileEntry) if !isSource => s - } - - val classEntry = findEntry(isSource = false) - val sourceEntry = findEntry(isSource = true) - - (classEntry, sourceEntry) match { - case (Some(c: BinaryFileEntry), Some(s: SourceFileEntry)) => Some(BinaryAndSourceFilesEntry(c, s)) - case (c @ Some(_), _) => c - case (_, s) => s - } - } - override def asURLs: Seq[URL] = aggregates.flatMap(_.asURLs) override def asClassPathStrings: Seq[String] = aggregates.map(_.asClassPathString).distinct diff --git a/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala index c5b267bc774d..212f7123198a 100644 --- a/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala @@ -274,17 +274,12 @@ final class CtSymClassPath(ctSym: java.nio.file.Path, release: Int) extends Clas } case class DirectoryClassPath(dir: JFile) extends JFileDirectoryLookup[BinaryFileEntry] with NoSourcePaths { - override def findClass(className: String): Option[ClassRepresentation] = - findClassFile(className).map(BinaryFileEntry(_)) def findClassFile(className: String): Option[AbstractFile] = { val relativePath = FileUtils.dirPath(className) - val tastyFile = new JFile(dir, relativePath + ".tasty") - if tastyFile.exists then Some(tastyFile.toPath.toPlainFile) - else - val classFile = new JFile(dir, relativePath + ".class") - if classFile.exists then Some(classFile.toPath.toPlainFile) - else None + val classFile = new JFile(dir, relativePath + ".class") + if classFile.exists then Some(classFile.toPath.toPlainFile) + else None } protected def createFileEntry(file: AbstractFile): BinaryFileEntry = BinaryFileEntry(file) @@ -301,16 +296,5 @@ case class DirectorySourcePath(dir: JFile) extends JFileDirectoryLookup[SourceFi protected def createFileEntry(file: AbstractFile): SourceFileEntry = SourceFileEntry(file) protected def isMatchingFile(f: JFile): Boolean = endsScalaOrJava(f.getName) - override def findClass(className: String): Option[ClassRepresentation] = findSourceFile(className).map(SourceFileEntry(_)) - - private def findSourceFile(className: String): Option[AbstractFile] = { - val relativePath = FileUtils.dirPath(className) - val sourceFile = LazyList("scala", "java") - .map(ext => new JFile(dir, relativePath + "." + ext)) - .collectFirst { case file if file.exists() => file } - - sourceFile.map(_.toPath.toPlainFile) - } - private[dotty] def sources(inPackage: PackageName): Seq[SourceFileEntry] = files(inPackage) } diff --git a/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala index 93583c85fff7..9a2f49a786f4 100644 --- a/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala @@ -38,16 +38,12 @@ case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath wi def asURLs: Seq[URL] = Seq(new URI(dir.name).toURL) def asClassPathStrings: Seq[String] = Seq(dir.path) - override def findClass(className: String): Option[ClassRepresentation] = - findClassFile(className).map(BinaryFileEntry(_)) - def findClassFile(className: String): Option[AbstractFile] = { val pathSeq = FileUtils.dirPath(className).split(java.io.File.separator) val parentDir = lookupPath(dir)(pathSeq.init.toSeq, directory = true) - if parentDir == null then return None + if parentDir == null then None else - Option(lookupPath(parentDir)(pathSeq.last + ".tasty" :: Nil, directory = false)) - .orElse(Option(lookupPath(parentDir)(pathSeq.last + ".class" :: Nil, directory = false))) + Option(lookupPath(parentDir)(pathSeq.last + ".class" :: Nil, directory = false)) } private[dotty] def classes(inPackage: PackageName): Seq[BinaryFileEntry] = files(inPackage) diff --git a/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala b/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala index dac156c5f647..3a725ad6e052 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala @@ -45,14 +45,8 @@ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { with NoSourcePaths { override def findClassFile(className: String): Option[AbstractFile] = - findClass(className).map(_.file) - - // This method is performance sensitive as it is used by SBT's ExtractDependencies phase. - override def findClass(className: String): Option[BinaryFileEntry] = { val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) - val binaries = files(PackageName(pkg), simpleClassName + ".tasty", simpleClassName + ".class") - binaries.find(_.file.isTasty).orElse(binaries.find(_.file.isClass)) - } + file(PackageName(pkg), simpleClassName + ".class").map(_.file) override private[dotty] def classes(inPackage: PackageName): Seq[BinaryFileEntry] = files(inPackage) diff --git a/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala b/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala index ca8636e3884f..4595f7978999 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala @@ -43,15 +43,6 @@ trait ZipArchiveFileLookup[FileEntryType <: ClassRepresentation] extends Efficie } yield createFileEntry(entry) - protected def files(inPackage: PackageName, names: String*): Seq[FileEntryType] = - for { - dirEntry <- findDirEntry(inPackage).toSeq - name <- names - entry <- Option(dirEntry.lookupName(name, directory = false)) - if isRequiredFileType(entry) - } - yield createFileEntry(entry) - protected def file(inPackage: PackageName, name: String): Option[FileEntryType] = for { dirEntry <- findDirEntry(inPackage) diff --git a/compiler/src/dotty/tools/io/ClassPath.scala b/compiler/src/dotty/tools/io/ClassPath.scala index 5344e2cf7e35..f77bc1efca91 100644 --- a/compiler/src/dotty/tools/io/ClassPath.scala +++ b/compiler/src/dotty/tools/io/ClassPath.scala @@ -56,32 +56,8 @@ trait ClassPath { private[dotty] def list(inPackage: PackageName): ClassPathEntries /** - * Returns the class file and / or source file for a given external name, e.g., "java.lang.String". - * If there is both a class file and source file, the compiler can decide whether to read the - * class file or compile the source file. - * - * Internally this seems to be used only by `ScriptRunner`, but only to call `.isDefined`. That - * could probably be implemented differently. - * - * Externally, it is used by sbt's compiler interface: - * https://github.com/sbt/sbt/blob/v0.13.15/compile/interface/src/main/scala/xsbt/CompilerInterface.scala#L249 - * Jason has some improvements for that in the works (https://github.com/scala/bug/issues/10289#issuecomment-310022699) - */ - def findClass(className: String): Option[ClassRepresentation] = { - // A default implementation which should be overridden, if we can create the more efficient - // solution for a given type of ClassPath - val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) - - val packageName = PackageName(pkg) - val foundClassFromClassFiles = classes(packageName).find(_.name == simpleClassName) - def findClassInSources = sources(packageName).find(_.name == simpleClassName) - - foundClassFromClassFiles orElse findClassInSources - } - - /** - * Returns the classfile for an external name, e.g., "java.lang.String". This method does not - * return source files. + * Returns *only* the classfile for an external name, e.g., "java.lang.String". This method does not + * return source files, tasty files,. * * This method is used by the classfile parser. When parsing a Java class, its own inner classes * are entered with a `ClassfileLoader` that parses the classfile returned by this method. diff --git a/compiler/test/dotty/tools/dotc/classpath/JrtClassPathTest.scala b/compiler/test/dotty/tools/dotc/classpath/JrtClassPathTest.scala index b676bb100320..a06698c1d513 100644 --- a/compiler/test/dotty/tools/dotc/classpath/JrtClassPathTest.scala +++ b/compiler/test/dotty/tools/dotc/classpath/JrtClassPathTest.scala @@ -38,7 +38,6 @@ class JrtClassPathTest { assertEquals("java/lang/Object", AsmUtils.readClass(jl_Object.file.toByteArray).name) assertTrue(cp.list("java.lang").packages.exists(_.name == "java.lang.annotation")) assertTrue(cp.list("java.lang").classesAndSources.exists(_.name == "Object")) - assertTrue(cp.findClass("java.lang.Object").isDefined) assertTrue(cp.findClassFile("java.lang.Object").isDefined) } } diff --git a/compiler/test/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactoryTest.scala b/compiler/test/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactoryTest.scala index 84973b8d3d71..db14ff3b1fb4 100644 --- a/compiler/test/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactoryTest.scala +++ b/compiler/test/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactoryTest.scala @@ -27,7 +27,7 @@ class ZipAndJarFileLookupFactoryTest { createZip(f, Array(), "p2/X.class") createZip(f, Array(), "p3/Y.class") val cp1 = createCp - assert(cp1.findClass("p1.C").isDefined) + assert(cp1.findClassFile("p1.C").isDefined) // We expect get a cache hit as the underlying zip hasn't changed val cp2 = createCp @@ -46,8 +46,8 @@ class ZipAndJarFileLookupFactoryTest { val cp3 = createCp assert(cp1 ne cp3, (System.identityHashCode(cp1), System.identityHashCode(cp3))) // And that instance should see D, not C, in package p1. - assert(cp3.findClass("p1.C").isEmpty) - assert(cp3.findClass("p1.D").isDefined) + assert(cp3.findClassFile("p1.C").isEmpty) + assert(cp3.findClassFile("p1.D").isDefined) } finally Files.delete(f) } From 8168d1e3f496f8d1b5c808765aeb257bb376264d Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Tue, 28 Nov 2023 12:05:34 +0100 Subject: [PATCH 2/7] use an enum to store file extensions, This caches common file extensions, while still being extensible. Also fixes many operations with unexpected behavior (manipulation of file extensions where toLowerCase behaves differently with certain locales.) --- .../dotty/tools/dotc/CompilationUnit.scala | 13 +-- compiler/src/dotty/tools/dotc/Driver.scala | 8 +- .../tools/dotc/classpath/ClassPath.scala | 3 +- .../dotc/classpath/DirectoryClassPath.scala | 2 +- .../tools/dotc/classpath/FileUtils.scala | 41 +++++----- .../classpath/VirtualDirectoryClassPath.scala | 2 +- .../ZipAndJarFileLookupFactory.scala | 2 +- .../dotty/tools/dotc/config/Settings.scala | 30 +++---- .../dotty/tools/dotc/core/SymbolLoaders.scala | 4 +- .../dotc/core/classfile/ClassfileParser.scala | 4 +- .../tools/dotc/core/tasty/TastyPrinter.scala | 3 +- .../dotty/tools/dotc/fromtasty/Debug.scala | 2 +- .../dotty/tools/dotc/fromtasty/TASTYRun.scala | 10 +-- .../tools/dotc/fromtasty/TastyFileUtil.scala | 3 +- .../src/dotty/tools/dotc/sbt/ExtractAPI.scala | 4 +- .../tools/dotc/sbt/ExtractDependencies.scala | 12 +-- .../src/dotty/tools/dotc/util/EnumFlags.scala | 14 ++++ .../src/dotty/tools/io/AbstractFile.scala | 19 +++-- compiler/src/dotty/tools/io/File.scala | 2 - .../src/dotty/tools/io/FileExtension.scala | 79 +++++++++++++++++++ compiler/src/dotty/tools/io/Jar.scala | 2 +- compiler/src/dotty/tools/io/JarArchive.scala | 4 +- compiler/src/dotty/tools/io/Path.scala | 47 +++++++---- .../dotc/core/tasty/CommentPicklingTest.scala | 2 +- .../tools/dotc/printing/PrintingTest.scala | 2 +- .../transform/PatmatExhaustivityTest.scala | 6 +- .../languageserver/DottyLanguageServer.scala | 2 +- .../tools/pc/completions/Completions.scala | 2 +- .../tasty/inspector/TastyInspector.scala | 2 +- .../tasty/inspector/TastyInspector.scala | 2 +- .../scala2-library-test.scala | 2 +- .../scala2-library-from-tasty-jar.scala | 2 +- .../scala2-library-from-tasty.scala | 2 +- 33 files changed, 228 insertions(+), 106 deletions(-) create mode 100644 compiler/src/dotty/tools/dotc/util/EnumFlags.scala create mode 100644 compiler/src/dotty/tools/io/FileExtension.scala diff --git a/compiler/src/dotty/tools/dotc/CompilationUnit.scala b/compiler/src/dotty/tools/dotc/CompilationUnit.scala index 78773a518b67..2358739ebd74 100644 --- a/compiler/src/dotty/tools/dotc/CompilationUnit.scala +++ b/compiler/src/dotty/tools/dotc/CompilationUnit.scala @@ -28,13 +28,16 @@ class CompilationUnit protected (val source: SourceFile, val info: CompilationUn var tpdTree: tpd.Tree = tpd.EmptyTree /** Is this the compilation unit of a Java file */ - def isJava: Boolean = source.file.name.endsWith(".java") + def isJava: Boolean = source.file.ext.isJava /** Is this the compilation unit of a Java file, or TASTy derived from a Java file */ - def typedAsJava = isJava || { - val infoNN = info - infoNN != null && infoNN.tastyInfo.exists(_.attributes.isJava) - } + def typedAsJava = + val ext = source.file.ext + ext.isJavaOrTasty && (ext.isJava || tastyInfo.exists(_.attributes.isJava)) + + def tastyInfo: Option[TastyInfo] = + val local = info + if local == null then None else local.tastyInfo /** The source version for this unit, as determined by a language import */ diff --git a/compiler/src/dotty/tools/dotc/Driver.scala b/compiler/src/dotty/tools/dotc/Driver.scala index 196752aceb29..ae2219a4f049 100644 --- a/compiler/src/dotty/tools/dotc/Driver.scala +++ b/compiler/src/dotty/tools/dotc/Driver.scala @@ -6,7 +6,7 @@ import core.Comments.{ContextDoc, ContextDocstrings} import core.Contexts.* import core.{MacroClassLoader, TypeError} import dotty.tools.dotc.ast.Positioned -import dotty.tools.io.AbstractFile +import dotty.tools.io.{AbstractFile, FileExtension} import reporting.* import core.Decorators.* import config.Feature @@ -97,9 +97,9 @@ class Driver { if !file.exists then report.error(em"File does not exist: ${file.path}") None - else file.extension match - case "jar" => Some(file.path) - case "tasty" => + else file.ext match + case FileExtension.Jar => Some(file.path) + case FileExtension.Tasty => TastyFileUtil.getClassPath(file) match case Some(classpath) => Some(classpath) case _ => diff --git a/compiler/src/dotty/tools/dotc/classpath/ClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/ClassPath.scala index 3210c6221a78..5f545e1b93a5 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ClassPath.scala @@ -6,6 +6,7 @@ package dotty.tools.dotc.classpath import dotty.tools.dotc.classpath.FileUtils.isTasty import dotty.tools.io.AbstractFile import dotty.tools.io.ClassRepresentation +import dotty.tools.io.FileExtension case class ClassPathEntries(packages: scala.collection.Seq[PackageEntry], classesAndSources: scala.collection.Seq[ClassRepresentation]) { def toTuple: (scala.collection.Seq[PackageEntry], scala.collection.Seq[ClassRepresentation]) = (packages, classesAndSources) @@ -52,7 +53,7 @@ sealed trait BinaryFileEntry extends ClassRepresentation { object BinaryFileEntry { def apply(file: AbstractFile): BinaryFileEntry = if file.isTasty then - if file.resolveSiblingWithExtension("class") != null then TastyWithClassFileEntry(file) + if file.resolveSiblingWithExtension(FileExtension.Class) != null then TastyWithClassFileEntry(file) else StandaloneTastyFileEntry(file) else ClassFileEntry(file) diff --git a/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala index 212f7123198a..252f046ab548 100644 --- a/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala @@ -285,7 +285,7 @@ case class DirectoryClassPath(dir: JFile) extends JFileDirectoryLookup[BinaryFil protected def createFileEntry(file: AbstractFile): BinaryFileEntry = BinaryFileEntry(file) protected def isMatchingFile(f: JFile): Boolean = - f.isTasty || (f.isClass && f.classToTasty.isEmpty) + f.isTasty || (f.isClass && !f.hasSiblingTasty) private[dotty] def classes(inPackage: PackageName): Seq[BinaryFileEntry] = files(inPackage) } diff --git a/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala b/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala index b8cb9a2155dc..030b0b61044a 100644 --- a/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala +++ b/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala @@ -17,21 +17,20 @@ object FileUtils { extension (file: AbstractFile) { def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.name) - def isClass: Boolean = !file.isDirectory && hasClassExtension && !file.name.endsWith("$class.class") - // FIXME: drop last condition when we stop being compatible with Scala 2.11 + def isClass: Boolean = !file.isDirectory && hasClassExtension - def hasClassExtension: Boolean = file.hasExtension("class") + def hasClassExtension: Boolean = file.ext.isClass - def hasTastyExtension: Boolean = file.hasExtension("tasty") + def hasTastyExtension: Boolean = file.ext.isTasty def isTasty: Boolean = !file.isDirectory && hasTastyExtension def isScalaBinary: Boolean = file.isClass || file.isTasty - def isScalaOrJavaSource: Boolean = !file.isDirectory && (file.hasExtension("scala") || file.hasExtension("java")) + def isScalaOrJavaSource: Boolean = !file.isDirectory && file.ext.isScalaOrJava // TODO do we need to check also other files using ZipMagicNumber like in scala.tools.nsc.io.Jar.isJarOrZip? - def isJarOrZip: Boolean = file.hasExtension("jar") || file.hasExtension("zip") + def isJarOrZip: Boolean = file.ext.isJarOrZip /** * Safe method returning a sequence containing one URL representing this file, when underlying file exists, @@ -39,27 +38,31 @@ object FileUtils { */ def toURLs(default: => Seq[URL] = Seq.empty): Seq[URL] = if (file.file == null) default else Seq(file.toURL) - /** Returns the tasty file associated with this class file */ - def classToTasty: Option[AbstractFile] = - assert(file.isClass, s"non-class: $file") - val tastyName = classNameToTasty(file.name) - Option(file.resolveSibling(tastyName)) + /** + * Returns if there is an existing sibling `.tasty` file. + */ + def hasSiblingTasty: Boolean = + assert(file.hasClassExtension, s"non-class: $file") + file.resolveSibling(classNameToTasty(file.name)) != null } extension (file: JFile) { def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.getName) - def isClass: Boolean = file.isFile && file.getName.endsWith(SUFFIX_CLASS) && !file.getName.endsWith("$class.class") - // FIXME: drop last condition when we stop being compatible with Scala 2.11 + def isClass: Boolean = file.isFile && hasClassExtension + + def hasClassExtension: Boolean = file.getName.endsWith(SUFFIX_CLASS) def isTasty: Boolean = file.isFile && file.getName.endsWith(SUFFIX_TASTY) - /** Returns the tasty file associated with this class file */ - def classToTasty: Option[JFile] = - assert(file.isClass, s"non-class: $file") - val tastyName = classNameToTasty(file.getName.stripSuffix(".class")) - val tastyPath = file.toPath.resolveSibling(tastyName) - if java.nio.file.Files.exists(tastyPath) then Some(tastyPath.toFile) else None + /** + * Returns if there is an existing sibling `.tasty` file. + */ + def hasSiblingTasty: Boolean = + assert(file.hasClassExtension, s"non-class: $file") + val path = file.toPath + val tastyPath = path.resolveSibling(classNameToTasty(file.getName)) + java.nio.file.Files.exists(tastyPath) } diff --git a/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala index 9a2f49a786f4..0616d6c14ba6 100644 --- a/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala @@ -51,5 +51,5 @@ case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath wi protected def createFileEntry(file: AbstractFile): BinaryFileEntry = BinaryFileEntry(file) protected def isMatchingFile(f: AbstractFile): Boolean = - f.isTasty || (f.isClass && f.classToTasty.isEmpty) + f.isTasty || (f.isClass && !f.hasSiblingTasty) } diff --git a/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala b/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala index 3a725ad6e052..d5473e6b26c3 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala @@ -53,7 +53,7 @@ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { override protected def createFileEntry(file: FileZipArchive#Entry): BinaryFileEntry = BinaryFileEntry(file) override protected def isRequiredFileType(file: AbstractFile): Boolean = - file.isTasty || (file.isClass && file.classToTasty.isEmpty) + file.isTasty || (file.isClass && !file.hasSiblingTasty) } /** diff --git a/compiler/src/dotty/tools/dotc/config/Settings.scala b/compiler/src/dotty/tools/dotc/config/Settings.scala index a65072427ba7..816d85e6c6fd 100644 --- a/compiler/src/dotty/tools/dotc/config/Settings.scala +++ b/compiler/src/dotty/tools/dotc/config/Settings.scala @@ -64,7 +64,7 @@ object Settings: @unshared val settingCharacters = "[a-zA-Z0-9_\\-]*".r - def validateSettingString(name: String): Unit = + def validateSettingString(name: String): Unit = assert(settingCharacters.matches(name), s"Setting string $name contains invalid characters") @@ -83,7 +83,7 @@ object Settings: deprecationMsg: Option[String] = None, // kept only for -Ykind-projector option compatibility legacyArgs: Boolean = false)(private[Settings] val idx: Int) { - + validateSettingString(prefix.getOrElse(name)) aliases.foreach(validateSettingString) assert(name.startsWith(s"-${category.prefixLetter}"), s"Setting $name does not start with category -$category") @@ -92,7 +92,7 @@ object Settings: // Example: -opt Main.scala would be interpreted as -opt:Main.scala, and the source file would be ignored. assert(!(summon[ClassTag[T]] == ListTag && ignoreInvalidArgs), s"Ignoring invalid args is not supported for multivalue settings: $name") - val allFullNames: List[String] = s"$name" :: s"-$name" :: aliases + val allFullNames: List[String] = s"$name" :: s"-$name" :: aliases def valueIn(state: SettingsState): T = state.value(idx).asInstanceOf[T] @@ -105,7 +105,7 @@ object Settings: def isMultivalue: Boolean = summon[ClassTag[T]] == ListTag def acceptsNoArg: Boolean = summon[ClassTag[T]] == BooleanTag || summon[ClassTag[T]] == OptionTag || choices.exists(_.contains("")) - + def legalChoices: String = choices match { case Some(xs) if xs.isEmpty => "" @@ -168,17 +168,17 @@ object Settings: update(x, args) catch case _: NumberFormatException => fail(s"$argValue is not an integer argument for $name", args) - - def setOutput(argValue: String, args: List[String]) = + + def setOutput(argValue: String, args: List[String]) = val path = Directory(argValue) - val isJar = path.extension == "jar" + val isJar = path.ext.isJar if (!isJar && !path.isDirectory) fail(s"'$argValue' does not exist or is not a directory or .jar file", args) else { val output = if (isJar) JarArchive.create(path) else new PlainDirectory(path) update(output, args) } - + def setVersion(argValue: String, args: List[String]) = ScalaVersion.parse(argValue) match { case Success(v) => update(v, args) @@ -193,7 +193,7 @@ object Settings: case _ => update(strings, args) - def doSet(argRest: String) = + def doSet(argRest: String) = ((summon[ClassTag[T]], args): @unchecked) match { case (BooleanTag, _) => setBoolean(argRest, args) @@ -224,16 +224,16 @@ object Settings: case _ => missingArg - def matches(argName: String): Boolean = + def matches(argName: String): Boolean = (allFullNames).exists(_ == argName.takeWhile(_ != ':')) || prefix.exists(arg.startsWith) - def argValRest: String = + def argValRest: String = if(prefix.isEmpty) arg.dropWhile(_ != ':').drop(1) else arg.drop(prefix.get.length) - - if matches(arg) then + + if matches(arg) then if deprecationMsg.isDefined then warn(s"Option $name is deprecated: ${deprecationMsg.get}", args) - else + else doSet(argValRest) else state @@ -375,7 +375,7 @@ object Settings: def OptionSetting[T: ClassTag](category: SettingCategory, name: String, descr: String, aliases: List[String] = Nil): Setting[Option[T]] = publish(Setting(category, prependName(name), descr, None, propertyClass = Some(summon[ClassTag[T]].runtimeClass), aliases = aliases)) - + def DeprecatedSetting(category: SettingCategory, name: String, descr: String, deprecationMsg: String): Setting[Boolean] = publish(Setting(category, prependName(name), descr, false, deprecationMsg = Some(deprecationMsg))) } diff --git a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala index 75c610b29140..8b5a7ddfa65c 100644 --- a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala +++ b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala @@ -7,7 +7,7 @@ import java.nio.channels.ClosedByInterruptException import scala.util.control.NonFatal -import dotty.tools.dotc.classpath.FileUtils.isTasty +import dotty.tools.dotc.classpath.FileUtils.hasTastyExtension import dotty.tools.io.{ ClassPath, ClassRepresentation, AbstractFile } import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions @@ -198,7 +198,7 @@ object SymbolLoaders { enterToplevelsFromSource(owner, nameOf(classRep), src) case (Some(bin), _) => val completer = - if bin.isTasty then ctx.platform.newTastyLoader(bin) + if bin.hasTastyExtension then ctx.platform.newTastyLoader(bin) else ctx.platform.newClassLoader(bin) enterClassAndModule(owner, nameOf(classRep), completer) } diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala index 894d430fe54b..22a43dd524e1 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala @@ -23,7 +23,7 @@ import scala.annotation.switch import typer.Checking.checkNonCyclic import io.{AbstractFile, ZipArchive} import scala.util.control.NonFatal -import dotty.tools.dotc.classpath.FileUtils.classToTasty +import dotty.tools.dotc.classpath.FileUtils.hasSiblingTasty import scala.compiletime.uninitialized @@ -1143,7 +1143,7 @@ class ClassfileParser( if (scan(tpnme.TASTYATTR)) { val hint = - if classfile.classToTasty.isDefined then "This is likely a bug in the compiler. Please report." + if classfile.hasSiblingTasty then "This is likely a bug in the compiler. Please report." else "This `.tasty` file is missing. Try cleaning the project to fix this issue." report.error(s"Loading Scala 3 binary from $classfile. It should have been loaded from `.tasty` file. $hint", NoSourcePosition) return None diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala index af2097f347ba..6850d87d1f4d 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala @@ -18,6 +18,7 @@ import scala.collection.immutable.BitSet import scala.compiletime.uninitialized import dotty.tools.tasty.TastyBuffer.Addr import dotty.tools.dotc.core.Names.TermName +import dotty.tools.dotc.classpath.FileUtils.hasTastyExtension object TastyPrinter: @@ -56,7 +57,7 @@ object TastyPrinter: else if arg.endsWith(".jar") then val jar = JarArchive.open(Path(arg), create = false) try - for file <- jar.iterator() if file.name.endsWith(".tasty") do + for file <- jar.iterator() if file.hasTastyExtension do printTasty(s"$arg ${file.path}", file.toByteArray) finally jar.close() else diff --git a/compiler/src/dotty/tools/dotc/fromtasty/Debug.scala b/compiler/src/dotty/tools/dotc/fromtasty/Debug.scala index 979fae239e59..2e6b699b4e36 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/Debug.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/Debug.scala @@ -40,7 +40,7 @@ object Debug { val tastyFiles = Directory(fromSourcesOut).walk - .filter(x => x.isFile && "tasty".equalsIgnoreCase(x.extension)) + .filter(x => x.isFile && x.ext.isTasty) .map(_.toString) .toList diff --git a/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala b/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala index 98ab8e2b6226..8ad9afb7d512 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala @@ -4,7 +4,7 @@ package fromtasty import scala.language.unsafeNulls -import io.{JarArchive, AbstractFile, Path} +import io.{JarArchive, AbstractFile, Path, FileExtension} import core.Contexts.* import core.Decorators.em import java.io.File @@ -19,14 +19,14 @@ class TASTYRun(comp: Compiler, ictx: Context) extends Run(comp, ictx) { val fromTastyIgnoreList = ctx.settings.YfromTastyIgnoreList.value.toSet // Resolve class names of tasty and jar files val classNames = files.flatMap { file => - file.extension match - case "jar" => + file.ext match + case FileExtension.Jar => JarArchive.open(Path(file.path), create = false).allFileNames() .map(_.stripPrefix("/")) // change paths from absolute to relative - .filter(e => Path.extension(e) == "tasty" && !fromTastyIgnoreList(e.replace("/", File.separator))) + .filter(e => Path.fileExtension(e).isTasty && !fromTastyIgnoreList(e.replace("/", File.separator))) .map(e => e.stripSuffix(".tasty").replace("/", ".")) .toList - case "tasty" => TastyFileUtil.getClassName(file) + case FileExtension.Tasty => TastyFileUtil.getClassName(file) case _ => report.error(em"File extension is not `tasty` or `jar`: ${file.path}") Nil diff --git a/compiler/src/dotty/tools/dotc/fromtasty/TastyFileUtil.scala b/compiler/src/dotty/tools/dotc/fromtasty/TastyFileUtil.scala index bc04cc648a65..d3a9550c4491 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/TastyFileUtil.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/TastyFileUtil.scala @@ -6,6 +6,7 @@ import scala.language.unsafeNulls import dotty.tools.dotc.core.tasty.TastyClassName import dotty.tools.dotc.core.StdNames.nme.EMPTY_PACKAGE import dotty.tools.io.AbstractFile +import dotty.tools.dotc.classpath.FileUtils.hasTastyExtension object TastyFileUtil { /** Get the class path of a tasty file @@ -34,7 +35,7 @@ object TastyFileUtil { */ def getClassName(file: AbstractFile): Option[String] = { assert(file.exists) - assert(file.extension == "tasty") + assert(file.hasTastyExtension) val bytes = file.toByteArray val names = new TastyClassName(bytes).readName() names.map { case (packageName, className) => diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index dafb44d525e4..506ebc81b23d 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -18,7 +18,7 @@ import Names.* import NameOps.* import inlines.Inlines import transform.ValueClasses -import dotty.tools.io.File +import dotty.tools.io.{File, FileExtension} import java.io.PrintWriter @@ -76,7 +76,7 @@ class ExtractAPI extends Phase { if (ctx.settings.YdumpSbtInc.value) { // Append to existing file that should have been created by ExtractDependencies - val pw = new PrintWriter(File(sourceFile.file.jpath).changeExtension("inc").toFile + val pw = new PrintWriter(File(sourceFile.file.jpath).changeExtension(FileExtension.Inc).toFile .bufferedWriter(append = true), true) try { classes.foreach(source => pw.println(DefaultShowAPI(source))) diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala index a35628dc52e4..352636f681c3 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala @@ -8,7 +8,7 @@ import java.nio.file.Path import java.util.{Arrays, EnumSet} import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.classpath.FileUtils.{isTasty, hasClassExtension, hasTastyExtension} +import dotty.tools.dotc.classpath.FileUtils.{hasClassExtension, hasTastyExtension} import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Decorators.* import dotty.tools.dotc.core.Flags.* @@ -21,7 +21,7 @@ import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.util.{SrcPos, NoSourcePosition} import dotty.tools.io -import dotty.tools.io.{AbstractFile, PlainFile, ZipArchive, NoAbstractFile} +import dotty.tools.io.{AbstractFile, PlainFile, ZipArchive, NoAbstractFile, FileExtension} import xsbti.UseScope import xsbti.api.DependencyContext import xsbti.api.DependencyContext.* @@ -84,7 +84,7 @@ class ExtractDependencies extends Phase { Arrays.sort(deps) Arrays.sort(names) - val pw = io.File(unit.source.file.jpath).changeExtension("inc").toFile.printWriter() + val pw = io.File(unit.source.file.jpath).changeExtension(FileExtension.Inc).toFile.printWriter() // val pw = Console.out try { pw.println("Used Names:") @@ -495,7 +495,7 @@ class DependencyRecorder { if depFile != null then { // Cannot ignore inheritance relationship coming from the same source (see sbt/zinc#417) def allowLocal = depCtx == DependencyByInheritance || depCtx == LocalDependencyByInheritance - val isTasty = depFile.hasTastyExtension + val isTastyOrSig = depFile.hasTastyExtension def processExternalDependency() = { val binaryClassName = depClass.binaryClassName @@ -506,13 +506,13 @@ class DependencyRecorder { binaryDependency(zip.jpath, binaryClassName) case _ => case pf: PlainFile => // The dependency comes from a class file, Zinc handles JRT filesystem - binaryDependency(if isTasty then cachedSiblingClass(pf) else pf.jpath, binaryClassName) + binaryDependency(if isTastyOrSig then cachedSiblingClass(pf) else pf.jpath, binaryClassName) case _ => internalError(s"Ignoring dependency $depFile of unknown class ${depFile.getClass}}", fromClass.srcPos) } } - if isTasty || depFile.hasClassExtension then + if isTastyOrSig || depFile.hasClassExtension then processExternalDependency() else if allowLocal || depFile != sourceFile.file then // We cannot ignore dependencies coming from the same source file because diff --git a/compiler/src/dotty/tools/dotc/util/EnumFlags.scala b/compiler/src/dotty/tools/dotc/util/EnumFlags.scala new file mode 100644 index 000000000000..a833af7632de --- /dev/null +++ b/compiler/src/dotty/tools/dotc/util/EnumFlags.scala @@ -0,0 +1,14 @@ +package dotty.tools.dotc.util + +object EnumFlags: + + opaque type FlagSet[E <: reflect.Enum] = Int + + object FlagSet: + + extension [E <: reflect.Enum](set: FlagSet[E]) + def is(flag: E): Boolean = (set & (1 << flag.ordinal)) != 0 + def |(flag: E): FlagSet[E] = (set | (1 << flag.ordinal)) + + def empty[E <: reflect.Enum]: FlagSet[E] = + 0 diff --git a/compiler/src/dotty/tools/io/AbstractFile.scala b/compiler/src/dotty/tools/io/AbstractFile.scala index 243dc2953d2e..233b1ca8fb62 100644 --- a/compiler/src/dotty/tools/io/AbstractFile.scala +++ b/compiler/src/dotty/tools/io/AbstractFile.scala @@ -97,11 +97,16 @@ abstract class AbstractFile extends Iterable[AbstractFile] { /** Returns the path of this abstract file in a canonical form. */ def canonicalPath: String = if (jpath == null) path else jpath.normalize.toString - /** Checks extension case insensitively. TODO: change to enum */ - def hasExtension(other: String): Boolean = extension == other.toLowerCase + /** Checks extension case insensitively. */ + @deprecated("prefer queries on ext") + def hasExtension(other: String): Boolean = ext.toLowerCase.equalsIgnoreCase(other) - /** Returns the extension of this abstract file. TODO: store as an enum to avoid costly comparisons */ - val extension: String = Path.extension(name) + /** Returns the extension of this abstract file. */ + val ext: FileExtension = Path.fileExtension(name) + + /** Returns the extension of this abstract file as a String. */ + @deprecated("use ext instead.") + def extension: String = ext.toLowerCase /** The absolute file, if this is a relative file. */ def absolute: AbstractFile @@ -129,7 +134,7 @@ abstract class AbstractFile extends Iterable[AbstractFile] { } /** Does this abstract file represent something which can contain classfiles? */ - def isClassContainer: Boolean = isDirectory || (jpath != null && (extension == "jar" || extension == "zip")) + def isClassContainer: Boolean = isDirectory || (jpath != null && ext.isJarOrZip) /** Create a file on disk, if one does not exist already. */ def create(): Unit @@ -258,8 +263,8 @@ abstract class AbstractFile extends Iterable[AbstractFile] { final def resolveSibling(name: String): AbstractFile | Null = container.lookupName(name, directory = false) - final def resolveSiblingWithExtension(extension: String): AbstractFile | Null = - resolveSibling(name.stripSuffix(this.extension) + extension) + final def resolveSiblingWithExtension(extension: FileExtension): AbstractFile | Null = + resolveSibling(Path.fileName(name) + "." + extension) private def fileOrSubdirectoryNamed(name: String, isDir: Boolean): AbstractFile = lookupName(name, isDir) match { diff --git a/compiler/src/dotty/tools/io/File.scala b/compiler/src/dotty/tools/io/File.scala index 22a0e04b2b48..59e4a2ee451b 100644 --- a/compiler/src/dotty/tools/io/File.scala +++ b/compiler/src/dotty/tools/io/File.scala @@ -39,8 +39,6 @@ object File { */ class File(jpath: JPath)(implicit constructorCodec: Codec) extends Path(jpath) with Streamable.Chars { override val creationCodec: io.Codec = constructorCodec - - override def addExtension(ext: String): File = super.addExtension(ext).toFile override def toAbsolute: File = if (isAbsolute) this else super.toAbsolute.toFile override def toDirectory: Directory = new Directory(jpath) override def toFile: File = this diff --git a/compiler/src/dotty/tools/io/FileExtension.scala b/compiler/src/dotty/tools/io/FileExtension.scala new file mode 100644 index 000000000000..9d239477aed3 --- /dev/null +++ b/compiler/src/dotty/tools/io/FileExtension.scala @@ -0,0 +1,79 @@ +package dotty.tools.io + +import dotty.tools.uncheckedNN +import dotty.tools.dotc.util.EnumFlags.FlagSet + +enum FileExtension(val toLowerCase: String): + case Tasty extends FileExtension("tasty") + case Class extends FileExtension("class") + case Jar extends FileExtension("jar") + case Scala extends FileExtension("scala") + case ScalaScript extends FileExtension("sc") + case Java extends FileExtension("java") + case Zip extends FileExtension("zip") + case Inc extends FileExtension("inc") + case Empty extends FileExtension("") + + /** Fallback extension */ + case External(override val toLowerCase: String) extends FileExtension(toLowerCase) + + /** represents an empty file extension. */ + def isEmpty: Boolean = this == Empty + + override def toString: String = toLowerCase + + /** represents `".tasty"` */ + def isTasty = this == Tasty + /** represents `".class"` */ + def isClass = this == Class + /** represents `".scala"` */ + def isScala = this == Scala + /** represents `".sc"` */ + def isScalaScript = this == ScalaScript + /** represents `".java"` */ + def isJava = this == Java + /** represents `".jar"` */ + def isJar: Boolean = this == Jar + /** represents `".zip"` */ + def isZip: Boolean = this == Zip + /** represents `".jar"` or `".zip"` */ + def isJarOrZip: Boolean = FileExtension.JarOrZip.is(this) + /** represents `".scala"` or `".java"` */ + def isScalaOrJava: Boolean = FileExtension.ScalaOrJava.is(this) + /** represents `".java"` or `.tasty` */ + def isJavaOrTasty: Boolean = FileExtension.JavaOrTasty.is(this) + +object FileExtension: + + private val JarOrZip: FlagSet[FileExtension] = FlagSet.empty | Zip | Jar + private val ScalaOrJava: FlagSet[FileExtension] = FlagSet.empty | Scala | Java + private val JavaOrTasty: FlagSet[FileExtension] = FlagSet.empty | Java | Tasty + + // this will be optimised to a single hashcode + equality check, and then fallback to slowLookup, + // keep in sync with slowLookup. + private def initialLookup(s: String): FileExtension = s match + case "tasty" => Tasty + case "class" => Class + case "jar" => Jar + case "scala" => Scala + case "sc" => ScalaScript + case "java" => Java + case "zip" => Zip + case "inc" => Inc + case _ => slowLookup(s) + + // slower than initialLookup, keep in sync with initialLookup + private def slowLookup(s: String): FileExtension = + if s.equalsIgnoreCase("tasty") then Tasty + else if s.equalsIgnoreCase("class") then Class + else if s.equalsIgnoreCase("jar") then Jar + else if s.equalsIgnoreCase("scala") then Scala + else if s.equalsIgnoreCase("sc") then ScalaScript + else if s.equalsIgnoreCase("java") then Java + else if s.equalsIgnoreCase("zip") then Zip + else if s.equalsIgnoreCase("inc") then Inc + else External(s) + + def from(s: String): FileExtension = + if s.isEmpty then Empty + else initialLookup(s) diff --git a/compiler/src/dotty/tools/io/Jar.scala b/compiler/src/dotty/tools/io/Jar.scala index 3e65d2f7635d..dd33b1229610 100644 --- a/compiler/src/dotty/tools/io/Jar.scala +++ b/compiler/src/dotty/tools/io/Jar.scala @@ -165,7 +165,7 @@ object Jar { def isJarOrZip(f: Path): Boolean = isJarOrZip(f, true) def isJarOrZip(f: Path, examineFile: Boolean): Boolean = - f.hasExtension("zip", "jar") || (examineFile && magicNumberIsZip(f)) + f.ext.isJarOrZip || (examineFile && magicNumberIsZip(f)) def create(file: File, sourceDir: Directory, mainClass: String): Unit = { val writer = new Jar(file).jarWriter(Name.MAIN_CLASS -> mainClass) diff --git a/compiler/src/dotty/tools/io/JarArchive.scala b/compiler/src/dotty/tools/io/JarArchive.scala index f42f68e745ed..e95dbe97bb19 100644 --- a/compiler/src/dotty/tools/io/JarArchive.scala +++ b/compiler/src/dotty/tools/io/JarArchive.scala @@ -20,14 +20,14 @@ class JarArchive private (root: Directory) extends PlainDirectory(root) { object JarArchive { /** Create a new jar file. Overwrite if file already exists */ def create(path: Path): JarArchive = { - require(path.extension == "jar") + require(path.ext.isJar) path.delete() open(path, create = true) } /** Create a jar file. */ def open(path: Path, create: Boolean = false): JarArchive = { - require(path.extension == "jar") + require(path.ext.isJar) // creating a new zip file system by using the JAR URL syntax: // https://docs.oracle.com/javase/7/docs/technotes/guides/io/fsp/zipfilesystemprovider.html diff --git a/compiler/src/dotty/tools/io/Path.scala b/compiler/src/dotty/tools/io/Path.scala index c8420c5e381d..6f97e03ca4d7 100644 --- a/compiler/src/dotty/tools/io/Path.scala +++ b/compiler/src/dotty/tools/io/Path.scala @@ -33,17 +33,20 @@ import scala.util.Random.alphanumeric */ object Path { def isExtensionJarOrZip(jpath: JPath): Boolean = isExtensionJarOrZip(jpath.getFileName.toString) - def isExtensionJarOrZip(name: String): Boolean = { - val ext = extension(name) - ext == "jar" || ext == "zip" + def isExtensionJarOrZip(name: String): Boolean = fileExtension(name).isJarOrZip + def fileExtension(name: String): FileExtension = { + val i = name.lastIndexOf('.') + if (i < 0) FileExtension.Empty + else FileExtension.from(name.substring(i + 1)) } - def extension(name: String): String = { - var i = name.length - 1 - while (i >= 0 && name.charAt(i) != '.') - i -= 1 + @deprecated("use fileExtension instead.") + def extension(name: String): String = fileExtension(name).toLowerCase - if (i < 0) "" - else name.substring(i + 1).toLowerCase + /** strip anything after and including trailing the extension */ + def fileName(name: String): String = { + val i = name.lastIndexOf('.') + if (i < 0) name + else name.substring(0, i).nn } def onlyDirs(xs: Iterator[Path]): Iterator[Directory] = xs.filter(_.isDirectory).map(_.toDirectory) @@ -160,22 +163,36 @@ class Path private[io] (val jpath: JPath) { val p = parent if (p isSame this) Nil else p :: p.parents } + + def ext: FileExtension = Path.fileExtension(name) + // if name ends with an extension (e.g. "foo.jpg") returns the extension ("jpg"), otherwise "" - def extension: String = Path.extension(name) + @deprecated("use ext instead.") + def extension: String = ext.toLowerCase + // compares against extensions in a CASE INSENSITIVE way. + @deprecated("consider using queries on ext instead.") def hasExtension(ext: String, exts: String*): Boolean = { - val lower = extension.toLowerCase - ext.toLowerCase == lower || exts.exists(_.toLowerCase == lower) + val lower = ext.toLowerCase + lower.equalsIgnoreCase(ext) || exts.exists(lower.equalsIgnoreCase) } // returns the filename without the extension. - def stripExtension: String = name stripSuffix ("." + extension) + def stripExtension: String = Path.fileName(name) // returns the Path with the extension. def addExtension(ext: String): Path = new Path(jpath.resolveSibling(name + ext)) + + // changes the existing extension out for a new one, or adds it + // if the current path has none. + def changeExtension(ext: FileExtension): Path = + changeExtension(ext.toLowerCase) + // changes the existing extension out for a new one, or adds it // if the current path has none. def changeExtension(ext: String): Path = - if (extension == "") addExtension(ext) - else new Path(jpath.resolveSibling(stripExtension + "." + ext)) + val name0 = name + val dropExtension = Path.fileName(name0) + if dropExtension eq name0 then addExtension(ext) + else new Path(jpath.resolveSibling(dropExtension + "." + ext)) // conditionally execute def ifFile[T](f: File => T): Option[T] = if (isFile) Some(f(toFile)) else None diff --git a/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala b/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala index 4daaf86f2fb0..db58ff36ac42 100644 --- a/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala +++ b/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala @@ -98,7 +98,7 @@ class CommentPicklingTest { Main.process(options.all, reporter) assertFalse("Compilation failed.", reporter.hasErrors) - val tastyFiles = Path.onlyFiles(out.walkFilter(_.extension == "tasty")).toList + val tastyFiles = Path.onlyFiles(out.walkFilter(_.ext.isTasty)).toList val unpicklingOptions = unpickleOptions .withClasspath(out.toAbsolute.toString) .and("dummy") // Need to pass a dummy source file name diff --git a/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala b/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala index 2c970e93f573..73118216d6fa 100644 --- a/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala +++ b/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala @@ -51,7 +51,7 @@ class PrintingTest { def testIn(testsDir: String, phase: String) = val res = Directory(testsDir).list.toList - .filter(f => f.extension == "scala") + .filter(f => f.ext.isScala) .map { f => compileFile(f.jpath, phase) } val failed = res.filter(!_) diff --git a/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala b/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala index f538d9534cd9..4ed59db5c10e 100644 --- a/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala +++ b/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala @@ -52,7 +52,7 @@ class PatmatExhaustivityTest { /** A single test with multiple files grouped in a folder */ private def compileDir(path: JPath): Boolean = { val files = Directory(path).list.toList - .filter(f => f.extension == "scala" || f.extension == "java" ) + .filter(_.ext.isScalaOrJava) .map(_.jpath) val actualLines = compile(files) @@ -65,7 +65,7 @@ class PatmatExhaustivityTest { def patmatExhaustivity: Unit = { val blacklisted = TestSources.patmatExhaustivityScala2LibraryTastyBlacklisted.toSet val res = Directory(testsDir).list.toList - .filter(f => f.extension == "scala" || f.isDirectory) + .filter(f => f.ext.isScala || f.isDirectory) .filter { f => val path = if f.isDirectory then f.path + "/" else f.path Properties.testsFilter.isEmpty || Properties.testsFilter.exists(path.contains) @@ -74,7 +74,7 @@ class PatmatExhaustivityTest { .map(f => if f.isDirectory then compileDir(f.jpath) else compileFile(f.jpath)) val failed = res.filter(!_) - val ignored = Directory(testsDir).list.toList.filter(_.extension == "ignore") + val ignored = Directory(testsDir).list.toList.filter(_.ext.toLowerCase.equalsIgnoreCase("ignore")) val msg = s"Total: ${res.length + ignored.length}, Failed: ${failed.length}, Ignored: ${ignored.length}" diff --git a/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala b/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala index 3604e38375e7..e878866be81e 100644 --- a/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala +++ b/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala @@ -753,7 +753,7 @@ object DottyLanguageServer { /** Does this sourcefile represent a worksheet? */ private def isWorksheet(sourcefile: SourceFile): Boolean = - sourcefile.file.extension == "sc" + sourcefile.file.ext.isScalaScript /** Wrap the source of a worksheet inside an `object`. */ private def wrapWorksheet(source: String): String = diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala index abb15d45f88a..81a543701817 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala @@ -115,7 +115,7 @@ class Completions( val allAdvanced = advanced ++ keywords path match // should not show completions for toplevel - case Nil | (_: PackageDef) :: _ if completionPos.originalCursorPosition.source.file.extension != "sc" => + case Nil | (_: PackageDef) :: _ if !completionPos.originalCursorPosition.source.file.ext.isScalaScript => (allAdvanced, SymbolSearch.Result.COMPLETE) case Select(qual, _) :: _ if qual.typeOpt.isErroneous => (allAdvanced, SymbolSearch.Result.COMPLETE) diff --git a/scaladoc/src/scala/tasty/inspector/TastyInspector.scala b/scaladoc/src/scala/tasty/inspector/TastyInspector.scala index 14e5f019b433..03b3aadedc4d 100644 --- a/scaladoc/src/scala/tasty/inspector/TastyInspector.scala +++ b/scaladoc/src/scala/tasty/inspector/TastyInspector.scala @@ -52,7 +52,7 @@ object TastyInspector: def inspectAllTastyFiles(tastyFiles: List[String], jars: List[String], dependenciesClasspath: List[String])(inspector: Inspector): Boolean = def checkFile(fileName: String, ext: String): Unit = val file = dotty.tools.io.Path(fileName) - if file.extension != ext then + if !file.ext.toLowerCase.equalsIgnoreCase(ext) then throw new IllegalArgumentException(s"File extension is not `.$ext`: $file") else if !file.exists then throw new IllegalArgumentException(s"File not found: ${file.toAbsolute}") diff --git a/tasty-inspector/src/scala/tasty/inspector/TastyInspector.scala b/tasty-inspector/src/scala/tasty/inspector/TastyInspector.scala index e70d2d4f6dc5..ea3f0a95dded 100644 --- a/tasty-inspector/src/scala/tasty/inspector/TastyInspector.scala +++ b/tasty-inspector/src/scala/tasty/inspector/TastyInspector.scala @@ -49,7 +49,7 @@ object TastyInspector: def inspectAllTastyFiles(tastyFiles: List[String], jars: List[String], dependenciesClasspath: List[String])(inspector: Inspector): Boolean = def checkFile(fileName: String, ext: String): Unit = val file = dotty.tools.io.Path(fileName) - if file.extension != ext then + if !file.ext.toLowerCase.equalsIgnoreCase(ext) then throw new IllegalArgumentException(s"File extension is not `.$ext`: $file") else if !file.exists then throw new IllegalArgumentException(s"File not found: ${file.toAbsolute}") diff --git a/tests/run-tasty-inspector/scala2-library-test.scala b/tests/run-tasty-inspector/scala2-library-test.scala index 15a251427d70..37dc55e20d1f 100644 --- a/tests/run-tasty-inspector/scala2-library-test.scala +++ b/tests/run-tasty-inspector/scala2-library-test.scala @@ -32,7 +32,7 @@ def scalaLibClassesPath = lazy val scalaLibTastyPaths = new Directory(scalaLibClassesPath).deepFiles - .filter(_.`extension` == "tasty") + .filter(_.ext.isTasty) .map(_.normalize.path.stripPrefix(scalaLibClassesPath.toString + separator)) .toList diff --git a/tests/run-with-compiler/scala2-library-from-tasty-jar.scala b/tests/run-with-compiler/scala2-library-from-tasty-jar.scala index 913cf7dc24fc..f6d7b7b87d5b 100644 --- a/tests/run-with-compiler/scala2-library-from-tasty-jar.scala +++ b/tests/run-with-compiler/scala2-library-from-tasty-jar.scala @@ -32,7 +32,7 @@ def scalaLibClassesPath = lazy val scalaLibTastyPaths = new Directory(scalaLibClassesPath).deepFiles - .filter(_.`extension` == "tasty") + .filter(_.ext.isTasty) .map(_.normalize.path.stripPrefix(scalaLibClassesPath.toString + separator)) .toList diff --git a/tests/run-with-compiler/scala2-library-from-tasty.scala b/tests/run-with-compiler/scala2-library-from-tasty.scala index ee2ec8951701..c3a52ea95ae1 100644 --- a/tests/run-with-compiler/scala2-library-from-tasty.scala +++ b/tests/run-with-compiler/scala2-library-from-tasty.scala @@ -29,7 +29,7 @@ def scalaLibClassesPath = lazy val scalaLibTastyPaths = new Directory(scalaLibClassesPath).deepFiles - .filter(_.`extension` == "tasty") + .filter(_.ext.isTasty) .map(_.normalize.path.stripPrefix(scalaLibClassesPath.toString + separator)) .toList From a975398310ddcb90ff15fca543330c4fb18b3587 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Thu, 17 Aug 2023 15:01:07 +0200 Subject: [PATCH 3/7] Notify Zinc about non-local classes early For pipelining Zinc needs to know about non-local classes early. e.g. it enables Zinc to disable pipelining if a non-local class contains macros. The changes in this commit are based of changes made originally in Zinc: https://github.com/sbt/zinc/commit/856d4162127927cb9a6c37a1649cc42d1871a815 --- .../src/dotty/tools/backend/jvm/CodeGen.scala | 9 ++- .../src/dotty/tools/dotc/sbt/ExtractAPI.scala | 62 ++++++++++++++++++- .../sbt/interfaces/IncrementalCallback.java | 7 +++ .../dotty/tools/xsbt/IncrementalCallback.java | 10 +++ .../tools/xsbt/OldIncrementalCallback.java | 10 +++ .../compactify/src/main/scala/Nested.scala | 54 +++++++++------- 6 files changed, 121 insertions(+), 31 deletions(-) diff --git a/compiler/src/dotty/tools/backend/jvm/CodeGen.scala b/compiler/src/dotty/tools/backend/jvm/CodeGen.scala index b48df60d4c1a..2286ad6c2c25 100644 --- a/compiler/src/dotty/tools/backend/jvm/CodeGen.scala +++ b/compiler/src/dotty/tools/backend/jvm/CodeGen.scala @@ -125,17 +125,16 @@ class CodeGen(val int: DottyBackendInterface, val primitives: DottyPrimitives)( // Creates a callback that will be evaluated in PostProcessor after creating a file private def onFileCreated(cls: ClassNode, claszSymbol: Symbol, sourceFile: util.SourceFile)(using Context): AbstractFile => Unit = { - val (fullClassName, isLocal) = atPhase(sbtExtractDependenciesPhase) { - (ExtractDependencies.classNameAsString(claszSymbol), claszSymbol.isLocal) + val isLocal = atPhase(sbtExtractDependenciesPhase) { + claszSymbol.isLocal } clsFile => { val className = cls.name.replace('/', '.') if (ctx.compilerCallback != null) ctx.compilerCallback.onClassGenerated(sourceFile, convertAbstractFile(clsFile), className) - ctx.withIncCallback: cb => - if (isLocal) cb.generatedLocalClass(sourceFile, clsFile.jpath) - else cb.generatedNonLocalClass(sourceFile, clsFile.jpath, className, fullClassName) + if isLocal then + ctx.withIncCallback(_.generatedLocalClass(sourceFile, clsFile.jpath)) } } diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index 506ebc81b23d..079687ac3122 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -15,12 +15,15 @@ import Trees.* import Types.* import Symbols.* import Names.* +import StdNames.str import NameOps.* import inlines.Inlines import transform.ValueClasses -import dotty.tools.io.{File, FileExtension} +import dotty.tools.io.{File, FileExtension, JarArchive} +import util.{Property, SourceFile} import java.io.PrintWriter +import ExtractAPI.NonLocalClassSymbolsInCurrentUnits import scala.collection.mutable import scala.util.hashing.MurmurHash3 @@ -64,13 +67,62 @@ class ExtractAPI extends Phase { // definitions, and `PostTyper` does not change definitions). override def runsAfter: Set[String] = Set(transform.PostTyper.name) + override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = + val nonLocalClassSymbols = new mutable.HashSet[Symbol] + val ctx0 = ctx.withProperty(NonLocalClassSymbolsInCurrentUnits, Some(nonLocalClassSymbols)) + val units0 = super.runOn(units)(using ctx0) + ctx.withIncCallback(recordNonLocalClasses(nonLocalClassSymbols, _)) + units0 + end runOn + + private def recordNonLocalClasses(nonLocalClassSymbols: mutable.HashSet[Symbol], cb: interfaces.IncrementalCallback)(using Context): Unit = + for cls <- nonLocalClassSymbols do + val sourceFile = cls.source + if sourceFile.exists && cls.isDefinedInCurrentRun then + recordNonLocalClass(cls, sourceFile, cb) + cb.apiPhaseCompleted() + cb.dependencyPhaseCompleted() + + private def recordNonLocalClass(cls: Symbol, sourceFile: SourceFile, cb: interfaces.IncrementalCallback)(using Context): Unit = + def registerProductNames(fullClassName: String, binaryClassName: String) = + val pathToClassFile = s"${binaryClassName.replace('.', java.io.File.separatorChar)}.class" + + val classFile = { + ctx.settings.outputDir.value match { + case jar: JarArchive => + // important detail here, even on Windows, Zinc expects the separator within the jar + // to be the system default, (even if in the actual jar file the entry always uses '/'). + // see https://github.com/sbt/zinc/blob/dcddc1f9cfe542d738582c43f4840e17c053ce81/internal/compiler-bridge/src/main/scala/xsbt/JarUtils.scala#L47 + new java.io.File(s"$jar!$pathToClassFile") + case outputDir => + new java.io.File(outputDir.file, pathToClassFile) + } + } + + cb.generatedNonLocalClass(sourceFile, classFile.toPath(), binaryClassName, fullClassName) + end registerProductNames + + val fullClassName = atPhase(sbtExtractDependenciesPhase) { + ExtractDependencies.classNameAsString(cls) + } + val binaryClassName = cls.binaryClassName + registerProductNames(fullClassName, binaryClassName) + + // Register the names of top-level module symbols that emit two class files + val isTopLevelUniqueModule = + cls.owner.is(PackageClass) && cls.is(ModuleClass) && cls.companionClass == NoSymbol + if isTopLevelUniqueModule then + registerProductNames(fullClassName, binaryClassName.stripSuffix(str.MODULE_SUFFIX)) + end recordNonLocalClass + override def run(using Context): Unit = { val unit = ctx.compilationUnit val sourceFile = unit.source ctx.withIncCallback: cb => cb.startSource(sourceFile) - val apiTraverser = new ExtractAPICollector + val nonLocalClassSymbols = ctx.property(NonLocalClassSymbolsInCurrentUnits).get + val apiTraverser = ExtractAPICollector(nonLocalClassSymbols) val classes = apiTraverser.apiSource(unit.tpdTree) val mainClasses = apiTraverser.mainClasses @@ -94,6 +146,8 @@ object ExtractAPI: val name: String = "sbt-api" val description: String = "sends a representation of the API of classes to sbt" + private val NonLocalClassSymbolsInCurrentUnits: Property.Key[mutable.HashSet[Symbol]] = Property.Key() + /** Extracts full (including private members) API representation out of Symbols and Types. * * The exact representation used for each type is not important: the only thing @@ -136,7 +190,7 @@ object ExtractAPI: * without going through an intermediate representation, see * http://www.scala-sbt.org/0.13/docs/Understanding-Recompilation.html#Hashing+an+API+representation */ -private class ExtractAPICollector(using Context) extends ThunkHolder { +private class ExtractAPICollector(nonLocalClassSymbols: mutable.HashSet[Symbol])(using Context) extends ThunkHolder { import tpd.* import xsbti.api @@ -254,6 +308,8 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { childrenOfSealedClass, topLevel, tparams) allNonLocalClassesInSrc += cl + if !sym.isLocal then + nonLocalClassSymbols += sym if (sym.isStatic && !sym.is(Trait) && ctx.platform.hasMainMethod(sym)) { // If sym is an object, all main methods count, otherwise only @static ones count. diff --git a/compiler/src/dotty/tools/dotc/sbt/interfaces/IncrementalCallback.java b/compiler/src/dotty/tools/dotc/sbt/interfaces/IncrementalCallback.java index 4c6afa113f4f..ebdb1b7b24d4 100644 --- a/compiler/src/dotty/tools/dotc/sbt/interfaces/IncrementalCallback.java +++ b/compiler/src/dotty/tools/dotc/sbt/interfaces/IncrementalCallback.java @@ -7,6 +7,7 @@ /* User code should not implement this interface, it is intended to be a wrapper around xsbti.AnalysisCallback. */ public interface IncrementalCallback { + default void api(SourceFile sourceFile, xsbti.api.ClassLike classApi) { } @@ -36,4 +37,10 @@ default void generatedLocalClass(SourceFile source, Path classFile) { default void generatedNonLocalClass(SourceFile source, Path classFile, String binaryClassName, String srcClassName) { } + + default void apiPhaseCompleted() { + } + + default void dependencyPhaseCompleted() { + } } diff --git a/sbt-bridge/src/dotty/tools/xsbt/IncrementalCallback.java b/sbt-bridge/src/dotty/tools/xsbt/IncrementalCallback.java index 3c3d33c1c1fe..6e19c62b10d0 100644 --- a/sbt-bridge/src/dotty/tools/xsbt/IncrementalCallback.java +++ b/sbt-bridge/src/dotty/tools/xsbt/IncrementalCallback.java @@ -57,4 +57,14 @@ public void generatedLocalClass(SourceFile source, java.nio.file.Path classFile) public void generatedNonLocalClass(SourceFile source, java.nio.file.Path classFile, String binaryClassName, String srcClassName) { delegate.generatedNonLocalClass(asVirtualFile.apply(source), classFile, binaryClassName, srcClassName); } + + @Override + public void apiPhaseCompleted() { + delegate.apiPhaseCompleted(); + } + + @Override + public void dependencyPhaseCompleted() { + delegate.dependencyPhaseCompleted(); + } } diff --git a/sbt-bridge/src/dotty/tools/xsbt/OldIncrementalCallback.java b/sbt-bridge/src/dotty/tools/xsbt/OldIncrementalCallback.java index 597a964eb944..30e25194736d 100644 --- a/sbt-bridge/src/dotty/tools/xsbt/OldIncrementalCallback.java +++ b/sbt-bridge/src/dotty/tools/xsbt/OldIncrementalCallback.java @@ -71,4 +71,14 @@ public void generatedLocalClass(SourceFile source, java.nio.file.Path classFile) public void generatedNonLocalClass(SourceFile source, java.nio.file.Path classFile, String binaryClassName, String srcClassName) { delegate.generatedNonLocalClass(asJavaFile(source), classFile.toFile(), binaryClassName, srcClassName); } + + @Override + public void apiPhaseCompleted() { + delegate.apiPhaseCompleted(); + } + + @Override + public void dependencyPhaseCompleted() { + delegate.dependencyPhaseCompleted(); + } } diff --git a/sbt-test/source-dependencies/compactify/src/main/scala/Nested.scala b/sbt-test/source-dependencies/compactify/src/main/scala/Nested.scala index 4b1597d287d4..b2d53cedee05 100644 --- a/sbt-test/source-dependencies/compactify/src/main/scala/Nested.scala +++ b/sbt-test/source-dependencies/compactify/src/main/scala/Nested.scala @@ -2,35 +2,35 @@ package test object TopLevelModule1 { - object InnerModule1 - { - object InnerModule2 - { - trait Z { def q = 3 } - def x = 3 - } - } - class InnerClass1 - { - class InnerClass2 - { - val z = new TopLevelModule1.InnerClass2 - } - object InnerModule3 - { - val y = new TopLevel1 with InnerModule1.InnerModule2.Z { val x = 4 } - } - } - class InnerClass2 + object InnerModule1 + { + object InnerModule2 + { + trait Z { def q = 3 } + def x = 3 + } + } + class InnerClass1 + { + class InnerClass2 + { + val z = new TopLevelModule1.InnerClass2 + } + object InnerModule3 + { + val y = new TopLevel1 with InnerModule1.InnerModule2.Z { val x = 4 } + } + } + class InnerClass2 } class TopLevel1 { - object Inner1_1 + object Inner1_1 } object TopLevel1 { - class Inner1_2 - object Inner1_2 + class Inner1_2 + object Inner1_2 } object TopLevel2 @@ -41,3 +41,11 @@ object TopLevel3 class TopLevel4 object TopLevelModuleSuffix$ + +// will generate a package object wrapper +val topLevelVal = 23 + +// explicit package object +package object inner { + val innerVal = 23 +} From c22e31344d45880443c217bf2495fd59e132771a Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Thu, 17 Aug 2023 13:44:14 +0200 Subject: [PATCH 4/7] add support for pipeline build of Scala and Java files - rename '-Yjava-tasty-output' to '-Yearly-tasty-output' because now Scala TASTy will also be written to this destination. - add '-Ypickle-java' alias of '-Yjava-tasty', as expected by Zinc - add '-Ypickle-write' alias of '-Yearly-tasty-output', as expected by Zinc - move ExtractAPI phase to after Pickler, this way we can do it in parallel with generating TASTy bytes. At the end of this phase we write the TASTy to the '-Yearly-tasty-output' destination. Also ensure that ExtractAPI phase runs with '-Yjava-tasty', even if no incremental callback is set (don't extract the API in this case). - test the pipelining with sbt scripted tests, including for inline methods and macros with pipelining - describe semantics with respect to suspensions, introduce -Yno-suspended-units flag for greater control by the user. --- .../dotty/tools/dotc/CompilationUnit.scala | 15 ++-- compiler/src/dotty/tools/dotc/Compiler.scala | 2 +- .../tools/dotc/config/ScalaSettings.scala | 9 +-- .../dotty/tools/dotc/config/Settings.scala | 4 +- .../src/dotty/tools/dotc/core/Phases.scala | 20 +++++- .../dotty/tools/dotc/core/SymbolLoaders.scala | 3 +- .../dotty/tools/dotc/inlines/Inliner.scala | 3 + .../src/dotty/tools/dotc/sbt/ExtractAPI.scala | 72 +++++++++++++++++-- .../dotty/tools/dotc/transform/Pickler.scala | 54 +++----------- .../a/src/main/scala/a/A.scala | 10 +++ .../b/src/main/scala/b/B.scala | 10 +++ .../Yearly-tasty-output-inline/build.sbt | 14 ++++ .../project/DottyInjectedPlugin.scala | 12 ++++ .../Yearly-tasty-output-inline/test | 3 + .../a/src/main/scala/a/A.scala | 5 ++ .../Yearly-tasty-output/b-early-out/.keep | 0 .../b/src/main/scala/b/B.scala | 5 ++ .../pipelining/Yearly-tasty-output/build.sbt | 23 ++++++ .../c/src/main/scala/c/C.scala | 9 +++ .../project/DottyInjectedPlugin.scala | 12 ++++ sbt-test/pipelining/Yearly-tasty-output/test | 5 ++ .../Yjava-tasty-annotation/build.sbt | 2 +- .../pipelining/Yjava-tasty-enum/build.sbt | 2 +- .../Yjava-tasty-from-tasty/build.sbt | 4 +- .../Yjava-tasty-fromjavaobject/build.sbt | 4 +- .../a/src/main/scala/a/A.java | 6 ++ .../b/src/main/scala/b/B.scala | 8 ++- .../pipelining/Yjava-tasty-generic/build.sbt | 2 +- .../pipelining/Yjava-tasty-paths/build.sbt | 2 +- .../Yjava-tasty-result-types/build.sbt | 2 +- .../a/src/main/scala/a/A.scala | 8 +++ .../b/src/main/scala/b/B.scala | 10 +++ .../pipelining-scala-inline/build.sbt | 35 +++++++++ .../project/DottyInjectedPlugin.scala | 12 ++++ .../pipelining/pipelining-scala-inline/test | 1 + .../a/src/main/scala/a/A.scala | 5 ++ .../b/src/main/scala/b/B.java | 5 ++ .../pipelining-scala-java-basic/build.sbt | 17 +++++ .../c/src/main/scala/c/C.scala | 15 ++++ .../project/DottyInjectedPlugin.scala | 12 ++++ .../pipelining-scala-java-basic/test | 1 + .../a/src/main/scala/a/A.scala | 18 +++++ .../b/src/main/scala/b/B.scala | 10 +++ .../pipelining-scala-macro-fail/build.sbt | 28 ++++++++ .../project/DottyInjectedPlugin.scala | 12 ++++ .../pipelining-scala-macro-fail/test | 2 + .../a/src/main/scala/a/A.scala | 13 ++++ .../a/src/main/scala/a/AConsume.scala | 5 ++ .../main/scala/a/AConsumeTransparent.scala | 5 ++ .../b/src/main/scala/b/B.scala | 14 ++++ .../pipelining-scala-macro-force/build.sbt | 45 ++++++++++++ .../src/main/scala/macros/MacroImpl.scala | 15 ++++ .../project/DottyInjectedPlugin.scala | 12 ++++ .../pipelining-scala-macro-force/test | 1 + .../a/src/main/scala/a/A.scala | 21 ++++++ .../a/src/main/scala/a/ASuspendInlining.scala | 5 ++ .../a/src/main/scala/a/ASuspendTyper.scala | 5 ++ .../b/src/main/scala/b/B.scala | 14 ++++ .../pipelining-scala-macro/build.sbt | 56 +++++++++++++++ .../project/DottyInjectedPlugin.scala | 12 ++++ .../pipelining/pipelining-scala-macro/test | 1 + .../a/src/main/scala/a/A.scala | 5 ++ .../b/src/main/scala/b/B.scala | 12 ++++ .../pipelining-scala-only/build.sbt | 12 ++++ .../project/DottyInjectedPlugin.scala | 12 ++++ .../pipelining/pipelining-scala-only/test | 1 + 66 files changed, 700 insertions(+), 79 deletions(-) create mode 100644 sbt-test/pipelining/Yearly-tasty-output-inline/a/src/main/scala/a/A.scala create mode 100644 sbt-test/pipelining/Yearly-tasty-output-inline/b/src/main/scala/b/B.scala create mode 100644 sbt-test/pipelining/Yearly-tasty-output-inline/build.sbt create mode 100644 sbt-test/pipelining/Yearly-tasty-output-inline/project/DottyInjectedPlugin.scala create mode 100644 sbt-test/pipelining/Yearly-tasty-output-inline/test create mode 100644 sbt-test/pipelining/Yearly-tasty-output/a/src/main/scala/a/A.scala create mode 100644 sbt-test/pipelining/Yearly-tasty-output/b-early-out/.keep create mode 100644 sbt-test/pipelining/Yearly-tasty-output/b/src/main/scala/b/B.scala create mode 100644 sbt-test/pipelining/Yearly-tasty-output/build.sbt create mode 100644 sbt-test/pipelining/Yearly-tasty-output/c/src/main/scala/c/C.scala create mode 100644 sbt-test/pipelining/Yearly-tasty-output/project/DottyInjectedPlugin.scala create mode 100644 sbt-test/pipelining/Yearly-tasty-output/test create mode 100644 sbt-test/pipelining/pipelining-scala-inline/a/src/main/scala/a/A.scala create mode 100644 sbt-test/pipelining/pipelining-scala-inline/b/src/main/scala/b/B.scala create mode 100644 sbt-test/pipelining/pipelining-scala-inline/build.sbt create mode 100644 sbt-test/pipelining/pipelining-scala-inline/project/DottyInjectedPlugin.scala create mode 100644 sbt-test/pipelining/pipelining-scala-inline/test create mode 100644 sbt-test/pipelining/pipelining-scala-java-basic/a/src/main/scala/a/A.scala create mode 100644 sbt-test/pipelining/pipelining-scala-java-basic/b/src/main/scala/b/B.java create mode 100644 sbt-test/pipelining/pipelining-scala-java-basic/build.sbt create mode 100644 sbt-test/pipelining/pipelining-scala-java-basic/c/src/main/scala/c/C.scala create mode 100644 sbt-test/pipelining/pipelining-scala-java-basic/project/DottyInjectedPlugin.scala create mode 100644 sbt-test/pipelining/pipelining-scala-java-basic/test create mode 100644 sbt-test/pipelining/pipelining-scala-macro-fail/a/src/main/scala/a/A.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro-fail/b/src/main/scala/b/B.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro-fail/build.sbt create mode 100644 sbt-test/pipelining/pipelining-scala-macro-fail/project/DottyInjectedPlugin.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro-fail/test create mode 100644 sbt-test/pipelining/pipelining-scala-macro-force/a/src/main/scala/a/A.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro-force/a/src/main/scala/a/AConsume.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro-force/a/src/main/scala/a/AConsumeTransparent.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro-force/b/src/main/scala/b/B.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro-force/build.sbt create mode 100644 sbt-test/pipelining/pipelining-scala-macro-force/macros/src/main/scala/macros/MacroImpl.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro-force/project/DottyInjectedPlugin.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro-force/test create mode 100644 sbt-test/pipelining/pipelining-scala-macro/a/src/main/scala/a/A.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro/a/src/main/scala/a/ASuspendInlining.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro/a/src/main/scala/a/ASuspendTyper.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro/b/src/main/scala/b/B.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro/build.sbt create mode 100644 sbt-test/pipelining/pipelining-scala-macro/project/DottyInjectedPlugin.scala create mode 100644 sbt-test/pipelining/pipelining-scala-macro/test create mode 100644 sbt-test/pipelining/pipelining-scala-only/a/src/main/scala/a/A.scala create mode 100644 sbt-test/pipelining/pipelining-scala-only/b/src/main/scala/b/B.scala create mode 100644 sbt-test/pipelining/pipelining-scala-only/build.sbt create mode 100644 sbt-test/pipelining/pipelining-scala-only/project/DottyInjectedPlugin.scala create mode 100644 sbt-test/pipelining/pipelining-scala-only/test diff --git a/compiler/src/dotty/tools/dotc/CompilationUnit.scala b/compiler/src/dotty/tools/dotc/CompilationUnit.scala index 2358739ebd74..4ea9b558ea7f 100644 --- a/compiler/src/dotty/tools/dotc/CompilationUnit.scala +++ b/compiler/src/dotty/tools/dotc/CompilationUnit.scala @@ -97,12 +97,15 @@ class CompilationUnit protected (val source: SourceFile, val info: CompilationUn // when this unit is unsuspended. depRecorder.clear() if !suspended then - if (ctx.settings.XprintSuspension.value) - report.echo(i"suspended: $this") - suspended = true - ctx.run.nn.suspendedUnits += this - if ctx.phase == Phases.inliningPhase then - suspendedAtInliningPhase = true + if ctx.settings.YnoSuspendedUnits.value then + report.error(i"Compilation unit suspended $this (-Yno-suspended-units is set)") + else + if (ctx.settings.XprintSuspension.value) + report.echo(i"suspended: $this") + suspended = true + ctx.run.nn.suspendedUnits += this + if ctx.phase == Phases.inliningPhase then + suspendedAtInliningPhase = true throw CompilationUnit.SuspendException() private var myAssignmentSpans: Map[Int, List[Span]] | Null = null diff --git a/compiler/src/dotty/tools/dotc/Compiler.scala b/compiler/src/dotty/tools/dotc/Compiler.scala index 290df761d117..06ef70b4cea5 100644 --- a/compiler/src/dotty/tools/dotc/Compiler.scala +++ b/compiler/src/dotty/tools/dotc/Compiler.scala @@ -41,13 +41,13 @@ class Compiler { List(new semanticdb.ExtractSemanticDB.ExtractSemanticInfo) :: // Extract info into .semanticdb files List(new PostTyper) :: // Additional checks and cleanups after type checking List(new sjs.PrepJSInterop) :: // Additional checks and transformations for Scala.js (Scala.js only) - List(new sbt.ExtractAPI) :: // Sends a representation of the API of classes to sbt via callbacks List(new SetRootTree) :: // Set the `rootTreeOrProvider` on class symbols Nil /** Phases dealing with TASTY tree pickling and unpickling */ protected def picklerPhases: List[List[Phase]] = List(new Pickler) :: // Generate TASTY info + List(new sbt.ExtractAPI) :: // Sends a representation of the API of classes to sbt via callbacks List(new Inlining) :: // Inline and execute macros List(new PostInlining) :: // Add mirror support for inlined code List(new CheckUnused.PostInlining) :: // Check for unused elements diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index 687adfe05ca7..5ac4cf2e5829 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -31,7 +31,7 @@ object ScalaSettings extends ScalaSettings // Kept as seperate type to avoid breaking backward compatibility abstract class ScalaSettings extends SettingGroup, AllScalaSettings: - val settingsByCategory: Map[SettingCategory, List[Setting[_]]] = + val settingsByCategory: Map[SettingCategory, List[Setting[_]]] = allSettings.groupBy(_.category) .view.mapValues(_.toList).toMap .withDefaultValue(Nil) @@ -43,7 +43,7 @@ abstract class ScalaSettings extends SettingGroup, AllScalaSettings: val verboseSettings: List[Setting[_]] = settingsByCategory(VerboseSetting).sortBy(_.name) val settingsByAliases: Map[String, Setting[_]] = allSettings.flatMap(s => s.aliases.map(_ -> s)).toMap - + trait AllScalaSettings extends CommonScalaSettings, PluginSettings, VerboseSettings, WarningSettings, XSettings, YSettings: self: SettingGroup => @@ -380,6 +380,7 @@ private sealed trait YSettings: val YprintPos: Setting[Boolean] = BooleanSetting(ForkSetting, "Yprint-pos", "Show tree positions.") val YprintPosSyms: Setting[Boolean] = BooleanSetting(ForkSetting, "Yprint-pos-syms", "Show symbol definitions positions.") val YnoDeepSubtypes: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-deep-subtypes", "Throw an exception on deep subtyping call stacks.") + val YnoSuspendedUnits: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-suspended-units", "Do not suspend units, e.g. when calling a macro defined in the same run. This will error instead of suspending.") val YnoPatmatOpt: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-patmat-opt", "Disable all pattern matching optimizations.") val YplainPrinter: Setting[Boolean] = BooleanSetting(ForkSetting, "Yplain-printer", "Pretty-print using a plain printer.") val YprintSyms: Setting[Boolean] = BooleanSetting(ForkSetting, "Yprint-syms", "When printing trees print info in symbols instead of corresponding info in trees.") @@ -439,7 +440,7 @@ private sealed trait YSettings: val YdebugMacros: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydebug-macros", "Show debug info when quote pattern match fails") // Pipeline compilation options - val YjavaTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Yjava-tasty", "Pickler phase should compute pickles for .java defined symbols for use by build tools") - val YjavaTastyOutput: Setting[AbstractFile] = OutputSetting(ForkSetting, "Yjava-tasty-output", "directory|jar", "(Internal use only!) destination for generated .tasty files containing Java type signatures.", NoAbstractFile) + val YjavaTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Yjava-tasty", "Pickler phase should compute TASTy for .java defined symbols for use by build tools", aliases = List("-Ypickle-java")) + val YearlyTastyOutput: Setting[AbstractFile] = OutputSetting(ForkSetting, "Yearly-tasty-output", "directory|jar", "Destination to write generated .tasty files to for use in pipelined compilation.", NoAbstractFile, aliases = List("-Ypickle-write")) val YallowOutlineFromTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Yallow-outline-from-tasty", "Allow outline TASTy to be loaded with the -from-tasty option.") end YSettings diff --git a/compiler/src/dotty/tools/dotc/config/Settings.scala b/compiler/src/dotty/tools/dotc/config/Settings.scala index 816d85e6c6fd..87760a2a034e 100644 --- a/compiler/src/dotty/tools/dotc/config/Settings.scala +++ b/compiler/src/dotty/tools/dotc/config/Settings.scala @@ -357,8 +357,8 @@ object Settings: def MultiStringSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: List[String] = Nil, aliases: List[String] = Nil): Setting[List[String]] = publish(Setting(category, prependName(name), descr, default, helpArg, aliases = aliases)) - def OutputSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: AbstractFile): Setting[AbstractFile] = - publish(Setting(category, prependName(name), descr, default, helpArg)) + def OutputSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: AbstractFile, aliases: List[String] = Nil): Setting[AbstractFile] = + publish(Setting(category, prependName(name), descr, default, helpArg, aliases = aliases)) def PathSetting(category: SettingCategory, name: String, descr: String, default: String, aliases: List[String] = Nil): Setting[String] = publish(Setting(category, prependName(name), descr, default, aliases = aliases)) diff --git a/compiler/src/dotty/tools/dotc/core/Phases.scala b/compiler/src/dotty/tools/dotc/core/Phases.scala index c704846a82da..59736447af3c 100644 --- a/compiler/src/dotty/tools/dotc/core/Phases.scala +++ b/compiler/src/dotty/tools/dotc/core/Phases.scala @@ -210,6 +210,7 @@ object Phases { private var myTyperPhase: Phase = uninitialized private var myPostTyperPhase: Phase = uninitialized private var mySbtExtractDependenciesPhase: Phase = uninitialized + private var mySbtExtractAPIPhase: Phase = uninitialized private var myPicklerPhase: Phase = uninitialized private var myInliningPhase: Phase = uninitialized private var myStagingPhase: Phase = uninitialized @@ -235,6 +236,7 @@ object Phases { final def typerPhase: Phase = myTyperPhase final def postTyperPhase: Phase = myPostTyperPhase final def sbtExtractDependenciesPhase: Phase = mySbtExtractDependenciesPhase + final def sbtExtractAPIPhase: Phase = mySbtExtractAPIPhase final def picklerPhase: Phase = myPicklerPhase final def inliningPhase: Phase = myInliningPhase final def stagingPhase: Phase = myStagingPhase @@ -263,6 +265,7 @@ object Phases { myTyperPhase = phaseOfClass(classOf[TyperPhase]) myPostTyperPhase = phaseOfClass(classOf[PostTyper]) mySbtExtractDependenciesPhase = phaseOfClass(classOf[sbt.ExtractDependencies]) + mySbtExtractAPIPhase = phaseOfClass(classOf[sbt.ExtractAPI]) myPicklerPhase = phaseOfClass(classOf[Pickler]) myInliningPhase = phaseOfClass(classOf[Inlining]) myStagingPhase = phaseOfClass(classOf[Staging]) @@ -336,19 +339,29 @@ object Phases { /** skip the phase for a Java compilation unit, may depend on -Yjava-tasty */ def skipIfJava(using Context): Boolean = true + final def isAfterLastJavaPhase(using Context): Boolean = + // With `-Yjava-tasty` nominally the final phase is expected be ExtractAPI, + // otherwise drop Java sources at the end of TyperPhase. + // Checks if the last Java phase is before this phase, + // which always fails if the terminal phase is before lastJavaPhase. + val lastJavaPhase = if ctx.settings.YjavaTasty.value then sbtExtractAPIPhase else typerPhase + lastJavaPhase <= this + /** @pre `isRunnable` returns true */ def run(using Context): Unit /** @pre `isRunnable` returns true */ def runOn(units: List[CompilationUnit])(using runCtx: Context): List[CompilationUnit] = val buf = List.newBuilder[CompilationUnit] - // factor out typedAsJava check when not needed - val doSkipJava = ctx.settings.YjavaTasty.value && this <= picklerPhase && skipIfJava + + // Test that we are in a state where we need to check if the phase should be skipped for a java file, + // this prevents checking the expensive `unit.typedAsJava` unnecessarily. + val doCheckJava = skipIfJava && !isAfterLastJavaPhase for unit <- units do given unitCtx: Context = runCtx.fresh.setPhase(this.start).setCompilationUnit(unit).withRootImports if ctx.run.enterUnit(unit) then try - if doSkipJava && unit.typedAsJava then + if doCheckJava && unit.typedAsJava then () else run @@ -503,6 +516,7 @@ object Phases { def typerPhase(using Context): Phase = ctx.base.typerPhase def postTyperPhase(using Context): Phase = ctx.base.postTyperPhase def sbtExtractDependenciesPhase(using Context): Phase = ctx.base.sbtExtractDependenciesPhase + def sbtExtractAPIPhase(using Context): Phase = ctx.base.sbtExtractAPIPhase def picklerPhase(using Context): Phase = ctx.base.picklerPhase def inliningPhase(using Context): Phase = ctx.base.inliningPhase def stagingPhase(using Context): Phase = ctx.base.stagingPhase diff --git a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala index 8b5a7ddfa65c..cbdcf2d0fe43 100644 --- a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala +++ b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala @@ -456,7 +456,8 @@ class TastyLoader(val tastyFile: AbstractFile) extends SymbolLoader { val tastyUUID = unpickler.unpickler.header.uuid new ClassfileTastyUUIDParser(classfile)(ctx).checkTastyUUID(tastyUUID) else - // This will be the case in any of our tests that compile with `-Youtput-only-tasty` + // This will be the case in any of our tests that compile with `-Youtput-only-tasty`, or when + // tasty file compiled by `-Yearly-tasty-output-write` comes from an early output jar. report.inform(s"No classfiles found for $tastyFile when checking TASTy UUID") private def mayLoadTreesFromTasty(using Context): Boolean = diff --git a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala index 8bd89a71fa50..1b4d985c7c4c 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala @@ -1042,6 +1042,9 @@ class Inliner(val call: tpd.Tree)(using Context): for sym <- dependencies do if ctx.compilationUnit.source.file == sym.associatedFile then report.error(em"Cannot call macro $sym defined in the same source file", call.srcPos) + else if ctx.settings.YnoSuspendedUnits.value then + val addendum = ", suspension prevented by -Yno-suspended-units" + report.error(em"Cannot call macro $sym defined in the same compilation run$addendum", call.srcPos) if (suspendable && ctx.settings.XprintSuspension.value) report.echo(i"suspension triggered by macro call to ${sym.showLocated} in ${sym.associatedFile}", call.srcPos) if suspendable then diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index 079687ac3122..d43a2f22a7fb 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -19,6 +19,7 @@ import StdNames.str import NameOps.* import inlines.Inlines import transform.ValueClasses +import transform.Pickler import dotty.tools.io.{File, FileExtension, JarArchive} import util.{Property, SourceFile} import java.io.PrintWriter @@ -51,7 +52,7 @@ class ExtractAPI extends Phase { override def description: String = ExtractAPI.description override def isRunnable(using Context): Boolean = { - super.isRunnable && ctx.runZincPhases + super.isRunnable && (ctx.runZincPhases || ctx.settings.YjavaTasty.value) } // Check no needed. Does not transform trees @@ -65,16 +66,75 @@ class ExtractAPI extends Phase { // after `PostTyper` (unlike `ExtractDependencies`, the simplication to trees // done by `PostTyper` do not affect this phase because it only cares about // definitions, and `PostTyper` does not change definitions). - override def runsAfter: Set[String] = Set(transform.PostTyper.name) + override def runsAfter: Set[String] = Set(transform.Pickler.name) override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = + val doZincCallback = ctx.runZincPhases + val sigWriter: Option[Pickler.EarlyFileWriter] = ctx.settings.YearlyTastyOutput.value match + case earlyOut if earlyOut.isDirectory && earlyOut.exists => + Some(Pickler.EarlyFileWriter(earlyOut)) + case _ => + None val nonLocalClassSymbols = new mutable.HashSet[Symbol] - val ctx0 = ctx.withProperty(NonLocalClassSymbolsInCurrentUnits, Some(nonLocalClassSymbols)) - val units0 = super.runOn(units)(using ctx0) - ctx.withIncCallback(recordNonLocalClasses(nonLocalClassSymbols, _)) - units0 + val units0 = + if doZincCallback then + val ctx0 = ctx.withProperty(NonLocalClassSymbolsInCurrentUnits, Some(nonLocalClassSymbols)) + super.runOn(units)(using ctx0) + else + units // still run the phase for the side effects (writing TASTy files to -Yearly-tasty-output) + sigWriter.foreach(writeSigFiles(units0, _)) + if doZincCallback then + ctx.withIncCallback(recordNonLocalClasses(nonLocalClassSymbols, _)) + if ctx.settings.YjavaTasty.value then + units0.filterNot(_.typedAsJava) // remove java sources, this is the terminal phase when `-Yjava-tasty` is set + else + units0 end runOn + // Why we only write to early output in the first run? + // =================================================== + // TL;DR the point of pipeline compilation is to start downstream projects early, + // so we don't want to wait for suspended units to be compiled. + // + // But why is it safe to ignore suspended units? + // If this project contains a transparent macro that is called in the same project, + // the compilation unit of that call will be suspended (if the macro implementation + // is also in this project), causing a second run. + // However before we do that run, we will have already requested sbt to begin + // early downstream compilation. This means that the suspended definitions will not + // be visible in *early* downstream compilation. + // + // However, sbt will by default prevent downstream compilation happening in this scenario, + // due to the existence of macro definitions. So we are protected from failure if user tries + // to use the suspended definitions. + // + // Additionally, it is recommended for the user to move macro implementations to another project + // if they want to force early output. In this scenario the suspensions will no longer occur, so now + // they will become visible in the early-output. + // + // See `sbt-test/pipelining/pipelining-scala-macro` and `sbt-test/pipelining/pipelining-scala-macro-force` + // for examples of this in action. + // + // Therefore we only need to write to early output in the first run. We also provide the option + // to diagnose suspensions with the `-Yno-suspended-units` flag. + private def writeSigFiles(units: List[CompilationUnit], writer: Pickler.EarlyFileWriter)(using Context): Unit = { + try + for + unit <- units + (cls, pickled) <- unit.pickled + if cls.isDefinedInCurrentRun + do + val internalName = + if cls.is(Module) then cls.binaryClassName.stripSuffix(str.MODULE_SUFFIX).nn + else cls.binaryClassName + val _ = writer.writeTasty(internalName, pickled()) + finally + writer.close() + if ctx.settings.verbose.value then + report.echo("[sig files written]") + end try + } + private def recordNonLocalClasses(nonLocalClassSymbols: mutable.HashSet[Symbol], cb: interfaces.IncrementalCallback)(using Context): Unit = for cls <- nonLocalClassSymbols do val sourceFile = cls.source diff --git a/compiler/src/dotty/tools/dotc/transform/Pickler.scala b/compiler/src/dotty/tools/dotc/transform/Pickler.scala index b0aed580e824..3a4212547d16 100644 --- a/compiler/src/dotty/tools/dotc/transform/Pickler.scala +++ b/compiler/src/dotty/tools/dotc/transform/Pickler.scala @@ -56,7 +56,7 @@ class Pickler extends Phase { // No need to repickle trees coming from TASTY override def isRunnable(using Context): Boolean = - super.isRunnable && (!ctx.settings.fromTasty.value || ctx.settings.YjavaTasty.value) + super.isRunnable && !ctx.settings.fromTasty.value // when `-Yjava-tasty` is set we actually want to run this phase on Java sources override def skipIfJava(using Context): Boolean = false @@ -94,9 +94,7 @@ class Pickler extends Phase { private val executor = Executor[Array[Byte]]() - private def useExecutor(using Context) = - Pickler.ParallelPickling && !ctx.settings.YtestPickler.value && - !ctx.settings.YjavaTasty.value // disable parallel pickling when `-Yjava-tasty` is set (internal testing only) + private def useExecutor(using Context) = Pickler.ParallelPickling && !ctx.settings.YtestPickler.value private def printerContext(isOutline: Boolean)(using Context): Context = if isOutline then ctx.fresh.setPrinterFn(OutlinePrinter(_)) @@ -196,22 +194,13 @@ class Pickler extends Phase { } override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = { - val sigWriter: Option[Pickler.EarlyFileWriter] = ctx.settings.YjavaTastyOutput.value match - case jar: JarArchive if jar.exists => - Some(Pickler.EarlyFileWriter(jar)) - case _ => - None - val units0 = - if ctx.settings.fromTasty.value then - // we still run the phase for the side effect of writing the pipeline tasty files - units + val result = + if useExecutor then + executor.start() + try super.runOn(units) + finally executor.close() else - if useExecutor then - executor.start() - try super.runOn(units) - finally executor.close() - else - super.runOn(units) + super.runOn(units) if ctx.settings.YtestPickler.value then val ctx2 = ctx.fresh .setSetting(ctx.settings.YreadComments, true) @@ -222,36 +211,9 @@ class Pickler extends Phase { .setReporter(new ThrowingReporter(ctx.reporter)) .addMode(Mode.ReadPositions) ) - val result = - if ctx.settings.YjavaTasty.value then - sigWriter.foreach(writeJavaSigFiles(units0, _)) - units0.filterNot(_.typedAsJava) // remove java sources, this is the terminal phase when `-Yjava-tasty` is set - else - units0 result } - private def writeJavaSigFiles(units: List[CompilationUnit], writer: Pickler.EarlyFileWriter)(using Context): Unit = { - var count = 0 - try - for - unit <- units if unit.typedAsJava - (cls, pickled) <- unit.pickled - if cls.isDefinedInCurrentRun - do - val binaryClassName = cls.binaryClassName - val internalName = - if (cls.is(Module)) binaryClassName.stripSuffix(str.MODULE_SUFFIX).nn - else binaryClassName - val _ = writer.writeTasty(internalName, pickled()) - count += 1 - finally - writer.close() - if ctx.settings.verbose.value then - report.echo(s"[$count java sig files written]") - end try - } - private def testUnpickler(using Context): Unit = pickling.println(i"testing unpickler at run ${ctx.runId}") ctx.initialize() diff --git a/sbt-test/pipelining/Yearly-tasty-output-inline/a/src/main/scala/a/A.scala b/sbt-test/pipelining/Yearly-tasty-output-inline/a/src/main/scala/a/A.scala new file mode 100644 index 000000000000..930e0ee78eb9 --- /dev/null +++ b/sbt-test/pipelining/Yearly-tasty-output-inline/a/src/main/scala/a/A.scala @@ -0,0 +1,10 @@ +package a + +import scala.quoted.* + +object A { + inline def power(x: Double, inline n: Int): Double = + inline if (n == 0) 1.0 + else inline if (n % 2 == 1) x * power(x, n - 1) + else power(x * x, n / 2) +} diff --git a/sbt-test/pipelining/Yearly-tasty-output-inline/b/src/main/scala/b/B.scala b/sbt-test/pipelining/Yearly-tasty-output-inline/b/src/main/scala/b/B.scala new file mode 100644 index 000000000000..7055d6d2d006 --- /dev/null +++ b/sbt-test/pipelining/Yearly-tasty-output-inline/b/src/main/scala/b/B.scala @@ -0,0 +1,10 @@ +package b + +import a.A + +object B { + @main def run = + assert(A.power(2.0, 2) == 4.0) + assert(A.power(2.0, 3) == 8.0) + assert(A.power(2.0, 4) == 16.0) +} diff --git a/sbt-test/pipelining/Yearly-tasty-output-inline/build.sbt b/sbt-test/pipelining/Yearly-tasty-output-inline/build.sbt new file mode 100644 index 000000000000..c0c726ce6a02 --- /dev/null +++ b/sbt-test/pipelining/Yearly-tasty-output-inline/build.sbt @@ -0,0 +1,14 @@ +// defines a inline method +lazy val a = project.in(file("a")) + .settings( + scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-early.jar").toString), + scalacOptions += "-Ystop-after:firstTransform", + scalacOptions += "-Ycheck:all", + ) + +// uses the inline method, this is fine as there is no macro classloader involved +lazy val b = project.in(file("b")) + .settings( + Compile / unmanagedClasspath += Attributed.blank((ThisBuild / baseDirectory).value / "a-early.jar"), + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/pipelining/Yearly-tasty-output-inline/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/Yearly-tasty-output-inline/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/Yearly-tasty-output-inline/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/Yearly-tasty-output-inline/test b/sbt-test/pipelining/Yearly-tasty-output-inline/test new file mode 100644 index 000000000000..9779d91ce131 --- /dev/null +++ b/sbt-test/pipelining/Yearly-tasty-output-inline/test @@ -0,0 +1,3 @@ +> a/compile +# uses the early output jar of a +> b/run diff --git a/sbt-test/pipelining/Yearly-tasty-output/a/src/main/scala/a/A.scala b/sbt-test/pipelining/Yearly-tasty-output/a/src/main/scala/a/A.scala new file mode 100644 index 000000000000..4b10db3eb385 --- /dev/null +++ b/sbt-test/pipelining/Yearly-tasty-output/a/src/main/scala/a/A.scala @@ -0,0 +1,5 @@ +package a + +object A { + val foo: (1,2,3) = (1,2,3) +} diff --git a/sbt-test/pipelining/Yearly-tasty-output/b-early-out/.keep b/sbt-test/pipelining/Yearly-tasty-output/b-early-out/.keep new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sbt-test/pipelining/Yearly-tasty-output/b/src/main/scala/b/B.scala b/sbt-test/pipelining/Yearly-tasty-output/b/src/main/scala/b/B.scala new file mode 100644 index 000000000000..5e6fa369e309 --- /dev/null +++ b/sbt-test/pipelining/Yearly-tasty-output/b/src/main/scala/b/B.scala @@ -0,0 +1,5 @@ +package b + +object B { + val bar: (1,2,3) = (1,2,3) +} diff --git a/sbt-test/pipelining/Yearly-tasty-output/build.sbt b/sbt-test/pipelining/Yearly-tasty-output/build.sbt new file mode 100644 index 000000000000..62990c616071 --- /dev/null +++ b/sbt-test/pipelining/Yearly-tasty-output/build.sbt @@ -0,0 +1,23 @@ +// early out is a jar +lazy val a = project.in(file("a")) + .settings( + scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-early.jar").toString), + scalacOptions += "-Ystop-after:firstTransform", + scalacOptions += "-Ycheck:all", + ) + +// early out is a directory +lazy val b = project.in(file("b")) + .settings( + scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "b-early-out").toString), + scalacOptions += "-Ystop-after:firstTransform", + scalacOptions += "-Ycheck:all", + ) + +// reads classpaths from early tasty outputs. No need for extra flags as the full tasty is available. +lazy val c = project.in(file("c")) + .settings( + Compile / unmanagedClasspath += Attributed.blank((ThisBuild / baseDirectory).value / "a-early.jar"), + Compile / unmanagedClasspath += Attributed.blank((ThisBuild / baseDirectory).value / "b-early-out"), + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/pipelining/Yearly-tasty-output/c/src/main/scala/c/C.scala b/sbt-test/pipelining/Yearly-tasty-output/c/src/main/scala/c/C.scala new file mode 100644 index 000000000000..fd1876088778 --- /dev/null +++ b/sbt-test/pipelining/Yearly-tasty-output/c/src/main/scala/c/C.scala @@ -0,0 +1,9 @@ +package c + +import a.A +import b.B + +object C { + val f: 2 = A.foo(1) + val g: 3 = B.bar(2) +} diff --git a/sbt-test/pipelining/Yearly-tasty-output/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/Yearly-tasty-output/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/Yearly-tasty-output/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/Yearly-tasty-output/test b/sbt-test/pipelining/Yearly-tasty-output/test new file mode 100644 index 000000000000..52d60facc75b --- /dev/null +++ b/sbt-test/pipelining/Yearly-tasty-output/test @@ -0,0 +1,5 @@ +> a/compile +# same as a but with a directory output +> b/compile +# c uses the early output jar of a and b +> c/compile diff --git a/sbt-test/pipelining/Yjava-tasty-annotation/build.sbt b/sbt-test/pipelining/Yjava-tasty-annotation/build.sbt index 18f6b8224968..20a13d7d4ba0 100644 --- a/sbt-test/pipelining/Yjava-tasty-annotation/build.sbt +++ b/sbt-test/pipelining/Yjava-tasty-annotation/build.sbt @@ -1,7 +1,7 @@ lazy val a = project.in(file("a")) .settings( scalacOptions += "-Yjava-tasty", // enable pickling of java signatures - scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a-annotation-java-tasty.jar").toString), + scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-annotation-java-tasty.jar").toString), scalacOptions += "-Ycheck:all", Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-annotation-classes"), // send classfiles to a different directory ) diff --git a/sbt-test/pipelining/Yjava-tasty-enum/build.sbt b/sbt-test/pipelining/Yjava-tasty-enum/build.sbt index aca2391987e9..2083003d9ebe 100644 --- a/sbt-test/pipelining/Yjava-tasty-enum/build.sbt +++ b/sbt-test/pipelining/Yjava-tasty-enum/build.sbt @@ -2,7 +2,7 @@ lazy val a = project.in(file("a")) .settings( compileOrder := CompileOrder.Mixed, // ensure we send java sources to Scala compiler scalacOptions += "-Yjava-tasty", // enable pickling of java signatures - scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a-enum-java-tasty.jar").toString), + scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-enum-java-tasty.jar").toString), scalacOptions += "-Ycheck:all", Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-enum-classes"), // send classfiles to a different directory ) diff --git a/sbt-test/pipelining/Yjava-tasty-from-tasty/build.sbt b/sbt-test/pipelining/Yjava-tasty-from-tasty/build.sbt index e4b15d3d9c7e..040c3bf6eac8 100644 --- a/sbt-test/pipelining/Yjava-tasty-from-tasty/build.sbt +++ b/sbt-test/pipelining/Yjava-tasty-from-tasty/build.sbt @@ -3,7 +3,7 @@ lazy val a = project.in(file("a")) .settings( compileOrder := CompileOrder.Mixed, // ensure we send java sources to Scala compiler scalacOptions += "-Yjava-tasty", // enable pickling of java signatures - scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a-pre-java-tasty.jar").toString), + scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-pre-java-tasty.jar").toString), scalacOptions += "-Ycheck:all", Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-pre-classes"), // send classfiles to a different directory ) @@ -17,7 +17,7 @@ lazy val a_from_tasty = project.in(file("a_from_tasty")) scalacOptions += "-from-tasty", // read the jar file tasties as the source files scalacOptions += "-Yjava-tasty", scalacOptions += "-Yallow-outline-from-tasty", // allow outline signatures to be read with -from-tasty - scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a_from_tasty-java-tasty.jar").toString), + scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a_from_tasty-java-tasty.jar").toString), scalacOptions += "-Ycheck:all", Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a_from_tasty-classes"), // send classfiles to a different directory ) diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/build.sbt b/sbt-test/pipelining/Yjava-tasty-fromjavaobject/build.sbt index 6738db3016fa..9013490f1f54 100644 --- a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/build.sbt +++ b/sbt-test/pipelining/Yjava-tasty-fromjavaobject/build.sbt @@ -2,7 +2,7 @@ lazy val a = project.in(file("a")) .settings( compileOrder := CompileOrder.Mixed, // ensure we send java sources to Scala compiler scalacOptions += "-Yjava-tasty", // enable pickling of java signatures - scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a-enum-java-tasty.jar").toString), + scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-enum-java-tasty.jar").toString), scalacOptions += "-Ycheck:all", Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-enum-classes"), // send classfiles to a different directory ) @@ -14,7 +14,7 @@ lazy val aCheck = project.in(file("a-check")) Compile / sources := (a / Compile / sources).value, // use the same sources as a compileOrder := CompileOrder.Mixed, // ensure we send java sources to Scala compiler scalacOptions += "-Yjava-tasty", // enable pickling of java signatures - scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a-enum-java-tasty-2.jar").toString), + scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-enum-java-tasty-2.jar").toString), Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-enum-classes-2"), // send classfiles to a different directory ) diff --git a/sbt-test/pipelining/Yjava-tasty-generic/a/src/main/scala/a/A.java b/sbt-test/pipelining/Yjava-tasty-generic/a/src/main/scala/a/A.java index 1fcb7e78ae3d..c6e7431f0bbe 100644 --- a/sbt-test/pipelining/Yjava-tasty-generic/a/src/main/scala/a/A.java +++ b/sbt-test/pipelining/Yjava-tasty-generic/a/src/main/scala/a/A.java @@ -1,6 +1,8 @@ // this test ensures that it is possible to read a generic java class from TASTy. package a; +import java.lang.Object; + public abstract class A { private final int _value; @@ -11,4 +13,8 @@ protected A(final int value) { public int value() { return _value; } + + public int hash(Object any) { + return any.hashCode(); + } } diff --git a/sbt-test/pipelining/Yjava-tasty-generic/b/src/main/scala/b/B.scala b/sbt-test/pipelining/Yjava-tasty-generic/b/src/main/scala/b/B.scala index f132e012a5fc..62e58aa72f94 100644 --- a/sbt-test/pipelining/Yjava-tasty-generic/b/src/main/scala/b/B.scala +++ b/sbt-test/pipelining/Yjava-tasty-generic/b/src/main/scala/b/B.scala @@ -7,9 +7,15 @@ class B[T] { } object B { + + val someAny: Any = 23 + + val inner = (new B[Int]).inner + @main def test = { - val derived: Int = (new B[Int]).inner.value + val derived: Int = inner.value assert(derived == 23, s"actually was $derived") + assert(inner.hash(someAny) == someAny.hashCode, s"actually was ${inner.hash(someAny)}") } } diff --git a/sbt-test/pipelining/Yjava-tasty-generic/build.sbt b/sbt-test/pipelining/Yjava-tasty-generic/build.sbt index 07e2ea56fbaa..9e2796600333 100644 --- a/sbt-test/pipelining/Yjava-tasty-generic/build.sbt +++ b/sbt-test/pipelining/Yjava-tasty-generic/build.sbt @@ -1,7 +1,7 @@ lazy val a = project.in(file("a")) .settings( scalacOptions += "-Yjava-tasty", // enable pickling of java signatures - scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a-generic-java-tasty.jar").toString), + scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-generic-java-tasty.jar").toString), scalacOptions += "-Ycheck:all", Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-generic-classes"), // send classfiles to a different directory ) diff --git a/sbt-test/pipelining/Yjava-tasty-paths/build.sbt b/sbt-test/pipelining/Yjava-tasty-paths/build.sbt index d63d1f9a3f7e..49487fccb57e 100644 --- a/sbt-test/pipelining/Yjava-tasty-paths/build.sbt +++ b/sbt-test/pipelining/Yjava-tasty-paths/build.sbt @@ -1,7 +1,7 @@ lazy val a = project.in(file("a")) .settings( scalacOptions += "-Yjava-tasty", // enable pickling of java signatures - scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a-paths-java-tasty.jar").toString), + scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-paths-java-tasty.jar").toString), scalacOptions += "-Ycheck:all", Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-paths-classes"), // send classfiles to a different directory ) diff --git a/sbt-test/pipelining/Yjava-tasty-result-types/build.sbt b/sbt-test/pipelining/Yjava-tasty-result-types/build.sbt index 512344f0635b..80bcf71b3365 100644 --- a/sbt-test/pipelining/Yjava-tasty-result-types/build.sbt +++ b/sbt-test/pipelining/Yjava-tasty-result-types/build.sbt @@ -1,7 +1,7 @@ lazy val a = project.in(file("a")) .settings( scalacOptions += "-Yjava-tasty", // enable pickling of java signatures - scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a-result-types-java-tasty.jar").toString), + scalacOptions ++= Seq("-Yearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-result-types-java-tasty.jar").toString), scalacOptions += "-Ycheck:all", Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-result-types-classes"), // send classfiles to a different directory ) diff --git a/sbt-test/pipelining/pipelining-scala-inline/a/src/main/scala/a/A.scala b/sbt-test/pipelining/pipelining-scala-inline/a/src/main/scala/a/A.scala new file mode 100644 index 000000000000..c2dfb3e2c886 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-inline/a/src/main/scala/a/A.scala @@ -0,0 +1,8 @@ +package a + +object A { + inline def power(x: Double, inline n: Int): Double = + inline if (n == 0) 1.0 + else inline if (n % 2 == 1) x * power(x, n - 1) + else power(x * x, n / 2) +} diff --git a/sbt-test/pipelining/pipelining-scala-inline/b/src/main/scala/b/B.scala b/sbt-test/pipelining/pipelining-scala-inline/b/src/main/scala/b/B.scala new file mode 100644 index 000000000000..7055d6d2d006 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-inline/b/src/main/scala/b/B.scala @@ -0,0 +1,10 @@ +package b + +import a.A + +object B { + @main def run = + assert(A.power(2.0, 2) == 4.0) + assert(A.power(2.0, 3) == 8.0) + assert(A.power(2.0, 4) == 16.0) +} diff --git a/sbt-test/pipelining/pipelining-scala-inline/build.sbt b/sbt-test/pipelining/pipelining-scala-inline/build.sbt new file mode 100644 index 000000000000..cd2a0c4eef07 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-inline/build.sbt @@ -0,0 +1,35 @@ +ThisBuild / usePipelining := true + +// defines a purely inline function, and we always force the early output, this should not be needed in practice +// because pure inline methods do not have a Macro flag. +lazy val a = project.in(file("a")) + .settings( + scalacOptions += "-Ycheck:all", + Compile / incOptions := { + val old = (Compile / incOptions).value + val hooks = old.externalHooks + val newHooks = hooks.withExternalLookup( + new sbt.internal.inc.NoopExternalLookup { + // assert that the analysis contains the class `a.A` and that it does not have a macro. + override def shouldDoEarlyOutput(analysis: xsbti.compile.CompileAnalysis): Boolean = { + val internalClasses = analysis.asInstanceOf[sbt.internal.inc.Analysis].apis.internal + val a_A = internalClasses.get("a.A") + assert(a_A.exists(cls => !cls.hasMacro), "`a.A` wasn't found, or it had a macro.") + + // returning true will force the early output ping and activate downstream pipelining, + // this is fine for inline methods, but see `sbt-test/pipelining/pipelining-scala-macro-fail` for how + // we can force a failure by returning true here. + true + } + } + ) + old.withExternalHooks(newHooks) + }, + ) + +// uses the purely inline function +lazy val b = project.in(file("b")) + .dependsOn(a) + .settings( + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/pipelining/pipelining-scala-inline/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/pipelining-scala-inline/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-inline/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/pipelining-scala-inline/test b/sbt-test/pipelining/pipelining-scala-inline/test new file mode 100644 index 000000000000..48a2443830b5 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-inline/test @@ -0,0 +1 @@ +> b/run diff --git a/sbt-test/pipelining/pipelining-scala-java-basic/a/src/main/scala/a/A.scala b/sbt-test/pipelining/pipelining-scala-java-basic/a/src/main/scala/a/A.scala new file mode 100644 index 000000000000..4b10db3eb385 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-java-basic/a/src/main/scala/a/A.scala @@ -0,0 +1,5 @@ +package a + +object A { + val foo: (1,2,3) = (1,2,3) +} diff --git a/sbt-test/pipelining/pipelining-scala-java-basic/b/src/main/scala/b/B.java b/sbt-test/pipelining/pipelining-scala-java-basic/b/src/main/scala/b/B.java new file mode 100644 index 000000000000..7cac88d3cd46 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-java-basic/b/src/main/scala/b/B.java @@ -0,0 +1,5 @@ +package b; + +public class B { + public static final String VALUE = "B"; +} diff --git a/sbt-test/pipelining/pipelining-scala-java-basic/build.sbt b/sbt-test/pipelining/pipelining-scala-java-basic/build.sbt new file mode 100644 index 000000000000..2b49443ae8f0 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-java-basic/build.sbt @@ -0,0 +1,17 @@ +ThisBuild / usePipelining := true + +lazy val a = project.in(file("a")) + .settings( + scalacOptions += "-Ycheck:all", + ) + +lazy val b = project.in(file("b")) + .settings( + scalacOptions += "-Ycheck:all", + ) + +lazy val c = project.in(file("c")) + .dependsOn(a, b) + .settings( + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/pipelining/pipelining-scala-java-basic/c/src/main/scala/c/C.scala b/sbt-test/pipelining/pipelining-scala-java-basic/c/src/main/scala/c/C.scala new file mode 100644 index 000000000000..b8e23e0b5920 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-java-basic/c/src/main/scala/c/C.scala @@ -0,0 +1,15 @@ +package c + +import a.A +import b.B + +object C { + val c_1: 2 = A.foo(1) + val c_2: "B" = B.VALUE + + @main def run = + assert(A.foo(0) == 1) + assert(A.foo(1) == 2) + assert(A.foo(2) == 3) + assert(B.VALUE == "B") +} diff --git a/sbt-test/pipelining/pipelining-scala-java-basic/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/pipelining-scala-java-basic/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-java-basic/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/pipelining-scala-java-basic/test b/sbt-test/pipelining/pipelining-scala-java-basic/test new file mode 100644 index 000000000000..77f2017c835f --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-java-basic/test @@ -0,0 +1 @@ +> c/run diff --git a/sbt-test/pipelining/pipelining-scala-macro-fail/a/src/main/scala/a/A.scala b/sbt-test/pipelining/pipelining-scala-macro-fail/a/src/main/scala/a/A.scala new file mode 100644 index 000000000000..d98a9d2c1159 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-fail/a/src/main/scala/a/A.scala @@ -0,0 +1,18 @@ +package a + +import scala.quoted.* + +object A { + + inline def power(x: Double, inline n: Int): Double = + ${ powerCode('x, 'n) } + + def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = { + def impl(x: Double, n: Int): Double = + if (n == 0) 1.0 + else if (n % 2 == 1) x * impl(x, n - 1) + else impl(x * x, n / 2) + + Expr(impl(x.valueOrError, n.valueOrError)) + } +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-fail/b/src/main/scala/b/B.scala b/sbt-test/pipelining/pipelining-scala-macro-fail/b/src/main/scala/b/B.scala new file mode 100644 index 000000000000..7055d6d2d006 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-fail/b/src/main/scala/b/B.scala @@ -0,0 +1,10 @@ +package b + +import a.A + +object B { + @main def run = + assert(A.power(2.0, 2) == 4.0) + assert(A.power(2.0, 3) == 8.0) + assert(A.power(2.0, 4) == 16.0) +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-fail/build.sbt b/sbt-test/pipelining/pipelining-scala-macro-fail/build.sbt new file mode 100644 index 000000000000..c98e664af507 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-fail/build.sbt @@ -0,0 +1,28 @@ +ThisBuild / usePipelining := true + +// defines a macro, normally this would cause sbt not to write the early output jar, but we force it +// this will cause b to fail to compile due to the missing macro class, +// see `sbt-test/pipelining/pipelining-scala-macro` for how by default sbt does the right thing +lazy val a = project.in(file("a")) + .settings( + scalacOptions += "-Ycheck:all", + Compile / incOptions := { + val old = (Compile / incOptions).value + val hooks = old.externalHooks + val newHooks = hooks.withExternalLookup( + new sbt.internal.inc.NoopExternalLookup { + // force early output, this is safe in projects where the macro implementation is not in the same project, + // however in this build, b will now fail as it will not find the macro implementation class. + override def shouldDoEarlyOutput(analysis: xsbti.compile.CompileAnalysis): Boolean = true + } + ) + old.withExternalHooks(newHooks) + }, + ) + +// uses the macro, this will fail because we forced early output ping, causing the missing macro implementation class +lazy val b = project.in(file("b")) + .dependsOn(a) + .settings( + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/pipelining/pipelining-scala-macro-fail/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/pipelining-scala-macro-fail/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-fail/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-fail/test b/sbt-test/pipelining/pipelining-scala-macro-fail/test new file mode 100644 index 000000000000..13daffd6dfa0 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-fail/test @@ -0,0 +1,2 @@ +> a/compile +-> b/compile diff --git a/sbt-test/pipelining/pipelining-scala-macro-force/a/src/main/scala/a/A.scala b/sbt-test/pipelining/pipelining-scala-macro-force/a/src/main/scala/a/A.scala new file mode 100644 index 000000000000..520aec03482a --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-force/a/src/main/scala/a/A.scala @@ -0,0 +1,13 @@ +package a + +import scala.quoted.* + +object A { + + transparent inline def transparentPower(x: Double, inline n: Int): Double = + ${ macros.MacroImpl.powerCode('x, 'n) } + + inline def power(x: Double, inline n: Int): Double = + ${ macros.MacroImpl.powerCode('x, 'n) } + +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-force/a/src/main/scala/a/AConsume.scala b/sbt-test/pipelining/pipelining-scala-macro-force/a/src/main/scala/a/AConsume.scala new file mode 100644 index 000000000000..1a4b0c234910 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-force/a/src/main/scala/a/AConsume.scala @@ -0,0 +1,5 @@ +package a + +object AConsume { + def sixtyFour: Double = A.power(2.0, 6) // cause a suspension in inlining +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-force/a/src/main/scala/a/AConsumeTransparent.scala b/sbt-test/pipelining/pipelining-scala-macro-force/a/src/main/scala/a/AConsumeTransparent.scala new file mode 100644 index 000000000000..cbd356047c4d --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-force/a/src/main/scala/a/AConsumeTransparent.scala @@ -0,0 +1,5 @@ +package a + +object AConsumeTransparent { + def thirtyTwo: Double = A.transparentPower(2.0, 5) // cause a suspension in typer +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-force/b/src/main/scala/b/B.scala b/sbt-test/pipelining/pipelining-scala-macro-force/b/src/main/scala/b/B.scala new file mode 100644 index 000000000000..7955b1d7cfbb --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-force/b/src/main/scala/b/B.scala @@ -0,0 +1,14 @@ +package b + +import a.A +import a.AConsumeTransparent +import a.AConsume + +object B { + @main def run = + assert(A.power(2.0, 2) == 4.0) + assert(A.power(2.0, 3) == 8.0) + assert(A.power(2.0, 4) == 16.0) + assert(AConsumeTransparent.thirtyTwo == 32.0) // these are not actually suspended in this project + assert(AConsume.sixtyFour == 64.0) // check that suspended definition is still available +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-force/build.sbt b/sbt-test/pipelining/pipelining-scala-macro-force/build.sbt new file mode 100644 index 000000000000..ee06080d0e76 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-force/build.sbt @@ -0,0 +1,45 @@ +ThisBuild / usePipelining := true + +// defines just the macro implementations +lazy val macros = project.in(file("macros")) + .settings( + scalacOptions += "-Ycheck:all", + Compile / exportPipelining := false // downstream waits until classfiles are available + ) + +// defines a macro, we need to force sbt to produce the early output jar +// because it will detect macros in the analysis. +// However the classes for the implementation are provided by `macros` +lazy val a = project.in(file("a")) + .dependsOn(macros) + .settings( + scalacOptions += "-Ycheck:all", + scalacOptions += "-Xprint-suspension", + Compile / incOptions := { + val old = (Compile / incOptions).value + val hooks = old.externalHooks + val newHooks = hooks.withExternalLookup( + new sbt.internal.inc.NoopExternalLookup { + // force early output, this is safe because the macro class from `macros` will be available. + override def shouldDoEarlyOutput(analysis: xsbti.compile.CompileAnalysis): Boolean = { + val internalClasses = analysis.asInstanceOf[sbt.internal.inc.Analysis].apis.internal + val a_A = internalClasses.get("a.A") + val a_AConsume = internalClasses.get("a.AConsume") + val a_AConsumeTransparent = internalClasses.get("a.AConsumeTransparent") + assert(a_A.exists(cls => cls.hasMacro), s"`a.A` wasn't found, or it didn't have a macro.") + assert(a_AConsume.isDefined, s"`a.AConsume` wasn't found.") + assert(a_AConsumeTransparent.isDefined, s"`a.AConsumeTransparent` wasn't found.") + true // because `a.A` has macros, normally this would be false + } + } + ) + old.withExternalHooks(newHooks) + }, + ) + +// uses the macro, will still succeed as the macro implementation class is available +lazy val b = project.in(file("b")) + .dependsOn(a) + .settings( + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/pipelining/pipelining-scala-macro-force/macros/src/main/scala/macros/MacroImpl.scala b/sbt-test/pipelining/pipelining-scala-macro-force/macros/src/main/scala/macros/MacroImpl.scala new file mode 100644 index 000000000000..d7c03aaf0ae0 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-force/macros/src/main/scala/macros/MacroImpl.scala @@ -0,0 +1,15 @@ +package macros + +import scala.quoted.* + +object MacroImpl { + + def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = { + def impl(x: Double, n: Int): Double = + if (n == 0) 1.0 + else if (n % 2 == 1) x * impl(x, n - 1) + else impl(x * x, n / 2) + + Expr(impl(x.valueOrError, n.valueOrError)) + } +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-force/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/pipelining-scala-macro-force/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-force/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-force/test b/sbt-test/pipelining/pipelining-scala-macro-force/test new file mode 100644 index 000000000000..48a2443830b5 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-force/test @@ -0,0 +1 @@ +> b/run diff --git a/sbt-test/pipelining/pipelining-scala-macro/a/src/main/scala/a/A.scala b/sbt-test/pipelining/pipelining-scala-macro/a/src/main/scala/a/A.scala new file mode 100644 index 000000000000..9077f0a2e849 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro/a/src/main/scala/a/A.scala @@ -0,0 +1,21 @@ +package a + +import scala.quoted.* + +object A { + + transparent inline def transparentPower(x: Double, inline n: Int): Double = + ${ powerCode('x, 'n) } + + inline def power(x: Double, inline n: Int): Double = + ${ powerCode('x, 'n) } + + def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = { + def impl(x: Double, n: Int): Double = + if (n == 0) 1.0 + else if (n % 2 == 1) x * impl(x, n - 1) + else impl(x * x, n / 2) + + Expr(impl(x.valueOrError, n.valueOrError)) + } +} diff --git a/sbt-test/pipelining/pipelining-scala-macro/a/src/main/scala/a/ASuspendInlining.scala b/sbt-test/pipelining/pipelining-scala-macro/a/src/main/scala/a/ASuspendInlining.scala new file mode 100644 index 000000000000..0fa449601d31 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro/a/src/main/scala/a/ASuspendInlining.scala @@ -0,0 +1,5 @@ +package a + +object ASuspendInlining { + def sixtyFour: Double = A.power(2.0, 6) // cause a suspension in inlining +} diff --git a/sbt-test/pipelining/pipelining-scala-macro/a/src/main/scala/a/ASuspendTyper.scala b/sbt-test/pipelining/pipelining-scala-macro/a/src/main/scala/a/ASuspendTyper.scala new file mode 100644 index 000000000000..2af5139b30bc --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro/a/src/main/scala/a/ASuspendTyper.scala @@ -0,0 +1,5 @@ +package a + +object ASuspendTyper { + def thirtyTwo: Double = A.transparentPower(2.0, 5) // cause a suspension in typer +} diff --git a/sbt-test/pipelining/pipelining-scala-macro/b/src/main/scala/b/B.scala b/sbt-test/pipelining/pipelining-scala-macro/b/src/main/scala/b/B.scala new file mode 100644 index 000000000000..17f72ddf1644 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro/b/src/main/scala/b/B.scala @@ -0,0 +1,14 @@ +package b + +import a.A +import a.ASuspendTyper +import a.ASuspendInlining + +object B { + @main def run = + assert(A.power(2.0, 2) == 4.0) + assert(A.power(2.0, 3) == 8.0) + assert(A.power(2.0, 4) == 16.0) + assert(ASuspendTyper.thirtyTwo == 32.0) // check that suspended definition is still available + assert(ASuspendInlining.sixtyFour == 64.0) // check that suspended definition is still available +} diff --git a/sbt-test/pipelining/pipelining-scala-macro/build.sbt b/sbt-test/pipelining/pipelining-scala-macro/build.sbt new file mode 100644 index 000000000000..f8576cdae796 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro/build.sbt @@ -0,0 +1,56 @@ +ThisBuild / usePipelining := true + +// defines a macro, sbt will not force the early output +// because it will detect macros in the analysis, so b will compile fine, +// see `sbt-test/pipelining/pipelining-scala-macro-fail` for how we can +// force a failure by always forcing early output. +lazy val a = project.in(file("a")) + .settings( + scalacOptions += "-Ycheck:all", + scalacOptions += "-Xprint-suspension", + Compile / incOptions := { + val old = (Compile / incOptions).value + val hooks = old.externalHooks + val newHooks = hooks.withExternalLookup( + new sbt.internal.inc.NoopExternalLookup { + @volatile var knownSuspension = false + + def didFindMacros(analysis: xsbti.compile.CompileAnalysis) = { + val foundMacros = analysis.asInstanceOf[sbt.internal.inc.Analysis].apis.internal.values.exists(_.hasMacro) + assert(foundMacros, "expected macros to be found in the analysis.") + foundMacros + } + + // force early output, this is safe because the macro class from `macros` will be available. + override def shouldDoEarlyOutput(analysis: xsbti.compile.CompileAnalysis): Boolean = { + val internalClasses = analysis.asInstanceOf[sbt.internal.inc.Analysis].apis.internal + val a_A = internalClasses.get("a.A") + val a_ASuspendTyper = internalClasses.get("a.ASuspendTyper") + val a_ASuspendInlining = internalClasses.get("a.ASuspendInlining") + assert(a_A.isDefined, s"`a.A` wasn't found.") + + if (!knownSuspension) { + // this callback is called multiple times, so we only want to assert the first time, + // in subsequent runs the suspended definition will be "resumed", so a.ASuspendTyper be found. + knownSuspension = true + assert(a_ASuspendTyper.isEmpty, s"`a.ASuspendTyper` should have been suspended initially.") + } + + assert(a_ASuspendInlining.isDefined, s"`a.ASuspendInlining` wasn't found.") + + // do what sbt does typically, + // it will not force early output because macros are found + !didFindMacros(analysis) + } + } + ) + old.withExternalHooks(newHooks) + }, + ) + +// uses the macro, sbt is smart enough to not use pipelining flags when upstream compilation has macros +lazy val b = project.in(file("b")) + .dependsOn(a) + .settings( + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/pipelining/pipelining-scala-macro/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/pipelining-scala-macro/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/pipelining-scala-macro/test b/sbt-test/pipelining/pipelining-scala-macro/test new file mode 100644 index 000000000000..48a2443830b5 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro/test @@ -0,0 +1 @@ +> b/run diff --git a/sbt-test/pipelining/pipelining-scala-only/a/src/main/scala/a/A.scala b/sbt-test/pipelining/pipelining-scala-only/a/src/main/scala/a/A.scala new file mode 100644 index 000000000000..4b10db3eb385 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-only/a/src/main/scala/a/A.scala @@ -0,0 +1,5 @@ +package a + +object A { + val foo: (1,2,3) = (1,2,3) +} diff --git a/sbt-test/pipelining/pipelining-scala-only/b/src/main/scala/b/B.scala b/sbt-test/pipelining/pipelining-scala-only/b/src/main/scala/b/B.scala new file mode 100644 index 000000000000..971d07d5656d --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-only/b/src/main/scala/b/B.scala @@ -0,0 +1,12 @@ +package b + +import a.A + +object B { + val b: 2 = A.foo(1) + + @main def run = + assert(A.foo(0) == 1) + assert(A.foo(1) == 2) + assert(A.foo(2) == 3) +} diff --git a/sbt-test/pipelining/pipelining-scala-only/build.sbt b/sbt-test/pipelining/pipelining-scala-only/build.sbt new file mode 100644 index 000000000000..16e182e48801 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-only/build.sbt @@ -0,0 +1,12 @@ +ThisBuild / usePipelining := true + +lazy val a = project.in(file("a")) + .settings( + scalacOptions += "-Ycheck:all", + ) + +lazy val b = project.in(file("b")) + .dependsOn(a) + .settings( + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/pipelining/pipelining-scala-only/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/pipelining-scala-only/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-only/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/pipelining-scala-only/test b/sbt-test/pipelining/pipelining-scala-only/test new file mode 100644 index 000000000000..48a2443830b5 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-only/test @@ -0,0 +1 @@ +> b/run From 94162a38e2fa4d90dc848087a85164ef466940c1 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Wed, 8 Nov 2023 17:41:20 +0100 Subject: [PATCH 5/7] fix prediction in ProgressCallbackTest --- .../test/dotty/tools/dotc/sbt/ProgressCallbackTest.scala | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/compiler/test/dotty/tools/dotc/sbt/ProgressCallbackTest.scala b/compiler/test/dotty/tools/dotc/sbt/ProgressCallbackTest.scala index 489dc0f1759c..49fd3ee68d5f 100644 --- a/compiler/test/dotty/tools/dotc/sbt/ProgressCallbackTest.scala +++ b/compiler/test/dotty/tools/dotc/sbt/ProgressCallbackTest.scala @@ -97,7 +97,11 @@ final class ProgressCallbackTest extends DottyTest: locally: // (4) assert that the final progress recorded is at the target phase, // and progress is equal to the number of phases before the target. - val (befores, target +: next +: _) = runnableSubPhases.span(_ != targetPhase): @unchecked + // + // (4.1) extract the real befores by looking at the runnable phases + val (befores, target +: _) = runnableSubPhases.span(_ != targetPhase): @unchecked + // (4.2) extract the predicted next phase by looking at all phases + val (_, `target` +: next +: _) = allSubPhases.span(_ != targetPhase): @unchecked // (4.1) we expect cancellation to occur *as we enter* the target phase, // so no units should be visited in this phase. Therefore progress // should be equal to the number of phases before the target. (as we have 1 unit) From 20d635d439c7bb6ff344b7df82d505755ed8fc61 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 15 Mar 2024 14:22:17 +0100 Subject: [PATCH 6/7] support test scope by ignoring repeated pipelining flags --- .../tools/dotc/config/ScalaSettings.scala | 4 ++-- .../dotty/tools/dotc/config/Settings.scala | 24 +++++++++++++------ .../a/src/main/scala/a/A.scala | 5 ++++ .../a/src/test/scala/a/Hello.scala | 12 ++++++++++ sbt-test/pipelining/pipelining-test/build.sbt | 7 ++++++ .../project/DottyInjectedPlugin.scala | 12 ++++++++++ sbt-test/pipelining/pipelining-test/test | 12 ++++++++++ 7 files changed, 67 insertions(+), 9 deletions(-) create mode 100644 sbt-test/pipelining/pipelining-test/a/src/main/scala/a/A.scala create mode 100644 sbt-test/pipelining/pipelining-test/a/src/test/scala/a/Hello.scala create mode 100644 sbt-test/pipelining/pipelining-test/build.sbt create mode 100644 sbt-test/pipelining/pipelining-test/project/DottyInjectedPlugin.scala create mode 100644 sbt-test/pipelining/pipelining-test/test diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index 5ac4cf2e5829..2e48ca78258f 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -440,7 +440,7 @@ private sealed trait YSettings: val YdebugMacros: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydebug-macros", "Show debug info when quote pattern match fails") // Pipeline compilation options - val YjavaTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Yjava-tasty", "Pickler phase should compute TASTy for .java defined symbols for use by build tools", aliases = List("-Ypickle-java")) - val YearlyTastyOutput: Setting[AbstractFile] = OutputSetting(ForkSetting, "Yearly-tasty-output", "directory|jar", "Destination to write generated .tasty files to for use in pipelined compilation.", NoAbstractFile, aliases = List("-Ypickle-write")) + val YjavaTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Yjava-tasty", "Pickler phase should compute TASTy for .java defined symbols for use by build tools", aliases = List("-Ypickle-java"), preferPrevious = true) + val YearlyTastyOutput: Setting[AbstractFile] = OutputSetting(ForkSetting, "Yearly-tasty-output", "directory|jar", "Destination to write generated .tasty files to for use in pipelined compilation.", NoAbstractFile, aliases = List("-Ypickle-write"), preferPrevious = true) val YallowOutlineFromTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Yallow-outline-from-tasty", "Allow outline TASTy to be loaded with the -from-tasty option.") end YSettings diff --git a/compiler/src/dotty/tools/dotc/config/Settings.scala b/compiler/src/dotty/tools/dotc/config/Settings.scala index 87760a2a034e..241ab34052a1 100644 --- a/compiler/src/dotty/tools/dotc/config/Settings.scala +++ b/compiler/src/dotty/tools/dotc/config/Settings.scala @@ -79,6 +79,7 @@ object Settings: aliases: List[String] = Nil, depends: List[(Setting[?], Any)] = Nil, ignoreInvalidArgs: Boolean = false, + preferPrevious: Boolean = false, propertyClass: Option[Class[?]] = None, deprecationMsg: Option[String] = None, // kept only for -Ykind-projector option compatibility @@ -125,11 +126,16 @@ object Settings: valueList.filter(current.contains).foreach(s => dangers :+= s"Setting $name set to $s redundantly") current ++ valueList else - if sstate.wasChanged(idx) then dangers :+= s"Flag $name set repeatedly" + if sstate.wasChanged(idx) then + assert(!preferPrevious, "should have shortcutted with ignoreValue, side-effect may be present!") + dangers :+= s"Flag $name set repeatedly" value ArgsSummary(updateIn(sstate, valueNew), args, errors, dangers) end update + def ignoreValue(args: List[String]): ArgsSummary = + ArgsSummary(sstate, args, errors, warnings) + def fail(msg: String, args: List[String]) = ArgsSummary(sstate, args, errors :+ msg, warnings) @@ -196,7 +202,8 @@ object Settings: def doSet(argRest: String) = ((summon[ClassTag[T]], args): @unchecked) match { case (BooleanTag, _) => - setBoolean(argRest, args) + if sstate.wasChanged(idx) && preferPrevious then ignoreValue(args) + else setBoolean(argRest, args) case (OptionTag, _) => update(Some(propertyClass.get.getConstructor().newInstance()), args) case (ct, args) => @@ -216,7 +223,10 @@ object Settings: case StringTag => setString(arg, argsLeft) case OutputTag => - setOutput(arg, argsLeft) + if sstate.wasChanged(idx) && preferPrevious then + ignoreValue(argsLeft) // do not risk side effects e.g. overwriting a jar + else + setOutput(arg, argsLeft) case IntTag => setInt(arg, argsLeft) case VersionTag => @@ -333,8 +343,8 @@ object Settings: assert(!name.startsWith("-"), s"Setting $name cannot start with -") "-" + name - def BooleanSetting(category: SettingCategory, name: String, descr: String, initialValue: Boolean = false, aliases: List[String] = Nil): Setting[Boolean] = - publish(Setting(category, prependName(name), descr, initialValue, aliases = aliases)) + def BooleanSetting(category: SettingCategory, name: String, descr: String, initialValue: Boolean = false, aliases: List[String] = Nil, preferPrevious: Boolean = false): Setting[Boolean] = + publish(Setting(category, prependName(name), descr, initialValue, aliases = aliases, preferPrevious = preferPrevious)) def StringSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: String, aliases: List[String] = Nil): Setting[String] = publish(Setting(category, prependName(name), descr, default, helpArg, aliases = aliases)) @@ -357,8 +367,8 @@ object Settings: def MultiStringSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: List[String] = Nil, aliases: List[String] = Nil): Setting[List[String]] = publish(Setting(category, prependName(name), descr, default, helpArg, aliases = aliases)) - def OutputSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: AbstractFile, aliases: List[String] = Nil): Setting[AbstractFile] = - publish(Setting(category, prependName(name), descr, default, helpArg, aliases = aliases)) + def OutputSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: AbstractFile, aliases: List[String] = Nil, preferPrevious: Boolean = false): Setting[AbstractFile] = + publish(Setting(category, prependName(name), descr, default, helpArg, aliases = aliases, preferPrevious = preferPrevious)) def PathSetting(category: SettingCategory, name: String, descr: String, default: String, aliases: List[String] = Nil): Setting[String] = publish(Setting(category, prependName(name), descr, default, aliases = aliases)) diff --git a/sbt-test/pipelining/pipelining-test/a/src/main/scala/a/A.scala b/sbt-test/pipelining/pipelining-test/a/src/main/scala/a/A.scala new file mode 100644 index 000000000000..4b10db3eb385 --- /dev/null +++ b/sbt-test/pipelining/pipelining-test/a/src/main/scala/a/A.scala @@ -0,0 +1,5 @@ +package a + +object A { + val foo: (1,2,3) = (1,2,3) +} diff --git a/sbt-test/pipelining/pipelining-test/a/src/test/scala/a/Hello.scala b/sbt-test/pipelining/pipelining-test/a/src/test/scala/a/Hello.scala new file mode 100644 index 000000000000..1cfa3424bd98 --- /dev/null +++ b/sbt-test/pipelining/pipelining-test/a/src/test/scala/a/Hello.scala @@ -0,0 +1,12 @@ +package a + +import a.A + +import org.junit.Test + +class Hello { + + @Test def test(): Unit = { + assert(A.foo == (1,2,3)) + } +} diff --git a/sbt-test/pipelining/pipelining-test/build.sbt b/sbt-test/pipelining/pipelining-test/build.sbt new file mode 100644 index 000000000000..576ecc793ac6 --- /dev/null +++ b/sbt-test/pipelining/pipelining-test/build.sbt @@ -0,0 +1,7 @@ +ThisBuild / usePipelining := true + +lazy val a = project.in(file("a")) + .settings( + scalacOptions += "-Ycheck:all", + libraryDependencies += "com.novocode" % "junit-interface" % "0.11" % "test", + ) diff --git a/sbt-test/pipelining/pipelining-test/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/pipelining-test/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/pipelining-test/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/pipelining-test/test b/sbt-test/pipelining/pipelining-test/test new file mode 100644 index 000000000000..e2b8e39082b2 --- /dev/null +++ b/sbt-test/pipelining/pipelining-test/test @@ -0,0 +1,12 @@ +# run the tests on a project with pipelining +# exercises the fact that -Ypickle-java and -Ypickle-write +# flags are set twice. +# steps: +# - Compile scope is compiled with flags `-Ypickle-java -Ypickle-write early/a-early-7423784.jar` +# - sbt copies `early/a-early-7423784.jar` to `early/a-early.jar` +# - Test scope is compiled with flags `-Ypickle-java -Ypickle-write early-test/a-early-963232.jar -Ypickle-java -Ypickle-write early/a-early.jar -classpath early/a-early.jar` +# e.g. for some reason the classpath has the same `a-early.jar` that +# is passed with `Ypickle-write`. +# Therefore we MUST avoid even reading the second `-Ypickle-write` setting, +# otherwise we will zero-out `a-early.jar`, causing type errors because its contents are blank. +> a/test From c19b67ed5322b7c40e89a7365ca854c5d22ef917 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Sat, 16 Mar 2024 01:53:40 +0100 Subject: [PATCH 7/7] add test to show incremental compilation works under pipelining --- .../pipelining/pipelining-changes/build.sbt | 27 +++++++++++++++++++ .../pipelining-changes/changes/A1.scala | 5 ++++ .../project/CompileState.scala | 4 +++ .../project/DottyInjectedPlugin.scala | 11 ++++++++ .../src/main/scala/a/A.scala | 5 ++++ .../src/main/scala/a/App.scala | 11 ++++++++ sbt-test/pipelining/pipelining-changes/test | 7 +++++ 7 files changed, 70 insertions(+) create mode 100644 sbt-test/pipelining/pipelining-changes/build.sbt create mode 100644 sbt-test/pipelining/pipelining-changes/changes/A1.scala create mode 100644 sbt-test/pipelining/pipelining-changes/project/CompileState.scala create mode 100644 sbt-test/pipelining/pipelining-changes/project/DottyInjectedPlugin.scala create mode 100644 sbt-test/pipelining/pipelining-changes/src/main/scala/a/A.scala create mode 100644 sbt-test/pipelining/pipelining-changes/src/main/scala/a/App.scala create mode 100644 sbt-test/pipelining/pipelining-changes/test diff --git a/sbt-test/pipelining/pipelining-changes/build.sbt b/sbt-test/pipelining/pipelining-changes/build.sbt new file mode 100644 index 000000000000..630bd4be5b3e --- /dev/null +++ b/sbt-test/pipelining/pipelining-changes/build.sbt @@ -0,0 +1,27 @@ +import sbt.internal.inc.Analysis +import complete.DefaultParsers._ + +ThisBuild / usePipelining := true + +// Reset compiler iterations, necessary because tests run in batch mode +val recordPreviousIterations = taskKey[Unit]("Record previous iterations.") +recordPreviousIterations := { + val log = streams.value.log + CompileState.previousIterations = { + val previousAnalysis = (previousCompile in Compile).value.analysis.asScala + previousAnalysis match { + case None => + log.info("No previous analysis detected") + 0 + case Some(a: Analysis) => a.compilations.allCompilations.size + } + } +} + +val checkIterations = inputKey[Unit]("Verifies the accumulated number of iterations of incremental compilation.") + +checkIterations := { + val expected: Int = (Space ~> NatBasic).parsed + val actual: Int = ((compile in Compile).value match { case a: Analysis => a.compilations.allCompilations.size }) - CompileState.previousIterations + assert(expected == actual, s"Expected $expected compilations, got $actual (previous: ${CompileState.previousIterations})") +} diff --git a/sbt-test/pipelining/pipelining-changes/changes/A1.scala b/sbt-test/pipelining/pipelining-changes/changes/A1.scala new file mode 100644 index 000000000000..db5605e419d1 --- /dev/null +++ b/sbt-test/pipelining/pipelining-changes/changes/A1.scala @@ -0,0 +1,5 @@ +package a + +enum A { + case A, B +} diff --git a/sbt-test/pipelining/pipelining-changes/project/CompileState.scala b/sbt-test/pipelining/pipelining-changes/project/CompileState.scala new file mode 100644 index 000000000000..078db9c7bf56 --- /dev/null +++ b/sbt-test/pipelining/pipelining-changes/project/CompileState.scala @@ -0,0 +1,4 @@ +// This is necessary because tests are run in batch mode +object CompileState { + @volatile var previousIterations: Int = -1 +} diff --git a/sbt-test/pipelining/pipelining-changes/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/pipelining-changes/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..1c6c00400f04 --- /dev/null +++ b/sbt-test/pipelining/pipelining-changes/project/DottyInjectedPlugin.scala @@ -0,0 +1,11 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + ) +} diff --git a/sbt-test/pipelining/pipelining-changes/src/main/scala/a/A.scala b/sbt-test/pipelining/pipelining-changes/src/main/scala/a/A.scala new file mode 100644 index 000000000000..4a0eec46ec7e --- /dev/null +++ b/sbt-test/pipelining/pipelining-changes/src/main/scala/a/A.scala @@ -0,0 +1,5 @@ +package a + +enum A { + case A +} diff --git a/sbt-test/pipelining/pipelining-changes/src/main/scala/a/App.scala b/sbt-test/pipelining/pipelining-changes/src/main/scala/a/App.scala new file mode 100644 index 000000000000..a9862cea9dc4 --- /dev/null +++ b/sbt-test/pipelining/pipelining-changes/src/main/scala/a/App.scala @@ -0,0 +1,11 @@ +package a + +import scala.deriving.Mirror + +object App { + val m = summon[Mirror.SumOf[a.A]] + def size = compiletime.constValue[Tuple.Size[m.MirroredElemTypes]] + + @main def test = + assert(size == 2, s"Expected size 2, got $size") +} diff --git a/sbt-test/pipelining/pipelining-changes/test b/sbt-test/pipelining/pipelining-changes/test new file mode 100644 index 000000000000..e6fb01d57f5a --- /dev/null +++ b/sbt-test/pipelining/pipelining-changes/test @@ -0,0 +1,7 @@ +# test the interaction of incremental compilation and pipelining +> compile +> recordPreviousIterations +$ copy-file changes/A1.scala src/main/scala/a/A.scala +# A recompilation should trigger recompilation of App.scala, otherwise test assert will fail +> run +> checkIterations 2