diff --git a/build.sbt b/build.sbt index 9b01002ed2..55b511fc0c 100644 --- a/build.sbt +++ b/build.sbt @@ -367,6 +367,10 @@ lazy val zincCompileCore = (projectMatrix in internalPath / "zinc-compile-core") ) .settings( name := "zinc Compile Core", + libraryDependencies ++= (scalaVersion.value match { + case v if v.startsWith("2.12.") => List(compilerPlugin(silencerPlugin)) + case _ => List() + }), libraryDependencies ++= Seq( scalaCompiler.value % Test, launcherInterface, @@ -407,18 +411,18 @@ lazy val compilerInterface = (projectMatrix in internalPath / "compiler-interfac mimaSettings, mimaBinaryIssueFilters ++= Seq( // 1.4.0 changed to VirtualFile. These should be internal to Zinc. - exclude[Problem]("xsbti.AnalysisCallback.*"), + exclude[ReversedMissingMethodProblem]("xsbti.AnalysisCallback.*"), exclude[Problem]("xsbti.compile.PerClasspathEntryLookup.*"), exclude[Problem]("xsbti.compile.ExternalHooks*"), exclude[Problem]("xsbti.compile.FileHash.*"), - exclude[Problem]("xsbti.compile.Output.*"), - exclude[Problem]("xsbti.compile.OutputGroup.*"), - exclude[Problem]("xsbti.compile.SingleOutput.*"), - exclude[Problem]("xsbti.compile.MultipleOutput.*"), - exclude[Problem]("xsbti.compile.CachedCompiler.*"), + exclude[ReversedMissingMethodProblem]("xsbti.compile.Output.*"), + exclude[ReversedMissingMethodProblem]("xsbti.compile.OutputGroup.*"), + exclude[ReversedMissingMethodProblem]("xsbti.compile.SingleOutput.*"), + exclude[ReversedMissingMethodProblem]("xsbti.compile.MultipleOutput.*"), + exclude[ReversedMissingMethodProblem]("xsbti.compile.CachedCompiler.*"), exclude[Problem]("xsbti.compile.ClassFileManager.*"), exclude[Problem]("xsbti.compile.WrappedClassFileManager.*"), - exclude[Problem]("xsbti.compile.DependencyChanges.*"), + exclude[ReversedMissingMethodProblem]("xsbti.compile.DependencyChanges.*"), exclude[Problem]("xsbti.compile.ScalaCompiler.*"), exclude[Problem]("xsbti.compile.JavaTool.*"), exclude[Problem]("xsbti.compile.JavaTool.*"), diff --git a/internal/compiler-bridge-test/src/test/scala/sbt/internal/inc/CompilingSpecification.scala b/internal/compiler-bridge-test/src/test/scala/sbt/internal/inc/CompilingSpecification.scala index ed9fd32dc2..d54e1461bd 100644 --- a/internal/compiler-bridge-test/src/test/scala/sbt/internal/inc/CompilingSpecification.scala +++ b/internal/compiler-bridge-test/src/test/scala/sbt/internal/inc/CompilingSpecification.scala @@ -5,6 +5,7 @@ package inc import java.io.File import java.net.URLClassLoader import java.nio.file.{ Files, Path, Paths } +import java.util.Optional import sbt.internal.inc.classpath.ClassLoaderCache import sbt.io.IO @@ -12,7 +13,7 @@ import sbt.io.syntax._ import xsbti.compile._ import sbt.util.Logger import xsbti.TestCallback.ExtractedClassDependencies -import xsbti.{ TestCallback, UseScope, VirtualFile, VirtualFileRef } +import xsbti.{ ReporterUtil, TestCallback, UseScope, VirtualFile, VirtualFileRef } import xsbti.api.ClassLike import xsbti.api.DependencyContext._ @@ -199,18 +200,25 @@ trait CompilingSpecification extends BridgeProviderSpecification { val cp = (si.allJars ++ Array(targetDir)).map(_.toPath).map(converter.toVirtualFile) val emptyChanges: DependencyChanges = new DependencyChanges { override val modifiedLibraries = new Array[VirtualFileRef](0) + override val modifiedBinaries = new Array[File](0) override val modifiedClasses = new Array[String](0) override def isEmpty = true } - sc.apply( + val compArgs = new CompilerArguments(si, sc.classpathOptions) + val arguments = compArgs.makeArguments(Nil, cp, Nil) + val basicReporterConfig = ReporterUtil.getDefaultReporterConfig() + val reporterConfig = basicReporterConfig.withMaximumErrors(maxErrors) + val reporter = ReporterManager.getReporter(log, reporterConfig) + sc.compile( sources = sources, + converter = converter, changes = emptyChanges, - classpath = cp, - singleOutput = targetDir.toPath, - options = Array(), + options = arguments.toArray, + output = CompileOutput(targetDir.toPath), callback = analysisCallback, - maximumErrors = maxErrors, + reporter = reporter, cache = cache, + progressOpt = Optional.empty[CompileProgress], log = log ) srcFiles diff --git a/internal/compiler-bridge/src/main/scala/xsbt/CallbackGlobal.scala b/internal/compiler-bridge/src/main/scala/xsbt/CallbackGlobal.scala index 42091b2525..170b462f6c 100644 --- a/internal/compiler-bridge/src/main/scala/xsbt/CallbackGlobal.scala +++ b/internal/compiler-bridge/src/main/scala/xsbt/CallbackGlobal.scala @@ -39,9 +39,9 @@ sealed abstract class CallbackGlobal( lazy val outputDirs: Iterable[Path] = { output match { - case single: SingleOutput => List(single.getOutputDirectory) + case single: SingleOutput => List(single.getOutputDirectoryAsPath) // Use Stream instead of List because Analyzer maps intensively over the directories - case multi: MultipleOutput => multi.getOutputGroups.toStream map (_.getOutputDirectory) + case multi: MultipleOutput => multi.getOutputGroups.toStream map (_.getOutputDirectoryAsPath) } } diff --git a/internal/compiler-bridge/src/main/scala/xsbt/CompilerInterface.scala b/internal/compiler-bridge/src/main/scala/xsbt/CompilerInterface.scala index a97abcba41..f5222f73ab 100644 --- a/internal/compiler-bridge/src/main/scala/xsbt/CompilerInterface.scala +++ b/internal/compiler-bridge/src/main/scala/xsbt/CompilerInterface.scala @@ -19,23 +19,26 @@ import scala.reflect.io.AbstractFile import Log.debug import java.io.File -final class CompilerInterface { - def newCompiler( +/** + * This is the entry point for the compiler bridge (implementation of CompilerInterface) + */ +final class CompilerInterface extends CompilerInterface2 { + override def newCompiler( options: Array[String], output: Output, initialLog: Logger, initialDelegate: Reporter - ): CachedCompiler = + ): CachedCompiler2 = new CachedCompiler0(options, output, new WeakLog(initialLog, initialDelegate)) - def run( + override def run( sources: Array[VirtualFile], changes: DependencyChanges, callback: AnalysisCallback, log: Logger, delegate: Reporter, progress: CompileProgress, - cached: CachedCompiler + cached: CachedCompiler2 ): Unit = cached.run(sources, changes, callback, log, delegate, progress) } @@ -63,7 +66,7 @@ private final class WeakLog(private[this] var log: Logger, private[this] var del } private final class CachedCompiler0(args: Array[String], output: Output, initialLog: WeakLog) - extends CachedCompiler + extends CachedCompiler2 with CachedCompilerCompat with java.io.Closeable { @@ -77,11 +80,11 @@ private final class CachedCompiler0(args: Array[String], output: Output, initial for (out <- multi.getOutputGroups) settings.outputDirs .add( - out.getSourceDirectory.toAbsolutePath.toString, - out.getOutputDirectory.toAbsolutePath.toString + out.getSourceDirectoryAsPath.toAbsolutePath.toString, + out.getOutputDirectoryAsPath.toAbsolutePath.toString ) case single: SingleOutput => - val outputFilepath = single.getOutputDirectory.toAbsolutePath + val outputFilepath = single.getOutputDirectoryAsPath.toAbsolutePath settings.outputDirs.setSingleOutput(outputFilepath.toString) } @@ -115,18 +118,41 @@ private final class CachedCompiler0(args: Array[String], output: Output, initial def infoOnCachedCompiler(compilerId: String): String = s"[zinc] Running cached compiler $compilerId for Scala compiler $versionString" - def run( + override def run( + sources: Array[File], + changes: DependencyChanges, + callback: AnalysisCallback, + log: Logger, + delegate: Reporter, + progress: CompileProgress + ): Unit = { + val srcs = sources.toList.map(AbstractFile.getFile(_)).sortBy(_.path) + doRun(srcs, callback, log, delegate, progress) + } + + override def run( sources: Array[VirtualFile], changes: DependencyChanges, callback: AnalysisCallback, log: Logger, delegate: Reporter, progress: CompileProgress + ): Unit = { + val srcs = sources.toList.map(AbstractZincFile(_)).sortBy(_.underlying.id) + doRun(srcs, callback, log, delegate, progress) + } + + private[this] def doRun( + sources: List[AbstractFile], + callback: AnalysisCallback, + log: Logger, + delegate: Reporter, + progress: CompileProgress ): Unit = synchronized { debug(log, infoOnCachedCompiler(hashCode().toLong.toHexString)) val dreporter = DelegatingReporter(settings, delegate) try { - run(sources.toList, changes, callback, log, dreporter, progress) + run(sources, callback, log, dreporter, progress) } finally { dreporter.dropDelegate() } @@ -134,10 +160,11 @@ private final class CachedCompiler0(args: Array[String], output: Output, initial private def prettyPrintCompilationArguments(args: Array[String]) = args.mkString("[zinc] The Scala compiler is invoked with:\n\t", "\n\t", "") + private val StopInfoError = "Compiler option supplied that disabled Zinc compilation." + private[this] def run( - sources: List[VirtualFile], - changes: DependencyChanges, + sources: List[AbstractFile], callback: AnalysisCallback, log: Logger, underlyingReporter: DelegatingReporter, @@ -154,10 +181,7 @@ private final class CachedCompiler0(args: Array[String], output: Output, initial compiler.set(callback, underlyingReporter) val run = new compiler.ZincRun(compileProgress) - val wrappedFiles = sources.map(AbstractZincFile(_)) - val sortedSourceFiles: List[AbstractFile] = - wrappedFiles.sortWith(_.underlying.id < _.underlying.id) - run.compileFiles(sortedSourceFiles) + run.compileFiles(sources) processUnreportedWarnings(run) underlyingReporter.problems.foreach( p => callback.problem(p.category, p.position, p.message, p.severity, true) diff --git a/internal/compiler-bridge/src/main/scala/xsbt/ScaladocInterface.scala b/internal/compiler-bridge/src/main/scala/xsbt/ScaladocInterface.scala index f853561d06..dd512d8abd 100644 --- a/internal/compiler-bridge/src/main/scala/xsbt/ScaladocInterface.scala +++ b/internal/compiler-bridge/src/main/scala/xsbt/ScaladocInterface.scala @@ -15,7 +15,7 @@ import xsbti.{ Logger, VirtualFile } import scala.reflect.io.AbstractFile import Log.debug -class ScaladocInterface { +class ScaladocInterface extends xsbti.compile.ScaladocInterface2 { def run(sources: Array[VirtualFile], args: Array[String], log: Logger, delegate: xsbti.Reporter) = (new Runner(sources, args, log, delegate)).run } diff --git a/internal/compiler-interface/src/main/java/xsbti/AnalysisCallback.java b/internal/compiler-interface/src/main/java/xsbti/AnalysisCallback.java index 525e7ad461..e4c2096098 100644 --- a/internal/compiler-interface/src/main/java/xsbti/AnalysisCallback.java +++ b/internal/compiler-interface/src/main/java/xsbti/AnalysisCallback.java @@ -18,6 +18,14 @@ import java.util.EnumSet; public interface AnalysisCallback { + + /** + * This is kept around for sbt-dotty. + * @deprecated Use @link{#startSource(VirtualFile)} instead. + */ + @Deprecated + void startSource(File source); + /** * Set the source file mapped to a concrete {@link AnalysisCallback}. * @param source Source file mapped to this instance of {@link AnalysisCallback}. @@ -45,6 +53,17 @@ void classDependency(String onClassName, String sourceClassName, DependencyContext context); + /** + * This is kept around for sbt-dotty. + * @deprecated Use @link{#binaryDependency(Path, String, String, VirtualFileRef, DependencyContext)} instead. + */ + @Deprecated + void binaryDependency(File onBinaryEntry, + String onBinaryClassName, + String fromClassName, + File fromSourceFile, + DependencyContext context); + /** * Indicate that the class fromClassName depends on a class * named onBinaryClassName coming from class file or jar @@ -77,6 +96,15 @@ void binaryDependency(Path onBinaryEntry, VirtualFileRef fromSourceFile, DependencyContext context); + /** + * This is kept around for sbt-dotty. + * @deprecated Use @link{#generatedNonLocalClass(VirtualFileRef, Path, String, String)} instead. + */ + @Deprecated + void generatedNonLocalClass(File source, + File classFile, + String binaryClassName, + String srcClassName); /** * Map the source class name (srcClassName) of a top-level * Scala class coming from a given source file to a binary class name @@ -99,6 +127,13 @@ void generatedNonLocalClass(VirtualFileRef source, String binaryClassName, String srcClassName); + /** + * This is kept around for sbt-dotty. + * @deprecated Use @link{#generatedLocalClass(VirtualFileRef, Path)} instead. + */ + @Deprecated + void generatedLocalClass(File source, File classFile); + /** * Map the product relation between classFile and * source to indicate that classFile is the @@ -109,6 +144,13 @@ void generatedNonLocalClass(VirtualFileRef source, */ void generatedLocalClass(VirtualFileRef source, Path classFile); + /** + * This is kept around for sbt-dotty. + * @deprecated Use @link{#api(VirtualFileRef, xsbti.api.ClassLike)} instead. + */ + @Deprecated + void api(File sourceFile, xsbti.api.ClassLike classApi); + /** * Register a public API entry coming from a given source file. * @@ -117,6 +159,13 @@ void generatedNonLocalClass(VirtualFileRef source, */ void api(VirtualFileRef sourceFile, xsbti.api.ClassLike classApi); + /** + * This is kept around for sbt-dotty. + * @deprecated Use @link{#mainClass(VirtualFileRef, String)} instead. + */ + @Deprecated + void mainClass(File sourceFile, String className); + /** * Register a class containing an entry point coming from a given source file. * diff --git a/internal/compiler-interface/src/main/java/xsbti/compile/CachedCompiler.java b/internal/compiler-interface/src/main/java/xsbti/compile/CachedCompiler.java index 53c3f501cb..64707d1940 100644 --- a/internal/compiler-interface/src/main/java/xsbti/compile/CachedCompiler.java +++ b/internal/compiler-interface/src/main/java/xsbti/compile/CachedCompiler.java @@ -14,7 +14,6 @@ import xsbti.AnalysisCallback; import xsbti.Logger; import xsbti.Reporter; -import xsbti.VirtualFile; import java.io.File; /** @@ -22,28 +21,31 @@ * * This cached compiler hides the implementation of a compiler by just * defining two operations: {@link #commandArguments(File[])} and - * + * {@link #run(File[], DependencyChanges, AnalysisCallback, Logger, Reporter, CompileProgress)}. */ public interface CachedCompiler { - /** - * Return an array of arguments that represent a command-line like - * equivalent of a call to the Scala compiler, but without the command itself. - * - * @param sources The source files that the compiler must compile. - * - * @return The array of arguments of the Scala compiler. - */ - String[] commandArguments(File[] sources); + /** + * * Return an array of arguments that represent a command-line like + * equivalent of a call to the Scala compiler, but without the command itself. + * + * @param sources The source files that the compiler must compile. + * + * @return The array of arguments of the Scala compiler. + */ + String[] commandArguments(File[] sources); - /** - * Run the cached Scala compiler with inputs of incremental compilation. - * - * @param sources The source files to be compiled. - * @param changes The changes that have occurred since last compilation. - * @param callback The callback injected by the incremental compiler. - * @param logger The logger of the incremental compilation. - * @param delegate The reporter that informs on the compiler's output. - * @param progress The compiler progress associated with a Scala compiler. - */ - void run(VirtualFile[] sources, DependencyChanges changes, AnalysisCallback callback, Logger logger, Reporter delegate, CompileProgress progress); + /** + * Run the cached Scala compiler with inputs of incremental compilation. + * + * @param sources The source files to be compiled. + * @param changes The changes that have occurred since last compilation. + * @param callback The callback injected by the incremental compiler. + * @param logger The logger of the incremental compilation. + * @param delegate The reporter that informs on the compiler's output. + * @param progress The compiler progress associated with a Scala compiler. + * @deprecated Use CachedCompiler2#run with VirtualFile instead. + */ + @Deprecated + void run(File[] sources, DependencyChanges changes, AnalysisCallback callback, Logger logger, Reporter delegate, CompileProgress progress); } + diff --git a/internal/compiler-interface/src/main/java/xsbti/compile/CachedCompiler2.java b/internal/compiler-interface/src/main/java/xsbti/compile/CachedCompiler2.java new file mode 100644 index 0000000000..8625194330 --- /dev/null +++ b/internal/compiler-interface/src/main/java/xsbti/compile/CachedCompiler2.java @@ -0,0 +1,40 @@ +/* + * Zinc - The incremental compiler for Scala. + * Copyright Lightbend, Inc. and Mark Harrah + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package xsbti.compile; + +import xsbti.AnalysisCallback; +import xsbti.Logger; +import xsbti.Reporter; +import xsbti.VirtualFile; + +import java.io.Closeable; + +/** + * Define the interface of a cached Scala compiler that can be run. + * + * This cached compiler hides the implementation of a compiler by just + * defining one operation: + * {@link #run(VirtualFile[], DependencyChanges, AnalysisCallback, Logger, Reporter, CompileProgress)}. + */ +public interface CachedCompiler2 extends CachedCompiler, Closeable { + /** + * Run the cached Scala compiler with inputs of incremental compilation. + * + * @param sources The source files to be compiled. + * @param changes The changes that have occurred since last compilation. + * @param callback The callback injected by the incremental compiler. + * @param logger The logger of the incremental compilation. + * @param delegate The reporter that informs on the compiler's output. + * @param progress The compiler progress associated with a Scala compiler. + */ + void run(VirtualFile[] sources, DependencyChanges changes, AnalysisCallback callback, Logger logger, Reporter delegate, CompileProgress progress); +} diff --git a/internal/compiler-interface/src/main/java/xsbti/compile/CompilerInterface1.java b/internal/compiler-interface/src/main/java/xsbti/compile/CompilerInterface1.java new file mode 100644 index 0000000000..12c3a9dc49 --- /dev/null +++ b/internal/compiler-interface/src/main/java/xsbti/compile/CompilerInterface1.java @@ -0,0 +1,39 @@ +/* + * Zinc - The incremental compiler for Scala. + * Copyright Lightbend, Inc. and Mark Harrah + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package xsbti.compile; + +import java.io.File; +import xsbti.AnalysisCallback; +import xsbti.Logger; +import xsbti.Reporter; +import xsbti.VirtualFile; + +/** Compiler interface as of Zinc 1.2.0. */ +public interface CompilerInterface1 { + CachedCompiler newCompiler( + String[] options, + Output output, + Logger initialLog, + Reporter initialDelegate + ); + + void run( + File[] sources, + DependencyChanges changes, + AnalysisCallback callback, + Logger log, + Reporter delegate, + CompileProgress progress, + CachedCompiler cached + ); +} + diff --git a/internal/compiler-interface/src/main/java/xsbti/compile/CompilerInterface2.java b/internal/compiler-interface/src/main/java/xsbti/compile/CompilerInterface2.java new file mode 100644 index 0000000000..b4e9d388f3 --- /dev/null +++ b/internal/compiler-interface/src/main/java/xsbti/compile/CompilerInterface2.java @@ -0,0 +1,37 @@ +/* + * Zinc - The incremental compiler for Scala. + * Copyright Lightbend, Inc. and Mark Harrah + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package xsbti.compile; + +import xsbti.AnalysisCallback; +import xsbti.Logger; +import xsbti.Reporter; +import xsbti.VirtualFile; + +public interface CompilerInterface2 { + CachedCompiler2 newCompiler( + String[] options, + Output output, + Logger initialLog, + Reporter initialDelegate + ); + + void run( + VirtualFile[] sources, + DependencyChanges changes, + AnalysisCallback callback, + Logger log, + Reporter delegate, + CompileProgress progress, + CachedCompiler2 cached + ); +} + diff --git a/internal/compiler-interface/src/main/java/xsbti/compile/ConsoleInterface1.java b/internal/compiler-interface/src/main/java/xsbti/compile/ConsoleInterface1.java new file mode 100644 index 0000000000..8f3ae3d04e --- /dev/null +++ b/internal/compiler-interface/src/main/java/xsbti/compile/ConsoleInterface1.java @@ -0,0 +1,33 @@ +/* + * Zinc - The incremental compiler for Scala. + * Copyright Lightbend, Inc. and Mark Harrah + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package xsbti.compile; + +import xsbti.Logger; +import xsbti.Reporter; + +/** Console Interface as of Zinc 1.2.0. */ +public interface ConsoleInterface1 { + void run( + String[] args, + String bootClasspathString, + String classpathString, + String initialCommands, + String cleanupCommands, + ClassLoader loader, + String[] bindNames, + Object[] bindValues, + Logger log); + + String[] commandArguments( + String[] args, String bootClasspathString, String classpathString, Logger log); +} + diff --git a/internal/compiler-interface/src/main/java/xsbti/compile/DependencyChanges.java b/internal/compiler-interface/src/main/java/xsbti/compile/DependencyChanges.java index 7b55860c54..370af8d798 100644 --- a/internal/compiler-interface/src/main/java/xsbti/compile/DependencyChanges.java +++ b/internal/compiler-interface/src/main/java/xsbti/compile/DependencyChanges.java @@ -11,24 +11,34 @@ package xsbti.compile; +import java.io.File; import xsbti.VirtualFileRef; /** * Define the changes that can occur to the dependencies of a given compilation run. */ public interface DependencyChanges { - /** Check whether there have been any change in the compilation dependencies. */ - boolean isEmpty(); + /** Check whether there have been any change in the compilation dependencies. */ + boolean isEmpty(); - /** - * Return the modified binaries since the last compilation run. - * These modified binaries are either class files or jar files. - */ - VirtualFileRef[] modifiedLibraries(); + /** + * Return the modified binaries since the last compilation run. + * These modified binaries are either class files or jar files. + * @deprecated Use @link{#modifiedLibraries()} instead. + */ + @Deprecated + File[] modifiedBinaries(); - /** - * Return the modified class names since the last compilation run. + /** + * Return the modified binaries since the last compilation run. + * These modified binaries are either class files or jar files. + */ + VirtualFileRef[] modifiedLibraries(); + + /** + * Return the modified class names since the last compilation run. * These class names are mapped to sources and not binaries. - */ - String[] modifiedClasses(); -} \ No newline at end of file + */ + String[] modifiedClasses(); +} + diff --git a/internal/compiler-interface/src/main/java/xsbti/compile/MultipleOutput.java b/internal/compiler-interface/src/main/java/xsbti/compile/MultipleOutput.java index cbae48ce57..c51553feb6 100755 --- a/internal/compiler-interface/src/main/java/xsbti/compile/MultipleOutput.java +++ b/internal/compiler-interface/src/main/java/xsbti/compile/MultipleOutput.java @@ -11,6 +11,7 @@ package xsbti.compile; +import java.io.File; import java.nio.file.Path; import java.util.Optional; @@ -30,7 +31,12 @@ public interface MultipleOutput extends Output { public OutputGroup[] getOutputGroups(); @Override - public default Optional getSingleOutput() { + public default Optional getSingleOutput() { + return Optional.empty(); + } + + @Override + public default Optional getSingleOutputAsPath() { return Optional.empty(); } @@ -38,4 +44,4 @@ public default Optional getSingleOutput() { public default Optional getMultipleOutput() { return Optional.of(getOutputGroups()); } -} \ No newline at end of file +} diff --git a/internal/compiler-interface/src/main/java/xsbti/compile/Output.java b/internal/compiler-interface/src/main/java/xsbti/compile/Output.java index 8c6bc349ba..69e46bb1da 100755 --- a/internal/compiler-interface/src/main/java/xsbti/compile/Output.java +++ b/internal/compiler-interface/src/main/java/xsbti/compile/Output.java @@ -12,6 +12,7 @@ package xsbti.compile; import java.nio.file.Path; +import java.io.File; import java.io.Serializable; import java.util.Optional; @@ -46,6 +47,17 @@ public interface Output extends Serializable { * If multiple outputs are used, it returns {@link java.util.Optional#EMPTY}. * * @see xsbti.compile.SingleOutput + * @deprecated use {@link #getSingleOutputAsPath()} instead. */ - public Optional getSingleOutput(); + @Deprecated + public Optional getSingleOutput(); + + /** + * Returns the single output passed or to be passed to the Scala or Java compiler. + * If multiple outputs are used, it returns {@link java.util.Optional#EMPTY}. + * + * @see xsbti.compile.SingleOutput + */ + public Optional getSingleOutputAsPath(); } + diff --git a/internal/compiler-interface/src/main/java/xsbti/compile/OutputGroup.java b/internal/compiler-interface/src/main/java/xsbti/compile/OutputGroup.java index b054787ceb..6dcca12a8c 100644 --- a/internal/compiler-interface/src/main/java/xsbti/compile/OutputGroup.java +++ b/internal/compiler-interface/src/main/java/xsbti/compile/OutputGroup.java @@ -11,6 +11,7 @@ package xsbti.compile; +import java.io.File; import java.nio.file.Path; import java.io.Serializable; @@ -23,9 +24,22 @@ public interface OutputGroup extends Serializable { *

* Note that source directories should uniquely identify the group * for a certain source file. + * + * @deprecated use {@link #getSourceDirectoryAsPath()} instead. */ - public Path getSourceDirectory(); + @Deprecated + public File getSourceDirectory(); + /** + * Return the directory where source files are stored for this group. + *

+ * Note that source directories should uniquely identify the group + * for a certain source file. + */ + public default Path getSourceDirectoryAsPath() { + return getSourceDirectory().toPath(); + } + /** * Return the directory where class files should be generated. *

@@ -35,5 +49,20 @@ public interface OutputGroup extends Serializable { *

* This directory must be exclusively used for one set of sources. */ - public Path getOutputDirectory(); + @Deprecated + public File getOutputDirectory(); + + /** + * Return the directory where class files should be generated. + *

+ * Incremental compilation manages the class files in this directory, so + * don't play with this directory out of the Zinc API. Zinc already takes + * care of deleting classes before every compilation run. + *

+ * This directory must be exclusively used for one set of sources. + */ + public default Path getOutputDirectoryAsPath() { + return getOutputDirectory().toPath(); + } } + diff --git a/internal/compiler-interface/src/main/java/xsbti/compile/ScalaCompiler.java b/internal/compiler-interface/src/main/java/xsbti/compile/ScalaCompiler.java index a4a482beee..342a9d216f 100644 --- a/internal/compiler-interface/src/main/java/xsbti/compile/ScalaCompiler.java +++ b/internal/compiler-interface/src/main/java/xsbti/compile/ScalaCompiler.java @@ -12,6 +12,7 @@ package xsbti.compile; import xsbti.AnalysisCallback; +import xsbti.FileConverter; import xsbti.Logger; import xsbti.Reporter; import xsbti.VirtualFile; @@ -32,28 +33,6 @@ public interface ScalaCompiler { */ ClasspathOptions classpathOptions(); - /** - * Recompile the subset of sources impacted by the - * changes defined in changes and collect the new APIs. - * - * @param sources All the sources of the project. - * @param changes The changes that have been detected at the previous step. - * @param callback The callback to which the extracted information should be - * reported. - * @param log The logger in which the Scala compiler will log info. - * @param reporter The reporter to which errors and warnings should be - * reported during compilation. - * @param progress Where to report the file being currently compiled. - * @param compiler The actual compiler that will perform the compilation step. - */ - void compile(VirtualFile[] sources, - DependencyChanges changes, - AnalysisCallback callback, - Logger log, - Reporter reporter, - CompileProgress progress, - CachedCompiler compiler); - /** * Recompile the subset of sources impacted by the * changes defined in changes and collect the new APIs. @@ -73,12 +52,14 @@ void compile(VirtualFile[] sources, * will report on the file being compiled. */ void compile(VirtualFile[] sources, + FileConverter converter, DependencyChanges changes, String[] options, Output output, AnalysisCallback callback, Reporter reporter, GlobalsCache cache, - Logger log, - Optional progressOpt); + Optional progressOpt, + Logger log); } + diff --git a/internal/compiler-interface/src/main/java/xsbti/compile/ScaladocInterface1.java b/internal/compiler-interface/src/main/java/xsbti/compile/ScaladocInterface1.java new file mode 100644 index 0000000000..e0f8be1224 --- /dev/null +++ b/internal/compiler-interface/src/main/java/xsbti/compile/ScaladocInterface1.java @@ -0,0 +1,21 @@ +/* + * Zinc - The incremental compiler for Scala. + * Copyright Lightbend, Inc. and Mark Harrah + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package xsbti.compile; + +import xsbti.Logger; +import xsbti.Reporter; + +/** Scaladoc Interface as of Zinc 1.2.0. */ +public interface ScaladocInterface1 { + void run(String[] args, Logger log, Reporter delegate); +} + diff --git a/internal/compiler-interface/src/main/java/xsbti/compile/ScaladocInterface2.java b/internal/compiler-interface/src/main/java/xsbti/compile/ScaladocInterface2.java new file mode 100644 index 0000000000..0360322e1c --- /dev/null +++ b/internal/compiler-interface/src/main/java/xsbti/compile/ScaladocInterface2.java @@ -0,0 +1,22 @@ +/* + * Zinc - The incremental compiler for Scala. + * Copyright Lightbend, Inc. and Mark Harrah + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package xsbti.compile; + +import xsbti.Logger; +import xsbti.Reporter; +import xsbti.VirtualFile; + +/** Scaladoc Interface as of Zinc 1.4.0. */ +public interface ScaladocInterface2 { + void run(VirtualFile[] sources, String[] args, Logger log, Reporter delegate); +} + diff --git a/internal/compiler-interface/src/main/java/xsbti/compile/SingleOutput.java b/internal/compiler-interface/src/main/java/xsbti/compile/SingleOutput.java index 454e13c77b..ad4e21187b 100755 --- a/internal/compiler-interface/src/main/java/xsbti/compile/SingleOutput.java +++ b/internal/compiler-interface/src/main/java/xsbti/compile/SingleOutput.java @@ -11,6 +11,7 @@ package xsbti.compile; +import java.io.File; import java.nio.file.Path; import java.util.Optional; @@ -29,16 +30,40 @@ public interface SingleOutput extends Output { * of deleting classes before every compilation run. *

* This file or directory must be exclusively used for one set of sources. + * + * @deprecated use {@link #getOutputDirectoryAsPath()} instead. */ - public Path getOutputDirectory(); + @Deprecated + public File getOutputDirectory(); + + /** + * Return the **directory or jar** where class files should be generated + * and written to. The method name is a misnamer since it can return a + * jar file when straight-to-jar compilation is enabled. + *

+ * Incremental compilation manages the class files in this file, so don't + * play with this directory out of the Zinc API. Zinc already takes care + * of deleting classes before every compilation run. + *

+ * This file or directory must be exclusively used for one set of sources. + */ + public default Path getOutputDirectoryAsPath() { + return getOutputDirectory().toPath(); + } @Override - public default Optional getSingleOutput() { + public default Optional getSingleOutput() { return Optional.of(getOutputDirectory()); } + @Override + public default Optional getSingleOutputAsPath() { + return Optional.of(getOutputDirectoryAsPath()); + } + @Override public default Optional getMultipleOutput() { return Optional.empty(); } -} \ No newline at end of file +} + diff --git a/internal/zinc-classfile/src/main/scala/sbt/internal/inc/JarUtils.scala b/internal/zinc-classfile/src/main/scala/sbt/internal/inc/JarUtils.scala index 81c10250df..6c7bdac48b 100644 --- a/internal/zinc-classfile/src/main/scala/sbt/internal/inc/JarUtils.scala +++ b/internal/zinc-classfile/src/main/scala/sbt/internal/inc/JarUtils.scala @@ -317,7 +317,7 @@ object JarUtils { def getOutputJar(output: Output): Option[Path] = { output match { case s: SingleOutput => - Some(s.getOutputDirectory).filter(_.toString.endsWith(".jar")) + Some(s.getOutputDirectoryAsPath).filter(_.toString.endsWith(".jar")) case _ => None } } diff --git a/internal/zinc-classfile/src/main/scala/sbt/internal/inc/classfile/JavaAnalyze.scala b/internal/zinc-classfile/src/main/scala/sbt/internal/inc/classfile/JavaAnalyze.scala index 4dfe165a13..47855451e0 100644 --- a/internal/zinc-classfile/src/main/scala/sbt/internal/inc/classfile/JavaAnalyze.scala +++ b/internal/zinc-classfile/src/main/scala/sbt/internal/inc/classfile/JavaAnalyze.scala @@ -45,7 +45,7 @@ private[sbt] object JavaAnalyze { .groupBy(_.name) // For performance reasons, precompute these as they are static throughout this analysis val outputJarOrNull: Path = finalJarOutput.getOrElse(null) - val singleOutputOrNull: Path = output.getSingleOutput.orElse(null) + val singleOutputOrNull: Path = output.getSingleOutputAsPath.orElse(null) def load(tpe: String, errMsg: => Option[String]): Option[Class[_]] = { if (tpe.endsWith("module-info")) None diff --git a/internal/zinc-classfile/src/test/scala/sbt/internal/inc/classfile/JavaCompilerForUnitTesting.scala b/internal/zinc-classfile/src/test/scala/sbt/internal/inc/classfile/JavaCompilerForUnitTesting.scala index 23e4656625..9ea4e47016 100644 --- a/internal/zinc-classfile/src/test/scala/sbt/internal/inc/classfile/JavaCompilerForUnitTesting.scala +++ b/internal/zinc-classfile/src/test/scala/sbt/internal/inc/classfile/JavaCompilerForUnitTesting.scala @@ -87,7 +87,10 @@ object JavaCompilerForUnitTesting { // - extract all base classes. // we extract just parents as this is enough for testing - val output = new SingleOutput { def getOutputDirectory: Path = classesDir.toPath } + val output = new SingleOutput { + override def getOutputDirectoryAsPath: Path = classesDir.toPath + override def getOutputDirectory: File = getOutputDirectoryAsPath.toFile + } JavaAnalyze(classFiles, srcFiles, logger, output, finalJarOutput = None)( analysisCallback, classloader, diff --git a/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/AnalyzingCompiler.scala b/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/AnalyzingCompiler.scala index f92ad94be0..5e17ddbfaf 100644 --- a/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/AnalyzingCompiler.scala +++ b/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/AnalyzingCompiler.scala @@ -18,19 +18,21 @@ import java.nio.file.Path import java.net.URLClassLoader import java.util.Optional +import com.github.ghik.silencer.silent import sbt.util.Logger import sbt.io.syntax._ import sbt.internal.inc.classpath.ClassLoaderCache import sbt.internal.util.ManagedLogger import xsbti.{ AnalysisCallback, + FileConverter, PathBasedFile, Reporter, - ReporterUtil, Logger => xLogger, VirtualFile } import xsbti.compile._ +import scala.language.existentials /** * Implement a cached incremental `ScalaCompiler` that has been instrumented @@ -52,6 +54,10 @@ final class AnalyzingCompiler( ) extends CachedCompilerProvider with ScalaCompiler { + private[this] final val compilerBridgeClassName = "xsbt.CompilerInterface" + private[this] final val scaladocBridgeClassName = "xsbt.ScaladocInterface" + private[this] final val consoleBridgeClassName = "xsbt.ConsoleInterface" + def onArgs(f: Seq[String] => Unit): AnalyzingCompiler = new AnalyzingCompiler(scalaInstance, provider, classpathOptions, f, classLoaderCache) @@ -64,42 +70,22 @@ final class AnalyzingCompiler( Some(classLoaderCache) ) - def apply( - sources: Array[VirtualFile], - changes: DependencyChanges, - classpath: Array[VirtualFile], - singleOutput: Path, - options: Array[String], - callback: AnalysisCallback, - maximumErrors: Int, - cache: GlobalsCache, - log: ManagedLogger - ): Unit = { - val compArgs = new CompilerArguments(scalaInstance, classpathOptions) - val arguments = compArgs.makeArguments(Nil, classpath, options) - val output = CompileOutput(singleOutput) - val basicReporterConfig = ReporterUtil.getDefaultReporterConfig() - val reporterConfig = basicReporterConfig.withMaximumErrors(maximumErrors) - val reporter = ReporterManager.getReporter(log, reporterConfig) - val progress = Optional.empty[CompileProgress] - compile(sources, changes, arguments.toArray, output, callback, reporter, cache, log, progress) - } - - def compile( + override def compile( sources: Array[VirtualFile], + converter: FileConverter, changes: DependencyChanges, options: Array[String], output: Output, callback: AnalysisCallback, reporter: Reporter, cache: GlobalsCache, - log: xLogger, - progressOpt: Optional[CompileProgress] + progressOpt: Optional[CompileProgress], + log: xLogger ): Unit = { val cached = cache(options, output, !changes.isEmpty, this, log, reporter) try { val progress = if (progressOpt.isPresent) progressOpt.get else IgnoreProgress - compile(sources, changes, callback, log, reporter, progress, cached) + compile(sources, converter, changes, callback, log, reporter, progress, cached) } finally { cached match { case c: java.io.Closeable => c.close() @@ -110,6 +96,7 @@ final class AnalyzingCompiler( def compile( sources: Array[VirtualFile], + converter: FileConverter, changes: DependencyChanges, callback: AnalysisCallback, log: xLogger, @@ -117,57 +104,63 @@ final class AnalyzingCompiler( progress: CompileProgress, compiler: CachedCompiler ): Unit = { - // onArgsHandler(compiler.commandArguments(sources)) - call("xsbt.CompilerInterface", "run", log)( - classOf[Array[VirtualFile]], - classOf[DependencyChanges], - classOf[AnalysisCallback], - classOf[xLogger], - classOf[Reporter], - classOf[CompileProgress], - classOf[CachedCompiler] - )(sources, changes, callback, log, reporter, progress, compiler) + val (bridge, bridgeClass) = bridgeInstance(compilerBridgeClassName, log) + (bridge, compiler) match { + case (intf: CompilerInterface2, compiler2: CachedCompiler2) => + intf.run(sources, changes, callback, log, reporter, progress, compiler2) + case _ => + // fall back to passing File array + val fileSources: Array[File] = sources.map(converter.toPath(_).toFile) + invoke(bridge, bridgeClass, "run", log)( + classOf[Array[File]], + classOf[DependencyChanges], + classOf[AnalysisCallback], + classOf[xLogger], + classOf[Reporter], + classOf[CompileProgress], + classOf[CachedCompiler] + )(fileSources, changes, callback, log, reporter, progress, compiler) + } () } - def newCachedCompiler( + override def newCachedCompiler( arguments: Array[String], output: Output, log: xLogger, reporter: Reporter - ): CachedCompiler = - newCachedCompiler(arguments: Seq[String], output, log, reporter) - - def newCachedCompiler( - arguments: Seq[String], - output: Output, - log: xLogger, - reporter: Reporter ): CachedCompiler = { - val compiler = call("xsbt.CompilerInterface", "newCompiler", log)( - classOf[Array[String]], - classOf[Output], - classOf[xLogger], - classOf[Reporter] - )(arguments.toArray[String], output, log, reporter) - compiler.asInstanceOf[CachedCompiler] + bridgeInstance(compilerBridgeClassName, log) match { + case (intf: CompilerInterface2, _) => + intf.newCompiler(arguments, output, log, reporter) + case (bridge, bridgeClass) => + // fall back to old reflection if CompilerInterface1 is not supported + invoke(bridge, bridgeClass, "newCompiler", log)( + classOf[Array[String]], + classOf[Output], + classOf[xLogger], + classOf[Reporter] + )(arguments, output, log, reporter).asInstanceOf[CachedCompiler]: @silent + } } def doc( sources: Seq[VirtualFile], classpath: Seq[VirtualFile], + converter: FileConverter, outputDirectory: Path, options: Seq[String], maximumErrors: Int, log: ManagedLogger ): Unit = { val reporter = new ManagedLoggedReporter(maximumErrors, log) - doc(sources, classpath, outputDirectory, options, log, reporter) + doc(sources, classpath, converter, outputDirectory, options, log, reporter) } def doc( sources: Seq[VirtualFile], classpath: Seq[VirtualFile], + converter: FileConverter, outputDirectory: Path, options: Seq[String], log: Logger, @@ -177,12 +170,20 @@ final class AnalyzingCompiler( val arguments = compArgs.makeArguments(Nil, classpath, Some(outputDirectory), options) onArgsHandler(arguments) - call("xsbt.ScaladocInterface", "run", log)( - classOf[Array[VirtualFile]], - classOf[Array[String]], - classOf[xLogger], - classOf[Reporter] - )(sources.toArray, arguments.toArray[String], log, reporter) + val (bridge, bridgeClass) = bridgeInstance(scaladocBridgeClassName, log) + bridge match { + case intf: ScaladocInterface2 => + intf.run(sources.toArray, arguments.toArray[String], log, reporter) + case _ => + // fall back to old reflection + val fileSources: Array[File] = sources.toArray.map(converter.toPath(_).toFile) + invoke(bridge, bridgeClass, "run", log)( + classOf[Array[File]], + classOf[Array[String]], + classOf[xLogger], + classOf[Reporter] + )(fileSources, arguments.toArray[String], log, reporter) + } () } @@ -195,28 +196,46 @@ final class AnalyzingCompiler( )(loader: Option[ClassLoader] = None, bindings: Seq[(String, Any)] = Nil): Unit = { onArgsHandler(consoleCommandArguments(classpath, options, log)) val (classpathString, bootClasspath) = consoleClasspaths(classpath) - val (names, values) = bindings.unzip - call("xsbt.ConsoleInterface", "run", log)( - classOf[Array[String]], - classOf[String], - classOf[String], - classOf[String], - classOf[String], - classOf[ClassLoader], - classOf[Array[String]], - classOf[Array[Any]], - classOf[xLogger] - )( - options.toArray[String]: Array[String], - bootClasspath, - classpathString, - initialCommands, - cleanupCommands, - loader.orNull, - names.toArray[String], - values.toArray[Any], - log - ) + val (names, values0) = bindings.unzip + val values = values0.toArray[Any].asInstanceOf[Array[AnyRef]] + val (bridge, bridgeClass) = bridgeInstance(consoleBridgeClassName, log) + bridge match { + case intf: ConsoleInterface1 => + intf.run( + options.toArray[String]: Array[String], + bootClasspath, + classpathString, + initialCommands, + cleanupCommands, + loader.orNull, + names.toArray[String], + values, + log + ) + case _ => + // fall back to old reflection if ConsoleInterface1 is not supported + invoke(bridge, bridgeClass, "run", log)( + classOf[Array[String]], + classOf[String], + classOf[String], + classOf[String], + classOf[String], + classOf[ClassLoader], + classOf[Array[String]], + classOf[Array[Any]], + classOf[xLogger] + )( + options.toArray[String]: Array[String], + bootClasspath, + classpathString, + initialCommands, + cleanupCommands, + loader.orNull, + names.toArray[String], + values.toArray[Any], + log + ) + } () } @@ -237,26 +256,31 @@ final class AnalyzingCompiler( log: Logger ): Seq[String] = { val (classpathString, bootClasspath) = consoleClasspaths(classpath) - val argsObj = call("xsbt.ConsoleInterface", "commandArguments", log)( - classOf[Array[String]], - classOf[String], - classOf[String], - classOf[xLogger] - )(options.toArray[String], bootClasspath, classpathString, log) + val (bridge, bridgeClass) = bridgeInstance(consoleBridgeClassName, log) + val argsObj = bridge match { + case intf: ConsoleInterface1 => + intf.commandArguments(options.toArray[String], bootClasspath, classpathString, log) + case _ => + invoke(bridge, bridgeClass, "commandArguments", log)( + classOf[Array[String]], + classOf[String], + classOf[String], + classOf[xLogger] + )(options.toArray[String], bootClasspath, classpathString, log) + } argsObj.asInstanceOf[Array[String]].toSeq } - def force(log: Logger): Unit = { provider.fetchCompiledBridge(scalaInstance, log); () } + private def bridgeInstance(bridgeClassName: String, log: Logger): (AnyRef, Class[_]) = { + val bridgeClass = getBridgeClass(bridgeClassName, log) + (bridgeClass.getDeclaredConstructor().newInstance().asInstanceOf[AnyRef], bridgeClass) + } - private def call( - interfaceClassName: String, - methodName: String, - log: Logger - )(argTypes: Class[_]*)(args: AnyRef*): AnyRef = { - val interfaceClass = getInterfaceClass(interfaceClassName, log) - val interface = interfaceClass.getDeclaredConstructor().newInstance().asInstanceOf[AnyRef] - val method = interfaceClass.getMethod(methodName, argTypes: _*) - try method.invoke(interface, args: _*) + private def invoke(bridge: AnyRef, bridgeClass: Class[_], methodName: String, log: Logger)( + argTypes: Class[_]* + )(args: AnyRef*): AnyRef = { + val method = bridgeClass.getMethod(methodName, argTypes: _*) + try method.invoke(bridge, args: _*) catch { case e: InvocationTargetException => e.getCause match { @@ -285,7 +309,7 @@ final class AnalyzingCompiler( } } - private[this] def getInterfaceClass(name: String, log: Logger) = + private[this] def getBridgeClass(name: String, log: Logger) = Class.forName(name, true, loader(log)) protected def createDualLoader( diff --git a/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/CompileOutput.scala b/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/CompileOutput.scala index 4994313ed3..3a541430f2 100755 --- a/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/CompileOutput.scala +++ b/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/CompileOutput.scala @@ -14,6 +14,7 @@ package internal package inc import xsbti.compile.{ Output, OutputGroup } +import java.io.File import java.nio.file.Path import java.util.Optional @@ -53,13 +54,15 @@ object CompileOutput { new ConcreteOutputGroup(source, output) private final class EmptyOutput extends xsbti.compile.Output { - override def getSingleOutput(): Optional[Path] = Optional.empty() + override def getSingleOutput(): Optional[File] = Optional.empty() + override def getSingleOutputAsPath(): Optional[Path] = Optional.empty() override def getMultipleOutput(): Optional[Array[OutputGroup]] = Optional.empty() override def toString: String = "EmptyOutput()" } - private final class ConcreteSingleOutput(val getOutputDirectory: Path) + private final class ConcreteSingleOutput(override val getOutputDirectoryAsPath: Path) extends xsbti.compile.SingleOutput { + override def getOutputDirectory: File = getOutputDirectoryAsPath.toFile override def toString: String = s"SingleOutput($getOutputDirectory)" } @@ -69,9 +72,11 @@ object CompileOutput { } private final class ConcreteOutputGroup( - val getSourceDirectory: Path, - val getOutputDirectory: Path + override val getSourceDirectoryAsPath: Path, + override val getOutputDirectoryAsPath: Path ) extends xsbti.compile.OutputGroup { - override def toString = s"OutputGroup($getSourceDirectory -> $getOutputDirectory)" + override def getSourceDirectory: File = getSourceDirectoryAsPath.toFile + override def getOutputDirectory: File = getOutputDirectoryAsPath.toFile + override def toString = s"OutputGroup($getSourceDirectoryAsPath -> $getOutputDirectoryAsPath)" } } diff --git a/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/CompilerArguments.scala b/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/CompilerArguments.scala index 3dda333549..c7ed65e631 100644 --- a/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/CompilerArguments.scala +++ b/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/CompilerArguments.scala @@ -190,7 +190,7 @@ object CompilerArguments { * make use of it (e.g. the Eclipse compiler does this via EJC). * See https://github.com/sbt/zinc/issues/163. */ val target = output match { - case so: SingleOutput => Some(so.getOutputDirectory) + case so: SingleOutput => Some(so.getOutputDirectoryAsPath) case _: MultipleOutput => None } outputOption(target) diff --git a/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/CompilerCache.scala b/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/CompilerCache.scala index 05aa509352..5b62bf2a4a 100644 --- a/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/CompilerCache.scala +++ b/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/CompilerCache.scala @@ -16,6 +16,7 @@ package inc import java.io.File import java.util +import com.github.ghik.silencer.silent import xsbti.{ AnalysisCallback, Reporter, Logger => xLogger } import xsbti.compile._ import sbt.util.InterfaceUtil.{ toSupplier => f0 } @@ -53,24 +54,49 @@ final class CompilerCache(val maxInstances: Int) extends GlobalsCache { case null => log.debug(f0(s"Compiler cache miss. $key ")) val compiler = c.newCachedCompiler(args, output, log, reporter) - val newCompiler: CachedCompiler = new CachedCompiler with java.io.Closeable { + + class OpenCompiler extends CachedCompiler with java.io.Closeable { override def commandArguments(sources: Array[File]): Array[String] = { compiler.commandArguments(sources) } + + @silent // silence deprecation of run(Array[File], ...) override def run( - sources: Array[VirtualFile], + sources: Array[File], changes: DependencyChanges, callback: AnalysisCallback, logger: xLogger, delegate: Reporter, progress: CompileProgress ): Unit = { + // forward run to underlying cached compiler since it could be created by sbt-dotty compiler.run(sources, changes, callback, logger, delegate, progress) } + override def close(): Unit = { - // Dont' close the underlying Global. + // Don't close the underlying Global. } } + + class OpenCompiler2(compiler: CachedCompiler2) extends OpenCompiler with CachedCompiler2 { + override def run( + sources: Array[VirtualFile], + changes: DependencyChanges, + callback: AnalysisCallback, + logger: xLogger, + delegate: Reporter, + progress: CompileProgress + ): Unit = { + compiler + .run(sources, changes, callback, logger, delegate, progress) + } + } + + val newCompiler: CachedCompiler = compiler match { + case compiler: CachedCompiler2 => new OpenCompiler2(compiler) + case _ => new OpenCompiler + } + cache.put(key, newCompiler) newCompiler case cachedCompiler => @@ -93,7 +119,7 @@ final class CompilerCache(val maxInstances: Int) extends GlobalsCache { final class FreshCompilerCache extends GlobalsCache { def clear(): Unit = () - def apply( + override def apply( args: Array[String], output: Output, forceNew: Boolean, diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/Analysis.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/Analysis.scala index 5872d01c7d..7be46d9f33 100644 --- a/internal/zinc-core/src/main/scala/sbt/internal/inc/Analysis.scala +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/Analysis.scala @@ -14,6 +14,7 @@ package internal package inc import sbt.internal.inc.Analysis.{ LocalProduct, NonLocalProduct } +import java.io.File import java.nio.file.{ Path, Paths } import xsbti.VirtualFileRef @@ -126,7 +127,8 @@ object Analysis { } lazy val dummyOutput: Output = new SingleOutput { - def getOutputDirectory: Path = Paths.get("/tmp/dummy") + override def getOutputDirectoryAsPath: Path = Paths.get("/tmp/dummy") + override def getOutputDirectory: File = getOutputDirectoryAsPath.toFile } } diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/Incremental.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/Incremental.scala index 2828e1c0fb..8f3722eabe 100644 --- a/internal/zinc-core/src/main/scala/sbt/internal/inc/Incremental.scala +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/Incremental.scala @@ -13,6 +13,7 @@ package sbt package internal package inc +import java.io.File import java.nio.file.Path import java.util.EnumSet import sbt.internal.inc.Analysis.{ LocalProduct, NonLocalProduct } @@ -243,8 +244,10 @@ object Incremental { incremental.detectInitialChanges(sources, previous, current, lookup, converter, output) log.debug(s"> initialChanges = $initialChanges") val binaryChanges = new DependencyChanges { - val modifiedLibraries = initialChanges.libraryDeps.toArray - val modifiedClasses = initialChanges.external.allModified.toArray + override def modifiedBinaries: Array[File] = + modifiedLibraries.map(converter.toPath(_).toFile) + override val modifiedLibraries = initialChanges.libraryDeps.toArray + override val modifiedClasses = initialChanges.external.allModified.toArray def isEmpty = modifiedLibraries.isEmpty && modifiedClasses.isEmpty } val (initialInvClasses, initialInvSources0) = @@ -428,7 +431,8 @@ private final class AnalysisCallback( private[this] val compilation: Compilation = Compilation(compileStartTime, output) private val hooks = options.externalHooks - private val provenance = jo2o(output.getSingleOutput).fold("")(hooks.getProvenance.get(_)).intern + private val provenance = + jo2o(output.getSingleOutputAsPath).fold("")(hooks.getProvenance.get(_)).intern override def toString = (List("Class APIs", "Object APIs", "Library deps", "Products", "Source deps") zip @@ -487,7 +491,8 @@ private final class AnalysisCallback( currentSetup.options.scalacOptions.contains("-Ypickle-java") } - def startSource(source: VirtualFile): Unit = { + override def startSource(source: File): Unit = startSource(converter.toVirtualFile(source.toPath)) + override def startSource(source: VirtualFile): Unit = { if (options.strictMode()) { assert( !srcs.contains(source), @@ -540,15 +545,31 @@ private final class AnalysisCallback( add(extSrcDeps, sourceClassName, dependency) } + // Called by sbt-dotty + override def binaryDependency( + classFile: File, + onBinaryClassName: String, + fromClassName: String, + fromSourceFile: File, + context: DependencyContext + ): Unit = + binaryDependency( + classFile.toPath, + onBinaryClassName, + fromClassName, + converter.toVirtualFile(fromSourceFile.toPath), + context + ) + // since the binary at this point could either *.class files or // library JARs, we need to accept Path here. - def binaryDependency( + override def binaryDependency( classFile: Path, onBinaryClassName: String, fromClassName: String, fromSourceFile: VirtualFileRef, context: DependencyContext - ) = + ): Unit = internalBinaryToSourceClassName(onBinaryClassName) match { case Some(dependsOn) => // dependsOn is a source class name // dependency is a product of a source not included in this compilation @@ -593,7 +614,21 @@ private final class AnalysisCallback( } } - def generatedNonLocalClass( + // Called by sbt-dotty + override def generatedNonLocalClass( + source: File, + classFile: File, + binaryClassName: String, + srcClassName: String + ): Unit = + generatedNonLocalClass( + converter.toVirtualFile(source.toPath), + classFile.toPath, + binaryClassName, + srcClassName + ) + + override def generatedNonLocalClass( source: VirtualFileRef, classFile: Path, binaryClassName: String, @@ -607,14 +642,22 @@ private final class AnalysisCallback( () } - def generatedLocalClass(source: VirtualFileRef, classFile: Path): Unit = { + // Called by sbt-dotty + override def generatedLocalClass(source: File, classFile: File): Unit = + generatedLocalClass(converter.toVirtualFile(source.toPath), classFile.toPath) + + override def generatedLocalClass(source: VirtualFileRef, classFile: Path): Unit = { //println(s"Generated local class ${source}, ${classFile}") val vf = converter.toVirtualFile(classFile) add(localClasses, source, vf) () } - def api(sourceFile: VirtualFileRef, classApi: ClassLike): Unit = { + // Called by sbt-dotty + override def api(sourceFile: File, classApi: ClassLike): Unit = + api(converter.toVirtualFile(sourceFile.toPath), classApi) + + override def api(sourceFile: VirtualFileRef, classApi: ClassLike): Unit = { import xsbt.api.{ APIUtil, HashAPI } val className = classApi.name if (APIUtil.isScalaSourceName(sourceFile.id) && APIUtil.hasMacro(classApi)) @@ -638,7 +681,11 @@ private final class AnalysisCallback( } } - def mainClass(sourceFile: VirtualFileRef, className: String): Unit = { + // Called by sbt-dotty + override def mainClass(sourceFile: File, className: String): Unit = + mainClass(converter.toVirtualFile(sourceFile.toPath), className) + + override def mainClass(sourceFile: VirtualFileRef, className: String): Unit = { mainClasses.getOrElseUpdate(sourceFile, new ConcurrentLinkedQueue).add(className) () } @@ -844,7 +891,7 @@ private final class AnalysisCallback( } for { earlyO <- earlyOutput - pickleJarPath <- jo2o(earlyO.getSingleOutput()) + pickleJarPath <- jo2o(earlyO.getSingleOutputAsPath()) } { // List classes defined in the files that were compiled in this run. val knownProducts = merged.relations.allSources @@ -858,7 +905,7 @@ private final class AnalysisCallback( } private def extractProductPath(product: VirtualFileRef): Option[String] = { - jo2o(output.getSingleOutput) match { + jo2o(output.getSingleOutputAsPath) match { case Some(so) if so.getFileName.toString.endsWith(".jar") => new JarUtils.ClassInJar(product.id).toClassFilePath case Some(so) => diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalCommon.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalCommon.scala index 27edb352d2..ba52154444 100644 --- a/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalCommon.scala +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalCommon.scala @@ -860,7 +860,8 @@ object IncrementalCommon { } } - def emptyChanges: DependencyChanges = new DependencyChanges { + lazy val emptyChanges: DependencyChanges = new DependencyChanges { + override val modifiedBinaries = new Array[java.io.File](0) override val modifiedLibraries = new Array[VirtualFileRef](0) override val modifiedClasses = new Array[String](0) override def isEmpty = true diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/MiniSetupUtil.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/MiniSetupUtil.scala index 71dc0756d6..4a0c627694 100644 --- a/internal/zinc-core/src/main/scala/sbt/internal/inc/MiniSetupUtil.scala +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/MiniSetupUtil.scala @@ -85,7 +85,7 @@ object MiniSetupUtil { implicit val equivOutput: Equiv[APIOutput] = { new Equiv[APIOutput] { implicit val outputGroupsOrdering = - Ordering.by((og: OutputGroup) => og.getSourceDirectory) + Ordering.by((og: OutputGroup) => og.getSourceDirectoryAsPath) def equiv(out1: APIOutput, out2: APIOutput) = (out1, out2) match { case (m1: MultipleOutput, m2: MultipleOutput) => @@ -93,11 +93,11 @@ object MiniSetupUtil { (m1.getOutputGroups.sorted zip m2.getOutputGroups.sorted forall { case (a, b) => equivFile - .equiv(a.getSourceDirectory, b.getSourceDirectory) && equivFile - .equiv(a.getOutputDirectory, b.getOutputDirectory) + .equiv(a.getSourceDirectoryAsPath, b.getSourceDirectoryAsPath) && equivFile + .equiv(a.getOutputDirectoryAsPath, b.getOutputDirectoryAsPath) }) case (s1: SingleOutput, s2: SingleOutput) => - equivFile.equiv(s1.getOutputDirectory, s2.getOutputDirectory) + equivFile.equiv(s1.getOutputDirectoryAsPath, s2.getOutputDirectoryAsPath) case _ => false } diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/VirtualFileUtil.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/VirtualFileUtil.scala index b05eca6491..8798637ce3 100644 --- a/internal/zinc-core/src/main/scala/sbt/internal/inc/VirtualFileUtil.scala +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/VirtualFileUtil.scala @@ -26,7 +26,9 @@ object VirtualFileUtil { implicit val sbtInternalIncVirtualFileRefOrdering: Ordering[VirtualFileRef] = Ordering.by(_.id) def outputDirectory(output: Output): Path = - output.getSingleOutput.orElseThrow(() => new RuntimeException(s"unexpected output $output")) + output.getSingleOutputAsPath.orElseThrow( + () => new RuntimeException(s"unexpected output $output") + ) def sourcePositionMapper(converter: FileConverter): Position => Position = new DelegatingPosition(_, converter) diff --git a/internal/zinc-persist/src/main/scala/sbt/internal/inc/binary/converters/ProtobufWriters.scala b/internal/zinc-persist/src/main/scala/sbt/internal/inc/binary/converters/ProtobufWriters.scala index 1047d3f602..615888ca8a 100644 --- a/internal/zinc-persist/src/main/scala/sbt/internal/inc/binary/converters/ProtobufWriters.scala +++ b/internal/zinc-persist/src/main/scala/sbt/internal/inc/binary/converters/ProtobufWriters.scala @@ -119,8 +119,8 @@ final class ProtobufWriters(mapper: WriteMapper) { } def toOutputGroup(outputGroup: OutputGroup): Schema.OutputGroup = { - val newSource = mapper.mapSourceDir(outputGroup.getSourceDirectory) - val newTarget = mapper.mapOutputDir(outputGroup.getOutputDirectory) + val newSource = mapper.mapSourceDir(outputGroup.getSourceDirectoryAsPath) + val newTarget = mapper.mapOutputDir(outputGroup.getOutputDirectoryAsPath) val sourcePath = toStringPath(newSource) val targetPath = toStringPath(newTarget) Schema.OutputGroup.newBuilder @@ -135,7 +135,7 @@ final class ProtobufWriters(mapper: WriteMapper) { ): Schema.Compilation.Builder = { output match { case single0: SingleOutput => - val newOutputDir = mapper.mapOutputDir(single0.getOutputDirectory) + val newOutputDir = mapper.mapOutputDir(single0.getOutputDirectoryAsPath) val targetPath = toStringPath(newOutputDir) val single = Schema.SingleOutput.newBuilder.setTarget(targetPath).build builder.setSingleOutput(single) @@ -264,7 +264,7 @@ final class ProtobufWriters(mapper: WriteMapper) { ): Schema.MiniSetup.Builder = output match { case single0: SingleOutput => - val newOutputDir = mapper.mapOutputDir(single0.getOutputDirectory) + val newOutputDir = mapper.mapOutputDir(single0.getOutputDirectoryAsPath) val targetPath = toStringPath(newOutputDir) val single = Schema.SingleOutput.newBuilder.setTarget(targetPath).build builder.setSingleOutput(single) diff --git a/internal/zinc-persist/src/main/scala/sbt/internal/inc/text/TextAnalysisFormat.scala b/internal/zinc-persist/src/main/scala/sbt/internal/inc/text/TextAnalysisFormat.scala index 12829cb7fb..fa50460d16 100644 --- a/internal/zinc-persist/src/main/scala/sbt/internal/inc/text/TextAnalysisFormat.scala +++ b/internal/zinc-persist/src/main/scala/sbt/internal/inc/text/TextAnalysisFormat.scala @@ -428,10 +428,12 @@ object TextAnalysisFormat extends TextAnalysisFormat(ReadWriteMappers.getEmptyMa val (mode, outputAsMap) = Analysis.dummyOutput match { case s: SingleOutput => // just to be compatible with multipleOutputMode - val ignored = s.getOutputDirectory - (singleOutputMode, Map(ignored -> s.getOutputDirectory)) + val ignored = s.getOutputDirectoryAsPath + (singleOutputMode, Map(ignored -> s.getOutputDirectoryAsPath)) case m: MultipleOutput => - val map = m.getOutputGroups.map(x => x.getSourceDirectory -> x.getOutputDirectory).toMap + val map = m.getOutputGroups + .map(x => x.getSourceDirectoryAsPath -> x.getOutputDirectoryAsPath) + .toMap (multipleOutputMode, map) } val mappedClasspathHash = setup.options.classpathHash diff --git a/internal/zinc-persist/src/test/scala/sbt/inc/AnalysisFormatHelpers.scala b/internal/zinc-persist/src/test/scala/sbt/inc/AnalysisFormatHelpers.scala index c83e85704e..e4d1e23f9d 100644 --- a/internal/zinc-persist/src/test/scala/sbt/inc/AnalysisFormatHelpers.scala +++ b/internal/zinc-persist/src/test/scala/sbt/inc/AnalysisFormatHelpers.scala @@ -32,7 +32,7 @@ object AnalysisFormatHelpers { val mappers: ReadWriteMappers = ReadWriteMappers.getMachineIndependentMappers(RootFilePath) val commonSetup: MiniSetup = { - val output: SingleOutput = () => RootFilePath.resolve("out") + val output: Output = CompileOutput(RootFilePath.resolve("out")) val opts = MiniOptions.of(Array(), Array(), Array()) MiniSetup.of(output, opts, "2.10.4", Mixed, true, Array(t2("key" -> "value"))) } diff --git a/internal/zinc-testing/src/main/scala/xsbti/TestCallback.scala b/internal/zinc-testing/src/main/scala/xsbti/TestCallback.scala index 0c196bc39a..c076ff1b8a 100644 --- a/internal/zinc-testing/src/main/scala/xsbti/TestCallback.scala +++ b/internal/zinc-testing/src/main/scala/xsbti/TestCallback.scala @@ -11,6 +11,7 @@ package xsbti +import java.io.File import java.nio.file.Path import java.util @@ -37,6 +38,7 @@ class TestCallback extends AnalysisCallback { def usedNames = usedNamesAndScopes.mapValues(_.map(_.name)) + override def startSource(source: File): Unit = ??? override def startSource(source: VirtualFile): Unit = { assert( !apis.contains(source), @@ -55,6 +57,14 @@ class TestCallback extends AnalysisCallback { () } + override def binaryDependency( + classFile: File, + onBinaryClassName: String, + fromClassName: String, + fromSourceFile: File, + context: DependencyContext + ): Unit = ??? + override def binaryDependency( onBinary: Path, onBinaryClassName: String, @@ -66,6 +76,13 @@ class TestCallback extends AnalysisCallback { () } + override def generatedNonLocalClass( + sourceFile: File, + classFile: File, + binaryClassName: String, + srcClassName: String + ): Unit = ??? + override def generatedNonLocalClass( sourceFile: VirtualFileRef, classFile: Path, @@ -77,6 +94,11 @@ class TestCallback extends AnalysisCallback { () } + override def generatedLocalClass( + sourceFile: File, + classFile: File + ): Unit = ??? + override def generatedLocalClass( sourceFile: VirtualFileRef, classFile: Path @@ -88,11 +110,15 @@ class TestCallback extends AnalysisCallback { def usedName(className: String, name: String, scopes: util.EnumSet[UseScope]): Unit = usedNamesAndScopes(className) += TestUsedName(name, scopes) + override def api(source: File, api: ClassLike): Unit = ??? + override def api(source: VirtualFileRef, api: ClassLike): Unit = { apis(source) += api () } + override def mainClass(source: File, className: String): Unit = () + override def mainClass(source: VirtualFileRef, className: String): Unit = () override def enabled(): Boolean = true diff --git a/zinc/src/main/scala/sbt/internal/inc/IncrementalCompilerImpl.scala b/zinc/src/main/scala/sbt/internal/inc/IncrementalCompilerImpl.scala index eee9c263c9..2297fc9904 100644 --- a/zinc/src/main/scala/sbt/internal/inc/IncrementalCompilerImpl.scala +++ b/zinc/src/main/scala/sbt/internal/inc/IncrementalCompilerImpl.scala @@ -280,11 +280,11 @@ class IncrementalCompilerImpl extends IncrementalCompiler { val numberSources = s"$sourceCount sources" val outputString = output match { case singleOutput: SingleOutput => - singleOutput.getOutputDirectory().toString + singleOutput.getOutputDirectoryAsPath().toString case multiOutput: MultipleOutput => multiOutput .getOutputGroups() - .map(_.getOutputDirectory().toString) + .map(_.getOutputDirectoryAsPath().toString) .mkString("[", ", ", "]") case _ => s"other output ($output)" diff --git a/zinc/src/main/scala/sbt/internal/inc/MixedAnalyzingCompiler.scala b/zinc/src/main/scala/sbt/internal/inc/MixedAnalyzingCompiler.scala index 567bb6473c..f83265e6de 100644 --- a/zinc/src/main/scala/sbt/internal/inc/MixedAnalyzingCompiler.scala +++ b/zinc/src/main/scala/sbt/internal/inc/MixedAnalyzingCompiler.scala @@ -81,7 +81,7 @@ final class MixedAnalyzingCompiler( logInputs(log, javaSrcs.size, scalaSrcs.size, outputDirs) val isPickleJava = config.currentSetup.order == Mixed && config.incOptions.pipelining && javaSrcs.nonEmpty - val earlyOut = config.earlyOutput.flatMap(_.getSingleOutput.toOption) + val earlyOut = config.earlyOutput.flatMap(_.getSingleOutputAsPath.toOption) val pickleWrite = earlyOut.toList.flatMap { out => val sbv = scalac.scalaInstance.version.take(4) if (out.toString.endsWith(".jar") && !Files.exists(out)) @@ -105,14 +105,15 @@ final class MixedAnalyzingCompiler( timed("Scala compilation", log) { config.compiler.compile( sources.toArray, + config.converter, changes, arguments.toArray, output, callback, config.reporter, config.cache, - log, - config.progress.toOptional + config.progress.toOptional, + log ) } } @@ -197,8 +198,8 @@ final class MixedAnalyzingCompiler( private[this] def outputDirectories(output: Output): Seq[Path] = { output match { - case single: SingleOutput => List(single.getOutputDirectory) - case mult: MultipleOutput => mult.getOutputGroups map (_.getOutputDirectory) + case single: SingleOutput => List(single.getOutputDirectoryAsPath) + case mult: MultipleOutput => mult.getOutputGroups map (_.getOutputDirectoryAsPath) } } diff --git a/zinc/src/main/scala/sbt/internal/inc/javac/AnalyzingJavaCompiler.scala b/zinc/src/main/scala/sbt/internal/inc/javac/AnalyzingJavaCompiler.scala index 0e1a20b760..495b7eb7d1 100644 --- a/zinc/src/main/scala/sbt/internal/inc/javac/AnalyzingJavaCompiler.scala +++ b/zinc/src/main/scala/sbt/internal/inc/javac/AnalyzingJavaCompiler.scala @@ -117,18 +117,18 @@ final class AnalyzingJavaCompiler private[sbt] ( // Outline chunks of compiles so that .class files end up in right location val chunks: Map[Option[Path], Seq[VirtualFile]] = output match { case single: SingleOutput => - Map(Option(single.getOutputDirectory) -> sources) + Map(Option(single.getOutputDirectoryAsPath) -> sources) case multi: MultipleOutput => sources.groupBy { src => multi.getOutputGroups .find { out => val sourceDir: VirtualFileRef = sourceDirs.getOrElseUpdate( - out.getSourceDirectory, - converter.toVirtualFile(out.getSourceDirectory) + out.getSourceDirectoryAsPath, + converter.toVirtualFile(out.getSourceDirectoryAsPath) ) src.id.startsWith(sourceDir.id) } - .map(_.getOutputDirectory) + .map(_.getOutputDirectoryAsPath) } }