getSingleOutputAsPath();
}
+
diff --git a/internal/compiler-interface/src/main/java/xsbti/compile/OutputGroup.java b/internal/compiler-interface/src/main/java/xsbti/compile/OutputGroup.java
index b054787ceb..6dcca12a8c 100644
--- a/internal/compiler-interface/src/main/java/xsbti/compile/OutputGroup.java
+++ b/internal/compiler-interface/src/main/java/xsbti/compile/OutputGroup.java
@@ -11,6 +11,7 @@
package xsbti.compile;
+import java.io.File;
import java.nio.file.Path;
import java.io.Serializable;
@@ -23,9 +24,22 @@ public interface OutputGroup extends Serializable {
*
* Note that source directories should uniquely identify the group
* for a certain source file.
+ *
+ * @deprecated use {@link #getSourceDirectoryAsPath()} instead.
*/
- public Path getSourceDirectory();
+ @Deprecated
+ public File getSourceDirectory();
+ /**
+ * Return the directory where source files are stored for this group.
+ *
+ * Note that source directories should uniquely identify the group
+ * for a certain source file.
+ */
+ public default Path getSourceDirectoryAsPath() {
+ return getSourceDirectory().toPath();
+ }
+
/**
* Return the directory where class files should be generated.
*
@@ -35,5 +49,20 @@ public interface OutputGroup extends Serializable {
*
* This directory must be exclusively used for one set of sources.
*/
- public Path getOutputDirectory();
+ @Deprecated
+ public File getOutputDirectory();
+
+ /**
+ * Return the directory where class files should be generated.
+ *
+ * Incremental compilation manages the class files in this directory, so
+ * don't play with this directory out of the Zinc API. Zinc already takes
+ * care of deleting classes before every compilation run.
+ *
+ * This directory must be exclusively used for one set of sources.
+ */
+ public default Path getOutputDirectoryAsPath() {
+ return getOutputDirectory().toPath();
+ }
}
+
diff --git a/internal/compiler-interface/src/main/java/xsbti/compile/ScalaCompiler.java b/internal/compiler-interface/src/main/java/xsbti/compile/ScalaCompiler.java
index a4a482beee..342a9d216f 100644
--- a/internal/compiler-interface/src/main/java/xsbti/compile/ScalaCompiler.java
+++ b/internal/compiler-interface/src/main/java/xsbti/compile/ScalaCompiler.java
@@ -12,6 +12,7 @@
package xsbti.compile;
import xsbti.AnalysisCallback;
+import xsbti.FileConverter;
import xsbti.Logger;
import xsbti.Reporter;
import xsbti.VirtualFile;
@@ -32,28 +33,6 @@ public interface ScalaCompiler {
*/
ClasspathOptions classpathOptions();
- /**
- * Recompile the subset of sources
impacted by the
- * changes defined in changes
and collect the new APIs.
- *
- * @param sources All the sources of the project.
- * @param changes The changes that have been detected at the previous step.
- * @param callback The callback to which the extracted information should be
- * reported.
- * @param log The logger in which the Scala compiler will log info.
- * @param reporter The reporter to which errors and warnings should be
- * reported during compilation.
- * @param progress Where to report the file being currently compiled.
- * @param compiler The actual compiler that will perform the compilation step.
- */
- void compile(VirtualFile[] sources,
- DependencyChanges changes,
- AnalysisCallback callback,
- Logger log,
- Reporter reporter,
- CompileProgress progress,
- CachedCompiler compiler);
-
/**
* Recompile the subset of sources
impacted by the
* changes defined in changes
and collect the new APIs.
@@ -73,12 +52,14 @@ void compile(VirtualFile[] sources,
* will report on the file being compiled.
*/
void compile(VirtualFile[] sources,
+ FileConverter converter,
DependencyChanges changes,
String[] options,
Output output,
AnalysisCallback callback,
Reporter reporter,
GlobalsCache cache,
- Logger log,
- Optional progressOpt);
+ Optional progressOpt,
+ Logger log);
}
+
diff --git a/internal/compiler-interface/src/main/java/xsbti/compile/ScaladocInterface1.java b/internal/compiler-interface/src/main/java/xsbti/compile/ScaladocInterface1.java
new file mode 100644
index 0000000000..e0f8be1224
--- /dev/null
+++ b/internal/compiler-interface/src/main/java/xsbti/compile/ScaladocInterface1.java
@@ -0,0 +1,21 @@
+/*
+ * Zinc - The incremental compiler for Scala.
+ * Copyright Lightbend, Inc. and Mark Harrah
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package xsbti.compile;
+
+import xsbti.Logger;
+import xsbti.Reporter;
+
+/** Scaladoc Interface as of Zinc 1.2.0. */
+public interface ScaladocInterface1 {
+ void run(String[] args, Logger log, Reporter delegate);
+}
+
diff --git a/internal/compiler-interface/src/main/java/xsbti/compile/ScaladocInterface2.java b/internal/compiler-interface/src/main/java/xsbti/compile/ScaladocInterface2.java
new file mode 100644
index 0000000000..0360322e1c
--- /dev/null
+++ b/internal/compiler-interface/src/main/java/xsbti/compile/ScaladocInterface2.java
@@ -0,0 +1,22 @@
+/*
+ * Zinc - The incremental compiler for Scala.
+ * Copyright Lightbend, Inc. and Mark Harrah
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package xsbti.compile;
+
+import xsbti.Logger;
+import xsbti.Reporter;
+import xsbti.VirtualFile;
+
+/** Scaladoc Interface as of Zinc 1.4.0. */
+public interface ScaladocInterface2 {
+ void run(VirtualFile[] sources, String[] args, Logger log, Reporter delegate);
+}
+
diff --git a/internal/compiler-interface/src/main/java/xsbti/compile/SingleOutput.java b/internal/compiler-interface/src/main/java/xsbti/compile/SingleOutput.java
index 454e13c77b..ad4e21187b 100755
--- a/internal/compiler-interface/src/main/java/xsbti/compile/SingleOutput.java
+++ b/internal/compiler-interface/src/main/java/xsbti/compile/SingleOutput.java
@@ -11,6 +11,7 @@
package xsbti.compile;
+import java.io.File;
import java.nio.file.Path;
import java.util.Optional;
@@ -29,16 +30,40 @@ public interface SingleOutput extends Output {
* of deleting classes before every compilation run.
*
* This file or directory must be exclusively used for one set of sources.
+ *
+ * @deprecated use {@link #getOutputDirectoryAsPath()} instead.
*/
- public Path getOutputDirectory();
+ @Deprecated
+ public File getOutputDirectory();
+
+ /**
+ * Return the **directory or jar** where class files should be generated
+ * and written to. The method name is a misnamer since it can return a
+ * jar file when straight-to-jar compilation is enabled.
+ *
+ * Incremental compilation manages the class files in this file, so don't
+ * play with this directory out of the Zinc API. Zinc already takes care
+ * of deleting classes before every compilation run.
+ *
+ * This file or directory must be exclusively used for one set of sources.
+ */
+ public default Path getOutputDirectoryAsPath() {
+ return getOutputDirectory().toPath();
+ }
@Override
- public default Optional getSingleOutput() {
+ public default Optional getSingleOutput() {
return Optional.of(getOutputDirectory());
}
+ @Override
+ public default Optional getSingleOutputAsPath() {
+ return Optional.of(getOutputDirectoryAsPath());
+ }
+
@Override
public default Optional getMultipleOutput() {
return Optional.empty();
}
-}
\ No newline at end of file
+}
+
diff --git a/internal/zinc-classfile/src/main/scala/sbt/internal/inc/JarUtils.scala b/internal/zinc-classfile/src/main/scala/sbt/internal/inc/JarUtils.scala
index 81c10250df..6c7bdac48b 100644
--- a/internal/zinc-classfile/src/main/scala/sbt/internal/inc/JarUtils.scala
+++ b/internal/zinc-classfile/src/main/scala/sbt/internal/inc/JarUtils.scala
@@ -317,7 +317,7 @@ object JarUtils {
def getOutputJar(output: Output): Option[Path] = {
output match {
case s: SingleOutput =>
- Some(s.getOutputDirectory).filter(_.toString.endsWith(".jar"))
+ Some(s.getOutputDirectoryAsPath).filter(_.toString.endsWith(".jar"))
case _ => None
}
}
diff --git a/internal/zinc-classfile/src/main/scala/sbt/internal/inc/classfile/JavaAnalyze.scala b/internal/zinc-classfile/src/main/scala/sbt/internal/inc/classfile/JavaAnalyze.scala
index 4dfe165a13..47855451e0 100644
--- a/internal/zinc-classfile/src/main/scala/sbt/internal/inc/classfile/JavaAnalyze.scala
+++ b/internal/zinc-classfile/src/main/scala/sbt/internal/inc/classfile/JavaAnalyze.scala
@@ -45,7 +45,7 @@ private[sbt] object JavaAnalyze {
.groupBy(_.name)
// For performance reasons, precompute these as they are static throughout this analysis
val outputJarOrNull: Path = finalJarOutput.getOrElse(null)
- val singleOutputOrNull: Path = output.getSingleOutput.orElse(null)
+ val singleOutputOrNull: Path = output.getSingleOutputAsPath.orElse(null)
def load(tpe: String, errMsg: => Option[String]): Option[Class[_]] = {
if (tpe.endsWith("module-info")) None
diff --git a/internal/zinc-classfile/src/test/scala/sbt/internal/inc/classfile/JavaCompilerForUnitTesting.scala b/internal/zinc-classfile/src/test/scala/sbt/internal/inc/classfile/JavaCompilerForUnitTesting.scala
index 23e4656625..9ea4e47016 100644
--- a/internal/zinc-classfile/src/test/scala/sbt/internal/inc/classfile/JavaCompilerForUnitTesting.scala
+++ b/internal/zinc-classfile/src/test/scala/sbt/internal/inc/classfile/JavaCompilerForUnitTesting.scala
@@ -87,7 +87,10 @@ object JavaCompilerForUnitTesting {
// - extract all base classes.
// we extract just parents as this is enough for testing
- val output = new SingleOutput { def getOutputDirectory: Path = classesDir.toPath }
+ val output = new SingleOutput {
+ override def getOutputDirectoryAsPath: Path = classesDir.toPath
+ override def getOutputDirectory: File = getOutputDirectoryAsPath.toFile
+ }
JavaAnalyze(classFiles, srcFiles, logger, output, finalJarOutput = None)(
analysisCallback,
classloader,
diff --git a/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/AnalyzingCompiler.scala b/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/AnalyzingCompiler.scala
index f92ad94be0..5e17ddbfaf 100644
--- a/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/AnalyzingCompiler.scala
+++ b/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/AnalyzingCompiler.scala
@@ -18,19 +18,21 @@ import java.nio.file.Path
import java.net.URLClassLoader
import java.util.Optional
+import com.github.ghik.silencer.silent
import sbt.util.Logger
import sbt.io.syntax._
import sbt.internal.inc.classpath.ClassLoaderCache
import sbt.internal.util.ManagedLogger
import xsbti.{
AnalysisCallback,
+ FileConverter,
PathBasedFile,
Reporter,
- ReporterUtil,
Logger => xLogger,
VirtualFile
}
import xsbti.compile._
+import scala.language.existentials
/**
* Implement a cached incremental `ScalaCompiler` that has been instrumented
@@ -52,6 +54,10 @@ final class AnalyzingCompiler(
) extends CachedCompilerProvider
with ScalaCompiler {
+ private[this] final val compilerBridgeClassName = "xsbt.CompilerInterface"
+ private[this] final val scaladocBridgeClassName = "xsbt.ScaladocInterface"
+ private[this] final val consoleBridgeClassName = "xsbt.ConsoleInterface"
+
def onArgs(f: Seq[String] => Unit): AnalyzingCompiler =
new AnalyzingCompiler(scalaInstance, provider, classpathOptions, f, classLoaderCache)
@@ -64,42 +70,22 @@ final class AnalyzingCompiler(
Some(classLoaderCache)
)
- def apply(
- sources: Array[VirtualFile],
- changes: DependencyChanges,
- classpath: Array[VirtualFile],
- singleOutput: Path,
- options: Array[String],
- callback: AnalysisCallback,
- maximumErrors: Int,
- cache: GlobalsCache,
- log: ManagedLogger
- ): Unit = {
- val compArgs = new CompilerArguments(scalaInstance, classpathOptions)
- val arguments = compArgs.makeArguments(Nil, classpath, options)
- val output = CompileOutput(singleOutput)
- val basicReporterConfig = ReporterUtil.getDefaultReporterConfig()
- val reporterConfig = basicReporterConfig.withMaximumErrors(maximumErrors)
- val reporter = ReporterManager.getReporter(log, reporterConfig)
- val progress = Optional.empty[CompileProgress]
- compile(sources, changes, arguments.toArray, output, callback, reporter, cache, log, progress)
- }
-
- def compile(
+ override def compile(
sources: Array[VirtualFile],
+ converter: FileConverter,
changes: DependencyChanges,
options: Array[String],
output: Output,
callback: AnalysisCallback,
reporter: Reporter,
cache: GlobalsCache,
- log: xLogger,
- progressOpt: Optional[CompileProgress]
+ progressOpt: Optional[CompileProgress],
+ log: xLogger
): Unit = {
val cached = cache(options, output, !changes.isEmpty, this, log, reporter)
try {
val progress = if (progressOpt.isPresent) progressOpt.get else IgnoreProgress
- compile(sources, changes, callback, log, reporter, progress, cached)
+ compile(sources, converter, changes, callback, log, reporter, progress, cached)
} finally {
cached match {
case c: java.io.Closeable => c.close()
@@ -110,6 +96,7 @@ final class AnalyzingCompiler(
def compile(
sources: Array[VirtualFile],
+ converter: FileConverter,
changes: DependencyChanges,
callback: AnalysisCallback,
log: xLogger,
@@ -117,57 +104,63 @@ final class AnalyzingCompiler(
progress: CompileProgress,
compiler: CachedCompiler
): Unit = {
- // onArgsHandler(compiler.commandArguments(sources))
- call("xsbt.CompilerInterface", "run", log)(
- classOf[Array[VirtualFile]],
- classOf[DependencyChanges],
- classOf[AnalysisCallback],
- classOf[xLogger],
- classOf[Reporter],
- classOf[CompileProgress],
- classOf[CachedCompiler]
- )(sources, changes, callback, log, reporter, progress, compiler)
+ val (bridge, bridgeClass) = bridgeInstance(compilerBridgeClassName, log)
+ (bridge, compiler) match {
+ case (intf: CompilerInterface2, compiler2: CachedCompiler2) =>
+ intf.run(sources, changes, callback, log, reporter, progress, compiler2)
+ case _ =>
+ // fall back to passing File array
+ val fileSources: Array[File] = sources.map(converter.toPath(_).toFile)
+ invoke(bridge, bridgeClass, "run", log)(
+ classOf[Array[File]],
+ classOf[DependencyChanges],
+ classOf[AnalysisCallback],
+ classOf[xLogger],
+ classOf[Reporter],
+ classOf[CompileProgress],
+ classOf[CachedCompiler]
+ )(fileSources, changes, callback, log, reporter, progress, compiler)
+ }
()
}
- def newCachedCompiler(
+ override def newCachedCompiler(
arguments: Array[String],
output: Output,
log: xLogger,
reporter: Reporter
- ): CachedCompiler =
- newCachedCompiler(arguments: Seq[String], output, log, reporter)
-
- def newCachedCompiler(
- arguments: Seq[String],
- output: Output,
- log: xLogger,
- reporter: Reporter
): CachedCompiler = {
- val compiler = call("xsbt.CompilerInterface", "newCompiler", log)(
- classOf[Array[String]],
- classOf[Output],
- classOf[xLogger],
- classOf[Reporter]
- )(arguments.toArray[String], output, log, reporter)
- compiler.asInstanceOf[CachedCompiler]
+ bridgeInstance(compilerBridgeClassName, log) match {
+ case (intf: CompilerInterface2, _) =>
+ intf.newCompiler(arguments, output, log, reporter)
+ case (bridge, bridgeClass) =>
+ // fall back to old reflection if CompilerInterface1 is not supported
+ invoke(bridge, bridgeClass, "newCompiler", log)(
+ classOf[Array[String]],
+ classOf[Output],
+ classOf[xLogger],
+ classOf[Reporter]
+ )(arguments, output, log, reporter).asInstanceOf[CachedCompiler]: @silent
+ }
}
def doc(
sources: Seq[VirtualFile],
classpath: Seq[VirtualFile],
+ converter: FileConverter,
outputDirectory: Path,
options: Seq[String],
maximumErrors: Int,
log: ManagedLogger
): Unit = {
val reporter = new ManagedLoggedReporter(maximumErrors, log)
- doc(sources, classpath, outputDirectory, options, log, reporter)
+ doc(sources, classpath, converter, outputDirectory, options, log, reporter)
}
def doc(
sources: Seq[VirtualFile],
classpath: Seq[VirtualFile],
+ converter: FileConverter,
outputDirectory: Path,
options: Seq[String],
log: Logger,
@@ -177,12 +170,20 @@ final class AnalyzingCompiler(
val arguments =
compArgs.makeArguments(Nil, classpath, Some(outputDirectory), options)
onArgsHandler(arguments)
- call("xsbt.ScaladocInterface", "run", log)(
- classOf[Array[VirtualFile]],
- classOf[Array[String]],
- classOf[xLogger],
- classOf[Reporter]
- )(sources.toArray, arguments.toArray[String], log, reporter)
+ val (bridge, bridgeClass) = bridgeInstance(scaladocBridgeClassName, log)
+ bridge match {
+ case intf: ScaladocInterface2 =>
+ intf.run(sources.toArray, arguments.toArray[String], log, reporter)
+ case _ =>
+ // fall back to old reflection
+ val fileSources: Array[File] = sources.toArray.map(converter.toPath(_).toFile)
+ invoke(bridge, bridgeClass, "run", log)(
+ classOf[Array[File]],
+ classOf[Array[String]],
+ classOf[xLogger],
+ classOf[Reporter]
+ )(fileSources, arguments.toArray[String], log, reporter)
+ }
()
}
@@ -195,28 +196,46 @@ final class AnalyzingCompiler(
)(loader: Option[ClassLoader] = None, bindings: Seq[(String, Any)] = Nil): Unit = {
onArgsHandler(consoleCommandArguments(classpath, options, log))
val (classpathString, bootClasspath) = consoleClasspaths(classpath)
- val (names, values) = bindings.unzip
- call("xsbt.ConsoleInterface", "run", log)(
- classOf[Array[String]],
- classOf[String],
- classOf[String],
- classOf[String],
- classOf[String],
- classOf[ClassLoader],
- classOf[Array[String]],
- classOf[Array[Any]],
- classOf[xLogger]
- )(
- options.toArray[String]: Array[String],
- bootClasspath,
- classpathString,
- initialCommands,
- cleanupCommands,
- loader.orNull,
- names.toArray[String],
- values.toArray[Any],
- log
- )
+ val (names, values0) = bindings.unzip
+ val values = values0.toArray[Any].asInstanceOf[Array[AnyRef]]
+ val (bridge, bridgeClass) = bridgeInstance(consoleBridgeClassName, log)
+ bridge match {
+ case intf: ConsoleInterface1 =>
+ intf.run(
+ options.toArray[String]: Array[String],
+ bootClasspath,
+ classpathString,
+ initialCommands,
+ cleanupCommands,
+ loader.orNull,
+ names.toArray[String],
+ values,
+ log
+ )
+ case _ =>
+ // fall back to old reflection if ConsoleInterface1 is not supported
+ invoke(bridge, bridgeClass, "run", log)(
+ classOf[Array[String]],
+ classOf[String],
+ classOf[String],
+ classOf[String],
+ classOf[String],
+ classOf[ClassLoader],
+ classOf[Array[String]],
+ classOf[Array[Any]],
+ classOf[xLogger]
+ )(
+ options.toArray[String]: Array[String],
+ bootClasspath,
+ classpathString,
+ initialCommands,
+ cleanupCommands,
+ loader.orNull,
+ names.toArray[String],
+ values.toArray[Any],
+ log
+ )
+ }
()
}
@@ -237,26 +256,31 @@ final class AnalyzingCompiler(
log: Logger
): Seq[String] = {
val (classpathString, bootClasspath) = consoleClasspaths(classpath)
- val argsObj = call("xsbt.ConsoleInterface", "commandArguments", log)(
- classOf[Array[String]],
- classOf[String],
- classOf[String],
- classOf[xLogger]
- )(options.toArray[String], bootClasspath, classpathString, log)
+ val (bridge, bridgeClass) = bridgeInstance(consoleBridgeClassName, log)
+ val argsObj = bridge match {
+ case intf: ConsoleInterface1 =>
+ intf.commandArguments(options.toArray[String], bootClasspath, classpathString, log)
+ case _ =>
+ invoke(bridge, bridgeClass, "commandArguments", log)(
+ classOf[Array[String]],
+ classOf[String],
+ classOf[String],
+ classOf[xLogger]
+ )(options.toArray[String], bootClasspath, classpathString, log)
+ }
argsObj.asInstanceOf[Array[String]].toSeq
}
- def force(log: Logger): Unit = { provider.fetchCompiledBridge(scalaInstance, log); () }
+ private def bridgeInstance(bridgeClassName: String, log: Logger): (AnyRef, Class[_]) = {
+ val bridgeClass = getBridgeClass(bridgeClassName, log)
+ (bridgeClass.getDeclaredConstructor().newInstance().asInstanceOf[AnyRef], bridgeClass)
+ }
- private def call(
- interfaceClassName: String,
- methodName: String,
- log: Logger
- )(argTypes: Class[_]*)(args: AnyRef*): AnyRef = {
- val interfaceClass = getInterfaceClass(interfaceClassName, log)
- val interface = interfaceClass.getDeclaredConstructor().newInstance().asInstanceOf[AnyRef]
- val method = interfaceClass.getMethod(methodName, argTypes: _*)
- try method.invoke(interface, args: _*)
+ private def invoke(bridge: AnyRef, bridgeClass: Class[_], methodName: String, log: Logger)(
+ argTypes: Class[_]*
+ )(args: AnyRef*): AnyRef = {
+ val method = bridgeClass.getMethod(methodName, argTypes: _*)
+ try method.invoke(bridge, args: _*)
catch {
case e: InvocationTargetException =>
e.getCause match {
@@ -285,7 +309,7 @@ final class AnalyzingCompiler(
}
}
- private[this] def getInterfaceClass(name: String, log: Logger) =
+ private[this] def getBridgeClass(name: String, log: Logger) =
Class.forName(name, true, loader(log))
protected def createDualLoader(
diff --git a/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/CompileOutput.scala b/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/CompileOutput.scala
index 4994313ed3..3a541430f2 100755
--- a/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/CompileOutput.scala
+++ b/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/CompileOutput.scala
@@ -14,6 +14,7 @@ package internal
package inc
import xsbti.compile.{ Output, OutputGroup }
+import java.io.File
import java.nio.file.Path
import java.util.Optional
@@ -53,13 +54,15 @@ object CompileOutput {
new ConcreteOutputGroup(source, output)
private final class EmptyOutput extends xsbti.compile.Output {
- override def getSingleOutput(): Optional[Path] = Optional.empty()
+ override def getSingleOutput(): Optional[File] = Optional.empty()
+ override def getSingleOutputAsPath(): Optional[Path] = Optional.empty()
override def getMultipleOutput(): Optional[Array[OutputGroup]] = Optional.empty()
override def toString: String = "EmptyOutput()"
}
- private final class ConcreteSingleOutput(val getOutputDirectory: Path)
+ private final class ConcreteSingleOutput(override val getOutputDirectoryAsPath: Path)
extends xsbti.compile.SingleOutput {
+ override def getOutputDirectory: File = getOutputDirectoryAsPath.toFile
override def toString: String = s"SingleOutput($getOutputDirectory)"
}
@@ -69,9 +72,11 @@ object CompileOutput {
}
private final class ConcreteOutputGroup(
- val getSourceDirectory: Path,
- val getOutputDirectory: Path
+ override val getSourceDirectoryAsPath: Path,
+ override val getOutputDirectoryAsPath: Path
) extends xsbti.compile.OutputGroup {
- override def toString = s"OutputGroup($getSourceDirectory -> $getOutputDirectory)"
+ override def getSourceDirectory: File = getSourceDirectoryAsPath.toFile
+ override def getOutputDirectory: File = getOutputDirectoryAsPath.toFile
+ override def toString = s"OutputGroup($getSourceDirectoryAsPath -> $getOutputDirectoryAsPath)"
}
}
diff --git a/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/CompilerArguments.scala b/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/CompilerArguments.scala
index 3dda333549..c7ed65e631 100644
--- a/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/CompilerArguments.scala
+++ b/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/CompilerArguments.scala
@@ -190,7 +190,7 @@ object CompilerArguments {
* make use of it (e.g. the Eclipse compiler does this via EJC).
* See https://github.com/sbt/zinc/issues/163. */
val target = output match {
- case so: SingleOutput => Some(so.getOutputDirectory)
+ case so: SingleOutput => Some(so.getOutputDirectoryAsPath)
case _: MultipleOutput => None
}
outputOption(target)
diff --git a/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/CompilerCache.scala b/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/CompilerCache.scala
index 05aa509352..5b62bf2a4a 100644
--- a/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/CompilerCache.scala
+++ b/internal/zinc-compile-core/src/main/scala/sbt/internal/inc/CompilerCache.scala
@@ -16,6 +16,7 @@ package inc
import java.io.File
import java.util
+import com.github.ghik.silencer.silent
import xsbti.{ AnalysisCallback, Reporter, Logger => xLogger }
import xsbti.compile._
import sbt.util.InterfaceUtil.{ toSupplier => f0 }
@@ -53,24 +54,49 @@ final class CompilerCache(val maxInstances: Int) extends GlobalsCache {
case null =>
log.debug(f0(s"Compiler cache miss. $key "))
val compiler = c.newCachedCompiler(args, output, log, reporter)
- val newCompiler: CachedCompiler = new CachedCompiler with java.io.Closeable {
+
+ class OpenCompiler extends CachedCompiler with java.io.Closeable {
override def commandArguments(sources: Array[File]): Array[String] = {
compiler.commandArguments(sources)
}
+
+ @silent // silence deprecation of run(Array[File], ...)
override def run(
- sources: Array[VirtualFile],
+ sources: Array[File],
changes: DependencyChanges,
callback: AnalysisCallback,
logger: xLogger,
delegate: Reporter,
progress: CompileProgress
): Unit = {
+ // forward run to underlying cached compiler since it could be created by sbt-dotty
compiler.run(sources, changes, callback, logger, delegate, progress)
}
+
override def close(): Unit = {
- // Dont' close the underlying Global.
+ // Don't close the underlying Global.
}
}
+
+ class OpenCompiler2(compiler: CachedCompiler2) extends OpenCompiler with CachedCompiler2 {
+ override def run(
+ sources: Array[VirtualFile],
+ changes: DependencyChanges,
+ callback: AnalysisCallback,
+ logger: xLogger,
+ delegate: Reporter,
+ progress: CompileProgress
+ ): Unit = {
+ compiler
+ .run(sources, changes, callback, logger, delegate, progress)
+ }
+ }
+
+ val newCompiler: CachedCompiler = compiler match {
+ case compiler: CachedCompiler2 => new OpenCompiler2(compiler)
+ case _ => new OpenCompiler
+ }
+
cache.put(key, newCompiler)
newCompiler
case cachedCompiler =>
@@ -93,7 +119,7 @@ final class CompilerCache(val maxInstances: Int) extends GlobalsCache {
final class FreshCompilerCache extends GlobalsCache {
def clear(): Unit = ()
- def apply(
+ override def apply(
args: Array[String],
output: Output,
forceNew: Boolean,
diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/Analysis.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/Analysis.scala
index 5872d01c7d..7be46d9f33 100644
--- a/internal/zinc-core/src/main/scala/sbt/internal/inc/Analysis.scala
+++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/Analysis.scala
@@ -14,6 +14,7 @@ package internal
package inc
import sbt.internal.inc.Analysis.{ LocalProduct, NonLocalProduct }
+import java.io.File
import java.nio.file.{ Path, Paths }
import xsbti.VirtualFileRef
@@ -126,7 +127,8 @@ object Analysis {
}
lazy val dummyOutput: Output = new SingleOutput {
- def getOutputDirectory: Path = Paths.get("/tmp/dummy")
+ override def getOutputDirectoryAsPath: Path = Paths.get("/tmp/dummy")
+ override def getOutputDirectory: File = getOutputDirectoryAsPath.toFile
}
}
diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/Incremental.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/Incremental.scala
index 2828e1c0fb..8f3722eabe 100644
--- a/internal/zinc-core/src/main/scala/sbt/internal/inc/Incremental.scala
+++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/Incremental.scala
@@ -13,6 +13,7 @@ package sbt
package internal
package inc
+import java.io.File
import java.nio.file.Path
import java.util.EnumSet
import sbt.internal.inc.Analysis.{ LocalProduct, NonLocalProduct }
@@ -243,8 +244,10 @@ object Incremental {
incremental.detectInitialChanges(sources, previous, current, lookup, converter, output)
log.debug(s"> initialChanges = $initialChanges")
val binaryChanges = new DependencyChanges {
- val modifiedLibraries = initialChanges.libraryDeps.toArray
- val modifiedClasses = initialChanges.external.allModified.toArray
+ override def modifiedBinaries: Array[File] =
+ modifiedLibraries.map(converter.toPath(_).toFile)
+ override val modifiedLibraries = initialChanges.libraryDeps.toArray
+ override val modifiedClasses = initialChanges.external.allModified.toArray
def isEmpty = modifiedLibraries.isEmpty && modifiedClasses.isEmpty
}
val (initialInvClasses, initialInvSources0) =
@@ -428,7 +431,8 @@ private final class AnalysisCallback(
private[this] val compilation: Compilation = Compilation(compileStartTime, output)
private val hooks = options.externalHooks
- private val provenance = jo2o(output.getSingleOutput).fold("")(hooks.getProvenance.get(_)).intern
+ private val provenance =
+ jo2o(output.getSingleOutputAsPath).fold("")(hooks.getProvenance.get(_)).intern
override def toString =
(List("Class APIs", "Object APIs", "Library deps", "Products", "Source deps") zip
@@ -487,7 +491,8 @@ private final class AnalysisCallback(
currentSetup.options.scalacOptions.contains("-Ypickle-java")
}
- def startSource(source: VirtualFile): Unit = {
+ override def startSource(source: File): Unit = startSource(converter.toVirtualFile(source.toPath))
+ override def startSource(source: VirtualFile): Unit = {
if (options.strictMode()) {
assert(
!srcs.contains(source),
@@ -540,15 +545,31 @@ private final class AnalysisCallback(
add(extSrcDeps, sourceClassName, dependency)
}
+ // Called by sbt-dotty
+ override def binaryDependency(
+ classFile: File,
+ onBinaryClassName: String,
+ fromClassName: String,
+ fromSourceFile: File,
+ context: DependencyContext
+ ): Unit =
+ binaryDependency(
+ classFile.toPath,
+ onBinaryClassName,
+ fromClassName,
+ converter.toVirtualFile(fromSourceFile.toPath),
+ context
+ )
+
// since the binary at this point could either *.class files or
// library JARs, we need to accept Path here.
- def binaryDependency(
+ override def binaryDependency(
classFile: Path,
onBinaryClassName: String,
fromClassName: String,
fromSourceFile: VirtualFileRef,
context: DependencyContext
- ) =
+ ): Unit =
internalBinaryToSourceClassName(onBinaryClassName) match {
case Some(dependsOn) => // dependsOn is a source class name
// dependency is a product of a source not included in this compilation
@@ -593,7 +614,21 @@ private final class AnalysisCallback(
}
}
- def generatedNonLocalClass(
+ // Called by sbt-dotty
+ override def generatedNonLocalClass(
+ source: File,
+ classFile: File,
+ binaryClassName: String,
+ srcClassName: String
+ ): Unit =
+ generatedNonLocalClass(
+ converter.toVirtualFile(source.toPath),
+ classFile.toPath,
+ binaryClassName,
+ srcClassName
+ )
+
+ override def generatedNonLocalClass(
source: VirtualFileRef,
classFile: Path,
binaryClassName: String,
@@ -607,14 +642,22 @@ private final class AnalysisCallback(
()
}
- def generatedLocalClass(source: VirtualFileRef, classFile: Path): Unit = {
+ // Called by sbt-dotty
+ override def generatedLocalClass(source: File, classFile: File): Unit =
+ generatedLocalClass(converter.toVirtualFile(source.toPath), classFile.toPath)
+
+ override def generatedLocalClass(source: VirtualFileRef, classFile: Path): Unit = {
//println(s"Generated local class ${source}, ${classFile}")
val vf = converter.toVirtualFile(classFile)
add(localClasses, source, vf)
()
}
- def api(sourceFile: VirtualFileRef, classApi: ClassLike): Unit = {
+ // Called by sbt-dotty
+ override def api(sourceFile: File, classApi: ClassLike): Unit =
+ api(converter.toVirtualFile(sourceFile.toPath), classApi)
+
+ override def api(sourceFile: VirtualFileRef, classApi: ClassLike): Unit = {
import xsbt.api.{ APIUtil, HashAPI }
val className = classApi.name
if (APIUtil.isScalaSourceName(sourceFile.id) && APIUtil.hasMacro(classApi))
@@ -638,7 +681,11 @@ private final class AnalysisCallback(
}
}
- def mainClass(sourceFile: VirtualFileRef, className: String): Unit = {
+ // Called by sbt-dotty
+ override def mainClass(sourceFile: File, className: String): Unit =
+ mainClass(converter.toVirtualFile(sourceFile.toPath), className)
+
+ override def mainClass(sourceFile: VirtualFileRef, className: String): Unit = {
mainClasses.getOrElseUpdate(sourceFile, new ConcurrentLinkedQueue).add(className)
()
}
@@ -844,7 +891,7 @@ private final class AnalysisCallback(
}
for {
earlyO <- earlyOutput
- pickleJarPath <- jo2o(earlyO.getSingleOutput())
+ pickleJarPath <- jo2o(earlyO.getSingleOutputAsPath())
} {
// List classes defined in the files that were compiled in this run.
val knownProducts = merged.relations.allSources
@@ -858,7 +905,7 @@ private final class AnalysisCallback(
}
private def extractProductPath(product: VirtualFileRef): Option[String] = {
- jo2o(output.getSingleOutput) match {
+ jo2o(output.getSingleOutputAsPath) match {
case Some(so) if so.getFileName.toString.endsWith(".jar") =>
new JarUtils.ClassInJar(product.id).toClassFilePath
case Some(so) =>
diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalCommon.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalCommon.scala
index 27edb352d2..ba52154444 100644
--- a/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalCommon.scala
+++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalCommon.scala
@@ -860,7 +860,8 @@ object IncrementalCommon {
}
}
- def emptyChanges: DependencyChanges = new DependencyChanges {
+ lazy val emptyChanges: DependencyChanges = new DependencyChanges {
+ override val modifiedBinaries = new Array[java.io.File](0)
override val modifiedLibraries = new Array[VirtualFileRef](0)
override val modifiedClasses = new Array[String](0)
override def isEmpty = true
diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/MiniSetupUtil.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/MiniSetupUtil.scala
index 71dc0756d6..4a0c627694 100644
--- a/internal/zinc-core/src/main/scala/sbt/internal/inc/MiniSetupUtil.scala
+++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/MiniSetupUtil.scala
@@ -85,7 +85,7 @@ object MiniSetupUtil {
implicit val equivOutput: Equiv[APIOutput] = {
new Equiv[APIOutput] {
implicit val outputGroupsOrdering =
- Ordering.by((og: OutputGroup) => og.getSourceDirectory)
+ Ordering.by((og: OutputGroup) => og.getSourceDirectoryAsPath)
def equiv(out1: APIOutput, out2: APIOutput) = (out1, out2) match {
case (m1: MultipleOutput, m2: MultipleOutput) =>
@@ -93,11 +93,11 @@ object MiniSetupUtil {
(m1.getOutputGroups.sorted zip m2.getOutputGroups.sorted forall {
case (a, b) =>
equivFile
- .equiv(a.getSourceDirectory, b.getSourceDirectory) && equivFile
- .equiv(a.getOutputDirectory, b.getOutputDirectory)
+ .equiv(a.getSourceDirectoryAsPath, b.getSourceDirectoryAsPath) && equivFile
+ .equiv(a.getOutputDirectoryAsPath, b.getOutputDirectoryAsPath)
})
case (s1: SingleOutput, s2: SingleOutput) =>
- equivFile.equiv(s1.getOutputDirectory, s2.getOutputDirectory)
+ equivFile.equiv(s1.getOutputDirectoryAsPath, s2.getOutputDirectoryAsPath)
case _ =>
false
}
diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/VirtualFileUtil.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/VirtualFileUtil.scala
index b05eca6491..8798637ce3 100644
--- a/internal/zinc-core/src/main/scala/sbt/internal/inc/VirtualFileUtil.scala
+++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/VirtualFileUtil.scala
@@ -26,7 +26,9 @@ object VirtualFileUtil {
implicit val sbtInternalIncVirtualFileRefOrdering: Ordering[VirtualFileRef] = Ordering.by(_.id)
def outputDirectory(output: Output): Path =
- output.getSingleOutput.orElseThrow(() => new RuntimeException(s"unexpected output $output"))
+ output.getSingleOutputAsPath.orElseThrow(
+ () => new RuntimeException(s"unexpected output $output")
+ )
def sourcePositionMapper(converter: FileConverter): Position => Position =
new DelegatingPosition(_, converter)
diff --git a/internal/zinc-persist/src/main/scala/sbt/internal/inc/binary/converters/ProtobufWriters.scala b/internal/zinc-persist/src/main/scala/sbt/internal/inc/binary/converters/ProtobufWriters.scala
index 1047d3f602..615888ca8a 100644
--- a/internal/zinc-persist/src/main/scala/sbt/internal/inc/binary/converters/ProtobufWriters.scala
+++ b/internal/zinc-persist/src/main/scala/sbt/internal/inc/binary/converters/ProtobufWriters.scala
@@ -119,8 +119,8 @@ final class ProtobufWriters(mapper: WriteMapper) {
}
def toOutputGroup(outputGroup: OutputGroup): Schema.OutputGroup = {
- val newSource = mapper.mapSourceDir(outputGroup.getSourceDirectory)
- val newTarget = mapper.mapOutputDir(outputGroup.getOutputDirectory)
+ val newSource = mapper.mapSourceDir(outputGroup.getSourceDirectoryAsPath)
+ val newTarget = mapper.mapOutputDir(outputGroup.getOutputDirectoryAsPath)
val sourcePath = toStringPath(newSource)
val targetPath = toStringPath(newTarget)
Schema.OutputGroup.newBuilder
@@ -135,7 +135,7 @@ final class ProtobufWriters(mapper: WriteMapper) {
): Schema.Compilation.Builder = {
output match {
case single0: SingleOutput =>
- val newOutputDir = mapper.mapOutputDir(single0.getOutputDirectory)
+ val newOutputDir = mapper.mapOutputDir(single0.getOutputDirectoryAsPath)
val targetPath = toStringPath(newOutputDir)
val single = Schema.SingleOutput.newBuilder.setTarget(targetPath).build
builder.setSingleOutput(single)
@@ -264,7 +264,7 @@ final class ProtobufWriters(mapper: WriteMapper) {
): Schema.MiniSetup.Builder =
output match {
case single0: SingleOutput =>
- val newOutputDir = mapper.mapOutputDir(single0.getOutputDirectory)
+ val newOutputDir = mapper.mapOutputDir(single0.getOutputDirectoryAsPath)
val targetPath = toStringPath(newOutputDir)
val single = Schema.SingleOutput.newBuilder.setTarget(targetPath).build
builder.setSingleOutput(single)
diff --git a/internal/zinc-persist/src/main/scala/sbt/internal/inc/text/TextAnalysisFormat.scala b/internal/zinc-persist/src/main/scala/sbt/internal/inc/text/TextAnalysisFormat.scala
index 12829cb7fb..fa50460d16 100644
--- a/internal/zinc-persist/src/main/scala/sbt/internal/inc/text/TextAnalysisFormat.scala
+++ b/internal/zinc-persist/src/main/scala/sbt/internal/inc/text/TextAnalysisFormat.scala
@@ -428,10 +428,12 @@ object TextAnalysisFormat extends TextAnalysisFormat(ReadWriteMappers.getEmptyMa
val (mode, outputAsMap) = Analysis.dummyOutput match {
case s: SingleOutput =>
// just to be compatible with multipleOutputMode
- val ignored = s.getOutputDirectory
- (singleOutputMode, Map(ignored -> s.getOutputDirectory))
+ val ignored = s.getOutputDirectoryAsPath
+ (singleOutputMode, Map(ignored -> s.getOutputDirectoryAsPath))
case m: MultipleOutput =>
- val map = m.getOutputGroups.map(x => x.getSourceDirectory -> x.getOutputDirectory).toMap
+ val map = m.getOutputGroups
+ .map(x => x.getSourceDirectoryAsPath -> x.getOutputDirectoryAsPath)
+ .toMap
(multipleOutputMode, map)
}
val mappedClasspathHash = setup.options.classpathHash
diff --git a/internal/zinc-persist/src/test/scala/sbt/inc/AnalysisFormatHelpers.scala b/internal/zinc-persist/src/test/scala/sbt/inc/AnalysisFormatHelpers.scala
index c83e85704e..e4d1e23f9d 100644
--- a/internal/zinc-persist/src/test/scala/sbt/inc/AnalysisFormatHelpers.scala
+++ b/internal/zinc-persist/src/test/scala/sbt/inc/AnalysisFormatHelpers.scala
@@ -32,7 +32,7 @@ object AnalysisFormatHelpers {
val mappers: ReadWriteMappers = ReadWriteMappers.getMachineIndependentMappers(RootFilePath)
val commonSetup: MiniSetup = {
- val output: SingleOutput = () => RootFilePath.resolve("out")
+ val output: Output = CompileOutput(RootFilePath.resolve("out"))
val opts = MiniOptions.of(Array(), Array(), Array())
MiniSetup.of(output, opts, "2.10.4", Mixed, true, Array(t2("key" -> "value")))
}
diff --git a/internal/zinc-testing/src/main/scala/xsbti/TestCallback.scala b/internal/zinc-testing/src/main/scala/xsbti/TestCallback.scala
index 0c196bc39a..c076ff1b8a 100644
--- a/internal/zinc-testing/src/main/scala/xsbti/TestCallback.scala
+++ b/internal/zinc-testing/src/main/scala/xsbti/TestCallback.scala
@@ -11,6 +11,7 @@
package xsbti
+import java.io.File
import java.nio.file.Path
import java.util
@@ -37,6 +38,7 @@ class TestCallback extends AnalysisCallback {
def usedNames = usedNamesAndScopes.mapValues(_.map(_.name))
+ override def startSource(source: File): Unit = ???
override def startSource(source: VirtualFile): Unit = {
assert(
!apis.contains(source),
@@ -55,6 +57,14 @@ class TestCallback extends AnalysisCallback {
()
}
+ override def binaryDependency(
+ classFile: File,
+ onBinaryClassName: String,
+ fromClassName: String,
+ fromSourceFile: File,
+ context: DependencyContext
+ ): Unit = ???
+
override def binaryDependency(
onBinary: Path,
onBinaryClassName: String,
@@ -66,6 +76,13 @@ class TestCallback extends AnalysisCallback {
()
}
+ override def generatedNonLocalClass(
+ sourceFile: File,
+ classFile: File,
+ binaryClassName: String,
+ srcClassName: String
+ ): Unit = ???
+
override def generatedNonLocalClass(
sourceFile: VirtualFileRef,
classFile: Path,
@@ -77,6 +94,11 @@ class TestCallback extends AnalysisCallback {
()
}
+ override def generatedLocalClass(
+ sourceFile: File,
+ classFile: File
+ ): Unit = ???
+
override def generatedLocalClass(
sourceFile: VirtualFileRef,
classFile: Path
@@ -88,11 +110,15 @@ class TestCallback extends AnalysisCallback {
def usedName(className: String, name: String, scopes: util.EnumSet[UseScope]): Unit =
usedNamesAndScopes(className) += TestUsedName(name, scopes)
+ override def api(source: File, api: ClassLike): Unit = ???
+
override def api(source: VirtualFileRef, api: ClassLike): Unit = {
apis(source) += api
()
}
+ override def mainClass(source: File, className: String): Unit = ()
+
override def mainClass(source: VirtualFileRef, className: String): Unit = ()
override def enabled(): Boolean = true
diff --git a/zinc/src/main/scala/sbt/internal/inc/IncrementalCompilerImpl.scala b/zinc/src/main/scala/sbt/internal/inc/IncrementalCompilerImpl.scala
index eee9c263c9..2297fc9904 100644
--- a/zinc/src/main/scala/sbt/internal/inc/IncrementalCompilerImpl.scala
+++ b/zinc/src/main/scala/sbt/internal/inc/IncrementalCompilerImpl.scala
@@ -280,11 +280,11 @@ class IncrementalCompilerImpl extends IncrementalCompiler {
val numberSources = s"$sourceCount sources"
val outputString = output match {
case singleOutput: SingleOutput =>
- singleOutput.getOutputDirectory().toString
+ singleOutput.getOutputDirectoryAsPath().toString
case multiOutput: MultipleOutput =>
multiOutput
.getOutputGroups()
- .map(_.getOutputDirectory().toString)
+ .map(_.getOutputDirectoryAsPath().toString)
.mkString("[", ", ", "]")
case _ =>
s"other output ($output)"
diff --git a/zinc/src/main/scala/sbt/internal/inc/MixedAnalyzingCompiler.scala b/zinc/src/main/scala/sbt/internal/inc/MixedAnalyzingCompiler.scala
index 567bb6473c..f83265e6de 100644
--- a/zinc/src/main/scala/sbt/internal/inc/MixedAnalyzingCompiler.scala
+++ b/zinc/src/main/scala/sbt/internal/inc/MixedAnalyzingCompiler.scala
@@ -81,7 +81,7 @@ final class MixedAnalyzingCompiler(
logInputs(log, javaSrcs.size, scalaSrcs.size, outputDirs)
val isPickleJava = config.currentSetup.order == Mixed && config.incOptions.pipelining && javaSrcs.nonEmpty
- val earlyOut = config.earlyOutput.flatMap(_.getSingleOutput.toOption)
+ val earlyOut = config.earlyOutput.flatMap(_.getSingleOutputAsPath.toOption)
val pickleWrite = earlyOut.toList.flatMap { out =>
val sbv = scalac.scalaInstance.version.take(4)
if (out.toString.endsWith(".jar") && !Files.exists(out))
@@ -105,14 +105,15 @@ final class MixedAnalyzingCompiler(
timed("Scala compilation", log) {
config.compiler.compile(
sources.toArray,
+ config.converter,
changes,
arguments.toArray,
output,
callback,
config.reporter,
config.cache,
- log,
- config.progress.toOptional
+ config.progress.toOptional,
+ log
)
}
}
@@ -197,8 +198,8 @@ final class MixedAnalyzingCompiler(
private[this] def outputDirectories(output: Output): Seq[Path] = {
output match {
- case single: SingleOutput => List(single.getOutputDirectory)
- case mult: MultipleOutput => mult.getOutputGroups map (_.getOutputDirectory)
+ case single: SingleOutput => List(single.getOutputDirectoryAsPath)
+ case mult: MultipleOutput => mult.getOutputGroups map (_.getOutputDirectoryAsPath)
}
}
diff --git a/zinc/src/main/scala/sbt/internal/inc/javac/AnalyzingJavaCompiler.scala b/zinc/src/main/scala/sbt/internal/inc/javac/AnalyzingJavaCompiler.scala
index 0e1a20b760..495b7eb7d1 100644
--- a/zinc/src/main/scala/sbt/internal/inc/javac/AnalyzingJavaCompiler.scala
+++ b/zinc/src/main/scala/sbt/internal/inc/javac/AnalyzingJavaCompiler.scala
@@ -117,18 +117,18 @@ final class AnalyzingJavaCompiler private[sbt] (
// Outline chunks of compiles so that .class files end up in right location
val chunks: Map[Option[Path], Seq[VirtualFile]] = output match {
case single: SingleOutput =>
- Map(Option(single.getOutputDirectory) -> sources)
+ Map(Option(single.getOutputDirectoryAsPath) -> sources)
case multi: MultipleOutput =>
sources.groupBy { src =>
multi.getOutputGroups
.find { out =>
val sourceDir: VirtualFileRef = sourceDirs.getOrElseUpdate(
- out.getSourceDirectory,
- converter.toVirtualFile(out.getSourceDirectory)
+ out.getSourceDirectoryAsPath,
+ converter.toVirtualFile(out.getSourceDirectoryAsPath)
)
src.id.startsWith(sourceDir.id)
}
- .map(_.getOutputDirectory)
+ .map(_.getOutputDirectoryAsPath)
}
}