diff --git a/contrib/playlib/test/src/mill/playlib/PlayModuleTests.scala b/contrib/playlib/test/src/mill/playlib/PlayModuleTests.scala index 44238b5393c2..562a3fdbbe15 100644 --- a/contrib/playlib/test/src/mill/playlib/PlayModuleTests.scala +++ b/contrib/playlib/test/src/mill/playlib/PlayModuleTests.scala @@ -14,6 +14,7 @@ object PlayModuleTests extends TestSuite with PlayTestSuite { val (crossScalaVersion, crossPlayVersion) = (crossValue, crossValue2) override def playVersion = crossPlayVersion override def scalaVersion = crossScalaVersion + object test extends PlayTests override def mvnDeps = Task { super.mvnDeps() ++ Seq(ws()) } } diff --git a/contrib/playlib/test/src/mill/playlib/PlaySingleApiModuleTests.scala b/contrib/playlib/test/src/mill/playlib/PlaySingleApiModuleTests.scala index 553838139c01..588ebe77865d 100644 --- a/contrib/playlib/test/src/mill/playlib/PlaySingleApiModuleTests.scala +++ b/contrib/playlib/test/src/mill/playlib/PlaySingleApiModuleTests.scala @@ -10,7 +10,7 @@ object PlaySingleApiModuleTests extends TestSuite with PlayTestSuite { object playsingleapi extends TestRootModule with PlayApiModule { override val moduleDir = os.temp() // workaround problem in `SingleModule` override def playVersion = Task { testPlay28 } - override def scalaVersion = Task { "2.13.12" } + override def scalaVersion = Task { "2.13.17" } object test extends PlayTests lazy val millDiscover = Discover[this.type] diff --git a/core/api/daemon/src/mill/api/daemon/internal/SemanticDbJavaModuleApi.scala b/core/api/daemon/src/mill/api/daemon/internal/SemanticDbJavaModuleApi.scala index a1d8f74e5537..807c323207b9 100644 --- a/core/api/daemon/src/mill/api/daemon/internal/SemanticDbJavaModuleApi.scala +++ b/core/api/daemon/src/mill/api/daemon/internal/SemanticDbJavaModuleApi.scala @@ -9,17 +9,17 @@ trait SemanticDbJavaModuleApi { private[mill] def bspCompiledClassesAndSemanticDbFiles: TaskApi[UnresolvedPathApi[?]] } object SemanticDbJavaModuleApi { - val buildTimeJavaSemanticDbVersion = BuildInfo.semanticDbJavaVersion - val buildTimeSemanticDbVersion = BuildInfo.semanticDBVersion + val buildTimeJavaSemanticDbVersion: String = BuildInfo.semanticDbJavaVersion + val buildTimeSemanticDbVersion: String = BuildInfo.semanticDBVersion private[mill] val contextSemanticDbVersion: InheritableThreadLocal[Option[String]] = new InheritableThreadLocal[Option[String]] { - protected override def initialValue(): Option[String] = None.asInstanceOf[Option[String]] + protected override def initialValue(): Option[String] = None } private[mill] val contextJavaSemanticDbVersion: InheritableThreadLocal[Option[String]] = new InheritableThreadLocal[Option[String]] { - protected override def initialValue(): Option[String] = None.asInstanceOf[Option[String]] + protected override def initialValue(): Option[String] = None } private[mill] def resetContext(): Unit = { diff --git a/core/api/src/mill/api/BuildCtx.scala b/core/api/src/mill/api/BuildCtx.scala index d0188dfea75b..cf665e58a6d3 100644 --- a/core/api/src/mill/api/BuildCtx.scala +++ b/core/api/src/mill/api/BuildCtx.scala @@ -1,7 +1,8 @@ package mill.api import collection.mutable import mill.api.Watchable -import mill.constants.EnvVars +import mill.constants.{EnvVars, OutFiles, OutFolderMode} + import scala.util.DynamicVariable /** @@ -50,6 +51,7 @@ object BuildCtx { } } + /** As [[watchValue]] but watches a file path. */ def watch(p: os.Path): os.Path = { val watchable = Watchable.Path(p.toNIO, false, PathRef(p).sig) watchedValues.append(watchable) @@ -59,4 +61,16 @@ object BuildCtx { def watch0(w: Watchable): Unit = watchedValues.append(w) def evalWatch0(w: Watchable): Unit = evalWatchedValues.append(w) + + /** + * Folder in the filesystem where Mill's BSP sessions that require semanticdb store an indicator file (name = + * process PID, contents are irrelevant) to communicate to main Mill daemon and other BSP sessions that there is at + * least one Mill session that will need the semanticdb. + * + * The reasoning is that if at least one of Mill's clients requests semanticdb, then there is no point in running + * regular `compile` without semanticdb, as eventually we will have to rerun it with semanticdb, and thus we should + * compile with semanticdb upfront to avoid paying the price of compling twice (without semanticdb and then with it). + */ + private[mill] def bspSemanticDbSessionsFolder: os.Path = + workspaceRoot / os.SubPath(OutFiles.outFor(OutFolderMode.BSP)) / "semanticdb-sessions" } diff --git a/core/constants/src/mill/constants/EnvVars.java b/core/constants/src/mill/constants/EnvVars.java index 4c9de777f736..5a03be01254d 100644 --- a/core/constants/src/mill/constants/EnvVars.java +++ b/core/constants/src/mill/constants/EnvVars.java @@ -27,6 +27,12 @@ public class EnvVars { */ public static final String MILL_OUTPUT_DIR = "MILL_OUTPUT_DIR"; + /** + * Output directory where Mill workers' state and Mill tasks output should be + * written to for the Mill instances running in BSP mode. + */ + public static final String MILL_BSP_OUTPUT_DIR = "MILL_BSP_OUTPUT_DIR"; + /** * If set to "1", Mill will re-use the regular @{Link OutFiles#out} folder instead of * using a separate one for BSP output. diff --git a/core/constants/src/mill/constants/OutFiles.java b/core/constants/src/mill/constants/OutFiles.java index 89ede26e9e42..64c795b0e4fa 100644 --- a/core/constants/src/mill/constants/OutFiles.java +++ b/core/constants/src/mill/constants/OutFiles.java @@ -12,6 +12,12 @@ public class OutFiles { */ private static final String envOutOrNull = System.getenv(EnvVars.MILL_OUTPUT_DIR); + /** + * Allows us to override the `out/mill-bsp-out` folder from the environment via the + * {@link EnvVars#MILL_BSP_OUTPUT_DIR} variable. + */ + private static final String envBspOutOrNull = System.getenv(EnvVars.MILL_BSP_OUTPUT_DIR); + /** @see EnvVars#MILL_NO_SEPARATE_BSP_OUTPUT_DIR */ public static final boolean mergeBspOut = "1".equals(System.getenv(EnvVars.MILL_NO_SEPARATE_BSP_OUTPUT_DIR)); @@ -28,11 +34,17 @@ public class OutFiles { */ public static final String out = envOutOrNull == null ? defaultOut : envOutOrNull; + /** + * Default hard-coded value for the Mill `out/` folder path when Mill is running in BSP mode. Unless you know + * what you are doing, you should favor using {@link #outFor} instead. + */ + public static final String defaultBspOut = "out/mill-bsp-out"; + /** * Path of the Mill `out/` folder when Mill is running in BSP mode. Unless you know * what you are doing, you should favor using {@link #outFor} instead. */ - public static final String bspOut = "out/mill-bsp-out"; + public static final String bspOut = envBspOutOrNull == null ? defaultBspOut : envBspOutOrNull; /** * Path of the Mill {@link #out} folder. @@ -40,12 +52,12 @@ public class OutFiles { * @param outMode If {@link #envOutOrNull} is set, this parameter is ignored. */ public static String outFor(OutFolderMode outMode) { - if (envOutOrNull != null) return envOutOrNull; switch (outMode) { case REGULAR: - return out; + return envOutOrNull != null ? envOutOrNull : out; case BSP: - return mergeBspOut ? out : bspOut; + if (envBspOutOrNull != null) return envBspOutOrNull; + return mergeBspOut ? outFor(OutFolderMode.REGULAR) : bspOut; default: throw new IllegalArgumentException("Unknown out folder mode: " + outMode); } diff --git a/core/exec/src/mill/exec/GroupExecution.scala b/core/exec/src/mill/exec/GroupExecution.scala index 8f06b855a655..a97d8c964a38 100644 --- a/core/exec/src/mill/exec/GroupExecution.scala +++ b/core/exec/src/mill/exec/GroupExecution.scala @@ -566,7 +566,7 @@ object GroupExecution { if (path.startsWith(workspace) && !validReadDests.exists(path.startsWith(_))) { sys.error( s"Reading from ${path.relativeTo(workspace)} not allowed during execution of `$terminal`.\n" + - "You can only read files referenced by `Task.Source` or `Task.Sources`, or within a `Task.Input" + "You can only read files referenced by `Task.Source` or `Task.Sources`, or within a `Task.Input`" ) } } diff --git a/example/scalalib/linting/3-acyclic/build.mill b/example/scalalib/linting/3-acyclic/build.mill index a2b03695cdcd..3d0b93adf86f 100644 --- a/example/scalalib/linting/3-acyclic/build.mill +++ b/example/scalalib/linting/3-acyclic/build.mill @@ -19,6 +19,7 @@ import mill.*, scalalib.* object `package` extends ScalaModule { def scalaVersion = "2.13.11" + def semanticDbVersion = "4.9.9" // last version to support this Scala def compileMvnDeps = Seq(mvn"com.lihaoyi:::acyclic:0.3.15") def scalacPluginMvnDeps = Seq(mvn"com.lihaoyi:::acyclic:0.3.15") def scalacOptions = Seq("-P:acyclic:force") diff --git a/example/scalalib/spark/1-hello-spark/build.mill b/example/scalalib/spark/1-hello-spark/build.mill index 2d5b96e88fca..252ac1c91611 100644 --- a/example/scalalib/spark/1-hello-spark/build.mill +++ b/example/scalalib/spark/1-hello-spark/build.mill @@ -3,6 +3,8 @@ import mill.*, scalalib.* object foo extends ScalaModule { def scalaVersion = "2.12.15" + def semanticDbVersion = "4.9.0" // last version to support this Scala + def mvnDeps = Seq( mvn"org.apache.spark::spark-core:3.5.4", mvn"org.apache.spark::spark-sql:3.5.4" diff --git a/example/scalalib/spark/3-semi-realistic/build.mill b/example/scalalib/spark/3-semi-realistic/build.mill index 5bcd51d68101..15de8948dea0 100644 --- a/example/scalalib/spark/3-semi-realistic/build.mill +++ b/example/scalalib/spark/3-semi-realistic/build.mill @@ -3,6 +3,8 @@ import mill.*, scalalib.* object `package` extends ScalaModule { def scalaVersion = "2.12.15" + def semanticDbVersion = "4.9.0" // last version to support this Scala + def mvnDeps = Seq( mvn"org.apache.spark::spark-core:3.5.6", mvn"org.apache.spark::spark-sql:3.5.6" diff --git a/integration/bsp-util/src/BspServerTestUtil.scala b/integration/bsp-util/src/BspServerTestUtil.scala index f834d0223f54..5b40de043394 100644 --- a/integration/bsp-util/src/BspServerTestUtil.scala +++ b/integration/bsp-util/src/BspServerTestUtil.scala @@ -11,6 +11,7 @@ import org.eclipse.lsp4j.jsonrpc.services.JsonRequest import java.io.ByteArrayOutputStream import java.util.concurrent.atomic.AtomicInteger import java.util.concurrent.{CompletableFuture, ExecutorService, Executors, ThreadFactory} +import scala.annotation.unused import scala.jdk.CollectionConverters.* import scala.reflect.ClassTag @@ -226,7 +227,7 @@ object BspServerTestUtil { workspacePath: os.Path, coursierCache: os.Path = os.Path(CacheDefaults.location), javaHome: os.Path = os.Path(sys.props("java.home")), - javaVersion: String = sys.props("java.version") + @unused javaVersion: String = sys.props("java.version") ): Seq[(String, String)] = Seq( workspacePath.toURI.toASCIIString.stripSuffix("/") -> "file:///workspace", diff --git a/integration/feature/scoverage/resources/build.mill b/integration/feature/scoverage/resources/build.mill index db0ebca789da..797038148103 100644 --- a/integration/feature/scoverage/resources/build.mill +++ b/integration/feature/scoverage/resources/build.mill @@ -17,7 +17,15 @@ object Deps { object core extends Cross[CoreCross]("2.13.11") trait CoreCross extends CrossScalaModule with ScoverageModule { override def scoverageVersion = "2.0.11" + override def semanticDbVersion = "4.9.9" // last version to support this Scala + + // customized scoverage data + override lazy val scoverage: ScoverageData = new ScoverageData { + override def semanticDbVersion = "4.9.9" // last version to support this Scala + } + object test extends ScoverageTests with TestModule.ScalaTest { + override def semanticDbVersion = "4.9.9" // last version to support this Scala override def mvnDeps = Seq(Deps.scalaTest, Deps.millMain) } } @@ -25,8 +33,11 @@ trait CoreCross extends CrossScalaModule with ScoverageModule { object extra extends ScalaModule with ScoverageModule { override def scoverageVersion = "2.0.11" override def scalaVersion = "2.13.11" + override def semanticDbVersion = "4.9.9" // last version to support this Scala + // customized scoverage data override lazy val scoverage: ScoverageData = new ScoverageData { // some customizations + override def semanticDbVersion = "4.9.9" // last version to support this Scala } } diff --git a/integration/feature/scoverage/src/ScoverageTests.scala b/integration/feature/scoverage/src/ScoverageTests.scala index b6f19364cd2a..7d3c0886ec24 100644 --- a/integration/feature/scoverage/src/ScoverageTests.scala +++ b/integration/feature/scoverage/src/ScoverageTests.scala @@ -9,8 +9,9 @@ object ScoverageTests extends UtestIntegrationTestSuite { test("test") - retry(3) { integrationTest { tester => import tester._ - assert(eval("__.compile").isSuccess) - assert(eval("core[2.13.11].scoverage.xmlReport").isSuccess) + + prepEval("__.compile").runWithClues(r => assert(r.isSuccess)) + prepEval("core[2.13.11].scoverage.xmlReport").runWithClues(r => assert(r.isSuccess)) } } } diff --git a/libs/androidlib/src/mill/androidlib/AndroidModule.scala b/libs/androidlib/src/mill/androidlib/AndroidModule.scala index fa19291a3d3b..c63694824c49 100644 --- a/libs/androidlib/src/mill/androidlib/AndroidModule.scala +++ b/libs/androidlib/src/mill/androidlib/AndroidModule.scala @@ -496,6 +496,7 @@ trait AndroidModule extends JavaModule { outer => .internalWorker() .compileJava( ZincCompileJava( + compileTo = Task.dest, upstreamCompileOutput = upstreamCompileOutput(), sources = androidLibsRClasses().map(_.path), compileClasspath = Seq.empty, @@ -665,6 +666,7 @@ trait AndroidModule extends JavaModule { outer => .internalWorker() .compileJava( ZincCompileJava( + compileTo = Task.dest, upstreamCompileOutput = upstreamCompileOutput(), sources = sources.map(_.path), compileClasspath = androidTransitiveLibRClasspath().map(_.path), diff --git a/libs/daemon/server/src/mill/server/Server.scala b/libs/daemon/server/src/mill/server/Server.scala index 339928980876..b79b0871f39b 100644 --- a/libs/daemon/server/src/mill/server/Server.scala +++ b/libs/daemon/server/src/mill/server/Server.scala @@ -126,147 +126,177 @@ abstract class Server(args: Server.Args) { afterClose = () => { serverLog("daemonLock released") } - ) { locked => - serverLog("server file locked") - val serverSocket = new java.net.ServerSocket(0, 0, InetAddress.getByName(null)) - Server.watchProcessIdFile( - daemonDir / DaemonFiles.processId, - processId, - running = () => !serverSocket.isClosed, - exit = msg => { - serverLog(s"watchProcessIdFile: $msg") - serverSocket.close() - } + )(whenServerLockAcquired(_, socketPortFile, initialSystemProperties)).getOrElse { + val serverLog = Try(os.read(daemonDir / DaemonFiles.serverLog)).toOption + val processIdLog = Try(os.read(daemonDir / DaemonFiles.processId)).toOption + val socketPort = Try(os.read(daemonDir / DaemonFiles.socketPort)).toOption + val stdout = Try(os.read(daemonDir / DaemonFiles.stdout)).toOption + val stderr = Try(os.read(daemonDir / DaemonFiles.stderr)).toOption + + def render(opt: Option[String]) = opt match { + case Some(s) => "\n" + s.linesIterator.map(" " + _).mkString("\n") + "\n" + case None => "" + } + + throw IllegalStateException( + s"""[${timestampStr()}] Cannot launch Mill server (pid:$processId): + | the daemon lock is already taken by another process. + | + |Currently running server information: + | + | Process ID: ${render(processIdLog)} + | TCP socket port: ${render(socketPort)} + | Daemon STDOUT: ${render(stdout)} + | Daemon STDERR: ${render(stderr)} + | Server log: ${render(serverLog)} + """.stripMargin ) + } + } catch { + case e: Throwable => + serverLog("server loop error: " + e) + serverLog("server loop stack trace: " + e.getStackTrace.mkString("\n")) + throw e + } finally { + serverLog("exiting server") + } + } + + private def whenServerLockAcquired( + locked: AutoCloseable, + socketPortFile: os.Path, + initialSystemProperties: Map[String, String] + ): Unit = { + serverLog("server file locked") + val serverSocket = new java.net.ServerSocket(0, 0, InetAddress.getByName(null)) + Server.watchProcessIdFile( + daemonDir / DaemonFiles.processId, + processId, + running = () => !serverSocket.isClosed, + exit = msg => { + serverLog(s"watchProcessIdFile: $msg") + serverSocket.close() + } + ) - // Wrapper object to encapsulate `activeConnections` and `inactiveTimestampOpt`, - // ensuring they get incremented and decremented together across multiple threads - // and never get out of sync - object connectionTracker { - private var activeConnections = 0 - private var inactiveTimestampOpt: Option[Long] = None + // Wrapper object to encapsulate `activeConnections` and `inactiveTimestampOpt`, + // ensuring they get incremented and decremented together across multiple threads + // and never get out of sync + object connectionTracker { + private var activeConnections = 0 + private var inactiveTimestampOpt: Option[Long] = None - def wrap(t: => Unit): Unit = synchronized { - if (!serverSocket.isClosed) { - t - } - } + def wrap(t: => Unit): Unit = synchronized { + if (!serverSocket.isClosed) { + t + } + } - def increment(): Unit = wrap { - activeConnections += 1 - serverLog(s"$activeConnections active connections") - inactiveTimestampOpt = None - } + def increment(): Unit = wrap { + activeConnections += 1 + serverLog(s"$activeConnections active connections") + inactiveTimestampOpt = None + } - def decrement(): Unit = wrap { - activeConnections -= 1 - serverLog(s"$activeConnections active connections") - if (activeConnections == 0) { - inactiveTimestampOpt = Some(System.currentTimeMillis()) - } - } + def decrement(): Unit = wrap { + activeConnections -= 1 + serverLog(s"$activeConnections active connections") + if (activeConnections == 0) { + inactiveTimestampOpt = Some(System.currentTimeMillis()) + } + } - def closeIfTimedOut(): Unit = wrap { - // Explicit matching as we're doing this every 1ms. - acceptTimeoutMillis match { + def closeIfTimedOut(): Unit = wrap { + // Explicit matching as we're doing this every 1ms. + acceptTimeoutMillis match { + case None => // Do nothing + case Some(acceptTimeoutMillis) => + inactiveTimestampOpt match { case None => // Do nothing - case Some(acceptTimeoutMillis) => - inactiveTimestampOpt match { - case None => // Do nothing - case Some(inactiveTimestamp) => - if (System.currentTimeMillis() - inactiveTimestamp > acceptTimeoutMillis) { - serverLog(s"shutting down due inactivity") - serverSocket.close() - } + case Some(inactiveTimestamp) => + if (System.currentTimeMillis() - inactiveTimestamp > acceptTimeoutMillis) { + serverLog(s"shutting down due inactivity") + serverSocket.close() } } - } } + } + } - try { - os.write.over(socketPortFile, serverSocket.getLocalPort.toString) - serverLog("listening on port " + serverSocket.getLocalPort) - - def systemExit(reason: String, exitCode: Int) = { - serverLog( - s"`systemExit` invoked (reason: $reason), shutting down with exit code $exitCode" - ) + try { + os.write.over(socketPortFile, serverSocket.getLocalPort.toString) + serverLog("listening on port " + serverSocket.getLocalPort) - // Explicitly close serverSocket before exiting otherwise it can keep the - // server alive 500-1000ms before letting it exit properly - serverSocket.close() - serverLog("serverSocket closed") + def systemExit(reason: String, exitCode: Int) = { + serverLog( + s"`systemExit` invoked (reason: $reason), shutting down with exit code $exitCode" + ) - // Explicitly release process lock to indicate this server will not be - // taking any more requests, and a new server should be spawned if necessary. - // Otherwise, launchers may continue trying to connect to the server and - // failing since the socket is closed. - locked.close() + // Explicitly close serverSocket before exiting otherwise it can keep the + // server alive 500-1000ms before letting it exit properly + serverSocket.close() + serverLog("serverSocket closed") - sys.exit(exitCode) - } + // Explicitly release process lock to indicate this server will not be + // taking any more requests, and a new server should be spawned if necessary. + // Otherwise, launchers may continue trying to connect to the server and + // failing since the socket is closed. + locked.close() - val timeoutThread = new Thread( - () => { - while (!serverSocket.isClosed) { - Thread.sleep(1) - connectionTracker.closeIfTimedOut() - } - }, - "MillServerTimeoutThread" - ) - timeoutThread.start() + sys.exit(exitCode) + } + val timeoutThread = new Thread( + () => { while (!serverSocket.isClosed) { - val socketOpt = - try Some(serverSocket.accept()) - catch { - case _: java.net.SocketException => None - } + Thread.sleep(1) + connectionTracker.closeIfTimedOut() + } + }, + "MillServerTimeoutThread" + ) + timeoutThread.start() - socketOpt match { - case Some(sock) => - val socketInfo = Server.SocketInfo(sock) - serverLog(s"handling run for $socketInfo") - new Thread( - () => - try { - connectionTracker.increment() - runForSocket( - systemExit, - sock, - socketInfo, - initialSystemProperties, - () => serverSocket.close() - ) - } catch { - case e: Throwable => - serverLog( - s"""$socketInfo error: $e - | - |${e.getStackTrace.mkString("\n")} - |""".stripMargin - ) - } finally { - connectionTracker.decrement() - sock.close() - }, - s"HandleRunThread-$socketInfo" - ).start() - case None => - } + while (!serverSocket.isClosed) { + val socketOpt = + try Some(serverSocket.accept()) + catch { + case _: java.net.SocketException => None } - } finally serverSocket.close() - }.getOrElse(throw new Exception("Mill server process already present")) - } catch { - case e: Throwable => - serverLog("server loop error: " + e) - serverLog("server loop stack trace: " + e.getStackTrace.mkString("\n")) - throw e - } finally { - serverLog("exiting server") - } + socketOpt match { + case Some(sock) => + val socketInfo = Server.SocketInfo(sock) + serverLog(s"handling run for $socketInfo") + new Thread( + () => + try { + connectionTracker.increment() + runForSocket( + systemExit, + sock, + socketInfo, + initialSystemProperties, + () => serverSocket.close() + ) + } catch { + case e: Throwable => + serverLog( + s"""$socketInfo error: $e + | + |${e.getStackTrace.mkString("\n")} + |""".stripMargin + ) + } finally { + connectionTracker.decrement() + sock.close() + }, + s"HandleRunThread-$socketInfo" + ).start() + case None => + } + } + } finally serverSocket.close() } /** @@ -314,6 +344,7 @@ abstract class Server(args: Server.Args) { @volatile var lastClientAlive = true val stopServerFromCheckClientAlive: Server.StopServer = (reason, exitCode) => stopServer("checkClientAlive", reason, exitCode, Some(data)) + def checkClientAlive() = { val result = try checkIfClientAlive(connectionData, stopServerFromCheckClientAlive, data) @@ -420,12 +451,13 @@ abstract class Server(args: Server.Args) { } } } + object Server { /** - * @param daemonDir directory used for exchanging pre-TCP data with a client + * @param daemonDir directory used for exchanging pre-TCP data with a client * @param acceptTimeout shuts down after this timeout if no clients are connected - * @param bufferSize size of the buffer used to read/write from/to the client + * @param bufferSize size of the buffer used to read/write from/to the client */ case class Args( daemonDir: os.Path, @@ -437,11 +469,12 @@ object Server { /** * @param remote the address of the client - * @param local the address of the server + * @param local the address of the server */ case class SocketInfo(remote: SocketAddress, local: SocketAddress) { override def toString: String = s"SocketInfo(remote=$remote, local=$local)" } + object SocketInfo { def apply(socket: Socket): SocketInfo = apply(socket.getRemoteSocketAddress, socket.getLocalSocketAddress) diff --git a/libs/javalib/api/src/mill/javalib/api/CompilationResult.scala b/libs/javalib/api/src/mill/javalib/api/CompilationResult.scala index ccf639b3692c..92857bc0ad59 100644 --- a/libs/javalib/api/src/mill/javalib/api/CompilationResult.scala +++ b/libs/javalib/api/src/mill/javalib/api/CompilationResult.scala @@ -1,10 +1,19 @@ package mill.javalib.api +import com.lihaoyi.unroll import mill.api.PathRef -import mill.api.JsonFormatters._ +import mill.api.JsonFormatters.* -// analysisFile is represented by os.Path, so we won't break caches after file changes -case class CompilationResult(analysisFile: os.Path, classes: PathRef) +/** + * @param analysisFile represented by os.Path, so we won't break caches after file changes + * @param classes path to the compilation classes + * @param semanticDbFiles path to semanticdb files, if they were produced + */ +case class CompilationResult( + analysisFile: os.Path, + classes: PathRef, + @unroll semanticDbFiles: Option[PathRef] = None +) object CompilationResult { implicit val jsonFormatter: upickle.ReadWriter[CompilationResult] = diff --git a/libs/javalib/api/src/mill/javalib/api/JvmWorkerApi.scala b/libs/javalib/api/src/mill/javalib/api/JvmWorkerApi.scala index 806cfc46e659..d4d804af2350 100644 --- a/libs/javalib/api/src/mill/javalib/api/JvmWorkerApi.scala +++ b/libs/javalib/api/src/mill/javalib/api/JvmWorkerApi.scala @@ -7,8 +7,6 @@ object JvmWorkerApi { type Ctx = mill.api.TaskCtx.Dest & mill.api.TaskCtx.Log & mill.api.TaskCtx.Env } trait JvmWorkerApi { - - /** Compile a Java-only project */ def compileJava( upstreamCompileOutput: Seq[CompilationResult], sources: Seq[os.Path], @@ -20,7 +18,6 @@ trait JvmWorkerApi { incrementalCompilation: Boolean )(using ctx: JvmWorkerApi.Ctx): mill.api.Result[CompilationResult] - /** Compile a mixed Scala/Java or Scala-only project */ def compileMixed( upstreamCompileOutput: Seq[CompilationResult], sources: Seq[os.Path], @@ -38,7 +35,6 @@ trait JvmWorkerApi { auxiliaryClassFileExtensions: Seq[String] )(using ctx: JvmWorkerApi.Ctx): mill.api.Result[CompilationResult] - /** Compiles a Scaladoc jar. */ def docJar( scalaVersion: String, scalaOrganization: String, diff --git a/libs/javalib/api/src/mill/javalib/api/internal/JvmWorkerApi.scala b/libs/javalib/api/src/mill/javalib/api/internal/JvmWorkerApi.scala index 8b4d41d536a9..719087552b9f 100644 --- a/libs/javalib/api/src/mill/javalib/api/internal/JvmWorkerApi.scala +++ b/libs/javalib/api/src/mill/javalib/api/internal/JvmWorkerApi.scala @@ -4,8 +4,6 @@ import mill.api.{PathRef, Result} import mill.api.daemon.internal.CompileProblemReporter import mill.javalib.api.CompilationResult import mill.javalib.api.JvmWorkerApi as PublicJvmWorkerApi -import mill.javalib.api.JvmWorkerApi.Ctx -import os.Path trait JvmWorkerApi extends PublicJvmWorkerApi { @@ -36,17 +34,18 @@ trait JvmWorkerApi extends PublicJvmWorkerApi { // public API forwarder override def compileJava( upstreamCompileOutput: Seq[CompilationResult], - sources: Seq[Path], - compileClasspath: Seq[Path], - javaHome: Option[Path], + sources: Seq[os.Path], + compileClasspath: Seq[os.Path], + javaHome: Option[os.Path], javacOptions: Seq[String], reporter: Option[CompileProblemReporter], reportCachedProblems: Boolean, incrementalCompilation: Boolean - )(using ctx: Ctx): Result[CompilationResult] = { + )(using ctx: PublicJvmWorkerApi.Ctx): Result[CompilationResult] = { val jOpts = JavaCompilerOptions(javacOptions) compileJava( ZincCompileJava( + compileTo = ctx.dest, upstreamCompileOutput = upstreamCompileOutput, sources = sources, compileClasspath = compileClasspath, @@ -63,9 +62,9 @@ trait JvmWorkerApi extends PublicJvmWorkerApi { // public API forwarder override def compileMixed( upstreamCompileOutput: Seq[CompilationResult], - sources: Seq[Path], - compileClasspath: Seq[Path], - javaHome: Option[Path], + sources: Seq[os.Path], + compileClasspath: Seq[os.Path], + javaHome: Option[os.Path], javacOptions: Seq[String], scalaVersion: String, scalaOrganization: String, @@ -76,10 +75,11 @@ trait JvmWorkerApi extends PublicJvmWorkerApi { reportCachedProblems: Boolean, incrementalCompilation: Boolean, auxiliaryClassFileExtensions: Seq[String] - )(using ctx: Ctx): Result[CompilationResult] = { + )(using ctx: PublicJvmWorkerApi.Ctx): Result[CompilationResult] = { val jOpts = JavaCompilerOptions(javacOptions) compileMixed( ZincCompileMixed( + compileTo = ctx.dest, upstreamCompileOutput = upstreamCompileOutput, sources = sources, compileClasspath = compileClasspath, @@ -105,9 +105,9 @@ trait JvmWorkerApi extends PublicJvmWorkerApi { scalaOrganization: String, compilerClasspath: Seq[PathRef], scalacPluginClasspath: Seq[PathRef], - javaHome: Option[Path], + javaHome: Option[os.Path], args: Seq[String] - )(using ctx: Ctx): Boolean = { + )(using ctx: PublicJvmWorkerApi.Ctx): Boolean = { scaladocJar( ZincScaladocJar( scalaVersion = scalaVersion, diff --git a/libs/javalib/api/src/mill/javalib/api/internal/zinc_operations.scala b/libs/javalib/api/src/mill/javalib/api/internal/zinc_operations.scala index be2e95bd51e7..88890b8e136a 100644 --- a/libs/javalib/api/src/mill/javalib/api/internal/zinc_operations.scala +++ b/libs/javalib/api/src/mill/javalib/api/internal/zinc_operations.scala @@ -6,6 +6,7 @@ import mill.api.JsonFormatters.* /** Compiles Java-only sources. */ case class ZincCompileJava( + compileTo: os.Path, upstreamCompileOutput: Seq[CompilationResult], sources: Seq[os.Path], compileClasspath: Seq[os.Path], @@ -15,6 +16,7 @@ case class ZincCompileJava( /** Compiles Java and Scala sources. */ case class ZincCompileMixed( + compileTo: os.Path, upstreamCompileOutput: Seq[CompilationResult], sources: Seq[os.Path], compileClasspath: Seq[os.Path], diff --git a/libs/javalib/package.mill b/libs/javalib/package.mill index e9b8baf0b020..9ecf91f93b01 100644 --- a/libs/javalib/package.mill +++ b/libs/javalib/package.mill @@ -1,5 +1,11 @@ package build.libs.javalib -import com.github.lolgab.mill.mima.{Problem, ProblemFilter} + +import com.github.lolgab.mill.mima.{ + DirectMissingMethodProblem, + MissingClassProblem, + Problem, + ProblemFilter +} import scala.util.Properties import scala.util.chaining.* @@ -52,7 +58,12 @@ object `package` extends MillStableScalaModule { // This was `private[mill]`, package private doesn't have the JVM bytecode equivalent, so mima can't check it. ProblemFilter.exclude[Problem]("mill.javalib.PublishModule.checkSonatypeCreds"), ProblemFilter.exclude[Problem]("mill.javalib.publish.SonatypeHelpers.getArtifactMappings"), - ProblemFilter.exclude[Problem]("mill.javalib.publish.PublishInfo.parseFromFile") + ProblemFilter.exclude[Problem]("mill.javalib.publish.PublishInfo.parseFromFile"), + ProblemFilter.exclude[DirectMissingMethodProblem]( + "mill.javalib.JavaModule.resolveRelativeToOut$default$2$" + ), + ProblemFilter.exclude[MissingClassProblem]("mill.javalib.CompileFor"), + ProblemFilter.exclude[MissingClassProblem]("mill.javalib.CompileFor$") ) object backgroundwrapper extends MillPublishJavaModule with MillJavaModule { @@ -119,6 +130,12 @@ object `package` extends MillStableScalaModule { BuildInfo.Value("pmdVersion", Deps.RuntimeDeps.pmdDist.version), BuildInfo.Value("comLihaoyiSourcecodeVersion", Deps.sourcecode.version) ) + + override def mimaBinaryIssueFilters: T[Seq[ProblemFilter]] = + super.mimaBinaryIssueFilters() ++ Seq( + // Ignore internal namespace. + ProblemFilter.exclude[Problem]("mill.javalib.api.internal.*") + ) } object worker extends MillPublishScalaModule with BuildInfo { diff --git a/libs/javalib/src/mill/javalib/CompileFor.scala b/libs/javalib/src/mill/javalib/CompileFor.scala deleted file mode 100644 index 24f6cc7d44a3..000000000000 --- a/libs/javalib/src/mill/javalib/CompileFor.scala +++ /dev/null @@ -1,10 +0,0 @@ -package mill.javalib - -private[mill] enum CompileFor { - - /** This is a regular compilation, for example for `compile`. */ - case Regular - - /** This is a compilation for SemanticDB, for example for `semanticDbData`. */ - case SemanticDb -} diff --git a/libs/javalib/src/mill/javalib/JavaModule.scala b/libs/javalib/src/mill/javalib/JavaModule.scala index 4455f003674a..f5fd06b5f2bc 100644 --- a/libs/javalib/src/mill/javalib/JavaModule.scala +++ b/libs/javalib/src/mill/javalib/JavaModule.scala @@ -21,7 +21,6 @@ import mill.javalib.* import mill.api.daemon.internal.idea.GenIdeaInternalApi import mill.api.{DefaultTaskModule, ModuleRef, PathRef, Segment, Task, TaskCtx} import mill.javalib.api.CompilationResult -import mill.javalib.api.internal.{JavaCompilerOptions, ZincCompileJava} import mill.javalib.bsp.{BspJavaModule, BspModule} import mill.javalib.internal.ModuleUtils import mill.javalib.publish.Artifact @@ -728,10 +727,8 @@ trait JavaModule Seq(internalDependenciesRepository()) } - /** - * The upstream compilation output of all this module's upstream modules - */ - def upstreamCompileOutput: T[Seq[CompilationResult]] = Task { + /** See [[SemanticDbJavaModule.upstreamCompileOutput]] for documentation. */ + override def upstreamCompileOutput: T[Seq[CompilationResult]] = Task { Task.traverse(transitiveModuleCompileModuleDeps)(_.compile)() } @@ -753,19 +750,11 @@ trait JavaModule * The transitive version of [[compileClasspath]] */ def transitiveCompileClasspath: T[Seq[PathRef]] = Task { - transitiveCompileClasspathTask(CompileFor.Regular)() + Task.traverse(transitiveModuleCompileModuleDeps)(m => + Task.Anon(m.localCompileClasspath() :+ m.compile().classes) + )().flatten } - /** - * The transitive version of [[compileClasspathTask]] - */ - private[mill] def transitiveCompileClasspathTask(compileFor: CompileFor): Task[Seq[PathRef]] = - Task.Anon { - Task.traverse(transitiveModuleCompileModuleDeps)(m => - Task.Anon { m.localCompileClasspath() ++ Seq(m.compileFor(compileFor)().classes) } - )().flatten - } - /** * Same as [[transitiveCompileClasspath]], but with all dependencies on [[compile]] * replaced by their non-compiling [[bspCompileClassesPath]] variants. @@ -826,13 +815,7 @@ trait JavaModule */ def generatedSources: T[Seq[PathRef]] = Task { Seq.empty[PathRef] } - /** - * Path to sources generated as part of the `compile` step, eg. by Java annotation - * processors which often generate source code alongside classfiles during compilation. - * - * Typically these do not need to be compiled again, and are only used by IDEs - */ - def compileGeneratedSources: T[os.Path] = Task(persistent = true) { Task.dest } + override def compileGeneratedSources: T[os.Path] = Task(persistent = true) { Task.dest } /** * The folders containing all source files fed into the compiler @@ -861,57 +844,14 @@ trait JavaModule true } - /** - * Compiles the current module to generate compiled classfiles/bytecode. - * - * When you override this, you probably also want/need to override [[bspCompileClassesPath]], - * as that needs to point to the same compilation output path. - * - * Keep in sync with [[bspCompileClassesPath]] - */ - def compile: T[mill.javalib.api.CompilationResult] = Task(persistent = true) { - // Prepare an empty `compileGeneratedSources` folder for java annotation processors - // to write generated sources into, that can then be picked up by IDEs like IntelliJ - val compileGenSources = compileGeneratedSources() - mill.api.BuildCtx.withFilesystemCheckerDisabled { - os.remove.all(compileGenSources) - os.makeDir.all(compileGenSources) - } - - val jOpts = JavaCompilerOptions(Seq( - "-s", - compileGenSources.toString - ) ++ javacOptions() ++ mandatoryJavacOptions()) - - jvmWorker() - .internalWorker() - .compileJava( - ZincCompileJava( - upstreamCompileOutput = upstreamCompileOutput(), - sources = allSourceFiles().map(_.path), - compileClasspath = compileClasspath().map(_.path), - javacOptions = jOpts.compiler, - incrementalCompilation = zincIncrementalCompilation() - ), - javaHome = javaHome().map(_.path), - javaRuntimeOptions = jOpts.runtime, - reporter = Task.reporter.apply(hashCode), - reportCachedProblems = zincReportCachedProblems() - ) + override def compile: T[mill.javalib.api.CompilationResult] = Task(persistent = true) { + SemanticDbJavaModule.compile(this)() } - /** Resolves paths relative to the `out` folder. */ - @internal - private[mill] def resolveRelativeToOut( - task: Task.Named[?], - mkPath: os.SubPath => os.SubPath = identity - ): UnresolvedPath.DestPath = - UnresolvedPath.DestPath(mkPath(os.sub), task.ctx.segments) - /** The path where the compiled classes produced by [[compile]] are stored. */ @internal private[mill] def compileClassesPath: UnresolvedPath.DestPath = - resolveRelativeToOut(compile, _ / "classes") + UnresolvedPath.resolveRelativeToOut(compile, _ / "classes") /** * The path to the compiled classes by [[compile]] without forcing to actually run the compilation. @@ -936,14 +876,15 @@ trait JavaModule needsToMergeResourcesIntoCompileDest: Boolean ): Task[UnresolvedPath] = Task.Anon { - if (needsToMergeResourcesIntoCompileDest) resolveRelativeToOut(bspBuildTargetCompileMerged) + if (needsToMergeResourcesIntoCompileDest) + UnresolvedPath.resolveRelativeToOut(bspBuildTargetCompileMerged) else compileClassesPath } /** * The part of the [[localClasspath]] which is available "after compilation". * - * Keep in sync with [[bspLocalRunClasspath]] + * Keep the return value in sync with [[bspLocalRunClasspath]] */ override def localRunClasspath: T[Seq[PathRef]] = Task { super.localRunClasspath() ++ resources() ++ Seq(compile().classes) @@ -992,21 +933,15 @@ trait JavaModule bspLocalRunClasspath(needsToMergeResourcesIntoCompileDest)() } - /** - * [[compileClasspathTask]] for regular compilations. - * - * Keep return value in sync with [[bspCompileClasspath]]. - */ - def compileClasspath: T[Seq[PathRef]] = Task { compileClasspathTask(CompileFor.Regular)() } - /** * All classfiles and resources from upstream modules and dependencies * necessary to compile this module. + * + * Keep return value in sync with [[bspCompileClasspath]]. */ - override private[mill] def compileClasspathTask(compileFor: CompileFor): Task[Seq[PathRef]] = - Task.Anon { - resolvedMvnDeps() ++ transitiveCompileClasspathTask(compileFor)() ++ localCompileClasspath() - } + def compileClasspath: T[Seq[PathRef]] = Task { + resolvedMvnDeps() ++ transitiveCompileClasspath() ++ localCompileClasspath() + } /** * Same as [[compileClasspath]], but does not trigger compilation targets, if possible. diff --git a/libs/javalib/src/mill/javalib/JvmWorkerModule.scala b/libs/javalib/src/mill/javalib/JvmWorkerModule.scala index 1dd0a075e0bc..18b1a6a629b9 100644 --- a/libs/javalib/src/mill/javalib/JvmWorkerModule.scala +++ b/libs/javalib/src/mill/javalib/JvmWorkerModule.scala @@ -48,6 +48,7 @@ trait JvmWorkerModule extends OfflineSupportModule with CoursierModule { /** Whether Zinc debug logging is enabled. */ def zincLogDebug: T[Boolean] = Task.Input(Task.ctx().log.debugEnabled) + @deprecated("Public API of JvmWorkerApi is deprecated.", "1.0.7") def worker: Worker[JvmWorkerApi] = internalWorker private[mill] def internalWorker: Worker[InternalJvmWorkerApi] = Task.Worker { diff --git a/libs/javalib/src/mill/javalib/SemanticDbJavaModule.scala b/libs/javalib/src/mill/javalib/SemanticDbJavaModule.scala index 5ee87af6d17a..4e13565942f5 100644 --- a/libs/javalib/src/mill/javalib/SemanticDbJavaModule.scala +++ b/libs/javalib/src/mill/javalib/SemanticDbJavaModule.scala @@ -1,16 +1,16 @@ package mill.javalib -import mill.api.{BuildCtx, Discover, ExternalModule, ModuleRef, PathRef, Result, experimental} +import mill.api.* import mill.api.daemon.internal.SemanticDbJavaModuleApi +import mill.api.daemon.internal.bsp.BspBuildTarget import mill.constants.CodeGenConstants -import mill.util.BuildInfo +import mill.javalib.api.internal.{JavaCompilerOptions, ZincCompileJava} import mill.javalib.api.{CompilationResult, JvmWorkerUtil} -import mill.util.Version +import mill.util.{BuildInfo, Version} import mill.{T, Task} +import java.nio.file.NoSuchFileException import scala.jdk.CollectionConverters.* -import mill.api.daemon.internal.bsp.BspBuildTarget -import mill.javalib.api.internal.{JavaCompilerOptions, ZincCompileJava} @experimental trait SemanticDbJavaModule extends CoursierModule with SemanticDbJavaModuleApi @@ -18,6 +18,13 @@ trait SemanticDbJavaModule extends CoursierModule with SemanticDbJavaModuleApi def jvmWorker: ModuleRef[JvmWorkerModule] + /** + * The upstream compilation output of all this module's upstream modules + * + * @note the implementation is in [[JavaModule.upstreamCompileOutput]] for binary compatibility reasons. + */ + def upstreamCompileOutput: T[Seq[CompilationResult]] + def upstreamSemanticDbDatas: Task[Seq[SemanticDbJavaModule.SemanticDbData]] = Task.sequence(transitiveModuleCompileModuleDeps.map(_.semanticDbDataDetailed)) @@ -25,20 +32,94 @@ trait SemanticDbJavaModule extends CoursierModule with SemanticDbJavaModuleApi def zincReportCachedProblems: T[Boolean] def zincIncrementalCompilation: T[Boolean] def allSourceFiles: T[Seq[PathRef]] - def compile: T[mill.javalib.api.CompilationResult] - private[mill] def compileFor(compileFor: CompileFor): Task[mill.javalib.api.CompilationResult] = - compileFor match { - case CompileFor.Regular => compile - case CompileFor.SemanticDb => semanticDbDataDetailed.map(_.compilationResult) + /** + * Compiles the current module to generate compiled classfiles/bytecode. + * + * When you override this, you probably also want/need to override [[JavaModule.bspCompileClassesPath]], + * as that needs to point to the same compilation output path. + * + * Keep the paths in sync with [[JavaModule.bspCompileClassesPath]]. + * + * @note the implementation is in [[JavaModule.compile]] for binary compatibility reasons. + */ + def compile: Task.Simple[mill.javalib.api.CompilationResult] + + private[mill] def compileInternal(compileSemanticDb: Boolean) = { + val (semanticDbJavacOptionsTask, semanticDbJavaPluginMvnDepsTask) = + if (compileSemanticDb) ( + Task.Anon { SemanticDbJavaModule.javacOptionsTask(semanticDbJavaVersion()) }, + resolvedSemanticDbJavaPluginMvnDeps + ) + else (Task.Anon(Seq.empty), Task.Anon(Seq.empty)) + + Task.Anon { + // Prepare an empty `compileGeneratedSources` folder for java annotation processors + // to write generated sources into, that can then be picked up by IDEs like IntelliJ + val compileGenSources = compileGeneratedSources() + mill.api.BuildCtx.withFilesystemCheckerDisabled { + os.remove.all(compileGenSources) + os.makeDir.all(compileGenSources) + } + + val jOpts = JavaCompilerOptions(Seq( + "-s", + compileGeneratedSources().toString + ) ++ javacOptions() ++ mandatoryJavacOptions() ++ semanticDbJavacOptionsTask()) + + val sources = allSourceFiles().map(_.path) + + val compileJavaOp = ZincCompileJava( + compileTo = Task.dest, + upstreamCompileOutput = upstreamCompileOutput(), + sources = sources, + compileClasspath = (compileClasspath() ++ semanticDbJavaPluginMvnDepsTask()).map(_.path), + javacOptions = jOpts.compiler, + incrementalCompilation = zincIncrementalCompilation() + ) + + Task.log.debug(s"compiling to: ${compileJavaOp.compileTo}") + Task.log.debug(s"semantic db enabled: $compileSemanticDb") + Task.log.debug(s"effective javac options: ${jOpts.compiler}") + Task.log.debug(s"effective java runtime options: ${jOpts.runtime}") + + val compileJavaResult = jvmWorker() + .internalWorker() + .compileJava( + compileJavaOp, + javaHome = javaHome().map(_.path), + javaRuntimeOptions = jOpts.runtime, + reporter = Task.reporter.apply(hashCode), + reportCachedProblems = zincReportCachedProblems() + ) + + compileJavaResult.map { compilationResult => + if (compileSemanticDb) SemanticDbJavaModule.enhanceCompilationResultWithSemanticDb( + compileTo = compileJavaOp.compileTo, + sources = sources, + workerClasspath = SemanticDbJavaModule.workerClasspath().map(_.path), + compilationResult = compilationResult + ) + else compilationResult + } } + } + + /** + * Path to sources generated as part of the `compile` step, e.g. by Java annotation + * processors which often generate source code alongside classfiles during compilation. + * + * Typically, these do not need to be compiled again, and are only used by IDEs + */ + def compileGeneratedSources: T[os.Path] private[mill] def bspBuildTarget: BspBuildTarget def javacOptions: T[Seq[String]] def mandatoryJavacOptions: T[Seq[String]] - private[mill] def compileClasspathTask(compileFor: CompileFor): Task[Seq[PathRef]] + def compileClasspath: Task[Seq[PathRef]] def moduleDeps: Seq[JavaModule] + /** The version of SemanticDB plugin. */ def semanticDbVersion: T[String] = Task.Input { val builtin = SemanticDbJavaModuleApi.buildTimeSemanticDbVersion val requested = Task.env.getOrElse[String]( @@ -48,7 +129,7 @@ trait SemanticDbJavaModule extends CoursierModule with SemanticDbJavaModuleApi Version.chooseNewest(requested, builtin)(using Version.IgnoreQualifierOrdering) } - def semanticDbJavaVersion: T[String] = Task.Input { + def semanticDbJavaVersion: Task.Simple[String] = Task.Input { val builtin = SemanticDbJavaModuleApi.buildTimeJavaSemanticDbVersion val requested = Task.env.getOrElse[String]( "JAVASEMANTICDB_VERSION", @@ -59,6 +140,14 @@ trait SemanticDbJavaModule extends CoursierModule with SemanticDbJavaModuleApi def semanticDbScalaVersion: T[String] = BuildInfo.scalaVersion + private def semanticDbScalaArtifactOrganization: String = "org.scalameta" + + private def semanticDbScalaArtifactName(scalaVersion: String): String = + s"semanticdb-scalac_$scalaVersion" + + private def semanticDbScalaArtifact(scalaVersion: String, semanticDbVersion: String): Dep = + mvn"$semanticDbScalaArtifactOrganization:${semanticDbScalaArtifactName(scalaVersion)}:$semanticDbVersion" + protected def semanticDbPluginMvnDeps: T[Seq[Dep]] = Task { val sv = semanticDbScalaVersion() val semDbVersion = semanticDbVersion() @@ -73,9 +162,7 @@ trait SemanticDbJavaModule extends CoursierModule with SemanticDbJavaModuleApi } else if (JvmWorkerUtil.isScala3(sv)) { Seq.empty[Dep] } else { - Seq( - mvn"org.scalameta:semanticdb-scalac_${sv}:${semDbVersion}" - ) + Seq(semanticDbScalaArtifact(sv, semDbVersion)) } } @@ -99,67 +186,89 @@ trait SemanticDbJavaModule extends CoursierModule with SemanticDbJavaModuleApi /** * Scalac options to activate the compiler plugins. */ - protected def semanticDbEnablePluginScalacOptions: T[Seq[String]] = Task { - val resolvedJars = defaultResolver().classpath( - semanticDbPluginMvnDeps().map(_.exclude("*" -> "*")) - ) + protected def semanticDbEnablePluginScalacOptions: Task.Simple[Seq[String]] = Task { + val resolvedJars = + try { + defaultResolver().classpath( + semanticDbPluginMvnDeps().map(_.exclude("*" -> "*")) + ) + } catch { + case t + if t.getMessage.contains( + s"$semanticDbScalaArtifactOrganization:${semanticDbScalaArtifactName("")}" + ) => + Task.log.error( + s"""!!! It seems that your SemanticDB version is not compatible with your Scala version !!! + | + |Specify the version that is compatible with your scala version in your build: + | + | ``` + | object myScalaApp extends ScalaModule { + | def semanticDbVersion = "" + | } + | ``` + | + |One option to find the last SemanticDB version that is compatible with your Scala version is to visit + |https://mvnrepository.com/artifact/org.scalameta/semanticdb-scalac and find the value in "Version" + |column that has your Scala version next to it in the "Scala" column. + |""".stripMargin + ) + throw t + } resolvedJars.iterator.map(jar => s"-Xplugin:${jar.path}").toSeq } - protected def semanticDbPluginClasspath: T[Seq[PathRef]] = Task { + protected def semanticDbPluginClasspath: Task.Simple[Seq[PathRef]] = Task { defaultResolver().classpath(semanticDbPluginMvnDeps()) } - protected def resolvedSemanticDbJavaPluginMvnDeps: T[Seq[PathRef]] = Task { + protected def resolvedSemanticDbJavaPluginMvnDeps: Task.Simple[Seq[PathRef]] = Task { defaultResolver().classpath(semanticDbJavaPluginMvnDeps()) } - def semanticDbDataDetailed: T[SemanticDbJavaModule.SemanticDbData] = Task(persistent = true) { - val javacOpts = SemanticDbJavaModule.javacOptionsTask( - javacOptions() ++ mandatoryJavacOptions(), - semanticDbJavaVersion() - ) + /** + * Initializes the filesystem watcher for the semanticdb sessions directory. + * + * This is `lazy val` because we don't want to initialize the watcher until it's actually needed. + */ + private lazy val semanticDbSessionsDirWatch: os.Path = + BuildCtx.watch(BuildCtx.bspSemanticDbSessionsFolder) - Task.log.debug(s"effective javac options: ${javacOpts}") - - val jOpts = JavaCompilerOptions(javacOpts) - - jvmWorker().internalWorker() - .compileJava( - ZincCompileJava( - upstreamCompileOutput = upstreamSemanticDbDatas().map(_.compilationResult), - sources = allSourceFiles().map(_.path), - compileClasspath = - (compileClasspathTask( - CompileFor.SemanticDb - )() ++ resolvedSemanticDbJavaPluginMvnDeps()).map( - _.path - ), - javacOptions = jOpts.compiler, - incrementalCompilation = zincIncrementalCompilation() - ), - javaHome = javaHome().map(_.path), - javaRuntimeOptions = jOpts.runtime, - reporter = Task.reporter.apply(hashCode), - reportCachedProblems = zincReportCachedProblems() - ) - .map { compilationResult => - val semanticDbFiles = BuildCtx.withFilesystemCheckerDisabled { - SemanticDbJavaModule.copySemanticdbFiles( - compilationResult.classes.path, - BuildCtx.workspaceRoot, - Task.dest / "data", - SemanticDbJavaModule.workerClasspath().map(_.path), - allSourceFiles().map(_.path) - ) + /** + * Returns true if the semanticdb will be needed by the BSP client or any of the other Mill daemons that are using + * the same `out/` directory. + * + * @note if this value changes the whole module will be reinstantiated. + */ + private[mill] lazy val bspAnyClientNeedsSemanticDb: Boolean = { + // Allows accessing files outside of normal task scope. + BuildCtx.withFilesystemCheckerDisabled { + val directory = semanticDbSessionsDirWatch + + val bspHasClientsThatNeedSemanticDb = + try { + os.list(directory).exists { path => + // Check if the sessions are not stale. + val maybePid = path.last.toLongOption + maybePid match { + case None => false // malformatted pid + case Some(pid) => ProcessHandle.of(pid).isPresent + } + } + } catch { + case _: NoSuchFileException => false } - SemanticDbJavaModule.SemanticDbData(compilationResult, semanticDbFiles) - } + bspHasClientsThatNeedSemanticDb + } + } + + def semanticDbDataDetailed: T[SemanticDbJavaModule.SemanticDbData] = Task { + SemanticDbJavaModule.semanticDbDataDetailed(this)() } def semanticDbData: T[PathRef] = Task { - semanticDbDataDetailed().semanticDbFiles + SemanticDbJavaModule.semanticDbData(this)() } /** @@ -218,20 +327,59 @@ object SemanticDbJavaModule extends ExternalModule with CoursierModule { semanticDbFiles: PathRef ) derives upickle.ReadWriter + /** @note extracted code to be invoked from multiple places for binary compatibility reasons. */ + private[mill] def compile(mod: SemanticDbJavaModule): Task[mill.javalib.api.CompilationResult] = { + mod.compileInternal(mod.bspAnyClientNeedsSemanticDb) + } + + /** @note extracted code to be invoked from multiple places for binary compatibility reasons. */ + private[mill] def semanticDbDataDetailed(mod: SemanticDbJavaModule): Task[SemanticDbData] = { + + /** + * If any of the clients needs semanticdb, regular [[compile]] will produce that, so let's reuse the tasks output + * to save resources. + */ + val task = + if (mod.bspAnyClientNeedsSemanticDb) mod.compile + else mod.compileInternal(compileSemanticDb = true) + + Task.Anon { + val compilationResult = task() + val semanticDbData = + compilationResult.semanticDbFiles.getOrElse(throw IllegalStateException( + "SemanticDB files were not produced, this is a bug in Mill." + )) + SemanticDbData(compilationResult, semanticDbData) + } + } + + /** @note extracted code to be invoked from multiple places for binary compatibility reasons. */ + private[mill] def semanticDbData(mod: SemanticDbJavaModule): Task[PathRef] = Task.Anon { + mod.semanticDbDataDetailed().semanticDbFiles + } + private[mill] def workerClasspath: T[Seq[PathRef]] = Task { defaultResolver().classpath(Seq( Dep.millProjectModule("mill-libs-javalib-scalameta-worker") )) } + /** + * This overload just prepends the given `javacOptions`, so it's kind of pointless, but it's already there, so we + * have to keep it. + */ def javacOptionsTask(javacOptions: Seq[String], semanticDbJavaVersion: String)(using ctx: mill.api.TaskCtx ): Seq[String] = { + javacOptions ++ javacOptionsTask(semanticDbJavaVersion) + } + + def javacOptionsTask(semanticDbJavaVersion: String)(using ctx: mill.api.TaskCtx): Seq[String] = { val isNewEnough = Version.isAtLeast(semanticDbJavaVersion, "0.8.10")(using Version.IgnoreQualifierOrdering) val buildTool = s" -build-tool:${if (isNewEnough) "mill" else "sbt"}" val verbose = if (ctx.log.debugEnabled) " -verbose" else "" - javacOptions ++ Seq( + Seq( s"-Xplugin:semanticdb -sourceroot:${ctx.workspace} -targetroot:${ctx.dest / "classes"}${buildTool}${verbose}" ) } @@ -286,6 +434,25 @@ object SemanticDbJavaModule extends ExternalModule with CoursierModule { } } + private[mill] def enhanceCompilationResultWithSemanticDb( + compileTo: os.Path, + sources: Seq[os.Path], + workerClasspath: Seq[os.Path], + compilationResult: CompilationResult + ): CompilationResult = { + val semanticDbFiles = BuildCtx.withFilesystemCheckerDisabled { + copySemanticdbFiles( + classesDir = compilationResult.classes.path, + sourceroot = BuildCtx.workspaceRoot, + targetDir = compileTo / "semanticdb-data", + workerClasspath = workerClasspath, + sources = sources + ) + } + + compilationResult.copy(semanticDbFiles = Some(semanticDbFiles)) + } + // The semanticdb-javac plugin has issues with the -sourceroot setting, so we correct this on the fly private[mill] def copySemanticdbFiles( classesDir: os.Path, diff --git a/libs/javalib/src/mill/javalib/UnresolvedPath.scala b/libs/javalib/src/mill/javalib/UnresolvedPath.scala index ef59583c4067..fce08d0642c2 100644 --- a/libs/javalib/src/mill/javalib/UnresolvedPath.scala +++ b/libs/javalib/src/mill/javalib/UnresolvedPath.scala @@ -1,19 +1,29 @@ package mill.javalib -import mill.api.daemon.internal.UnresolvedPathApi -import mill.api.{ExecutionPaths, Segment, Segments} +import mill.api.daemon.internal.{UnresolvedPathApi, internal} +import mill.api.{ExecutionPaths, Segment, Segments, Task} import upickle.{ReadWriter, macroRW} /** * An unresolved path is relative to some unspecified destination * which depends on the actual configuration at evaluation time. - * Hence, you need to call [[#resolve]] with an instance of - * [[ExecutionPathsResolver]] to get the final [[os.Path]]. + * + * Hence, you need to call [[resolve]] with the Mill's 'out/' path (for example from `EvaluatorApi.outPathJava` to + * get the final [[os.Path]]. */ sealed trait UnresolvedPath extends UnresolvedPathApi[os.Path] { def resolve(outPath: os.Path): os.Path } object UnresolvedPath { + + /** Resolves paths relative to the `out` folder. */ + @internal + private[mill] def resolveRelativeToOut( + task: Task.Named[?], + mkPath: os.SubPath => os.SubPath = identity + ): UnresolvedPath.DestPath = + UnresolvedPath.DestPath(mkPath(os.sub), task.ctx.segments) + case class ResolvedPath private (path: String) extends UnresolvedPath { override def resolve(outPath: os.Path): os.Path = os.Path(path) } diff --git a/libs/javalib/worker/src/mill/javalib/worker/JvmWorkerImpl.scala b/libs/javalib/worker/src/mill/javalib/worker/JvmWorkerImpl.scala index 2cd6e126d0b9..72635524bf64 100644 --- a/libs/javalib/worker/src/mill/javalib/worker/JvmWorkerImpl.scala +++ b/libs/javalib/worker/src/mill/javalib/worker/JvmWorkerImpl.scala @@ -105,7 +105,6 @@ class JvmWorkerImpl(args: JvmWorkerArgs) extends JvmWorkerApi with AutoCloseable val log = ctx.log val zincCtx = ZincWorker.InvocationContext( env = ctx.env, - dest = ctx.dest, logDebugEnabled = log.debugEnabled, logPromptColored = log.prompt.colored, zincLogDebug = zincLogDebug diff --git a/libs/javalib/worker/src/mill/javalib/zinc/ZincWorker.scala b/libs/javalib/worker/src/mill/javalib/zinc/ZincWorker.scala index d0814146fffa..1205465b8069 100644 --- a/libs/javalib/worker/src/mill/javalib/zinc/ZincWorker.scala +++ b/libs/javalib/worker/src/mill/javalib/zinc/ZincWorker.scala @@ -1,6 +1,5 @@ package mill.javalib.zinc -import mill.api.JsonFormatters.* import mill.api.PathRef import mill.api.daemon.internal.CompileProblemReporter import mill.api.daemon.{Logger, Result} @@ -182,6 +181,7 @@ class ZincWorker( val cacheKey = JavaCompilerCacheKey(javacOptions) javaOnlyCompilerCache.withValue(cacheKey) { compilers => compileInternal( + compileTo = compileTo, upstreamCompileOutput = upstreamCompileOutput, sources = sources, compileClasspath = compileClasspath, @@ -215,6 +215,7 @@ class ZincWorker( deps.compilerBridge ) { compilers => compileInternal( + compileTo = compileTo, upstreamCompileOutput = upstreamCompileOutput, sources = sources, compileClasspath = compileClasspath, @@ -333,6 +334,7 @@ class ZincWorker( } private def compileInternal( + compileTo: os.Path, upstreamCompileOutput: Seq[CompilationResult], sources: Seq[os.Path], compileClasspath: Seq[os.Path], @@ -361,9 +363,9 @@ class ZincWorker( |""".stripMargin ) - os.makeDir.all(ctx.dest) + os.makeDir.all(compileTo) - val classesDir = ctx.dest / "classes" + val classesDir = compileTo / "classes" if (ctx.logDebugEnabled) { deps.log.debug( @@ -417,7 +419,7 @@ class ZincWorker( val lookup = MockedLookup(analysisMap) - val store = fileAnalysisStore(ctx.dest / zincCache) + val store = fileAnalysisStore(compileTo / zincCache) // Fix jdk classes marked as binary dependencies, see https://github.com/com-lihaoyi/mill/pull/1904 val converter = MappedFileConverter.empty @@ -516,7 +518,7 @@ class ZincWorker( newResult.setup() ) ) - Result.Success(CompilationResult(ctx.dest / zincCache, PathRef(classesDir))) + Result.Success(CompilationResult(compileTo / zincCache, PathRef(classesDir))) } catch { case e: CompileFailed => Result.Failure(e.toString) @@ -605,7 +607,6 @@ object ZincWorker { /** The invocation context, always comes from the Mill's process. */ case class InvocationContext( env: Map[String, String], - dest: os.Path, logDebugEnabled: Boolean, logPromptColored: Boolean, zincLogDebug: Boolean diff --git a/libs/kotlinlib/src/mill/kotlinlib/KotlinModule.scala b/libs/kotlinlib/src/mill/kotlinlib/KotlinModule.scala index 8ce82ef9fc8c..dd02c9701212 100644 --- a/libs/kotlinlib/src/mill/kotlinlib/KotlinModule.scala +++ b/libs/kotlinlib/src/mill/kotlinlib/KotlinModule.scala @@ -438,6 +438,7 @@ trait KotlinModule extends JavaModule with KotlinModuleApi { outer => val jOpts = JavaCompilerOptions(javacOptions) worker.compileJava( ZincCompileJava( + compileTo = Task.dest, upstreamCompileOutput = upstreamCompileOutput, sources = javaSourceFiles, compileClasspath = compileCp, diff --git a/libs/scalajslib/test/src/mill/scalajslib/FullOptESModuleTests.scala b/libs/scalajslib/test/src/mill/scalajslib/FullOptESModuleTests.scala index 462b2f5b1c5e..d21ffe673865 100644 --- a/libs/scalajslib/test/src/mill/scalajslib/FullOptESModuleTests.scala +++ b/libs/scalajslib/test/src/mill/scalajslib/FullOptESModuleTests.scala @@ -12,6 +12,7 @@ object FullOptESModuleTests extends TestSuite { object fullOptESModuleModule extends ScalaJSModule { override def scalaVersion = "2.13.4" + override def semanticDbVersion = "4.8.4" // last compatible version override def scalaJSVersion = "1.7.0" override def moduleKind = ModuleKind.ESModule } diff --git a/libs/scalalib/package.mill b/libs/scalalib/package.mill index acb0ed5daa80..2827bb5cb4e8 100644 --- a/libs/scalalib/package.mill +++ b/libs/scalalib/package.mill @@ -1,12 +1,14 @@ package build.libs.scalalib -import scala.util.Properties -import scala.util.chaining._ +import com.github.lolgab.mill.mima.{DirectMissingMethodProblem, ProblemFilter} + +import scala.util.Properties +import scala.util.chaining.* import coursier.maven.MavenRepository -import mill._ +import mill.* import mill.util.Tasks -import mill.scalalib._ -import mill.scalalib.publish._ +import mill.scalalib.* +import mill.scalalib.publish.* import mill.util.Jvm import mill.api.SelectMode import mill.contrib.buildinfo.BuildInfo @@ -32,4 +34,24 @@ object `package` extends MillStableScalaModule { val locale = if (Properties.isMac) "en_US.UTF-8" else "C.utf8" super.testForkEnv() ++ Map("LC_ALL" -> locale) } + + override def mimaBinaryIssueFilters: T[Seq[ProblemFilter]] = + super.mimaBinaryIssueFilters() ++ Seq( + // This was `private[mill]`, package private doesn't have the JVM bytecode equivalent, so mima can't check it. + ProblemFilter.exclude[DirectMissingMethodProblem]( + "mill.scalalib.scalafmt.ScalafmtWorkerModule.resolveRelativeToOut" + ), + ProblemFilter.exclude[DirectMissingMethodProblem]( + "mill.scalalib.scalafmt.ScalafmtWorkerModule.resolveRelativeToOut$default$2" + ), + ProblemFilter.exclude[DirectMissingMethodProblem]( + "mill.scalalib.scalafmt.ScalafmtWorkerModule.compileClasspathTask" + ), + ProblemFilter.exclude[DirectMissingMethodProblem]( + "mill.scalalib.scalafmt.ScalafmtWorkerModule.compileFor" + ), + ProblemFilter.exclude[DirectMissingMethodProblem]( + "mill.scalalib.scalafmt.ScalafmtWorkerModule.transitiveCompileClasspathTask" + ) + ) } diff --git a/libs/scalalib/src/mill/scalalib/ScalaModule.scala b/libs/scalalib/src/mill/scalalib/ScalaModule.scala index a48eb678e040..be4d293d1182 100644 --- a/libs/scalalib/src/mill/scalalib/ScalaModule.scala +++ b/libs/scalalib/src/mill/scalalib/ScalaModule.scala @@ -1,22 +1,19 @@ package mill package scalalib -import mill.util.JarManifest -import mill.api.{BuildCtx, DummyInputStream, ModuleRef, PathRef, Result, Task} -import mill.util.BuildInfo -import mill.util.Jvm -import mill.javalib.api.{CompilationResult, JvmWorkerUtil, Versions} import mainargs.Flag import mill.api.daemon.internal.bsp.{BspBuildTarget, BspModuleApi, ScalaBuildTarget} import mill.api.daemon.internal.{ScalaModuleApi, ScalaPlatform, internal} -import mill.javalib.dependency.versions.{ValidVersion, Version} -import mill.javalib.{CompileFor, SemanticDbJavaModule} +import mill.api.{BuildCtx, DummyInputStream, ModuleRef, PathRef, Result, Task} +import mill.javalib.SemanticDbJavaModule import mill.javalib.api.internal.{JavaCompilerOptions, ZincCompileMixed, ZincScaladocJar} +import mill.javalib.api.{CompilationResult, JvmWorkerUtil, Versions} +import mill.javalib.dependency.versions.{ValidVersion, Version} +import mill.util.{BuildInfo, JarManifest, Jvm} // this import requires scala-reflect library to be on the classpath // it was duplicated to scala3-compiler, but is that too powerful to add as a dependency? import scala.reflect.internal.util.ScalaClassLoader - import scala.util.Using /** @@ -269,38 +266,97 @@ trait ScalaModule extends JavaModule with TestModule.ScalaModuleBase ) } - // Keep in sync with [[bspCompileClassesPath]] - override def compile: T[CompilationResult] = Task(persistent = true) { - val sv = scalaVersion() - if (sv == "2.12.4") Task.log.warn( - """Attention: Zinc is known to not work properly for Scala version 2.12.4. - |You may want to select another version. Upgrading to a more recent Scala version is recommended. - |For details, see: https://github.com/sbt/zinc/issues/1010""".stripMargin - ) + override def compile: T[mill.javalib.api.CompilationResult] = Task(persistent = true) { + SemanticDbJavaModule.compile(this)() + } - val jOpts = JavaCompilerOptions(javacOptions() ++ mandatoryJavacOptions()) - - jvmWorker() - .internalWorker() - .compileMixed( - ZincCompileMixed( - upstreamCompileOutput = upstreamCompileOutput(), - sources = allSourceFiles().map(_.path), - compileClasspath = compileClasspath().map(_.path), - javacOptions = jOpts.compiler, - scalaVersion = sv, - scalaOrganization = scalaOrganization(), - scalacOptions = allScalacOptions(), - compilerClasspath = scalaCompilerClasspath(), - scalacPluginClasspath = scalacPluginClasspath(), - incrementalCompilation = zincIncrementalCompilation(), - auxiliaryClassFileExtensions = zincAuxiliaryClassFileExtensions() - ), - javaHome = javaHome().map(_.path), - javaRuntimeOptions = jOpts.runtime, - reporter = Task.reporter.apply(hashCode), - reportCachedProblems = zincReportCachedProblems() + /** + * Keep the return paths in sync with [[bspCompileClassesPath]]. + */ + override private[mill] def compileInternal(compileSemanticDb: Boolean) = { + val ( + semanticDbJavacOptionsTask, + semanticDbEnablePluginScalacOptionsTask, + semanticDbJavaPluginMvnDepsTask + ) = + if (compileSemanticDb) ( + Task.Anon { SemanticDbJavaModule.javacOptionsTask(semanticDbJavaVersion()) }, + semanticDbEnablePluginScalacOptions, + resolvedSemanticDbJavaPluginMvnDeps + ) + else (Task.Anon(Seq.empty), Task.Anon(Seq.empty), Task.Anon(Seq.empty)) + + Task.Anon { + val sv = scalaVersion() + if (sv == "2.12.4") Task.log.warn( + """Attention: Zinc is known to not work properly for Scala version 2.12.4. + |You may want to select another version. Upgrading to a more recent Scala version is recommended. + |For details, see: https://github.com/sbt/zinc/issues/1010""".stripMargin + ) + + val jOpts = JavaCompilerOptions( + javacOptions() ++ mandatoryJavacOptions() ++ semanticDbJavacOptionsTask() ) + + val scalacOptions = { + def semanticDbOptions = + if (JvmWorkerUtil.isScala3(sv)) + Seq("-Xsemanticdb", s"-sourceroot:${BuildCtx.workspaceRoot}") + else Seq("-Yrangepos", s"-P:semanticdb:sourceroot:${BuildCtx.workspaceRoot}") + + if (compileSemanticDb) { + // Filter out -Xfatal-warnings to avoid semanticdb from failing the build. + allScalacOptions().filterNot(_ == "-Xfatal-warnings") ++ + semanticDbEnablePluginScalacOptionsTask() ++ semanticDbOptions + } else allScalacOptions() + } + + val compileClasspath = this.compileClasspath() ++ semanticDbJavaPluginMvnDepsTask() + + val compileTo = Task.dest + + Task.log.debug(s"compiling to: $compileTo") + Task.log.debug(s"semantic db enabled: $compileSemanticDb") + Task.log.debug(s"effective scalac options: $scalacOptions") + Task.log.debug(s"effective javac options: ${jOpts.compiler}") + Task.log.debug(s"effective java runtime options: ${jOpts.runtime}") + + val sources = allSourceFiles().map(_.path) + val compileMixedOp = ZincCompileMixed( + compileTo = compileTo, + upstreamCompileOutput = upstreamCompileOutput(), + sources = sources, + compileClasspath = compileClasspath.map(_.path), + javacOptions = jOpts.compiler, + scalaVersion = sv, + scalaOrganization = scalaOrganization(), + scalacOptions = scalacOptions, + compilerClasspath = scalaCompilerClasspath(), + scalacPluginClasspath = scalacPluginClasspath(), + incrementalCompilation = zincIncrementalCompilation(), + auxiliaryClassFileExtensions = zincAuxiliaryClassFileExtensions() + ) + + val compileMixedResult = jvmWorker() + .internalWorker() + .compileMixed( + compileMixedOp, + javaHome = javaHome().map(_.path), + javaRuntimeOptions = jOpts.runtime, + reporter = Task.reporter.apply(hashCode), + reportCachedProblems = zincReportCachedProblems() + ) + + compileMixedResult.map { compilationResult => + if (compileSemanticDb) SemanticDbJavaModule.enhanceCompilationResultWithSemanticDb( + compileTo = compileMixedOp.compileTo, + sources = sources, + workerClasspath = SemanticDbJavaModule.workerClasspath().map(_.path), + compilationResult = compilationResult + ) + else compilationResult + } + } } override def docSources: T[Seq[PathRef]] = Task { @@ -592,77 +648,19 @@ trait ScalaModule extends JavaModule with TestModule.ScalaModuleBase override def semanticDbScalaVersion: T[String] = scalaVersion() - override protected def semanticDbPluginClasspath = Task { + override protected def semanticDbPluginClasspath: T[Seq[PathRef]] = Task { defaultResolver().classpath( scalacPluginMvnDeps() ++ semanticDbPluginMvnDeps() ) } - override def semanticDbDataDetailed: T[SemanticDbJavaModule.SemanticDbData] = - Task(persistent = true) { - val sv = scalaVersion() - - val additionalScalacOptions = if (JvmWorkerUtil.isScala3(sv)) { - Seq("-Xsemanticdb", s"-sourceroot:${BuildCtx.workspaceRoot}") - } else { - Seq("-Yrangepos", s"-P:semanticdb:sourceroot:${BuildCtx.workspaceRoot}") - } - - val scalacOptions = ( - allScalacOptions() ++ - semanticDbEnablePluginScalacOptions() ++ - additionalScalacOptions - ) - .filterNot(_ == "-Xfatal-warnings") - - val javacOpts = SemanticDbJavaModule.javacOptionsTask(javacOptions(), semanticDbJavaVersion()) - - Task.log.debug(s"effective scalac options: ${scalacOptions}") - Task.log.debug(s"effective javac options: ${javacOpts}") - - val jOpts = JavaCompilerOptions(javacOpts) - - jvmWorker().internalWorker() - .compileMixed( - ZincCompileMixed( - upstreamCompileOutput = upstreamSemanticDbDatas().map(_.compilationResult), - sources = allSourceFiles().map(_.path), - compileClasspath = - (compileClasspathTask( - CompileFor.SemanticDb - )() ++ resolvedSemanticDbJavaPluginMvnDeps()).map(_.path), - javacOptions = jOpts.compiler, - scalaVersion = sv, - scalaOrganization = scalaOrganization(), - scalacOptions = scalacOptions, - compilerClasspath = scalaCompilerClasspath(), - scalacPluginClasspath = semanticDbPluginClasspath(), - incrementalCompilation = zincIncrementalCompilation(), - auxiliaryClassFileExtensions = zincAuxiliaryClassFileExtensions() - ), - javaHome = javaHome().map(_.path), - javaRuntimeOptions = jOpts.runtime, - reporter = Task.reporter.apply(hashCode), - reportCachedProblems = zincReportCachedProblems() - ) - .map { compilationResult => - val semanticDbFiles = BuildCtx.withFilesystemCheckerDisabled { - SemanticDbJavaModule.copySemanticdbFiles( - compilationResult.classes.path, - BuildCtx.workspaceRoot, - Task.dest / "data", - SemanticDbJavaModule.workerClasspath().map(_.path), - allSourceFiles().map(_.path) - ) - } - - SemanticDbJavaModule.SemanticDbData(compilationResult, semanticDbFiles) - } - } + // binary compatibility forwarder + override def semanticDbDataDetailed: T[SemanticDbJavaModule.SemanticDbData] = Task { + SemanticDbJavaModule.semanticDbDataDetailed(this)() + } // binary compatibility forwarder - override def semanticDbData: T[PathRef] = - // This is the same as `super.semanticDbData()`, but we can't call it directly - // because then it generates a forwarder which breaks binary compatibility. - Task { semanticDbDataDetailed().semanticDbFiles } + override def semanticDbData: T[PathRef] = Task { + SemanticDbJavaModule.semanticDbData(this)() + } } diff --git a/libs/scalalib/test/src/mill/scalalib/CrossVersionTests.scala b/libs/scalalib/test/src/mill/scalalib/CrossVersionTests.scala index f275de7b2078..2f0f0d9f11df 100644 --- a/libs/scalalib/test/src/mill/scalalib/CrossVersionTests.scala +++ b/libs/scalalib/test/src/mill/scalalib/CrossVersionTests.scala @@ -25,6 +25,7 @@ object CrossVersionTests extends TestSuite { |└─ org.scala-lang:scala-library:2.13.10 |""".stripMargin override def scalaVersion = "2.13.10" + override def semanticDbVersion = "4.9.3" // last version to support Scala 2.13.10 override def mvnDeps = Seq(mvn"com.lihaoyi::upickle:1.4.0") } diff --git a/libs/scalalib/test/src/mill/scalalib/HelloWorldTests.scala b/libs/scalalib/test/src/mill/scalalib/HelloWorldTests.scala index 9255d0cc6cd3..27686a993471 100644 --- a/libs/scalalib/test/src/mill/scalalib/HelloWorldTests.scala +++ b/libs/scalalib/test/src/mill/scalalib/HelloWorldTests.scala @@ -23,10 +23,6 @@ object HelloWorldTests extends TestSuite { trait HelloWorldModule extends scalalib.ScalaModule { def scalaVersion = scala212Version - override def semanticDbVersion: T[String] = Task { - // The latest semanticDB release for Scala 2.12.6 - "4.1.9" - } } trait SemanticModule extends scalalib.ScalaModule { def scalaVersion = scala213Version @@ -42,6 +38,9 @@ object HelloWorldTests extends TestSuite { object HelloWorldNonPrecompiledBridge extends TestRootModule { object core extends HelloWorldModule { override def scalaVersion = "2.12.1" + + // Hack to disable semanticdb, because there's no semanticdb for this ancient scala version. + override protected def semanticDbEnablePluginScalacOptions = Seq.empty } lazy val millDiscover = Discover[this.type] @@ -52,7 +51,16 @@ object HelloWorldTests extends TestSuite { scala212Version, scala213Version ) - trait HelloWorldCross extends CrossScalaModule + + trait HelloWorldCross extends CrossScalaModule { + override def semanticDbVersion = Task { + scalaVersion() match { + case `scala2123Version` => "2.1.2" + case _ => super.semanticDbVersion() + } + } + } + lazy val millDiscover = Discover[this.type] } diff --git a/libs/scalalib/test/src/mill/scalalib/LargeAssemblyExeTests.scala b/libs/scalalib/test/src/mill/scalalib/LargeAssemblyExeTests.scala index f1756212fe1e..ef75d1bfc0b1 100644 --- a/libs/scalalib/test/src/mill/scalalib/LargeAssemblyExeTests.scala +++ b/libs/scalalib/test/src/mill/scalalib/LargeAssemblyExeTests.scala @@ -19,6 +19,7 @@ object LargeAssemblyExeTests extends TestSuite { trait ExtraDeps extends ScalaModule { def scalaVersion = "2.13.11" + def semanticDbVersion = "4.9.9" // last version to support this Scala def sources = Task.Sources(mill.api.BuildCtx.workspaceRoot / "src") diff --git a/libs/scalalib/test/src/mill/scalalib/ScalaCrossVersionTests.scala b/libs/scalalib/test/src/mill/scalalib/ScalaCrossVersionTests.scala index d91007c783f5..b30695c38f3e 100644 --- a/libs/scalalib/test/src/mill/scalalib/ScalaCrossVersionTests.scala +++ b/libs/scalalib/test/src/mill/scalalib/ScalaCrossVersionTests.scala @@ -30,7 +30,7 @@ object ScalaCrossVersionTests extends TestSuite { CrossModuleDeps.cuttingEdge(scala213Version).moduleDeps ).getMessage assert( - message == s"Unable to find compatible cross version between ${scala213Version} and 2.12.6,3.2.0" + message == s"Unable to find compatible cross version between ${scala213Version} and 2.12.20,3.2.0" ) } } diff --git a/libs/scalalib/test/src/mill/scalalib/ScalaDotty213Tests.scala b/libs/scalalib/test/src/mill/scalalib/ScalaDotty213Tests.scala index d1f5f1f096d6..2a0f85387cc0 100644 --- a/libs/scalalib/test/src/mill/scalalib/ScalaDotty213Tests.scala +++ b/libs/scalalib/test/src/mill/scalalib/ScalaDotty213Tests.scala @@ -9,6 +9,10 @@ object ScalaDotty213Tests extends TestSuite { object Dotty213 extends TestRootModule { object foo extends ScalaModule { def scalaVersion = "0.18.1-RC1" + + // Hack to disable semanticdb, because there's no semanticdb for this ancient scala version. + override protected def semanticDbEnablePluginScalacOptions = Seq.empty + override def mvnDeps = Seq( mvn"org.scala-lang.modules::scala-xml:1.2.0".withDottyCompat(scalaVersion()) ) diff --git a/libs/scalalib/test/src/mill/scalalib/ScalaMixedProjectSemanticDbTests.scala b/libs/scalalib/test/src/mill/scalalib/ScalaMixedProjectSemanticDbTests.scala index e9aaeeb14415..00a87ec12003 100644 --- a/libs/scalalib/test/src/mill/scalalib/ScalaMixedProjectSemanticDbTests.scala +++ b/libs/scalalib/test/src/mill/scalalib/ScalaMixedProjectSemanticDbTests.scala @@ -31,7 +31,7 @@ object ScalaMixedProjectSemanticDbTests extends TestSuite { println("first - expected full compile") val Right(result) = eval.apply(SemanticWorld.core.semanticDbData): @unchecked - val dataPath = eval.outPath / "core/semanticDbDataDetailed.dest/data" + val dataPath = eval.outPath / "core/semanticDbDataDetailed.dest/semanticdb-data" val outputFiles = os.walk(result.value.path).filter(os.isFile).map(_.relativeTo(result.value.path)) @@ -39,7 +39,7 @@ object ScalaMixedProjectSemanticDbTests extends TestSuite { assert( result.value.path == dataPath, outputFiles.nonEmpty, - outputFiles.toSet == expectedSemFiles, + outputFiles.toVector.sorted == expectedSemFiles.toVector.sorted, result.evalCount > 0, os.exists(dataPath / os.up / "zinc") ) diff --git a/libs/scalalib/test/src/mill/scalalib/ScalaMultiModuleClasspathsTests.scala b/libs/scalalib/test/src/mill/scalalib/ScalaMultiModuleClasspathsTests.scala index 57366c58dc48..c7c4d91ae517 100644 --- a/libs/scalalib/test/src/mill/scalalib/ScalaMultiModuleClasspathsTests.scala +++ b/libs/scalalib/test/src/mill/scalalib/ScalaMultiModuleClasspathsTests.scala @@ -12,7 +12,7 @@ object ScalaMultiModuleClasspathsTests extends TestSuite { object MultiModuleClasspaths extends TestRootModule { trait FooModule extends ScalaModule { def scalaVersion = "2.13.12" - + def semanticDbVersion = "4.8.4" // Last version that supports 2.13.12 def mvnDeps = Seq(mvn"com.lihaoyi::sourcecode:0.2.2") def compileMvnDeps = Seq(mvn"com.lihaoyi::geny:0.4.2") def runMvnDeps = Seq(mvn"com.lihaoyi::utest:0.8.5") @@ -20,6 +20,7 @@ object ScalaMultiModuleClasspathsTests extends TestSuite { } trait BarModule extends ScalaModule { def scalaVersion = "2.13.12" + def semanticDbVersion = "4.8.4" // Last version that supports 2.13.12 def mvnDeps = Seq(mvn"com.lihaoyi::sourcecode:0.2.1") def compileMvnDeps = Seq(mvn"com.lihaoyi::geny:0.4.1") @@ -28,6 +29,7 @@ object ScalaMultiModuleClasspathsTests extends TestSuite { } trait QuxModule extends ScalaModule { def scalaVersion = "2.13.12" + def semanticDbVersion = "4.8.4" // Last version that supports 2.13.12 def mvnDeps = Seq(mvn"com.lihaoyi::sourcecode:0.2.0") def compileMvnDeps = Seq(mvn"com.lihaoyi::geny:0.4.0") diff --git a/libs/scalalib/test/src/mill/scalalib/ScalaSemanticDbTests.scala b/libs/scalalib/test/src/mill/scalalib/ScalaSemanticDbTests.scala index 85af62315b6e..ca102a1ebfef 100644 --- a/libs/scalalib/test/src/mill/scalalib/ScalaSemanticDbTests.scala +++ b/libs/scalalib/test/src/mill/scalalib/ScalaSemanticDbTests.scala @@ -34,7 +34,7 @@ object ScalaSemanticDbTests extends TestSuite { println("first - expected full compile") val Right(result) = eval.apply(SemanticWorld.core.semanticDbData): @unchecked - val dataPath = eval.outPath / "core/semanticDbDataDetailed.dest/data" + val dataPath = eval.outPath / "core/semanticDbDataDetailed.dest/semanticdb-data" val outputFiles = os.walk(result.value.path).filter(os.isFile).map(_.relativeTo(result.value.path)) @@ -42,7 +42,7 @@ object ScalaSemanticDbTests extends TestSuite { assert( result.value.path == dataPath, outputFiles.nonEmpty, - outputFiles.toSet == expectedSemFiles, + outputFiles.toVector.sorted == expectedSemFiles.toVector.sorted, result.evalCount > 0, os.exists(dataPath / os.up / "zinc") ) @@ -79,7 +79,7 @@ object ScalaSemanticDbTests extends TestSuite { println("first - expected full compile") val Right(result) = eval.apply(SemanticWorld.core.semanticDbData): @unchecked - val dataPath = eval.outPath / "core/semanticDbDataDetailed.dest/data" + val dataPath = eval.outPath / "core/semanticDbDataDetailed.dest/semanticdb-data" val outputFiles = os.walk(result.value.path).filter(os.isFile).map(_.relativeTo(result.value.path)) @@ -87,7 +87,7 @@ object ScalaSemanticDbTests extends TestSuite { val filteredOutputFiles = outputFiles.toSet.filter(_.ext != "class") assert( result.value.path == dataPath, - filteredOutputFiles == expectedSemFiles, + filteredOutputFiles.toVector.sorted == expectedSemFiles.toVector.sorted, result.evalCount > 0 ) } diff --git a/libs/scalalib/test/src/mill/scalalib/ScalaTypeLevelTests.scala b/libs/scalalib/test/src/mill/scalalib/ScalaTypeLevelTests.scala index 0f1bdbc4512c..41713f6923a7 100644 --- a/libs/scalalib/test/src/mill/scalalib/ScalaTypeLevelTests.scala +++ b/libs/scalalib/test/src/mill/scalalib/ScalaTypeLevelTests.scala @@ -12,6 +12,10 @@ object ScalaTypeLevelTests extends TestSuite { object foo extends ScalaModule { override def scalaVersion = "2.11.8" override def scalaOrganization = "org.typelevel" + + // Hack to disable semanticdb, because there's no semanticdb for this ancient scala version. + override protected def semanticDbEnablePluginScalacOptions = Seq.empty + override def ammoniteVersion = "1.6.7" override def mvnDeps = Seq( diff --git a/libs/scalalib/test/src/mill/scalalib/ScalaVersionsRangesTests.scala b/libs/scalalib/test/src/mill/scalalib/ScalaVersionsRangesTests.scala index 60a2134bd378..855c79b82755 100644 --- a/libs/scalalib/test/src/mill/scalalib/ScalaVersionsRangesTests.scala +++ b/libs/scalalib/test/src/mill/scalalib/ScalaVersionsRangesTests.scala @@ -10,10 +10,16 @@ object ScalaVersionsRangesTests extends TestSuite { object ScalaVersionsRanges extends TestRootModule { object core extends Cross[CoreCrossModule]("2.12.13", "2.13.5", "3.3.3") + trait CoreCrossModule extends CrossScalaModule with CrossScalaVersionRanges { + + override def semanticDbVersion = "4.8.4" // last version to support these Scala versions + object test extends ScalaTests with TestModule.Utest { override def utestVersion = "0.8.5" + + override def semanticDbVersion = "4.8.4" // last version to support these Scala versions } } diff --git a/libs/scalanativelib/test/src/mill/scalanativelib/FeaturesTests.scala b/libs/scalanativelib/test/src/mill/scalanativelib/FeaturesTests.scala index 2205225a34c9..c6549b8a23d0 100644 --- a/libs/scalanativelib/test/src/mill/scalanativelib/FeaturesTests.scala +++ b/libs/scalanativelib/test/src/mill/scalanativelib/FeaturesTests.scala @@ -10,6 +10,7 @@ object FeaturesTests extends TestSuite { object Features extends TestRootModule with ScalaNativeModule { def scalaNativeVersion = "0.5.0" def scalaVersion = "2.13.10" + def semanticDbVersion = "4.9.3" // last compatible version def nativeIncrementalCompilation = true override lazy val millDiscover = Discover[this.type] } diff --git a/mill-build/src/millbuild/Deps.scala b/mill-build/src/millbuild/Deps.scala index bd0afb0bbbc8..1f7890664fb2 100644 --- a/mill-build/src/millbuild/Deps.scala +++ b/mill-build/src/millbuild/Deps.scala @@ -17,7 +17,7 @@ object Deps { val testScala213Version = "2.13.16" // Scala Native 4.2 will not get releases for new Scala version val testScala213VersionForScalaNative42 = "2.13.16" - val testScala212Version = "2.12.6" + val testScala212Version = "2.12.20" val testScala32Version = "3.2.0" val testScala33Version = "3.3.1" diff --git a/runner/bsp/worker/src/mill/bsp/worker/MillBuildServer.scala b/runner/bsp/worker/src/mill/bsp/worker/MillBuildServer.scala index 511fb5e9f563..0b667d6267c3 100644 --- a/runner/bsp/worker/src/mill/bsp/worker/MillBuildServer.scala +++ b/runner/bsp/worker/src/mill/bsp/worker/MillBuildServer.scala @@ -264,6 +264,12 @@ private class MillBuildServer( ) clientWantsSemanticDb = true SemanticDbJavaModuleApi.contextSemanticDbVersion.set(Option(version)) + + // Inform other BSP clients that we want to use SemanticDB + val pid = ProcessHandle.current().pid() + val pidFile = BuildCtx.bspSemanticDbSessionsFolder / pid.toString + os.write.over(pidFile, "", createFolders = true) + pidFile.toNIO.toFile.deleteOnExit() } readVersion(d, "javaSemanticdbVersion").foreach { version => SemanticDbJavaModuleApi.contextJavaSemanticDbVersion.set(Option(version)) diff --git a/runner/meta/src/mill/meta/MillBuildRootModule.scala b/runner/meta/src/mill/meta/MillBuildRootModule.scala index 8439c022a00d..11a46aa5f133 100644 --- a/runner/meta/src/mill/meta/MillBuildRootModule.scala +++ b/runner/meta/src/mill/meta/MillBuildRootModule.scala @@ -288,6 +288,7 @@ trait MillBuildRootModule()(using .internalWorker() .compileMixed( ZincCompileMixed( + compileTo = Task.dest, upstreamCompileOutput = upstreamCompileOutput(), sources = Seq.from(allSourceFiles().map(_.path)), compileClasspath = compileClasspath().map(_.path), diff --git a/testkit/src/mill/testkit/IntegrationTester.scala b/testkit/src/mill/testkit/IntegrationTester.scala index 196c9a1b22a0..7e9a4fd3043d 100644 --- a/testkit/src/mill/testkit/IntegrationTester.scala +++ b/testkit/src/mill/testkit/IntegrationTester.scala @@ -77,6 +77,11 @@ object IntegrationTester { asTestValue(propagateEnv), asTestValue(shutdownGracePeriod) ) + + /** Enhances the test with clues and then runs the evaluation. */ + def runWithClues[A](f: EvalResult => A): A = { + withTestClues(clues*)(f(run())) + } } trait Impl extends AutoCloseable with IntegrationTesterBase {