Skip to content

Commit 5f3e1d7

Browse files
authored
write pipelined tasty in parallel. (#20153)
2 parents d148973 + 4bfc43f commit 5f3e1d7

File tree

18 files changed

+507
-106
lines changed

18 files changed

+507
-106
lines changed

compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala

+5-1
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,11 @@ import dotty.tools.io.JarArchive
2020

2121
import scala.language.unsafeNulls
2222

23-
23+
/** !!! This file is now copied in `dotty.tools.io.FileWriters` in a more general way that does not rely upon
24+
* `PostProcessorFrontendAccess`, this should probably be changed to wrap that class instead.
25+
*
26+
* Until then, any changes to this file should be copied to `dotty.tools.io.FileWriters` as well.
27+
*/
2428
class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) {
2529
type NullableFile = AbstractFile | Null
2630
import frontendAccess.{compilerSettings, backendReporting}

compiler/src/dotty/tools/backend/jvm/GenBCode.scala

+13
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,10 @@ import Symbols.*
1010
import dotty.tools.io.*
1111
import scala.collection.mutable
1212
import scala.compiletime.uninitialized
13+
import java.util.concurrent.TimeoutException
14+
15+
import scala.concurrent.duration.Duration
16+
import scala.concurrent.Await
1317

1418
class GenBCode extends Phase { self =>
1519

@@ -90,6 +94,15 @@ class GenBCode extends Phase { self =>
9094
try
9195
val result = super.runOn(units)
9296
generatedClassHandler.complete()
97+
try
98+
for
99+
async <- ctx.run.nn.asyncTasty
100+
bufferedReporter <- async.sync()
101+
do
102+
bufferedReporter.relayReports(frontendAccess.backendReporting)
103+
catch
104+
case ex: Exception =>
105+
report.error(s"exception from future: $ex, (${Option(ex.getCause())})")
93106
result
94107
finally
95108
// frontendAccess and postProcessor are created lazilly, clean them up only if they were initialized

compiler/src/dotty/tools/dotc/CompilationUnit.scala

+7-3
Original file line numberDiff line numberDiff line change
@@ -98,11 +98,15 @@ class CompilationUnit protected (val source: SourceFile, val info: CompilationUn
9898
depRecorder.clear()
9999
if !suspended then
100100
suspended = true
101-
ctx.run.nn.suspendedUnits += this
101+
val currRun = ctx.run.nn
102+
currRun.suspendedUnits += this
103+
val isInliningPhase = ctx.phase == Phases.inliningPhase
102104
if ctx.settings.XprintSuspension.value then
103-
ctx.run.nn.suspendedHints += (this -> hint)
104-
if ctx.phase == Phases.inliningPhase then
105+
currRun.suspendedHints += (this -> (hint, isInliningPhase))
106+
if isInliningPhase then
105107
suspendedAtInliningPhase = true
108+
else
109+
currRun.suspendedAtTyperPhase = true
106110
throw CompilationUnit.SuspendException()
107111

108112
private var myAssignmentSpans: Map[Int, List[Span]] | Null = null

compiler/src/dotty/tools/dotc/Driver.scala

+3-3
Original file line numberDiff line numberDiff line change
@@ -54,10 +54,10 @@ class Driver {
5454
if (ctx.settings.XprintSuspension.value)
5555
val suspendedHints = run.suspendedHints.toList
5656
report.echo(i"compiling suspended $suspendedUnits%, %")
57-
for (unit, hint) <- suspendedHints do
58-
report.echo(s" $unit: $hint")
57+
for (unit, (hint, atInlining)) <- suspendedHints do
58+
report.echo(s" $unit at ${if atInlining then "inlining" else "typer"}: $hint")
5959
val run1 = compiler.newRun
60-
run1.compileSuspendedUnits(suspendedUnits)
60+
run1.compileSuspendedUnits(suspendedUnits, !run.suspendedAtTyperPhase)
6161
finish(compiler, run1)(using MacroClassLoader.init(ctx.fresh))
6262

6363
protected def initCtx: Context = (new ContextBase).initialCtx

compiler/src/dotty/tools/dotc/Run.scala

+33-3
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,7 @@ import scala.io.Codec
3737
import Run.Progress
3838
import scala.compiletime.uninitialized
3939
import dotty.tools.dotc.transform.MegaPhase
40+
import dotty.tools.dotc.transform.Pickler.AsyncTastyHolder
4041

4142
/** A compiler run. Exports various methods to compile source files */
4243
class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with ConstraintRunInfo {
@@ -130,7 +131,10 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint
130131
myUnits = us
131132

132133
var suspendedUnits: mutable.ListBuffer[CompilationUnit] = mutable.ListBuffer()
133-
var suspendedHints: mutable.Map[CompilationUnit, String] = mutable.HashMap()
134+
var suspendedHints: mutable.Map[CompilationUnit, (String, Boolean)] = mutable.HashMap()
135+
136+
/** Were any units suspended in the typer phase? if so then pipeline tasty can not complete. */
137+
var suspendedAtTyperPhase: Boolean = false
134138

135139
def checkSuspendedUnits(newUnits: List[CompilationUnit])(using Context): Unit =
136140
if newUnits.isEmpty && suspendedUnits.nonEmpty && !ctx.reporter.errorsReported then
@@ -231,6 +235,22 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint
231235
if !progress.isCancelled() then
232236
progress.tickSubphase()
233237

238+
/** if true, then we are done writing pipelined TASTy files (i.e. finished in a previous run.) */
239+
private var myAsyncTastyWritten = false
240+
241+
private var _asyncTasty: Option[AsyncTastyHolder] = None
242+
243+
/** populated when this run needs to write pipeline TASTy files. */
244+
def asyncTasty: Option[AsyncTastyHolder] = _asyncTasty
245+
246+
private def initializeAsyncTasty()(using Context): () => Unit =
247+
// should we provide a custom ExecutionContext?
248+
// currently it is just used to call the `apiPhaseCompleted` and `dependencyPhaseCompleted` callbacks in Zinc
249+
import scala.concurrent.ExecutionContext.Implicits.global
250+
val async = AsyncTastyHolder.init
251+
_asyncTasty = Some(async)
252+
() => async.cancel()
253+
234254
/** Will be set to true if any of the compiled compilation units contains
235255
* a pureFunctions language import.
236256
*/
@@ -348,7 +368,14 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint
348368
runCtx.setProperty(CyclicReference.Trace, new CyclicReference.Trace())
349369
runCtx.withProgressCallback: cb =>
350370
_progress = Progress(cb, this, fusedPhases.map(_.traversals).sum)
371+
val cancelAsyncTasty: () => Unit =
372+
if !myAsyncTastyWritten && Phases.picklerPhase.exists && !ctx.settings.YearlyTastyOutput.isDefault then
373+
initializeAsyncTasty()
374+
else () => {}
375+
351376
runPhases(allPhases = fusedPhases)(using runCtx)
377+
cancelAsyncTasty()
378+
352379
ctx.reporter.finalizeReporting()
353380
if (!ctx.reporter.hasErrors)
354381
Rewrites.writeBack()
@@ -365,9 +392,12 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint
365392
/** Is this run started via a compilingSuspended? */
366393
def isCompilingSuspended: Boolean = myCompilingSuspended
367394

368-
/** Compile units `us` which were suspended in a previous run */
369-
def compileSuspendedUnits(us: List[CompilationUnit]): Unit =
395+
/** Compile units `us` which were suspended in a previous run,
396+
* also signal if all necessary async tasty files were written in a previous run.
397+
*/
398+
def compileSuspendedUnits(us: List[CompilationUnit], asyncTastyWritten: Boolean): Unit =
370399
myCompilingSuspended = true
400+
myAsyncTastyWritten = asyncTastyWritten
371401
for unit <- us do unit.suspended = false
372402
compileUnits(us)
373403

compiler/src/dotty/tools/dotc/core/Contexts.scala

+1
Original file line numberDiff line numberDiff line change
@@ -685,6 +685,7 @@ object Contexts {
685685
updateStore(compilationUnitLoc, compilationUnit)
686686
}
687687

688+
688689
def setCompilerCallback(callback: CompilerCallback): this.type = updateStore(compilerCallbackLoc, callback)
689690
def setIncCallback(callback: IncrementalCallback): this.type = updateStore(incCallbackLoc, callback)
690691
def setProgressCallback(callback: ProgressCallback): this.type = updateStore(progressCallbackLoc, callback)

compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala

+1-52
Original file line numberDiff line numberDiff line change
@@ -70,19 +70,13 @@ class ExtractAPI extends Phase {
7070

7171
override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] =
7272
val doZincCallback = ctx.runZincPhases
73-
val sigWriter: Option[Pickler.EarlyFileWriter] = ctx.settings.YearlyTastyOutput.value match
74-
case earlyOut if earlyOut.isDirectory && earlyOut.exists =>
75-
Some(Pickler.EarlyFileWriter(earlyOut))
76-
case _ =>
77-
None
7873
val nonLocalClassSymbols = new mutable.HashSet[Symbol]
7974
val units0 =
8075
if doZincCallback then
8176
val ctx0 = ctx.withProperty(NonLocalClassSymbolsInCurrentUnits, Some(nonLocalClassSymbols))
8277
super.runOn(units)(using ctx0)
8378
else
8479
units // still run the phase for the side effects (writing TASTy files to -Yearly-tasty-output)
85-
sigWriter.foreach(writeSigFiles(units0, _))
8680
if doZincCallback then
8781
ctx.withIncCallback(recordNonLocalClasses(nonLocalClassSymbols, _))
8882
if ctx.settings.YjavaTasty.value then
@@ -91,57 +85,12 @@ class ExtractAPI extends Phase {
9185
units0
9286
end runOn
9387

94-
// Why we only write to early output in the first run?
95-
// ===================================================
96-
// TL;DR the point of pipeline compilation is to start downstream projects early,
97-
// so we don't want to wait for suspended units to be compiled.
98-
//
99-
// But why is it safe to ignore suspended units?
100-
// If this project contains a transparent macro that is called in the same project,
101-
// the compilation unit of that call will be suspended (if the macro implementation
102-
// is also in this project), causing a second run.
103-
// However before we do that run, we will have already requested sbt to begin
104-
// early downstream compilation. This means that the suspended definitions will not
105-
// be visible in *early* downstream compilation.
106-
//
107-
// However, sbt will by default prevent downstream compilation happening in this scenario,
108-
// due to the existence of macro definitions. So we are protected from failure if user tries
109-
// to use the suspended definitions.
110-
//
111-
// Additionally, it is recommended for the user to move macro implementations to another project
112-
// if they want to force early output. In this scenario the suspensions will no longer occur, so now
113-
// they will become visible in the early-output.
114-
//
115-
// See `sbt-test/pipelining/pipelining-scala-macro` and `sbt-test/pipelining/pipelining-scala-macro-force`
116-
// for examples of this in action.
117-
//
118-
// Therefore we only need to write to early output in the first run. We also provide the option
119-
// to diagnose suspensions with the `-Yno-suspended-units` flag.
120-
private def writeSigFiles(units: List[CompilationUnit], writer: Pickler.EarlyFileWriter)(using Context): Unit = {
121-
try
122-
for
123-
unit <- units
124-
(cls, pickled) <- unit.pickled
125-
if cls.isDefinedInCurrentRun
126-
do
127-
val internalName =
128-
if cls.is(Module) then cls.binaryClassName.stripSuffix(str.MODULE_SUFFIX).nn
129-
else cls.binaryClassName
130-
val _ = writer.writeTasty(internalName, pickled())
131-
finally
132-
writer.close()
133-
if ctx.settings.verbose.value then
134-
report.echo("[sig files written]")
135-
end try
136-
}
137-
13888
private def recordNonLocalClasses(nonLocalClassSymbols: mutable.HashSet[Symbol], cb: interfaces.IncrementalCallback)(using Context): Unit =
13989
for cls <- nonLocalClassSymbols do
14090
val sourceFile = cls.source
14191
if sourceFile.exists && cls.isDefinedInCurrentRun then
14292
recordNonLocalClass(cls, sourceFile, cb)
143-
cb.apiPhaseCompleted()
144-
cb.dependencyPhaseCompleted()
93+
ctx.run.nn.asyncTasty.foreach(_.signalAPIComplete())
14594

14695
private def recordNonLocalClass(cls: Symbol, sourceFile: SourceFile, cb: interfaces.IncrementalCallback)(using Context): Unit =
14796
def registerProductNames(fullClassName: String, binaryClassName: String) =

compiler/src/dotty/tools/dotc/sbt/package.scala

+19
Original file line numberDiff line numberDiff line change
@@ -6,10 +6,29 @@ import dotty.tools.dotc.core.NameOps.stripModuleClassSuffix
66
import dotty.tools.dotc.core.Names.Name
77
import dotty.tools.dotc.core.Names.termName
88

9+
import interfaces.IncrementalCallback
10+
import dotty.tools.io.FileWriters.BufferingReporter
11+
import dotty.tools.dotc.core.Decorators.em
12+
13+
import scala.util.chaining.given
14+
import scala.util.control.NonFatal
15+
916
inline val TermNameHash = 1987 // 300th prime
1017
inline val TypeNameHash = 1993 // 301st prime
1118
inline val InlineParamHash = 1997 // 302nd prime
1219

20+
def asyncZincPhasesCompleted(cb: IncrementalCallback, pending: Option[BufferingReporter]): BufferingReporter =
21+
val zincReporter = pending match
22+
case Some(buffered) => buffered
23+
case None => BufferingReporter()
24+
try
25+
cb.apiPhaseCompleted()
26+
cb.dependencyPhaseCompleted()
27+
catch
28+
case NonFatal(t) =>
29+
zincReporter.exception(em"signaling API and Dependencies phases completion", t)
30+
zincReporter
31+
1332
extension (sym: Symbol)
1433

1534
/** Mangle a JVM symbol name in a format better suited for internal uses by sbt.

0 commit comments

Comments
 (0)