@@ -9,7 +9,7 @@ import tasty.*
9
9
import config .Printers .{noPrinter , pickling }
10
10
import config .Feature
11
11
import java .io .PrintStream
12
- import io .FileWriters .TastyWriter
12
+ import io .FileWriters .{ TastyWriter , ReadOnlyContext }
13
13
import StdNames .{str , nme }
14
14
import Periods .*
15
15
import Phases .*
@@ -22,6 +22,11 @@ import compiletime.uninitialized
22
22
import dotty .tools .io .{JarArchive , AbstractFile }
23
23
import dotty .tools .dotc .printing .OutlinePrinter
24
24
import scala .annotation .constructorOnly
25
+ import scala .concurrent .Promise
26
+ import dotty .tools .dotc .transform .Pickler .writeSigFilesAsync
27
+
28
+ import scala .util .chaining .given
29
+ import dotty .tools .io .FileWriters .BufferingDelayedReporting
25
30
26
31
object Pickler {
27
32
val name : String = " pickler"
@@ -33,8 +38,62 @@ object Pickler {
33
38
*/
34
39
inline val ParallelPickling = true
35
40
41
+ class AsyncTastyHolder (val earlyOut : AbstractFile , val promise : Promise [AsyncTastyState ])
42
+ class AsyncTastyState (val hasErrors : Boolean , val pending : Option [BufferingDelayedReporting ])
43
+
44
+ // Why we only write to early output in the first run?
45
+ // ===================================================
46
+ // TL;DR the point of pipeline compilation is to start downstream projects early,
47
+ // so we don't want to wait for suspended units to be compiled.
48
+ //
49
+ // But why is it safe to ignore suspended units?
50
+ // If this project contains a transparent macro that is called in the same project,
51
+ // the compilation unit of that call will be suspended (if the macro implementation
52
+ // is also in this project), causing a second run.
53
+ // However before we do that run, we will have already requested sbt to begin
54
+ // early downstream compilation. This means that the suspended definitions will not
55
+ // be visible in *early* downstream compilation.
56
+ //
57
+ // However, sbt will by default prevent downstream compilation happening in this scenario,
58
+ // due to the existence of macro definitions. So we are protected from failure if user tries
59
+ // to use the suspended definitions.
60
+ //
61
+ // Additionally, it is recommended for the user to move macro implementations to another project
62
+ // if they want to force early output. In this scenario the suspensions will no longer occur, so now
63
+ // they will become visible in the early-output.
64
+ //
65
+ // See `sbt-test/pipelining/pipelining-scala-macro` and `sbt-test/pipelining/pipelining-scala-macro-force`
66
+ // for examples of this in action.
67
+ //
68
+ // Therefore we only need to write to early output in the first run. We also provide the option
69
+ // to diagnose suspensions with the `-Yno-suspended-units` flag.
70
+ def writeSigFilesAsync (
71
+ tasks : List [(String , Array [Byte ])],
72
+ writer : EarlyFileWriter ,
73
+ promise : Promise [AsyncTastyState ])(using ctx : ReadOnlyContext ): Unit = {
74
+ try
75
+ for (internalName, pickled) <- tasks do
76
+ val _ = writer.writeTasty(internalName, pickled)
77
+ finally
78
+ try
79
+ writer.close()
80
+ finally
81
+ promise.success(
82
+ AsyncTastyState (
83
+ hasErrors = ctx.reporter.hasErrors,
84
+ pending = (
85
+ ctx.reporter match
86
+ case buffered : BufferingDelayedReporting => Some (buffered)
87
+ case _ => None
88
+ )
89
+ )
90
+ )
91
+ end try
92
+ end try
93
+ }
94
+
36
95
class EarlyFileWriter private (writer : TastyWriter , origin : AbstractFile ):
37
- def this (dest : AbstractFile )(using @ constructorOnly ctx : Context ) = this (TastyWriter (dest), dest)
96
+ def this (dest : AbstractFile )(using @ constructorOnly ctx : ReadOnlyContext ) = this (TastyWriter (dest), dest)
38
97
39
98
export writer .writeTasty
40
99
@@ -50,13 +109,15 @@ object Pickler {
50
109
class Pickler extends Phase {
51
110
import ast .tpd .*
52
111
112
+ def doAsyncTasty (using Context ): Boolean = ctx.asyncTastyPromise.isDefined
113
+
53
114
override def phaseName : String = Pickler .name
54
115
55
116
override def description : String = Pickler .description
56
117
57
118
// No need to repickle trees coming from TASTY
58
119
override def isRunnable (using Context ): Boolean =
59
- super .isRunnable && ! ctx.settings.fromTasty.value
120
+ super .isRunnable && ( ! ctx.settings.fromTasty.value || doAsyncTasty)
60
121
61
122
// when `-Yjava-tasty` is set we actually want to run this phase on Java sources
62
123
override def skipIfJava (using Context ): Boolean = false
@@ -86,11 +147,20 @@ class Pickler extends Phase {
86
147
*/
87
148
object serialized :
88
149
val scratch = new ScratchData
150
+ private val buf = mutable.ListBuffer .empty[(String , Array [Byte ])]
89
151
def run (body : ScratchData => Array [Byte ]): Array [Byte ] =
90
152
synchronized {
91
153
scratch.reset()
92
154
body(scratch)
93
155
}
156
+ def commit (internalName : String , tasty : Array [Byte ]): Unit = synchronized {
157
+ buf += ((internalName, tasty))
158
+ }
159
+ def result (): List [(String , Array [Byte ])] = synchronized {
160
+ val res = buf.toList
161
+ buf.clear()
162
+ res
163
+ }
94
164
95
165
private val executor = Executor [Array [Byte ]]()
96
166
@@ -100,10 +170,29 @@ class Pickler extends Phase {
100
170
if isOutline then ctx.fresh.setPrinterFn(OutlinePrinter (_))
101
171
else ctx
102
172
173
+ /** only ran under -Ypickle-write and -from-tasty */
174
+ private def runFromTasty (unit : CompilationUnit )(using Context ): Unit = {
175
+ val pickled = unit.pickled
176
+ for (cls, bytes) <- pickled do
177
+ serialized.commit(computeInternalName(cls), bytes())
178
+ }
179
+
180
+ private def computeInternalName (cls : ClassSymbol )(using Context ): String =
181
+ if cls.is(Module ) then cls.binaryClassName.stripSuffix(str.MODULE_SUFFIX ).nn
182
+ else cls.binaryClassName
183
+
103
184
override def run (using Context ): Unit = {
104
185
val unit = ctx.compilationUnit
105
186
pickling.println(i " unpickling in run ${ctx.runId}" )
106
187
188
+ if ctx.settings.fromTasty.value then
189
+ // skip the rest of the phase, as tasty is already "pickled",
190
+ // however we still need to set up tasks to write TASTy to
191
+ // early output when pipelining is enabled.
192
+ if doAsyncTasty then
193
+ runFromTasty(unit)
194
+ return ()
195
+
107
196
for
108
197
cls <- dropCompanionModuleClasses(topLevelClasses(unit.tpdTree))
109
198
tree <- sliceTopLevel(unit.tpdTree, cls)
@@ -137,6 +226,8 @@ class Pickler extends Phase {
137
226
val positionWarnings = new mutable.ListBuffer [Message ]()
138
227
def reportPositionWarnings () = positionWarnings.foreach(report.warning(_))
139
228
229
+ val internalName = if doAsyncTasty then computeInternalName(cls) else " "
230
+
140
231
def computePickled (): Array [Byte ] = inContext(ctx.fresh) {
141
232
serialized.run { scratch =>
142
233
treePkl.compactify(scratch)
@@ -166,6 +257,10 @@ class Pickler extends Phase {
166
257
println(i " **** pickled info of $cls" )
167
258
println(TastyPrinter .showContents(pickled, ctx.settings.color.value == " never" ))
168
259
println(i " **** end of pickled info of $cls" )
260
+
261
+ if doAsyncTasty then
262
+ serialized.commit(internalName, pickled)
263
+
169
264
pickled
170
265
}
171
266
}
@@ -194,13 +289,27 @@ class Pickler extends Phase {
194
289
}
195
290
196
291
override def runOn (units : List [CompilationUnit ])(using Context ): List [CompilationUnit ] = {
292
+ val isConcurrent = useExecutor
293
+
294
+ val writeTask : Option [() => Unit ] = ctx.asyncTastyPromise.map: holder =>
295
+ () =>
296
+ given ReadOnlyContext = if isConcurrent then ReadOnlyContext .buffered else ReadOnlyContext .eager
297
+ val writer = Pickler .EarlyFileWriter (holder.earlyOut)
298
+ writeSigFilesAsync(serialized.result(), writer, holder.promise)
299
+
300
+ def runPhase (writeCB : (doWrite : () => Unit ) => Unit ) =
301
+ super .runOn(units).tap(_ => writeTask.foreach(writeCB))
302
+
197
303
val result =
198
- if useExecutor then
304
+ if isConcurrent then
199
305
executor.start()
200
- try super .runOn(units)
306
+ try
307
+ runPhase : doWrite =>
308
+ // unless we redesign executor to have "Unit" schedule overload, we need some sentinel value.
309
+ executor.schedule(() => { doWrite(); Array .emptyByteArray })
201
310
finally executor.close()
202
311
else
203
- super .runOn(units )
312
+ runPhase(_() )
204
313
if ctx.settings.YtestPickler .value then
205
314
val ctx2 = ctx.fresh
206
315
.setSetting(ctx.settings.YreadComments , true )
0 commit comments