Skip to content

Commit b0f709c

Browse files
authored
Merge pull request #118 from SethTisue/prepare-for-dotty-part-2
more source tweaks to prepare for Dotty crossbuild
2 parents 39357a3 + f30d7d0 commit b0f709c

File tree

9 files changed

+15
-17
lines changed

9 files changed

+15
-17
lines changed

core/src/main/scala/scala/collection/immutable/OldHashMap.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -117,7 +117,7 @@ object OldHashMap extends MapFactory[OldHashMap] {
117117

118118
def from[K, V](it: collection.IterableOnce[(K, V)]): OldHashMap[K, V] =
119119
it match {
120-
case hm: OldHashMap[K, V] => hm
120+
case hm: OldHashMap[K @unchecked, V @unchecked] => hm
121121
case _ => (newBuilder[K, V] ++= it).result()
122122
}
123123

core/src/main/scala/scala/collection/immutable/OldHashSet.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ sealed abstract class OldHashSet[A]
6161
}
6262

6363
override def concat(that: collection.IterableOnce[A]): OldHashSet[A] = that match {
64-
case that: OldHashSet[A] =>
64+
case that: OldHashSet[A @unchecked] =>
6565
val buffer = new Array[OldHashSet[A]](bufferSize(this.size + that.size))
6666
nullToEmpty(union0(that, 0, buffer, 0))
6767
case _ => super.concat(that)
@@ -167,7 +167,7 @@ object OldHashSet extends IterableFactory[OldHashSet] {
167167

168168
def from[A](it: collection.IterableOnce[A]): OldHashSet[A] =
169169
it match {
170-
case hs: OldHashSet[A] => hs
170+
case hs: OldHashSet[A @unchecked] => hs
171171
case _ => (newBuilder[A] ++= it).result()
172172
}
173173

core/src/main/scala/scala/collection/parallel/CollectionConverters.scala

+5-5
Original file line numberDiff line numberDiff line change
@@ -26,10 +26,10 @@ object CollectionConverters {
2626
implicit class IterableIsParallelizable[A](private val coll: sc.Iterable[A]) extends AnyVal with sc.CustomParallelizable[A, ParIterable[A]] {
2727
def seq = coll
2828
override def par = coll match {
29-
case coll: sc.Set[A] => new SetIsParallelizable(coll).par
29+
case coll: sc.Set[A @unchecked] => new SetIsParallelizable(coll).par
3030
case coll: sc.Map[_, _] => new MapIsParallelizable(coll).par.asInstanceOf[ParIterable[A]]
3131
case coll: sci.Iterable[A] => new ImmutableIterableIsParallelizable(coll).par
32-
case coll: scm.Iterable[A] => new MutableIterableIsParallelizable(coll).par
32+
case coll: scm.Iterable[A @unchecked] => new MutableIterableIsParallelizable(coll).par
3333
case _ => ParIterable.newCombiner[A].fromSequential(seq) // builds ParArray, same as for scm.Iterable
3434
}
3535
}
@@ -48,15 +48,15 @@ object CollectionConverters {
4848
def seq = coll
4949
override def par = coll match {
5050
case coll: sci.Seq[A] => new ImmutableSeqIsParallelizable(coll).par
51-
case coll: sci.Set[A] => new ImmutableSetIsParallelizable(coll).par
51+
case coll: sci.Set[A @unchecked] => new ImmutableSetIsParallelizable(coll).par
5252
case coll: sci.Map[_, _] => new ImmutableMapIsParallelizable(coll).par.asInstanceOf[immutable.ParIterable[A]]
5353
case _ => immutable.ParIterable.newCombiner[A].fromSequential(seq) // builds ParVector
5454
}
5555
}
5656

5757
// Seq
5858
implicit def seqIsParallelizable[A](coll: sc.Seq[A]): sc.Parallelizable[A, ParSeq[A]] = coll match {
59-
case it: scm.Seq[A] => new MutableSeqIsParallelizable(it)
59+
case it: scm.Seq[A @unchecked] => new MutableSeqIsParallelizable(it)
6060
case it: sci.Seq[A] => new ImmutableSeqIsParallelizable(it)
6161
case _ => throw new IllegalArgumentException("Unexpected type "+coll.getClass.getName+" - every scala.collection.Seq must be a scala.collection.mutable.Seq or scala.collection.immutable.Seq")
6262
}
@@ -144,7 +144,7 @@ object CollectionConverters {
144144
def seq = coll
145145
override def par = coll match {
146146
case coll: sci.Map[K, V] => new ImmutableMapIsParallelizable(coll).par
147-
case coll: scm.Map[K, V] => new MutableMapIsParallelizable(coll).par
147+
case coll: scm.Map[K @unchecked, V @unchecked] => new MutableMapIsParallelizable(coll).par
148148
case _ => ParMap.newCombiner[K, V].fromSequential(seq)
149149
}
150150
}

core/src/main/scala/scala/collection/parallel/ParIterableLike.scala

+1-2
Original file line numberDiff line numberDiff line change
@@ -149,8 +149,7 @@ extends IterableOnce[T]
149149
with CustomParallelizable[T, Repr]
150150
with Parallel
151151
with HasNewCombiner[T, Repr]
152-
{
153-
self =>
152+
{ self =>
154153

155154
def size: Int
156155
def stringPrefix: String

core/src/main/scala/scala/collection/parallel/ParSeqLike.scala

+1-2
Original file line numberDiff line numberDiff line change
@@ -47,8 +47,7 @@ import scala.collection.parallel.ParallelCollectionImplicits._
4747
*/
4848
trait ParSeqLike[+T, +CC[X] <: ParSeq[X], +Repr <: ParSeq[T], +Sequential <: scala.collection.Seq[T] with SeqOps[T, AnyConstr, Sequential]]
4949
extends ParIterableLike[T, CC, Repr, Sequential]
50-
with Equals {
51-
self =>
50+
with Equals { self =>
5251

5352
def length: Int
5453
def apply(index: Int): T

core/src/main/scala/scala/collection/parallel/immutable/ParHashMap.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,7 @@ self =>
9494
def split: Seq[IterableSplitter[(K, V)]] = if (remaining < 2) Seq(this) else triter match {
9595
case t: TrieIterator[_] =>
9696
val previousRemaining = remaining
97-
val ((fst, fstlength), snd) = t.split
97+
val ((fst: Iterator[(K, V) @unchecked], fstlength), snd: Iterator[(K, V) @unchecked]) = t.split
9898
val sndlength = previousRemaining - fstlength
9999
Seq(
100100
new ParHashMapIterator(fst, fstlength),

core/src/main/scala/scala/collection/parallel/mutable/ParHashMap.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -229,7 +229,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
229229
_loadFactor = lf
230230
table = new Array[HashEntry[K, DefaultEntry[K, V]]](capacity(sizeForThreshold(_loadFactor, numelems)))
231231
tableSize = 0
232-
seedvalue = _seedvalue
232+
this.seedvalue = _seedvalue
233233
threshold = newThreshold(_loadFactor, table.length)
234234
sizeMapInit(table.length)
235235
def setSize(sz: Int) = tableSize = sz

core/src/main/scala/scala/collection/parallel/mutable/ParHashSet.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -162,7 +162,7 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
162162
// TODO parallelize by keeping separate size maps and merging them
163163
val tbl = new FlatHashTable[T] {
164164
sizeMapInit(table.length)
165-
seedvalue = ParHashSetCombiner.this.seedvalue
165+
this.seedvalue = ParHashSetCombiner.this.seedvalue
166166
for {
167167
buffer <- buckets
168168
if buffer ne null
@@ -184,7 +184,7 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
184184
table = new Array[AnyRef](capacity(FlatHashTable.sizeForThreshold(numelems, _loadFactor)))
185185
tableSize = 0
186186
threshold = FlatHashTable.newThreshold(_loadFactor, table.length)
187-
seedvalue = inseedvalue
187+
this.seedvalue = inseedvalue
188188
sizeMapInit(table.length)
189189

190190
override def toString = "AFHT(%s)".format(table.length)

core/src/main/scala/scala/collection/parallel/mutable/ParTrieMap.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -104,7 +104,7 @@ extends ParMap[K, V]
104104
while (i < until) {
105105
array(i) match {
106106
case sn: SNode[_, _] => sz += 1
107-
case in: INode[K, V] => sz += in.cachedSize(ctrie)
107+
case in: INode[K @unchecked, V @unchecked] => sz += in.cachedSize(ctrie)
108108
}
109109
i += 1
110110
}

0 commit comments

Comments
 (0)