diff --git a/.gitattributes b/.gitattributes index da4421cb78ed..99eca173f23e 100644 --- a/.gitattributes +++ b/.gitattributes @@ -21,6 +21,9 @@ text eol=lf *.txt eol=lf *.xml eol=lf +# Some sbt launcher scripts can't handle CR in .jvmopts +.jvmopts eol=lf + # Windows-specific files get windows endings *.bat eol=crlf *.cmd eol=crlf diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 000000000000..867bf52ff2db --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,51 @@ +name: Scala Merge CI + +on: + push: + branches: ['2.*.x'] + +defaults: + run: + shell: bash + +jobs: + build_and_test: + name: Windows + runs-on: windows-latest + strategy: + fail-fast: false + matrix: + include: + - java: 8 + - java: 17 + steps: + - run: git config --global core.autocrlf false + - name: Checkout + uses: actions/checkout@v2 + + # Note that we don't use olafurpg/setup-scala; it wouldn't buy us anything + # over setup-java. (We don't want csbt or xsbt; we prefer the standard + # sbt launch script, which comes preinstalled on Windows (and Ubuntu).) + - name: Setup Java + uses: actions/setup-java@v2 + with: + distribution: temurin + java-version: ${{matrix.java}} + + - name: Cache + uses: actions/cache@v2 + with: + path: | + ~/.sbt + ~/.ivy2/cache + ~/.cache/coursier + key: ${{ runner.os }}-sbt-cache-v2-${{ hashFiles('**/*.sbt') }}-${{ hashFiles('project/build.properties') }} + + - name: Build + run: | + sbt setupPublishCore generateBuildCharacterPropertiesFile headerCheck publishLocal + + - name: Test + run: | + STARR=`cat buildcharacter.properties | grep ^maven.version.number | cut -d= -f2` && echo $STARR + sbt -Dstarr.version=$STARR setupValidateTest test:compile info testAll diff --git a/.travis.yml b/.travis.yml index 994d9c446ecc..7da7ef852558 100644 --- a/.travis.yml +++ b/.travis.yml @@ -8,130 +8,158 @@ stages: - build - test +templates: # this has no effect on travis, it's just a place to put our templates + pr-jdk8: &pr-jdk8 + if: type = pull_request OR repo != scala/scala + + cron-jdk17: &cron-jdk17 + if: type = cron AND repo = scala/scala + env: ADOPTOPENJDK=17 + + build-for-testing: &build-for-testing + # pull request validation (w/ bootstrap) + # differs from the build that publishes releases / integration builds: + # - not using bash script setup, but just the underlying sbt calls + # - publishing locally rather than to Artifactory + # the bootstrap above is older historically; this way of doing it is newer + # and also simpler. we should aim to reduce/eliminate the duplication. + stage: build + name: build, publishLocal, build again + script: + - set -e + - sbt setupPublishCore generateBuildCharacterPropertiesFile headerCheck publishLocal + - STARR=$(sed -n 's/^maven\.version\.number=//p' buildcharacter.properties) && echo $STARR + - sbt -Dstarr.version=$STARR setupValidateTest compile + workspaces: + create: + name: bootstrapped + paths: + # so new STARR will be available + - "buildcharacter.properties" + - "$HOME/.ivy2/local/org.scala-lang" + # so build products built using new STARR are kept + - "target" + - "project/target" + - "project/project/target" + - "project/project/project/target" + - "dist" + - "build" + + test1: &test1 + stage: test + name: tests (junit, scalacheck, et al) + workspaces: + use: bootstrapped + script: + - set -e + - STARR=$(sed -n 's/^maven\.version\.number=//p' buildcharacter.properties) && echo $STARR + - sbt -Dstarr.version=$STARR setupValidateTest Test/compile testAll1 + + test2: &test2 + stage: test + name: tests (partest) + workspaces: + use: bootstrapped + script: + - set -e + - STARR=$(sed -n 's/^maven\.version\.number=//p' buildcharacter.properties) && echo $STARR + - sbt -Dstarr.version=$STARR setupValidateTest testAll2 + jobs: - include: - - stage: build - if: type != pull_request AND repo = scala/scala - name: bootstrap and publish - script: - # see comment in `bootstrap_fun` for details on the procedure - # env available in each stage - # - by travis config (see below): secret env vars - # - by `common` script: WORKSPACE, IVY2_DIR, SBT_CMD, integrationRepoUrl - # - by `bootstrap_fun`: publishPrivateTask, ... - - set -e - - (cd admin && ./init.sh) - - source scripts/common - - source scripts/bootstrap_fun - - determineScalaVersion - - removeExistingBuilds $integrationRepoUrl - - if [ ! -z "$STARR_REF" ]; then buildStarr; fi - - buildLocker - - buildQuick - - triggerScalaDist - - # pull request validation (w/ bootstrap) - # differs from the bootstrap above by: - # - not using bash script setup, but just the underlying sbt calls - # - publishing locally rather than to Artifactory - # the bootstrap above is older historically; this way of doing it is newer - # and also simpler. we should aim to reduce/eliminate the duplication. - - stage: build - name: build, publishLocal, build again - if: type = pull_request OR repo != scala/scala - script: - - set -e - - sbt setupPublishCore generateBuildCharacterPropertiesFile headerCheck publishLocal - - STARR=$(sed -n 's/^maven\.version\.number=//p' buildcharacter.properties) && echo $STARR - - sbt -Dstarr.version=$STARR setupValidateTest compile - workspaces: - create: - name: bootstrapped - paths: - # so new STARR will be available - - "buildcharacter.properties" - - "$HOME/.ivy2/local/org.scala-lang" - # so build products built using new STARR are kept - - "target" - - "project/target" - - "project/project/target" - - "project/project/project/target" - - "dist" - - "build" - - - stage: test - name: tests (junit, scalacheck, et al) - if: type = pull_request OR repo != scala/scala - workspaces: - use: bootstrapped - script: - - set -e - - STARR=$(sed -n 's/^maven\.version\.number=//p' buildcharacter.properties) && echo $STARR - - sbt -Dstarr.version=$STARR setupValidateTest Test/compile testAll1 - - - name: tests (partest) - if: type = pull_request OR repo != scala/scala - workspaces: - use: bootstrapped - script: - - set -e - - STARR=$(sed -n 's/^maven\.version\.number=//p' buildcharacter.properties) && echo $STARR - - sbt -Dstarr.version=$STARR setupValidateTest testAll2 - - - name: ensure standard library is buildable by Scala 3 - if: type = pull_request OR repo != scala/scala - workspaces: - use: bootstrapped - script: - - set -e - - STARR=$(sed -n 's/^maven\.version\.number=//p' buildcharacter.properties) && echo $STARR - - sbt -Dscala.build.compileWithDotty=true library/compile - - - stage: test - name: build benchmarks (bootstrapped) - if: type = pull_request OR repo != scala/scala - workspaces: - use: bootstrapped - script: - - set -e - - STARR=$(sed -n 's/^maven\.version\.number=//p' buildcharacter.properties) && echo $STARR - - sbt bench/Jmh/compile - - - stage: build - name: language spec (Jekyll) - # wkhtmltopdf requires libssl1.1, which we can't install on xenial - dist: bionic - language: ruby - install: - - ruby -v - - gem install bundler - - bundler --version - - bundle install - # cribbed from https://github.com/SebastiaanKlippert/go-wkhtmltopdf/blob/master/.travis.yml - - sudo apt-get update - - sudo apt-get install -y build-essential xorg xfonts-75dpi libpng16-16 libssl1.1 - - wget --quiet "https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6-1/wkhtmltox_0.12.6-1.bionic_amd64.deb" - - sudo dpkg -i "wkhtmltox_0.12.6-1.bionic_amd64.deb" - - rm "wkhtmltox_0.12.6-1.bionic_amd64.deb" - script: - - set -e - - (cd admin && ./init.sh) - - bundle exec jekyll build -s spec/ -d build/spec - - export JEKYLL_ENV=spec-pdf - - bundle exec jekyll build -s spec/ -d build/spec-pdf - - ./scripts/generate-spec-pdf.sh - after_success: - - ./scripts/travis-publish-spec.sh + include: + - stage: build + if: (type = push OR type = api) AND repo = scala/scala # api for manually triggered release builds + name: publish (bootstrapped) to scala-integration or sonatype + script: + # see comment in `bootstrap_fun` for details on the procedure + # env available in each stage + # - by travis config (see below): secret env vars + # - by `common` script: WORKSPACE, IVY2_DIR, SBT_CMD, integrationRepoUrl + # - by `bootstrap_fun`: publishPrivateTask, ... + - set -e + - (cd admin && ./init.sh) + - source scripts/common + - source scripts/bootstrap_fun + - determineScalaVersion + - removeExistingBuilds $integrationRepoUrl + - if [ ! -z "$STARR_REF" ]; then buildStarr; fi + - buildLocker + - buildQuick + - triggerScalaDist + + - <<: *build-for-testing + <<: *pr-jdk8 + + - <<: *test1 + <<: *pr-jdk8 + + - <<: *test2 + <<: *pr-jdk8 + + - <<: *build-for-testing + <<: *cron-jdk17 + + - <<: *test1 + <<: *cron-jdk17 + + - <<: *test2 + <<: *cron-jdk17 + + - stage: test + name: build library with Scala 3 + if: type = pull_request OR repo != scala/scala + workspaces: + use: bootstrapped + script: + - set -e + - STARR=$(sed -n 's/^maven\.version\.number=//p' buildcharacter.properties) && echo $STARR + - sbt -Dscala.build.compileWithDotty=true library/compile + + - name: build benchmarks + if: type = pull_request OR repo != scala/scala + workspaces: + use: bootstrapped + script: + - set -e + - STARR=$(sed -n 's/^maven\.version\.number=//p' buildcharacter.properties) && echo $STARR + - sbt bench/Jmh/compile + + - stage: build + if: type = pull_request OR type = push + name: language spec + # wkhtmltopdf requires libssl1.1, which we can't install on xenial + dist: bionic + language: ruby + install: + - ruby -v + - gem install bundler + - bundler --version + - bundle install + # cribbed from https://github.com/SebastiaanKlippert/go-wkhtmltopdf/blob/master/.travis.yml + - sudo apt-get update + - sudo apt-get install -y build-essential xorg xfonts-75dpi libpng16-16 libssl1.1 + - wget --quiet "https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6-1/wkhtmltox_0.12.6-1.bionic_amd64.deb" + - sudo dpkg -i "wkhtmltox_0.12.6-1.bionic_amd64.deb" + - rm "wkhtmltox_0.12.6-1.bionic_amd64.deb" + script: + - set -e + - (cd admin && ./init.sh) + - bundle exec jekyll build -s spec/ -d build/spec + - export JEKYLL_ENV=spec-pdf + - bundle exec jekyll build -s spec/ -d build/spec-pdf + - ./scripts/generate-spec-pdf.sh + after_success: + - ./scripts/travis-publish-spec.sh env: global: - ADOPTOPENJDK=8 - secure: "P8EqpZoin/YTnwel9TTxSSAHtXfZ4M262BKXlYUZmjoQsjyvXDAeZ7yAqgAvX5BeRFrGkBToPiE+V60stdWkPKs3+9COw2BDUB1CULBHhRY9Lxordmz0xVhgEfsoH4f6r6wOlIQ9kuaWhmP+JdB/mzOHZhLch9ziPi8O46Z8t4k=" # PRIV_KEY_SECRET, for scripts/travis-publish-spec.sh - - secure: "T1fxtvLTxioyXJYiC/zVYdNYsBOt+0Piw+xE04rB1pzeKahm9+G2mISdcAyqv6/vze9eIJt6jNHHpKX32/Z3Cs1/Ruha4m3k+jblj3S0SbxV6ht2ieJXLT5WoUPFRrU68KXI8wqUadXpjxeJJV53qF2FC4lhfMUsw1IwwMhdaE8=" # PRIVATE_REPO_PASS - - secure: "feE5A8mYNpkNQKVwCj3aXrwjVrJWh/4ENpRfFlr2HOD9ORk1GORD5Yq907WZd+dTkYK54Lh1gA+qHOCIDgJHbi9ZLU+kjzEjtYKF6lQy6Wb0LI8smTOnAA6IWVVYifiXw8d66MI2MKZb2jjGeIzy8Q00SZjLhEGjLyTeCIB88Ws=" # SONA_USER - - secure: "ek3As5q2tL8UBXcxSBbv4v5YgsoPD41SCzPOSu72kzfbngyxgQxrcziU5pIM+Lib9KaWex7hVVWNL38tMyDbu+0OpDv8bPjMujzlDx5I2pJUfuOJo7QRYsJE1nsXcY4cA72cCLfbRcLEkvtDAhcdLSaUOqlyQe5BY4X4fY5eoPA=" # SONA_PASS - - secure: "dbAvl6KEuLwZ0MVQPZihFsPzCdiLbX0EFk3so+hcfEbksrmLQ1tn4X5ZM7Wy1UDR8uN9lxngEwHch7a7lKqpugzmXMew9Wnikr9WBWbJT77Z+XJ/jHI6YuiCRpRo+nvxXGp9Ry80tSIgx5eju0J83IaJL41BWlBkvyAd7YAHORI=" # GPG_SUBKEY_SECRET - - secure: "RTyzS6nUgthupw5M0fPwTlcOym1sWgBo8eXYepB2xGiQnRu4g583BGuNBW1UZ3vIjRETi/UKQ1HtMR+i7D8ptF1cNpomopncVJA1iy7pU2w0MJ0xgIPMuvtkIa3kxocd/AnxAp+UhUad3nC8lDpkvZsUhhyA0fb4iPKipd2b2xY=" # TRAVIS_TOKEN (login with GitHub as SethTisue) + - secure: "T1fxtvLTxioyXJYiC/zVYdNYsBOt+0Piw+xE04rB1pzeKahm9+G2mISdcAyqv6/vze9eIJt6jNHHpKX32/Z3Cs1/Ruha4m3k+jblj3S0SbxV6ht2ieJXLT5WoUPFRrU68KXI8wqUadXpjxeJJV53qF2FC4lhfMUsw1IwwMhdaE8=" # PRIVATE_REPO_PASS, for publishing to scala-ci Artifactory + - secure: "dbAvl6KEuLwZ0MVQPZihFsPzCdiLbX0EFk3so+hcfEbksrmLQ1tn4X5ZM7Wy1UDR8uN9lxngEwHch7a7lKqpugzmXMew9Wnikr9WBWbJT77Z+XJ/jHI6YuiCRpRo+nvxXGp9Ry80tSIgx5eju0J83IaJL41BWlBkvyAd7YAHORI=" # GPG_SUBKEY_SECRET, so we can sign JARs + - secure: "RTyzS6nUgthupw5M0fPwTlcOym1sWgBo8eXYepB2xGiQnRu4g583BGuNBW1UZ3vIjRETi/UKQ1HtMR+i7D8ptF1cNpomopncVJA1iy7pU2w0MJ0xgIPMuvtkIa3kxocd/AnxAp+UhUad3nC8lDpkvZsUhhyA0fb4iPKipd2b2xY=" # TRAVIS_TOKEN (login with GitHub as SethTisue), for triggering scala-dist job + - secure: "FvhicbSeys7VNTj9ZP/aNT0NhiQP/NNV0KRfK7IHxi3uOeaxFVfaQsln4lzqZn8dkntgzzNrE/VhvMIknfnISAPX7bShy6SRyj3V2BlcUpuem8WtwmkCaZ42xlCJteBL7NW0auG/8rxrNIAJXbRObqF+YdK6XsRMWaBMQHky+ss=" # SONA_USER, token username for publishing to Sonatype + - secure: "Y8CTlEdQbAS+P+LgkY05al/KSbccbX5BATm9N2GI9C6wH7oQuUU/VtU+bwvzeiF9DCsZPjrWXsa0JCuIQE+UzK1NWXxlkhUdGCaCBZ/nUecouBtMk2x/h7uIGpeYInxA041r5SuBecZuZQI79nhl+BwZSLu82Vy1QtP0/Cd8oRM=" # SONA_PASS, token password for publishing to Sonatype # caching for sdkman / sbt / ivy / coursier imported from scala-dev cache: @@ -139,4 +167,10 @@ cache: - $HOME/.rvm notifications: + slack: + rooms: + - typesafe:WoewGgHil2FkdGzJyV3phAhj + if: (type = cron OR type = push) AND repo = scala/scala + on_success: never + on_failure: change webhooks: https://scala-ci.typesafe.com/benchq/webhooks/travis diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 78db0a59d6d8..197f841d78db 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -10,9 +10,9 @@ In 2014, you -- the Scala community -- matched the core team at EPFL in number o We are super happy about this, and are eager to make your experience contributing to Scala productive and satisfying, so that we can keep up this growth. We can't do this alone (nor do we want to)! -This is why we're collecting these notes on how to contribute, and we hope you'll share your experience to improve the process for the next contributor! (Feel free to send a PR for this note, send your thoughts to scala/contributors (Gitter) or contributors.scala-lang.org (Discourse).) +This is why we're collecting these notes on how to contribute, and we hope you'll share your experience to improve the process for the next contributor! (Feel free to send a PR for this note, send your thoughts to \#scala-contributors (on [Discord](https://discord.com/invite/scala)) or contributors.scala-lang.org (Discourse).) -By the way, the team at Lightbend is: @lrytz, @retronym, @SethTisue, and @dwijnand. +By the way, the team at Lightbend is: @lrytz, @retronym, @SethTisue, and @dwijnand. ## What kind of PR are you submitting? @@ -115,8 +115,25 @@ To run a single negative test from sbt shell: root> partest --verbose test/files/neg/delayed-init-ref.scala ``` -To specify compiler flags such as `-Werror -Xlint`, you can add a comment -at the top of your source file of the form: `// scalac: -Werror -Xlint`. +A test can be either a single `.scala` file or a directory containing multiple `.scala` and `.java` files. +For testing separate compilation, files can be grouped using `_N` suffixes in the filename. For example, a test +with files (`A.scala`, `B_1.scala`, `C_1.java`, `Test_2.scala`) does: +``` +scalac A.scala -d out +scalac -cp out B_1.scala C_1.java -d out +javac -cp out C_1.java -d out +scalac -cp out Test_2.scala -d out +scala -cp out Test +``` + +**Flags** + - To specify compiler flags such as `-Werror -Xlint`, you can add a comment at the top of your source file of the form: `// scalac: -Werror -Xlint`. + - Similarly, a `// javac: ` comment in a Java source file passes flags to the Java compiler. + - A `// filter: ` comment eliminates output lines that match the filter before comparing to the `.check` file. + - A `// java: ` comment makes a `run` test execute in a separate JVM and passes the additional flags to the `java` command. + - A `// javaVersion ` comment makes partest skip the test if the java version is outside the requested range (e.g. `8`, `15+`, `9 - 11`) + +**Common Usage** To test that no warnings are emitted while compiling a `pos` test, use `-Werror`. That will fail a `pos` test if there are warnings. Note that `pos` tests do not have `.check` files. @@ -171,7 +188,7 @@ See `--help` for more info: root> partest --help ``` -Partests are compiled by the `quick` compiler (and `run` partests executed with the `quick` library), +Partests are compiled by the bootstrapped `quick` compiler (and `run` partests executed with the `quick` library), and therefore: * if you're working on the compiler, you must write a partest, or a `BytecodeTesting` JUnit test which invokes the compiler programmatically; however @@ -268,8 +285,7 @@ See the [scala-jenkins-infra repo](https://github.com/scala/scala-jenkins-infra) ### Pass code review Your PR will need to be assigned to one or more reviewers. You can suggest reviewers -yourself; if you're not sure, see the list in [README.md](README.md) or ask on scala/contributors (Gitter) -or contributors.scala-lang.org (Discourse). +yourself; if you're not sure, see the list in [README.md](README.md) or ask on \#scala-contributors (on [Discord](https://discord.com/invite/scala)) or contributors.scala-lang.org (Discourse). To assign a reviewer, add a "review by @reviewer" to the PR description or in a comment on your PR. @@ -283,8 +299,8 @@ and `push -f` to the branch. This is to keep the git history clean. Additional c are OK if they stand on their own. Once all these conditions are met, we will merge your changes -- if we -agree with it! We are available on scala/contributors (Gitter) or -contributors.scala-lang.org (Discourse) to discuss changes beforehand, +agree with it! We are available on \#scala-contributors (on [Discord](https://discord.com/invite/scala)) +or contributors.scala-lang.org (Discourse) to discuss changes beforehand, before you put in the coding work. diff --git a/NOTICE b/NOTICE index ac3a26b40f48..ba6f890b920f 100644 --- a/NOTICE +++ b/NOTICE @@ -1,6 +1,6 @@ Scala -Copyright (c) 2002-2020 EPFL -Copyright (c) 2011-2020 Lightbend, Inc. +Copyright (c) 2002-2021 EPFL +Copyright (c) 2011-2021 Lightbend, Inc. Scala includes software developed at LAMP/EPFL (https://lamp.epfl.ch/) and diff --git a/README.md b/README.md index 1fefc3f11305..e7613b0abed5 100644 --- a/README.md +++ b/README.md @@ -26,8 +26,8 @@ For more information on building and developing the core of Scala, read the rest # Get in touch! -In order to get in touch with other Scala contributors, join -[scala/contributors](https://gitter.im/scala/contributors) (Gitter) or post on +In order to get in touch with other Scala contributors, join the +\#scala-contributors channel on the [Scala Discord](https://discord.com/invite/scala) chat, or post on [contributors.scala-lang.org](https://contributors.scala-lang.org) (Discourse). If you need some help with your PR at any time, please feel free to @-mention anyone from the list below, and we will do our best to help you out: diff --git a/build.sbt b/build.sbt index 4bffeca7cf91..4a5eafa9998a 100644 --- a/build.sbt +++ b/build.sbt @@ -3,7 +3,7 @@ * * What you see below is very much work-in-progress. The following features are implemented: * - Compiling all classes for the compiler and library ("compile" in the respective subprojects) - * - Running JUnit ("junit/test"), ScalaCheck ("scalacheck/test"), and partest ("test/it:test") tests + * - Running JUnit ("junit/test"), ScalaCheck ("scalacheck/test"), and partest ("test/IntegrationTest/test") tests * - Creating build/quick with all compiled classes and launcher scripts ("dist/mkQuick") * - Creating build/pack with all JARs and launcher scripts ("dist/mkPack") * - Building all scaladoc sets ("doc") @@ -36,8 +36,8 @@ import scala.build._, VersionUtil._ // Non-Scala dependencies: val junitDep = "junit" % "junit" % "4.13.2" -val junitInterfaceDep = "com.novocode" % "junit-interface" % "0.11" % Test -val scalacheckDep = "org.scalacheck" %% "scalacheck" % "1.15.3" % Test +val junitInterfaceDep = "com.github.sbt" % "junit-interface" % "0.13.2" % Test +val scalacheckDep = "org.scalacheck" %% "scalacheck" % "1.15.4" % Test val jolDep = "org.openjdk.jol" % "jol-core" % "0.13" val asmDep = "org.scala-lang.modules" % "scala-asm" % versionProps("scala-asm.version") val jlineDep = "org.jline" % "jline" % versionProps("jline.version") @@ -46,6 +46,8 @@ val jlineDeps = Seq(jlineDep, jnaDep) val testInterfaceDep = "org.scala-sbt" % "test-interface" % "1.0" val diffUtilsDep = "com.googlecode.java-diff-utils" % "diffutils" % "1.3.0" +val projectFolder = settingKey[String]("subfolder in src when using configureAsSubproject, else the project name") + // `set Global / fatalWarnings := true` to enable -Werror for the certain modules // currently, many modules cannot support -Werror; ideally this setting will eventually // enable -Werror for all modules @@ -54,12 +56,13 @@ val fatalWarnings = settingKey[Boolean]("whether or not warnings should be fatal // enable fatal warnings automatically on CI Global / fatalWarnings := insideCI.value +Global / credentials ++= { + val file = Path.userHome / ".credentials" + if (file.exists && !file.isDirectory) List(Credentials(file)) + else Nil +} + lazy val publishSettings : Seq[Setting[_]] = Seq( - credentials ++= { - val file = Path.userHome / ".credentials" - if (file.exists && !file.isDirectory) List(Credentials(file)) - else Nil - }, // Add a "default" Ivy configuration because sbt expects the Scala distribution to have one: ivyConfigurations += Configuration.of("Default", "default", "Default", true, Vector(Configurations.Runtime), true), publishMavenStyle := true @@ -70,7 +73,7 @@ lazy val publishSettings : Seq[Setting[_]] = Seq( // should not be set directly. It is the same as the Maven version and derived automatically from `baseVersion` and // `baseVersionSuffix`. globalVersionSettings -Global / baseVersion := "2.13.6" +Global / baseVersion := "2.13.8" Global / baseVersionSuffix := "SNAPSHOT" ThisBuild / organization := "org.scala-lang" ThisBuild / homepage := Some(url("https://www.scala-lang.org")) @@ -130,20 +133,21 @@ lazy val commonSettings = instanceSettings ++ clearSourceAndResourceDirectories // we always assume that Java classes are standalone and do not have any dependency // on Scala classes compileOrder := CompileOrder.JavaThenScala, + projectFolder := thisProject.value.id, // overridden in configureAsSubproject Compile / javacOptions ++= Seq("-g", "-source", "1.8", "-target", "1.8", "-Xlint:unchecked"), Compile / unmanagedJars := Seq.empty, // no JARs in version control! Compile / sourceDirectory := baseDirectory.value, Compile / unmanagedSourceDirectories := List(baseDirectory.value), - Compile / unmanagedResourceDirectories += (ThisBuild / baseDirectory).value / "src" / thisProject.value.id, + Compile / unmanagedResourceDirectories += (ThisBuild / baseDirectory).value / "src" / projectFolder.value, sourcesInBase := false, Compile / scalaSource := (Compile / sourceDirectory).value, // for some reason sbt 1.4 issues unused-settings warnings for this, it seems to me incorrectly Global / excludeLintKeys ++= Set(scalaSource), // each subproject has to ask specifically for files they want to include Compile / unmanagedResources / includeFilter := NothingFilter, - target := (ThisBuild / target).value / thisProject.value.id, - Compile / classDirectory := buildDirectory.value / "quick/classes" / thisProject.value.id, - Compile / doc / target := buildDirectory.value / "scaladoc" / thisProject.value.id, + target := (ThisBuild / target).value / projectFolder.value, + Compile / classDirectory := buildDirectory.value / "quick/classes" / projectFolder.value, + Compile / doc / target := buildDirectory.value / "scaladoc" / projectFolder.value, // given that classDirectory and doc target are overridden to be _outside_ of target directory, we have // to make sure they are being cleaned properly cleanFiles += (Compile / classDirectory).value, @@ -157,6 +161,9 @@ lazy val commonSettings = instanceSettings ++ clearSourceAndResourceDirectories // we don't want optimizer warnings to interfere with `-Werror`. we have hundreds of such warnings // when the optimizer is enabled (as it is in CI and release builds, though not in local development) Compile / scalacOptions += "-Wconf:cat=optimizer:is", + // We use @nowarn for some methods that are deprecated in Java > 8 + Compile / scalacOptions += "-Wconf:cat=unused-nowarn:s", + Compile / scalacOptions ++= Seq("-deprecation", "-feature"), Compile / doc / scalacOptions ++= Seq( "-doc-footer", "epfl", "-diagrams", @@ -225,7 +232,11 @@ lazy val commonSettings = instanceSettings ++ clearSourceAndResourceDirectories lazy val fatalWarningsSettings = Seq( Compile / scalacOptions ++= { - if (fatalWarnings.value) Seq("-Werror", "-Wconf:cat=unused-nowarn:is") + if (fatalWarnings.value) Seq("-Werror") + else Nil + }, + Compile / javacOptions ++= { + if (fatalWarnings.value) Seq("-Werror") else Nil }, Compile / doc / scalacOptions -= "-Werror", // there are too many doc errors to enable this right now @@ -346,7 +357,7 @@ def setForkedWorkingDirectory: Seq[Setting[_]] = { } // This project provides the STARR scalaInstance for bootstrapping -lazy val bootstrap = project in file("target/bootstrap") +lazy val bootstrap = project.in(file("target/bootstrap")).settings(bspEnabled := false) lazy val library = configureAsSubproject(project) .settings(generatePropertiesFileSettings) @@ -417,7 +428,7 @@ lazy val reflect = configureAsSubproject(project) "/project/description" -> Compiler for the Scala Programming Language, "/project/packaging" -> jar ), - apiURL := Some(url(s"https://www.scala-lang.org/api/${versionProperties.value.mavenVersion}/scala-${thisProject.value.id}/")), + apiURL := Some(url(s"https://www.scala-lang.org/api/${versionProperties.value.mavenVersion}/scala-${projectFolder.value}/")), MimaFilters.mimaSettings, ) .dependsOn(library) @@ -487,6 +498,7 @@ lazy val compiler = configureAsSubproject(project) |org.jline.terminal.impl.jna.*;resolution:=optional |org.jline.terminal.spi;resolution:=optional |org.jline.utils;resolution:=optional + |org.jline.builtins;resolution:=optional |scala.*;version="$${range;[==,=+);$${ver}}" |*""".stripMargin.linesIterator.mkString(","), "Class-Path" -> "scala-reflect.jar scala-library.jar" @@ -500,7 +512,7 @@ lazy val compiler = configureAsSubproject(project) "/project/description" -> Compiler for the Scala Programming Language, "/project/packaging" -> jar ), - apiURL := Some(url(s"https://www.scala-lang.org/api/${versionProperties.value.mavenVersion}/scala-${thisProject.value.id}/")), + apiURL := Some(url(s"https://www.scala-lang.org/api/${versionProperties.value.mavenVersion}/scala-${projectFolder.value}/")), pomDependencyExclusions += (("org.scala-lang.modules", "scala-asm")) ) .dependsOn(library, reflect) @@ -615,7 +627,7 @@ lazy val tastytest = configureAsSubproject(project) .settings( name := "scala-tastytest", description := "Scala TASTy Integration Testing Tool", - libraryDependencies ++= List(diffUtilsDep, TastySupport.scala3Compiler), + libraryDependencies += diffUtilsDep, Compile / scalacOptions ++= Seq("-feature", "-Xlint"), ) @@ -625,8 +637,9 @@ lazy val specLib = project.in(file("test") / "instrumented") .settings(commonSettings) .settings(disableDocs) .settings(fatalWarningsSettings) - .settings(publish / skip := true) .settings( + publish / skip := true, + bspEnabled := false, Compile / sourceGenerators += Def.task { import scala.collection.JavaConverters._ val srcBase = (library / Compile / sourceDirectories).value.head / "scala/runtime" @@ -663,12 +676,16 @@ lazy val bench = project.in(file("test") / "benchmarks") name := "test-benchmarks", autoScalaLibrary := false, crossPaths := true, // needed to enable per-scala-version source directories (https://github.com/sbt/sbt/pull/1799) + compileOrder := CompileOrder.JavaThenScala, // to allow inlining from Java ("... is defined in a Java source (mixed compilation), no bytecode is available") libraryDependencies += "org.openjdk.jol" % "jol-core" % "0.10", libraryDependencies ++= { if (benchmarkScalaVersion == "") Nil else "org.scala-lang" % "scala-compiler" % benchmarkScalaVersion :: Nil }, - scalacOptions ++= Seq("-feature", "-opt:l:inline", "-opt-inline-from:scala.**") + scalacOptions ++= Seq("-feature", "-opt:l:inline", "-opt-inline-from:scala/**", "-opt-warnings"), + // Skips JMH source generators during IDE import to avoid needing to compile scala-library during the import + // should not be needed once sbt-jmh 0.4.3 is out (https://github.com/sbt/sbt-jmh/pull/207) + Jmh / bspEnabled := false ).settings(inConfig(JmhPlugin.JmhKeys.Jmh)(scalabuild.JitWatchFilePlugin.jitwatchSettings)) @@ -690,6 +707,12 @@ lazy val testkit = configureAsSubproject(project) ) ) +// Jigsaw: reflective access between modules (`setAccessible(true)`) requires an `opens` directive. +// This is enforced by error (not just by warning) since JDK 16. In our tests we use reflective access +// from the unnamed package (the classpath) to JDK modules in testing utilities like `assertNotReachable`. +// `add-exports=jdk.jdeps/com.sun.tools.javap` is tests that use `:javap` in the REPL, see scala/bug#12378 +val addOpensForTesting = "-XX:+IgnoreUnrecognizedVMOptions" +: "--add-exports=jdk.jdeps/com.sun.tools.javap=ALL-UNNAMED" +: + Seq("java.util.concurrent.atomic", "java.lang", "java.lang.reflect", "java.net").map(p => s"--add-opens=java.base/$p=ALL-UNNAMED") lazy val junit = project.in(file("test") / "junit") .dependsOn(testkit, compiler, replFrontend, scaladoc) @@ -699,13 +722,14 @@ lazy val junit = project.in(file("test") / "junit") .settings(publish / skip := true) .settings( Test / fork := true, - Test / javaOptions += "-Xss1M", + Test / javaOptions ++= "-Xss1M" +: addOpensForTesting, (Test / forkOptions) := (Test / forkOptions).value.withWorkingDirectory((ThisBuild / baseDirectory).value), (Test / testOnly / forkOptions) := (Test / testOnly / forkOptions).value.withWorkingDirectory((ThisBuild / baseDirectory).value), Compile / scalacOptions ++= Seq( "-feature", "-Xlint:-valpattern,_", "-Wconf:msg=match may not be exhaustive:s", // if we missed a case, all that happens is the test fails + "-Wconf:cat=lint-nullary-unit&site=.*Test:s", // normal unit test style "-Ypatmat-exhaust-depth", "40", // despite not caring about patmat exhaustiveness, we still get warnings for this ), Compile / javacOptions ++= Seq("-Xlint"), @@ -722,7 +746,7 @@ lazy val tasty = project.in(file("test") / "tasty") .settings(publish / skip := true) .settings( Test / fork := true, - libraryDependencies += junitInterfaceDep, + libraryDependencies ++= Seq(junitInterfaceDep, TastySupport.scala3Library), testOptions += Tests.Argument(TestFrameworks.JUnit, "-a", "-v"), Test / testOptions += Tests.Argument( s"-Dtastytest.src=${baseDirectory.value}", @@ -731,10 +755,32 @@ lazy val tasty = project.in(file("test") / "tasty") Compile / unmanagedSourceDirectories := Nil, Test / unmanagedSourceDirectories := List(baseDirectory.value/"test"), ) + .configs(TastySupport.CompilerClasspath, TastySupport.LibraryClasspath) + .settings( + inConfig(TastySupport.CompilerClasspath)(Defaults.configSettings), + inConfig(TastySupport.LibraryClasspath)(Defaults.configSettings), + libraryDependencies ++= Seq( + TastySupport.scala3Compiler % TastySupport.CompilerClasspath, + TastySupport.scala3Library % TastySupport.LibraryClasspath, + ), + javaOptions ++= { + import java.io.File.pathSeparator + val scalaLibrary = (library / Compile / classDirectory).value.getAbsoluteFile() + val scalaReflect = (reflect / Compile / classDirectory).value.getAbsoluteFile() + val dottyCompiler = (TastySupport.CompilerClasspath / managedClasspath).value.seq.map(_.data) :+ scalaLibrary + val dottyLibrary = (TastySupport.LibraryClasspath / managedClasspath).value.seq.map(_.data) :+ scalaLibrary + Seq( + s"-Dtastytest.classpaths.dottyCompiler=${dottyCompiler.mkString(pathSeparator)}", + s"-Dtastytest.classpaths.dottyLibrary=${dottyLibrary.mkString(pathSeparator)}", + s"-Dtastytest.classpaths.scalaReflect=$scalaReflect", + ) + }, + ) lazy val scalacheck = project.in(file("test") / "scalacheck") .dependsOn(library, reflect, compiler, scaladoc) .settings(commonSettings) + .settings(fatalWarningsSettings) .settings(disableDocs) .settings(publish / skip := true) .settings( @@ -742,12 +788,12 @@ lazy val scalacheck = project.in(file("test") / "scalacheck") Test / fork := true, // Instead of forking above, it should be possible to set: // Test / classLoaderLayeringStrategy := ClassLoaderLayeringStrategy.Flat, - Test / javaOptions += "-Xss1M", + Test / javaOptions ++= "-Xss1M" +: addOpensForTesting, Test / testOptions += Tests.Argument( // Full stack trace on failure: "-verbosity", "2" ), - libraryDependencies ++= Seq(scalacheckDep), + libraryDependencies ++= Seq(scalacheckDep, junitDep), Compile / unmanagedSourceDirectories := Nil, Test / unmanagedSourceDirectories := List(baseDirectory.value) ) @@ -764,8 +810,9 @@ def osgiTestProject(p: Project, framework: ModuleID) = p .dependsOn(library, reflect, compiler) .settings(commonSettings) .settings(disableDocs) - .settings(publish / skip := true) .settings( + publish / skip := true, + bspEnabled := false, Test / fork := true, Test / parallelExecution := false, libraryDependencies ++= { @@ -778,16 +825,16 @@ def osgiTestProject(p: Project, framework: ModuleID) = p "org.ops4j.pax.exam" % "pax-exam-link-assembly" % paxExamVersion, "org.ops4j.pax.url" % "pax-url-aether" % "2.4.1", "org.ops4j.pax.swissbox" % "pax-swissbox-tracker" % "1.8.1", - "ch.qos.logback" % "logback-core" % "1.1.3", - "ch.qos.logback" % "logback-classic" % "1.1.3", - "org.slf4j" % "slf4j-api" % "1.7.12", + "ch.qos.logback" % "logback-core" % "1.2.8", + "ch.qos.logback" % "logback-classic" % "1.2.8", + "org.slf4j" % "slf4j-api" % "1.7.32", framework % Test ) }, Test / Keys.test := (Test / Keys.test).dependsOn(Compile / packageBin).value, Test / Keys.testOnly := (Test / Keys.testOnly).dependsOn(Compile / packageBin).evaluated, testOptions += Tests.Argument(TestFrameworks.JUnit, "-a", "-v", "-q"), - Test / javaOptions += "-Dscala.bundle.dir=" + (ThisBuild / buildDirectory).value / "osgi", + Test / javaOptions ++= ("-Dscala.bundle.dir=" + (ThisBuild / buildDirectory).value / "osgi") +: addOpensForTesting, Test / Keys.test / forkOptions := (Test / Keys.test / forkOptions).value.withWorkingDirectory((ThisBuild / baseDirectory).value), Test / unmanagedSourceDirectories := List((ThisBuild / baseDirectory).value / "test" / "osgi" / "src"), Compile / unmanagedResourceDirectories := (Test / unmanagedSourceDirectories).value, @@ -803,9 +850,8 @@ def osgiTestProject(p: Project, framework: ModuleID) = p cleanFiles += (ThisBuild / buildDirectory).value / "osgi" ) -lazy val partestJavaAgent = Project("partestJavaAgent", file(".") / "src" / "partest-javaagent") - .settings(commonSettings) - .settings(generatePropertiesFileSettings) +lazy val partestJavaAgent = configureAsSubproject(project, srcdir = Some("partest-javaagent")) + .settings(fatalWarningsSettings) .settings(disableDocs) .settings( libraryDependencies += asmDep, @@ -838,10 +884,10 @@ lazy val test = project IntegrationTest / sources := Nil, IntegrationTest / fork := true, Compile / scalacOptions += "-Yvalidate-pos:parser,typer", - IntegrationTest / javaOptions ++= List("-Xmx2G", "-Dpartest.exec.in.process=true", "-Dfile.encoding=UTF-8", "-Duser.language=en", "-Duser.country=US"), + IntegrationTest / javaOptions ++= List("-Xmx2G", "-Dpartest.exec.in.process=true", "-Dfile.encoding=UTF-8", "-Duser.language=en", "-Duser.country=US") ++ addOpensForTesting, IntegrationTest / testOptions += Tests.Argument("-Dfile.encoding=UTF-8", "-Duser.language=en", "-Duser.country=US"), testFrameworks += new TestFramework("scala.tools.partest.sbt.Framework"), - IntegrationTest / testOptions += Tests.Argument("-Dpartest.java_opts=-Xmx1024M -Xms64M"), + IntegrationTest / testOptions += Tests.Argument(s"-Dpartest.java_opts=-Xmx1024M -Xms64M ${addOpensForTesting.mkString(" ")}"), IntegrationTest / testOptions += Tests.Argument("-Dpartest.scalac_opts=" + (Compile / scalacOptions).value.mkString(" ")), (IntegrationTest / forkOptions) := (IntegrationTest / forkOptions).value.withWorkingDirectory((ThisBuild / baseDirectory).value), IntegrationTest / testOptions += { @@ -879,6 +925,7 @@ lazy val test = project lazy val manual = configureAsSubproject(project) .settings(disableDocs) .settings(publish / skip := true) + .settings(fatalWarningsSettings) .settings( libraryDependencies += "org.scala-lang" % "scala-library" % scalaVersion.value, Compile / classDirectory := (Compile / target).value / "classes" @@ -888,6 +935,7 @@ lazy val scalaDist = Project("scalaDist", file(".") / "target" / "scala-dist-dis .settings(commonSettings) .settings(disableDocs) .settings( + bspEnabled := false, name := "scala-dist", Compile / packageBin / mappings ++= { val binBaseDir = buildDirectory.value / "pack" @@ -939,9 +987,9 @@ def partestDesc(in: String): Def.Initialize[Task[(Result[Unit], String)]] = lazy val root: Project = (project in file(".")) .settings(disableDocs) - .settings(publish / skip := true) .settings(generateBuildCharacterFileSettings) .settings( + publish / skip := true, commands ++= ScriptCommands.all, extractBuildCharacterPropertiesFile := { val jar = (bootstrap / scalaInstance).value.allJars.find(_.getName contains "-compiler").get @@ -1080,6 +1128,7 @@ lazy val distDependencies = Seq(replFrontend, compiler, library, reflect, scalap lazy val dist = (project in file("dist")) .settings(commonSettings) .settings( + bspEnabled := false, libraryDependencies ++= jlineDeps, mkBin := mkBinImpl.value, mkQuick := Def.task { @@ -1091,7 +1140,7 @@ lazy val dist = (project in file("dist")) (ThisBuild / buildDirectory).value / "quick" }.dependsOn((distDependencies.map(_ / Runtime / products) :+ mkBin): _*).value, mkPack := Def.task { (ThisBuild / buildDirectory).value / "pack" }.dependsOn(Compile / packageBin / packagedArtifact, mkBin).value, - target := (ThisBuild / target).value / thisProject.value.id, + target := (ThisBuild / target).value / projectFolder.value, Compile / packageBin := { val targetDir = (ThisBuild / buildDirectory).value / "pack" / "lib" val jlineJAR = findJar((Compile / dependencyClasspath).value, jlineDep).get.data @@ -1128,6 +1177,7 @@ def configureAsSubproject(project: Project, srcdir: Option[String] = None): Proj (project in base) .settings(scalaSubprojectSettings) .settings(generatePropertiesFileSettings) + .settings(projectFolder := srcdir.getOrElse(project.id)) } lazy val mkBin = taskKey[Seq[File]]("Generate shell script (bash or Windows batch).") @@ -1199,7 +1249,7 @@ def generateServiceProviderResources(services: (String, String)*): Setting[_] = // Add tab completion to partest commands += Command("partest")(_ => PartestUtil.partestParser((ThisBuild / baseDirectory).value, (ThisBuild / baseDirectory).value / "test")) { (state, parsed) => - ("test/it:testOnly -- " + parsed) :: state + ("test/IntegrationTest/testOnly -- " + parsed) :: state } // Watch the test files also so ~partest triggers on test case changes @@ -1386,13 +1436,6 @@ def findJar(files: Seq[Attributed[File]], dep: ModuleID): Option[Attributed[File files.find(_.get(moduleID.key).map(extract _) == Some(extract(dep))) } -// WhiteSource -whitesourceProduct := "Lightbend Reactive Platform" -whitesourceAggregateProjectName := "scala-2.13-stable" -whitesourceIgnoredScopes := Vector("test", "scala-tool") -// for some reason sbt 1.4 issues an unused-setting warning for this, I don't understand why -Global / excludeLintKeys += whitesourceIgnoredScopes - { scala.build.TravisOutput.installIfOnTravis() Nil diff --git a/doc/LICENSE.md b/doc/LICENSE.md index 83ef781d15f1..8a8e78738ff2 100644 --- a/doc/LICENSE.md +++ b/doc/LICENSE.md @@ -2,9 +2,9 @@ Scala is licensed under the [Apache License Version 2.0](https://www.apache.org/ ## Scala License -Copyright (c) 2002-2020 EPFL +Copyright (c) 2002-2021 EPFL -Copyright (c) 2011-2020 Lightbend, Inc. +Copyright (c) 2011-2021 Lightbend, Inc. All rights reserved. diff --git a/doc/License.rtf b/doc/License.rtf index 376ec02cb530..8f266ee71b4b 100644 --- a/doc/License.rtf +++ b/doc/License.rtf @@ -23,8 +23,8 @@ Scala is licensed under the\'a0{\field{\*\fldinst{HYPERLINK "https://www.apache. \fs48 \cf2 Scala License\ \pard\pardeftab720\sl360\sa320\partightenfactor0 -\f0\b0\fs28 \cf2 Copyright (c) 2002-2020 EPFL\ -Copyright (c) 2011-2020 Lightbend, Inc.\ +\f0\b0\fs28 \cf2 Copyright (c) 2002-2021 EPFL\ +Copyright (c) 2011-2021 Lightbend, Inc.\ All rights reserved.\ \pard\pardeftab720\sl360\sa320\partightenfactor0 \cf2 \cb4 Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at {\field{\*\fldinst{HYPERLINK "http://www.apache.org/licenses/LICENSE-2.0"}}{\fldrslt http://www.apache.org/licenses/LICENSE-2.0}}.\ diff --git a/project/DottySupport.scala b/project/DottySupport.scala index da1bb5985189..ac3aa7faff9e 100644 --- a/project/DottySupport.scala +++ b/project/DottySupport.scala @@ -12,8 +12,12 @@ import sbt.librarymanagement.{ * Settings to support validation of TastyUnpickler against the release of dotty with the matching TASTy version */ object TastySupport { - val supportedTASTyRelease = "3.0.0-RC1" // TASTy version 28.0.1 - val scala3Compiler = "org.scala-lang" % "scala3-compiler_3.0.0-RC1" % supportedTASTyRelease + val supportedTASTyRelease = "3.1.0" // TASTy version 28.1-0 + val scala3Compiler = "org.scala-lang" % "scala3-compiler_3" % supportedTASTyRelease + val scala3Library = "org.scala-lang" % "scala3-library_3" % supportedTASTyRelease + + val CompilerClasspath = Configuration.of("TastySupport.CompilerClasspath", "TastySupport.CompilerClasspath") + val LibraryClasspath = Configuration.of("TastySupport.LibraryClasspath", "TastySupport.LibraryClasspath") } /** Settings needed to compile with Dotty, @@ -22,7 +26,7 @@ object TastySupport { * Dotty in .travis.yml. */ object DottySupport { - val dottyVersion = "3.0.0-RC1" + val dottyVersion = "3.0.0" val compileWithDotty: Boolean = Option(System.getProperty("scala.build.compileWithDotty")).map(_.toBoolean).getOrElse(false) lazy val commonSettings = Seq( diff --git a/project/MimaFilters.scala b/project/MimaFilters.scala index 71d9d7c65c03..c263e18c278c 100644 --- a/project/MimaFilters.scala +++ b/project/MimaFilters.scala @@ -13,7 +13,7 @@ object MimaFilters extends AutoPlugin { import autoImport._ override val globalSettings = Seq( - mimaReferenceVersion := Some("2.13.5"), + mimaReferenceVersion := Some("2.13.7"), ) val mimaFilters: Seq[ProblemFilter] = Seq[ProblemFilter]( @@ -25,14 +25,11 @@ object MimaFilters extends AutoPlugin { // don't publish the artifact built with JDK 11 anyways ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.convert.JavaCollectionWrappers#IteratorWrapper.asIterator"), - // PR: https://github.com/scala/scala/pull/9336; remove after re-STARR - ProblemFilters.exclude[MissingTypesProblem]("scala.deprecatedOverriding"), - ProblemFilters.exclude[MissingTypesProblem]("scala.deprecatedInheritance"), - ProblemFilters.exclude[MissingTypesProblem]("scala.deprecated"), - ProblemFilters.exclude[MissingTypesProblem]("scala.annotation.elidable"), - ProblemFilters.exclude[MissingTypesProblem]("scala.annotation.implicitAmbiguous"), - ProblemFilters.exclude[MissingTypesProblem]("scala.annotation.implicitNotFound"), - ProblemFilters.exclude[MissingTypesProblem]("scala.annotation.migration"), + // KEEP: when building on a recent JDK, classes implementing `CharSequence` get a mixin forwarder for + // the `isEmpty` default method that was added in JDK 15 + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.Predef#SeqCharSequence.isEmpty"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.Predef#ArrayCharSequence.isEmpty"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.runtime.ArrayCharSequence.isEmpty"), ) override val buildSettings = Seq( diff --git a/project/Osgi.scala b/project/Osgi.scala index 029ecdf82f7c..4dade3aa03a4 100644 --- a/project/Osgi.scala +++ b/project/Osgi.scala @@ -8,11 +8,11 @@ import sbt.Keys._ import collection.JavaConverters._ import VersionUtil.versionProperties -/** OSGi packaging for the Scala build, distilled from sbt-osgi. We do not use sbt-osgi because it - * depends on a newer version of BND which gives slightly different output (probably OK to upgrade - * in the future, now that the Ant build has been removed) and does not allow a crucial bit of +/** OSGi packaging for the Scala build, distilled from sbt-osgi. + * + * We don't use sbt-osgi (yet) because it does not allow a crucial bit of * configuration that we need: Setting the classpath for BND. In sbt-osgi this is always - * `fullClasspath in Compile` whereas we want `products in Compile in packageBin`. */ + * `fullClasspath in Compile` whereas we want `products in Compile in packageBin`. */ object Osgi { val bundle = TaskKey[File]("osgiBundle", "Create an OSGi bundle.") val bundleName = SettingKey[String]("osgiBundleName", "The Bundle-Name for the manifest.") @@ -29,11 +29,30 @@ object Osgi { "Bundle-Name" -> bundleName.value, "Bundle-SymbolicName" -> bundleSymbolicName.value, "ver" -> v, - "Export-Package" -> "*;version=${ver};-split-package:=merge-first", + + // bnd 3.0 fixes for https://github.com/bndtools/bnd/issues/971. This changes our OSGi + // metadata by adding Import-Package automatically for all of our exported packages. + // Supposedly this is the right thing to do: https://blog.osgi.org/2007/04/importance-of-exporting-nd-importing.html + // but I'm disabling the feature (`-noimport:=true`) to avoid changing this detail of + // our little understood OSGi metadata for now. + "Export-Package" -> "*;version=${ver};-noimport:=true;-split-package:=merge-first", + "Import-Package" -> raw"""scala.*;version="$${range;[==,=+);$${ver}}",*""", "Bundle-Version" -> v, "Bundle-RequiredExecutionEnvironment" -> "JavaSE-1.8", - "-eclipse" -> "false" + "-eclipse" -> "false", + + // Great new feature in modern bnd versions: reproducible builds. + // Omits the Bundle-LastModified header and avoids using System.currentTimeMillis + // for ZIP metadata. + "-reproducible" -> "true", + + // https://github.com/bndtools/bnd/commit/2f1d89428559d21857b87b6d5b465a18a300becc (bndlib 4.2.0) + // seems to have fixed a bug in its detection class references in Class.forName("some.Class") + // For our build, this adds an import on the package "com.cloudius.util" (referred to by an optional + // part of JLine. This directive disables the Class.forName scanning. An alternative fix would be + // direct this to be an optional dependency (as we do for jline itself with `"Import-Package" -> ("jline.*;resolution:=optional," + ... )`) + "-noclassforname" -> "true" // ) }, jarlist := false, diff --git a/project/ScalaOptionParser.scala b/project/ScalaOptionParser.scala index 367805199cc7..64d9db857982 100644 --- a/project/ScalaOptionParser.scala +++ b/project/ScalaOptionParser.scala @@ -82,7 +82,7 @@ object ScalaOptionParser { } // TODO retrieve these data programmatically, ala https://github.com/scala/scala-tool-support/blob/master/bash-completion/src/main/scala/BashCompletion.scala - private def booleanSettingNames = List("-X", "-Xasync", "-Xcheckinit", "-Xdev", "-Xdisable-assertions", "-Xexperimental", "-Xfatal-warnings", "-Xlog-free-terms", "-Xlog-free-types", "-Xlog-implicit-conversions", "-Xlog-implicits", "-Xlog-reflective-calls", + private def booleanSettingNames = List("-X", "-Xasync", "-Xcheckinit", "-Xdev", "-Xdisable-assertions", "-Xexperimental", "-Xfatal-warnings", "-Xlog-free-terms", "-Xlog-free-types", "-Xlog-implicit-conversions", "-Xlog-reflective-calls", "-Xno-forwarders", "-Xno-patmat-analysis", "-Xnon-strict-patmat-analysis", "-Xprint-pos", "-Xprint-types", "-Xprompt", "-Xresident", "-Xshow-phases", "-Xverify", "-Y", "-Ybreak-cycles", "-Ydebug", "-Ycompact-trees", "-YdisableFlatCpCaching", "-Ydoc-debug", "-Yide-debug", @@ -97,7 +97,7 @@ object ScalaOptionParser { "-Vhot-statistics", "-Vide", "-Vimplicit-conversions", "-Vimplicits", "-Vissue", "-Vmacro", "-Vmacro-lite", "-Vpatmat", "-Vphases", "-Vpos", "-Vprint-pos", "-Vprint-types", "-Vquasiquote", "-Vreflective-calls", "-Vreify", - "-Vshow-member-pos", "-Vshow-symkinds", "-Vshow-symowners", "-Vsymbols", "-Vtyper", + "-Vshow-member-pos", "-Vshow-symkinds", "-Vshow-symowners", "-Vsymbols", "-Vtype-diffs", "-Vtyper", "-W", "-Wdead-code", "-Werror", "-Wextra-implicit", "-Wnumeric-widen", "-Woctal-literal", "-Wvalue-discard", "-Wself-implicit", @@ -140,5 +140,5 @@ object ScalaOptionParser { private def scaladocPathSettingNames = List("-doc-root-content", "-diagrams-dot-path") private def scaladocMultiStringSettingNames = List("-doc-external-doc") - private val targetSettingNames = (8 to 17).map(_.toString).flatMap(v => v :: s"jvm-1.$v" :: s"jvm-$v" :: s"1.$v" :: Nil).toList + private val targetSettingNames = (8 to 18).map(_.toString).flatMap(v => v :: s"jvm-1.$v" :: s"jvm-$v" :: s"1.$v" :: Nil).toList } diff --git a/project/ScaladocSettings.scala b/project/ScaladocSettings.scala index ed4b8a188f38..ead5d1680a97 100644 --- a/project/ScaladocSettings.scala +++ b/project/ScaladocSettings.scala @@ -1,7 +1,7 @@ package scala.build import sbt._ -import sbt.Keys.{ artifact, dependencyClasspath, moduleID, resourceManaged } +import sbt.Keys.{ artifact, externalDependencyClasspath, moduleID, resourceManaged } object ScaladocSettings { @@ -15,7 +15,9 @@ object ScaladocSettings { s.get(artifact.key).isDefined && s.get(moduleID.key).exists(_.organization == "org.webjars") val dest = (resourceManaged.value / "webjars").getAbsoluteFile IO.createDirectory(dest) - val classpathes = (Compile / dependencyClasspath).value + // externalDependencyClasspath (not dependencyClasspath) to avoid compiling + // upstream projects (library, reflect, compiler) on bsp `buildTarget/resources` + val classpathes = (Compile / externalDependencyClasspath).value val files: Seq[File] = classpathes.filter(isWebjar).flatMap { classpathEntry => val jarFile = classpathEntry.data IO.unzip(jarFile, dest) diff --git a/project/ScriptCommands.scala b/project/ScriptCommands.scala index 82cc51f38561..973d23053218 100644 --- a/project/ScriptCommands.scala +++ b/project/ScriptCommands.scala @@ -107,7 +107,13 @@ object ScriptCommands { Global / baseVersionSuffix := "SPLIT", Global / resolvers += "scala-pr" at url, Global / publishTo := Some("sonatype-releases" at "https://oss.sonatype.org/service/local/staging/deploy/maven2"), - Global / credentials += Credentials("Sonatype Nexus Repository Manager", "oss.sonatype.org", env("SONA_USER"), env("SONA_PASS")) + Global / credentials ++= { + val user = env("SONA_USER") + val pass = env("SONA_PASS") + if (user != "" && pass != "") + List(Credentials("Sonatype Nexus Repository Manager", "oss.sonatype.org", user, pass)) + else Nil + } // pgpSigningKey and pgpPassphrase are set externally by travis / the bootstrap script, as the sbt-pgp plugin is not enabled by default ) ++ enableOptimizer } @@ -168,7 +174,12 @@ object ScriptCommands { Seq( Global / publishTo := Some("scala-pr-publish" at url2), - Global / credentials += Credentials("Artifactory Realm", "scala-ci.typesafe.com", "scala-ci", env("PRIVATE_REPO_PASS")) + Global / credentials ++= { + val pass = env("PRIVATE_REPO_PASS") + if (pass != "") + List(Credentials("Artifactory Realm", "scala-ci.typesafe.com", "scala-ci", pass)) + else Nil + } ) } diff --git a/project/VersionUtil.scala b/project/VersionUtil.scala index 6b4e659cc7a2..dd2f3cd9b1d7 100644 --- a/project/VersionUtil.scala +++ b/project/VersionUtil.scala @@ -2,13 +2,13 @@ package scala.build import sbt._ import Keys._ + import java.util.{Date, Locale, Properties, TimeZone} -import java.io.{File, FileInputStream} +import java.io.{File, FileInputStream, StringWriter} import java.text.SimpleDateFormat import java.time.Instant import java.time.format.DateTimeFormatter import java.time.temporal.{TemporalAccessor, TemporalQueries, TemporalQuery} - import scala.collection.JavaConverters._ import BuildSettings.autoImport._ @@ -30,7 +30,7 @@ object VersionUtil { ) lazy val generatePropertiesFileSettings = Seq[Setting[_]]( - copyrightString := "Copyright 2002-2020, LAMP/EPFL and Lightbend, Inc.", + copyrightString := "Copyright 2002-2021, LAMP/EPFL and Lightbend, Inc.", shellBannerString := """ | ________ ___ / / ___ | / __/ __// _ | / / / _ | @@ -173,13 +173,18 @@ object VersionUtil { } private def writeProps(m: Map[String, String], propFile: File): File = { - val props = new Properties - m.foreach { case (k, v) => props.put(k, v) } - // unfortunately, this will write properties in arbitrary order - // this makes it harder to test for stability of generated artifacts - // consider using https://github.com/etiennestuder/java-ordered-properties - // instead of java.util.Properties - IO.write(props, null, propFile) + // Like: + // IO.write(props, null, propFile) + // But with deterministic key ordering and no timestamp + val fullWriter = new StringWriter() + for (k <- m.keySet.toVector.sorted) { + val writer = new StringWriter() + val props = new Properties() + props.put(k, m(k)) + props.store(writer, null) + writer.toString.linesIterator.drop(1).foreach{line => fullWriter.write(line); fullWriter.write("\n")} + } + IO.write(propFile, fullWriter.toString) propFile } diff --git a/project/build.properties b/project/build.properties index e67343ae796c..bb3a9b7dc6d2 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.5.0 +sbt.version=1.5.6 diff --git a/project/plugins.sbt b/project/plugins.sbt index 73ce8dc22df5..fd522665f074 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,9 +1,9 @@ scalacOptions ++= Seq("-unchecked", "-feature", "-deprecation", "-Xlint:-unused,_", "-Xfatal-warnings") -libraryDependencies += "org.apache.commons" % "commons-lang3" % "3.3.2" +libraryDependencies += "org.apache.commons" % "commons-lang3" % "3.12.0" -libraryDependencies += "biz.aQute.bnd" % "biz.aQute.bnd" % "2.4.1" +libraryDependencies += "biz.aQute.bnd" % "biz.aQute.bndlib" % "5.3.0" enablePlugins(BuildInfoPlugin) @@ -17,20 +17,18 @@ buildInfoKeys := Seq[BuildInfoKey](buildClasspath) buildInfoPackage := "scalabuild" -addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.8.1") +addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.9.0") libraryDependencies ++= Seq( "org.eclipse.jgit" % "org.eclipse.jgit" % "4.6.0.201612231935-r", - "org.slf4j" % "slf4j-nop" % "1.7.23", + "org.slf4j" % "slf4j-nop" % "1.7.32", "com.googlecode.java-diff-utils" % "diffutils" % "1.3.0", -) + ) Global / concurrentRestrictions := Seq( Tags.limitAll(1) // workaround for https://github.com/sbt/sbt/issues/2970 ) -addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.3.4") +addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.2") addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.0.0") - -addSbtPlugin("com.lightbend" % "sbt-whitesource" % "0.1.18") diff --git a/scripts/common b/scripts/common index 2584d10574ef..2fc012cbe8c4 100644 --- a/scripts/common +++ b/scripts/common @@ -11,14 +11,13 @@ else fi SBT_CMD=${SBT_CMD-sbt} -SBT_CMD="$SBT_CMD -sbt-version 1.5.0" +SBT_CMD="$SBT_CMD -sbt-version 1.5.6" # repo to publish builds integrationRepoUrl=${integrationRepoUrl-"https://scala-ci.typesafe.com/artifactory/scala-integration/"} # only used on jenkins sbtRepositoryConfig="$WORKSPACE/scripts/sbt-repositories-config" -jcenterCacheUrl=${jcenterCacheUrl-"https://scala-ci.typesafe.com/artifactory/jcenter/"} # used by `checkAvailability` TMP_ROOT_DIR=$(mktemp -d -t pr-scala.XXXX) @@ -92,11 +91,9 @@ function generateRepositoriesConfig() { fi cat >> "$sbtRepositoryConfig" << EOF - jcenter-cache: $jcenterCacheUrl local maven-central - typesafe-ivy-releases-boot: https://repo.lightbend.com/typesafe/ivy-releases/, [organisation]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly - typesafe-ivy-releases: https://dl.bintray.com/typesafe/ivy-releases/, [organisation]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext] + typesafe-ivy-releases: https://repo.lightbend.com/typesafe/ivy-releases/, [organisation]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext] sbt-plugin-releases: https://repo.scala-sbt.org/scalasbt/sbt-plugin-releases/, [organisation]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext] EOF } diff --git a/scripts/jobs/integrate/ide b/scripts/jobs/integrate/ide deleted file mode 100755 index 1dc7b43139e8..000000000000 --- a/scripts/jobs/integrate/ide +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/bash -e -# requires checkout: root is a scala checkout with which to integrate (actually, only required file is versions.properties, as documented below) -# requires env: scalaVersion (specifies binary already built from above checkout), WORKSPACE (provided by jenkins), repo_ref (HEAD of the scala checkout), -# requires files: $WORKSPACE/versions.properties (from checkout -- defines version numbers for modules used to build scala for dbuild...) - -echo "IDE integration not yet available on 2.12.x. Punting." -exit 0 - -# TODO: remove when integration is up and running -if [ "woele$_scabot_last" != "woele1" ]; then echo "Scabot didn't mark this as last commit -- skipping."; exit 0; fi - -baseDir=${WORKSPACE-`pwd`} -uberBuildUrl=${uberBuildUrl-"https://github.com/scala-ide/uber-build.git"} -uberBuildConfig=${uberBuildConfig-"validator.conf"} # TODO: backport to 2.10.x: uberBuildConfig="validator-2.10.conf" - -uberBuildDir="$WORKSPACE/uber-build/" - -cd $WORKSPACE -if [[ -d $uberBuildDir ]]; then - ( cd $uberBuildDir && git fetch $uberBuildUrl HEAD && git checkout -f FETCH_HEAD && git clean -fxd ) -else - git clone $uberBuildUrl -fi - -echo "maven.version.number=$scalaVersion" >> versions.properties - -# pass prRepoUrl in, which uber-build passes along to dbuild (in sbt-builds-for-ide) -# the "-P pr-scala" maven arg accomplishes the same thing for maven (directly used in uber-build) -BASEDIR="$WORKSPACE" prRepoUrl="$prRepoUrl" IDE_M2_REPO="$prRepoUrl" MAVEN_ARGS="-P pr-scala"\ - $uberBuildDir/uber-build.sh $uberBuildDir/config/$uberBuildConfig $repo_ref $scalaVersion - -# uber-build puts its local repo under target/m2repo -# wipe the org/scala-lang part, which otherwise just keeps -# growing and growing due to the -$sha-SNAPSHOT approach -[[ -d $WORKSPACE/target/m2repo/org/scala-lang ]] && rm -rf $WORKSPACE/target/m2repo/org/scala-lang diff --git a/scripts/jobs/integrate/windows b/scripts/jobs/integrate/windows deleted file mode 100755 index 964b70383c09..000000000000 --- a/scripts/jobs/integrate/windows +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/bash - -source scripts/common - -java -version -javac -version - -generateRepositoriesConfig - -# it may not be necessary to set both COURSIER_HOME and sbt.coursier.home, -# but at least for now, doing it just in case; see discussion at -# https://github.com/scala/scala-dev/issues/666 -export COURSIER_HOME=$WORKSPACE/.coursier - -SBT="java $JAVA_OPTS -Dsbt.ivy.home=$WORKSPACE/.ivy2 -Dsbt.coursier.home=$WORKSPACE/.coursier -jar $sbtLauncher -Dsbt.override.build.repos=true -Dsbt.repository.config=$sbtRepositoryConfig" - -# Build locker with STARR -$SBT -warn "setupPublishCore" generateBuildCharacterPropertiesFile publishLocal - -# Build quick and run the tests -parseScalaProperties buildcharacter.properties -$SBT -Dstarr.version=$maven_version_number -warn "setupValidateTest" testAll diff --git a/spec/01-lexical-syntax.md b/spec/01-lexical-syntax.md index 718950b171a1..7f3d2887238d 100644 --- a/spec/01-lexical-syntax.md +++ b/spec/01-lexical-syntax.md @@ -6,20 +6,18 @@ chapter: 1 # Lexical Syntax -Scala programs are written using the Unicode Basic Multilingual Plane -(_BMP_) character set; Unicode supplementary characters are not -presently supported. This chapter defines the two modes of Scala's -lexical syntax, the Scala mode, and the _XML mode_. If not -otherwise mentioned, the following descriptions of Scala tokens refer -to _Scala mode_, and literal characters ā€˜c’ refer to the ASCII fragment -`\u0000` – `\u007F`. +Scala source code consists of Unicode text. + +The program text is tokenized as described in this chapter. +See the last section for special support for XML literals, +which are parsed in _XML mode_. To construct tokens, characters are distinguished according to the following classes (Unicode general category given in parentheses): 1. Whitespace characters. `\u0020 | \u0009 | \u000D | \u000A`. 1. Letters, which include lower case letters (`Ll`), upper case letters (`Lu`), - title case letters (`Lt`), other letters (`Lo`), modifier letters (`Ml`), + title case letters (`Lt`), other letters (`Lo`), modifier letters (`Lm`), letter numerals (`Nl`) and the two characters `\u0024 ā€˜$’` and `\u005F ā€˜_’`. 1. Digits `ā€˜0’ | … | ā€˜9’`. 1. Parentheses `ā€˜(’ | ā€˜)’ | ā€˜[’ | ā€˜]’ | ā€˜{’ | ā€˜}’ `. @@ -74,7 +72,7 @@ or `_`, and _constant identifiers_, which do not. For this purpose, lower case letters include not only a-z, but also all characters in Unicode category Ll (lowercase letter), as well as all letters that have contributory property -Other_Lowercase, except characters in category Nl (letter numerals) +Other_Lowercase, except characters in category Nl (letter numerals), which are never taken as lower case. The following are examples of variable identifiers: @@ -579,16 +577,7 @@ string literal does not start a valid escape sequence. symbolLiteral ::= ā€˜'’ plainid ``` -A symbol literal `'x` is a shorthand for the expression `scala.Symbol("x")` and -is of the [literal type](03-types.html#literal-types) `'x`. -`Symbol` is a [case class](05-classes-and-objects.html#case-classes), which is defined as follows. - -```scala -package scala -final case class Symbol private (name: String) { - override def toString: String = "'" + name -} -``` +A symbol literal `'x` is deprecated shorthand for the expression `scala.Symbol("x")`. The `apply` method of `Symbol`'s companion object caches weak references to `Symbol`s, thus ensuring that diff --git a/spec/03-types.md b/spec/03-types.md index 2f898d8acb39..b4bdb7cb2e07 100644 --- a/spec/03-types.md +++ b/spec/03-types.md @@ -131,7 +131,7 @@ determined by evaluating `e == lit`. Literal types are available for all types for which there is dedicated syntax except `Unit`. This includes the numeric types (other than `Byte` and `Short` -which don't currently have syntax), `Boolean`, `Char`, `String` and `Symbol`. +which don't currently have syntax), `Boolean`, `Char` and `String`. ### Stable Types A _stable type_ is a singleton type, a literal type, diff --git a/spec/06-expressions.md b/spec/06-expressions.md index c13c9b13a200..49687a2bf97e 100644 --- a/spec/06-expressions.md +++ b/spec/06-expressions.md @@ -295,6 +295,11 @@ must be the same). Furthermore, the type of Ā“eĀ“ must conform to sequence Ā“eĀ“ with its elements. When the application uses named arguments, the vararg parameter has to be specified exactly once. +If only a single argument is supplied, it may be supplied as a block expression +and parentheses can be omitted, in the form `Ā“fĀ“ { block }`. This is valid when +`f` has a single formal parameter or when all other formal parameters have +default values. + A function application usually allocates a new frame on the program's run-time stack. However, if a local method or a final method calls itself as its last action, the call is executed using the stack-frame @@ -654,7 +659,7 @@ character. Characters are listed below in increasing order of precedence, with characters on the same line having the same precedence. ```scala -(all letters) +(all letters, as defined in [chapter 1](01-lexical-syntax.html), including `_` and `$`) | ^ & @@ -663,7 +668,7 @@ precedence, with characters on the same line having the same precedence. : + - * / % -(all other special characters) +(other operator characters, as defined in [chapter 1](01-lexical-syntax.html), including Unicode categories `Sm` and `So`) ``` That is, operators starting with a letter have lowest precedence, @@ -914,7 +919,7 @@ A semicolon preceding the `while` symbol of a do loop expression is ignored. Expr1 ::= ā€˜for’ (ā€˜(’ Enumerators ā€˜)’ | ā€˜{’ Enumerators ā€˜}’) {nl} [ā€˜yield’] Expr Enumerators ::= Generator {semi Generator} -Generator ::= Pattern1 ā€˜<-’ Expr {[semi] Guard | semi Pattern1 ā€˜=’ Expr} +Generator ::= [ā€˜case’] Pattern1 ā€˜<-’ Expr {[semi] Guard | semi Pattern1 ā€˜=’ Expr} Guard ::= ā€˜if’ PostfixExpr ``` @@ -924,9 +929,15 @@ A _for comprehension_ `for (Ā“\mathit{enums}\,Ā“) yield Ā“eĀ“` evaluates expression Ā“eĀ“ for each binding generated by the enumerators Ā“\mathit{enums}Ā“ and collects the results. An enumerator sequence always starts with a generator; this can be followed by further generators, value -definitions, or guards. A _generator_ `Ā“pĀ“ <- Ā“eĀ“` -produces bindings from an expression Ā“eĀ“ which is matched in some way -against pattern Ā“pĀ“. A _value definition_ `Ā“pĀ“ = Ā“eĀ“` +definitions, or guards. + +A _generator_ `Ā“pĀ“ <- Ā“eĀ“` produces bindings from an expression Ā“eĀ“ which is +matched in some way against pattern Ā“pĀ“. Optionally, `case` can appear in front +of a generator pattern, this has no meaning in Scala 2 but will be [required in +Scala 3 if `p` is not +irrefutable](https://docs.scala-lang.org/scala3/reference/changed-features/pattern-bindings.html). + +A _value definition_ `Ā“pĀ“ = Ā“eĀ“` binds the value name Ā“pĀ“ (or several names in a pattern Ā“pĀ“) to the result of evaluating the expression Ā“eĀ“. A _guard_ `if Ā“eĀ“` contains a boolean expression which restricts diff --git a/spec/13-syntax-summary.md b/spec/13-syntax-summary.md index aec631beb45f..cda92a3b3e5f 100644 --- a/spec/13-syntax-summary.md +++ b/spec/13-syntax-summary.md @@ -14,15 +14,20 @@ The lexical syntax of Scala is given by the following grammar in EBNF form: ```ebnf whiteSpace ::= ā€˜\u0020’ | ā€˜\u0009’ | ā€˜\u000D’ | ā€˜\u000A’ -upper ::= ā€˜A’ | … | ā€˜Z’ | ā€˜$’ // and any character in Unicode category Lu, Lt or Nl, and any character in Lo and Ml that don't have contributory property Other_Lowercase -lower ::= ā€˜a’ | … | ā€˜z’ | ā€˜_’ // and any character in Unicode category Ll, and and any character in Lo or Ml that has contributory property Other_Lowercase +upper ::= ā€˜A’ | … | ā€˜Z’ | ā€˜$’ and any character in Unicode categories Lu, Lt or Nl, + and any character in Unicode categories Lo and Lm that don't have + contributory property Other_Lowercase +lower ::= ā€˜a’ | … | ā€˜z’ | ā€˜_’ and any character in Unicode category Ll, + and any character in Unicode categories Lo or Lm that has contributory + property Other_Lowercase letter ::= upper | lower digit ::= ā€˜0’ | … | ā€˜9’ paren ::= ā€˜(’ | ā€˜)’ | ā€˜[’ | ā€˜]’ | ā€˜{’ | ā€˜}’ delim ::= ā€˜`’ | ā€˜'’ | ā€˜"’ | ā€˜.’ | ā€˜;’ | ā€˜,’ -opchar ::= // printableChar not matched by (whiteSpace | upper | lower | - // letter | digit | paren | delim | Unicode_Sm | Unicode_So) -printableChar ::= // all characters in [\u0020, \u007F] inclusive +opchar ::= ā€˜!’ | ā€˜#’ | ā€˜%’ | ā€˜&’ | ā€˜*’ | ā€˜+’ | ā€˜-’ | ā€˜/’ | ā€˜:’ | + ā€˜<’ | ā€˜=’ | ā€˜>’ | ā€˜?’ | ā€˜@’ | ā€˜\’ | ā€˜^’ | ā€˜|’ | ā€˜~’ + and any character in Unicode categories Sm or So +printableChar ::= all characters in [\u0020, \u007E] inclusive UnicodeEscape ::= ā€˜\’ ā€˜u’ {ā€˜u’} hexDigit hexDigit hexDigit hexDigit hexDigit ::= ā€˜0’ | … | ā€˜9’ | ā€˜A’ | … | ā€˜F’ | ā€˜a’ | … | ā€˜f’ charEscapeSeq ::= ā€˜\’ (ā€˜b’ | ā€˜t’ | ā€˜n’ | ā€˜f’ | ā€˜r’ | ā€˜"’ | ā€˜'’ | ā€˜\’) @@ -179,7 +184,7 @@ grammar: | (Bindings | ([ā€˜implicit’] id | ā€˜_’) ā€˜:’ CompoundType) ā€˜=>’ Block Enumerators ::= Generator {semi Generator} - Generator ::= Pattern1 ā€˜<-’ Expr {[semi] Guard | semi Pattern1 ā€˜=’ Expr} + Generator ::= [ā€˜case’] Pattern1 ā€˜<-’ Expr {[semi] Guard | semi Pattern1 ā€˜=’ Expr} CaseClauses ::= CaseClause { CaseClause } CaseClause ::= ā€˜case’ Pattern [Guard] ā€˜=>’ Block diff --git a/src/compiler/scala/reflect/reify/Taggers.scala b/src/compiler/scala/reflect/reify/Taggers.scala index 41eddd796c85..adff7a293503 100644 --- a/src/compiler/scala/reflect/reify/Taggers.scala +++ b/src/compiler/scala/reflect/reify/Taggers.scala @@ -102,8 +102,6 @@ abstract class Taggers { val tpe = tpeTree.tpe val PolyType(_, MethodType(_, tagTpe)) = fun.tpe: @unchecked val tagModule = tagTpe.typeSymbol.companionSymbol - if (c.compilerSettings.contains("-Xlog-implicits")) - c.echo(c.enclosingPosition, s"cannot materialize ${tagModule.name}[$tpe] as $result because:\n$reason") c.abort(c.enclosingPosition, "No %s available for %s".format(tagModule.name, tpe)) } diff --git a/src/compiler/scala/tools/nsc/CompilerCommand.scala b/src/compiler/scala/tools/nsc/CompilerCommand.scala index 5601f96459cc..44c107f55dad 100644 --- a/src/compiler/scala/tools/nsc/CompilerCommand.scala +++ b/src/compiler/scala/tools/nsc/CompilerCommand.scala @@ -113,7 +113,7 @@ class CompilerCommand(arguments: List[String], val settings: Settings) { else if (Yhelp) yusageMsg else if (showPlugins) global.pluginDescriptions else if (showPhases) global.phaseDescriptions + ( - if (debug) "\n" + global.phaseFlagDescriptions else "" + if (settings.isDebug) "\n" + global.phaseFlagDescriptions else "" ) else if (genPhaseGraph.isSetByUser) { val components = global.phaseNames // global.phaseDescriptors // one initializes diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index a80c5dbf4d51..abfdbe9fe069 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -81,6 +81,11 @@ class Global(var currentSettings: Settings, reporter0: Reporter) import definitions.findNamedMember def findMemberFromRoot(fullName: Name): Symbol = rootMirror.findMemberFromRoot(fullName) + override def openPackageModule(pkgClass: Symbol, force: Boolean): Unit = { + if (force || isPast(currentRun.namerPhase)) super.openPackageModule(pkgClass, true) + else analyzer.packageObjects.deferredOpen.add(pkgClass) + } + // alternate constructors ------------------------------------------ override def settings = currentSettings @@ -282,7 +287,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) // ------------------ Debugging ------------------------------------- @inline final def ifDebug(body: => Unit): Unit = { - if (settings.debug) + if (settings.isDebug) body } @@ -313,7 +318,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) } @inline final override def debuglog(msg: => String): Unit = { - if (settings.debug) + if (settings.isDebug) log(msg) } @@ -417,7 +422,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) if ((unit ne null) && unit.exists) lastSeenSourceFile = unit.source - if (settings.debug && (settings.verbose || currentRun.size < 5)) + if (settings.isDebug && (settings.verbose || currentRun.size < 5)) inform("[running phase " + name + " on " + unit + "]") } @@ -713,7 +718,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) protected def computePhaseDescriptors: List[SubComponent] = { /* Allow phases to opt out of the phase assembly. */ def cullPhases(phases: List[SubComponent]) = { - val enabled = if (settings.debug && settings.isInfo) phases else phases filter (_.enabled) + val enabled = if (settings.isDebug && settings.isInfo) phases else phases filter (_.enabled) def isEnabled(q: String) = enabled exists (_.phaseName == q) val (satisfied, unhappy) = enabled partition (_.requires forall isEnabled) unhappy foreach (u => globalError(s"Phase '${u.phaseName}' requires: ${u.requires filterNot isEnabled}")) @@ -744,7 +749,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) } /** A description of the phases that will run in this configuration, or all if -Vdebug. */ - def phaseDescriptions: String = phaseHelp("description", elliptically = !settings.debug, phasesDescMap) + def phaseDescriptions: String = phaseHelp("description", elliptically = !settings.isDebug, phasesDescMap) /** Summary of the per-phase values of nextFlags and newFlags, shown under -Vphases -Vdebug. */ def phaseFlagDescriptions: String = { @@ -755,7 +760,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) else if (ph.phaseNewFlags != 0L && ph.phaseNextFlags != 0L) fstr1 + " " + fstr2 else fstr1 + fstr2 } - phaseHelp("new flags", elliptically = !settings.debug, fmt) + phaseHelp("new flags", elliptically = !settings.isDebug, fmt) } /** Emit a verbose phase table. @@ -1113,7 +1118,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) def echoPhaseSummary(ph: Phase) = { /* Only output a summary message under debug if we aren't echoing each file. */ - if (settings.debug && !(settings.verbose || currentRun.size < 5)) + if (settings.isDebug && !(settings.verbose || currentRun.size < 5)) inform("[running phase " + ph.name + " on " + currentRun.size + " compilation units]") } @@ -1154,9 +1159,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) keepPhaseStack = settings.log.isSetByUser // We hit these checks regularly. They shouldn't change inside the same run, so cache the comparisons here. - val isScala212: Boolean = settings.isScala212 - val isScala213: Boolean = settings.isScala213 - val isScala3: Boolean = settings.isScala3 + val isScala3 = settings.isScala3 // used in sbt def uncheckedWarnings: List[(Position, String)] = reporting.uncheckedWarnings @@ -1283,11 +1286,8 @@ class Global(var currentSettings: Settings, reporter0: Reporter) checkPhaseSettings(including = true, inclusions.toSeq: _*) checkPhaseSettings(including = false, exclusions map (_.value): _*) - // Enable or disable depending on the current setting -- useful for interactive behaviour - statistics.initFromSettings(settings) - // Report the overhead of statistics measurements per every run - if (statistics.areStatisticsLocallyEnabled) + if (settings.areStatisticsEnabled) statistics.reportStatisticsOverhead(reporter) phase = first //parserPhase @@ -1512,7 +1512,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) warnDeprecatedAndConflictingSettings() globalPhase = fromPhase - val timePhases = statistics.areStatisticsLocallyEnabled + val timePhases = settings.areStatisticsEnabled val startTotal = if (timePhases) statistics.startTimer(totalCompileTime) else null while (globalPhase.hasNext && !reporter.hasErrors) { @@ -1562,13 +1562,12 @@ class Global(var currentSettings: Settings, reporter0: Reporter) if (settings.YstatisticsEnabled && settings.Ystatistics.contains(phase.name)) printStatisticsFor(phase) - if (!globalPhase.hasNext || reporter.hasErrors) - runReporting.warnUnusedSuppressions() - advancePhase() } profiler.finished() + runReporting.runFinished(hasErrors = reporter.hasErrors) + reporting.summarizeErrors() // val allNamesArray: Array[String] = allNames().map(_.toString).toArray.sorted @@ -1654,8 +1653,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) compileLate(new CompilationUnit(scripted(getSourceFile(file)))) } - /** Compile abstract file until `globalPhase`, but at least to phase "namer". - */ + /** Compile the unit until `globalPhase`, but at least to phase "typer". */ def compileLate(unit: CompilationUnit): Unit = { addUnit(unit) diff --git a/src/compiler/scala/tools/nsc/MainBench.scala b/src/compiler/scala/tools/nsc/MainBench.scala index ca78db7e2dfe..84b3b6e603e7 100644 --- a/src/compiler/scala/tools/nsc/MainBench.scala +++ b/src/compiler/scala/tools/nsc/MainBench.scala @@ -29,9 +29,8 @@ object MainBench extends Driver with EvalLoop { var start = System.nanoTime() for (i <- 0 until NIter) { if (i == NIter-1) { - theCompiler.settings.Ystatistics.value = List("all") - theCompiler.statistics.enabled = true - theCompiler.statistics.hotEnabled = true + theCompiler.settings.Ystatistics.value = List("all") + theCompiler.settings.YhotStatisticsEnabled.value = true } process(args) val end = System.nanoTime() diff --git a/src/compiler/scala/tools/nsc/MainTokenMetric.scala b/src/compiler/scala/tools/nsc/MainTokenMetric.scala index c43683118028..ff8fcfa5c24f 100644 --- a/src/compiler/scala/tools/nsc/MainTokenMetric.scala +++ b/src/compiler/scala/tools/nsc/MainTokenMetric.scala @@ -50,7 +50,7 @@ object MainTokenMetric { tokenMetric(compiler, command.files) } catch { case ex @ FatalError(msg) => - if (command.settings.debug) + if (command.settings.isDebug) ex.printStackTrace() reporter.error(null, "fatal error: " + msg) } diff --git a/src/compiler/scala/tools/nsc/Parsing.scala b/src/compiler/scala/tools/nsc/Parsing.scala index 673d30bf237c..7d48e27678d8 100644 --- a/src/compiler/scala/tools/nsc/Parsing.scala +++ b/src/compiler/scala/tools/nsc/Parsing.scala @@ -22,7 +22,6 @@ trait Parsing { self : Positions with Reporting => trait RunParsing { val parsing: PerRunParsing = new PerRunParsing - def isScala213: Boolean } class PerRunParsing { diff --git a/src/compiler/scala/tools/nsc/Reporting.scala b/src/compiler/scala/tools/nsc/Reporting.scala index c69a60f3f8be..f113a3789ad2 100644 --- a/src/compiler/scala/tools/nsc/Reporting.scala +++ b/src/compiler/scala/tools/nsc/Reporting.scala @@ -80,11 +80,11 @@ trait Reporting extends internal.Reporting { self: ast.Positions with Compilatio def suppressionExists(pos: Position): Boolean = suppressions.getOrElse(pos.source, Nil).exists(_.annotPos.point == pos.point) - def warnUnusedSuppressions(): Unit = { - // if we stop before typer completes (errors in parser, Ystop), report all suspended messages + def runFinished(hasErrors: Boolean): Unit = { + // report suspended messages (in case the run finished before typer) suspendedMessages.valuesIterator.foreach(_.foreach(issueWarning)) - // scaladoc doesn't run all phases, so not all warnings are emitted - if (settings.warnUnusedNowarn && !settings.isScaladoc) + // report unused nowarns only if all all phases are done. scaladoc doesn't run all phases. + if (!hasErrors && settings.warnUnusedNowarn && !settings.isScaladoc) for { source <- suppressions.keysIterator.toList sups <- suppressions.remove(source) @@ -246,8 +246,12 @@ trait Reporting extends internal.Reporting { self: ast.Positions with Compilatio && parentFileName(pos.source).getOrElse("") == "xsbt" && Thread.currentThread.getStackTrace.exists(_.getClassName.startsWith("sbt.")) ) - if (required && !isSbtCompat) reporter.error(pos, msg) - else warning(pos, msg, featureCategory(featureTrait.nameString), site) + // on postfix error, include interesting infix warning + def isXfix = featureName == "postfixOps" && suspendedMessages.get(pos.source).map(_.exists(w => pos.includes(w.pos))).getOrElse(false) + if (required && !isSbtCompat) { + val amended = if (isXfix) s"$msg\n${suspendedMessages(pos.source).filter(pos includes _.pos).map(_.msg).mkString("\n")}" else msg + reporter.error(pos, amended) + } else warning(pos, msg, featureCategory(featureTrait.nameString), site) } // Used in the optimizer where we don't have no symbols, the site string is created from the class internal name and method name. diff --git a/src/compiler/scala/tools/nsc/ast/Positions.scala b/src/compiler/scala/tools/nsc/ast/Positions.scala index 8cc1858297b4..6cda189bd82c 100644 --- a/src/compiler/scala/tools/nsc/ast/Positions.scala +++ b/src/compiler/scala/tools/nsc/ast/Positions.scala @@ -39,6 +39,6 @@ trait Positions extends scala.reflect.internal.Positions { } override protected[this] lazy val posAssigner: PosAssigner = - if (settings.Yrangepos && settings.debug || settings.Yposdebug) new ValidatingPosAssigner + if (settings.Yrangepos && settings.isDebug || settings.Yposdebug) new ValidatingPosAssigner else new DefaultPosAssigner } diff --git a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala index cccd4326c375..ae55c09c3387 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala @@ -15,16 +15,14 @@ package tools.nsc package ast import scala.language.implicitConversions - import java.awt.{List => _, _} import java.awt.event._ import java.io.{StringWriter, Writer} import javax.swing._ import javax.swing.event.TreeModelListener import javax.swing.tree._ - import java.util.concurrent.CountDownLatch -import scala.annotation.tailrec +import scala.annotation.{nowarn, tailrec} /** * Tree browsers can show the AST in a graphical and interactive @@ -217,8 +215,8 @@ abstract class TreeBrowsers { } class ASTMenuBar extends JMenuBar { - val menuKey = Toolkit.getDefaultToolkit().getMenuShortcutKeyMask() - val shiftKey = InputEvent.SHIFT_MASK + val menuKey = Toolkit.getDefaultToolkit().getMenuShortcutKeyMask(): @nowarn("cat=deprecation") // deprecated since JDK 10, replacement only available in 10+ + val shiftKey = InputEvent.SHIFT_DOWN_MASK val jmFile = new JMenu("File") // val jmiSaveImage = new JMenuItem( // new AbstractAction("Save Tree Image") { diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala index 7281d66aa807..f3979f6c94a2 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala @@ -65,16 +65,14 @@ trait TreeDSL { * a member called nme.EQ. Not sure if that should happen, but we can be * robust by dragging in Any regardless. */ - def MEMBER_== (other: Tree) = { - val opSym = if (target.tpe == null) NoSymbol else target.tpe member nme.EQ - if (opSym == NoSymbol) ANY_==(other) - else fn(target, opSym, other) - } + def MEMBER_== (other: Tree) = fn(target, (if (target.tpe == null) NoSymbol else target.tpe member nme.EQ).orElse(Any_==), other) def ANY_EQ (other: Tree) = OBJ_EQ(other AS ObjectTpe) def ANY_== (other: Tree) = fn(target, Any_==, other) def ANY_!= (other: Tree) = fn(target, Any_!=, other) - def OBJ_EQ (other: Tree) = fn(target, Object_eq, other) - def OBJ_NE (other: Tree) = fn(target, Object_ne, other) + def OBJ_EQ (other: Tree) = fn(target, Object_eq, other) + def OBJ_NE (other: Tree) = fn(target, Object_ne, other) + def OBJ_== (other: Tree) = fn(target, Object_equals, other) + def OBJ_## = fn(target, Object_hashCode) def INT_>= (other: Tree) = fn(target, getMember(IntClass, nme.GE), other) def INT_== (other: Tree) = fn(target, getMember(IntClass, nme.EQ), other) @@ -142,7 +140,17 @@ trait TreeDSL { def NEW(tpt: Tree, args: Tree*): Tree = New(tpt, List(args.toList)) def NOT(tree: Tree) = Select(tree, Boolean_not) - def AND(guards: Tree*) = if (guards.isEmpty) EmptyTree else guards reduceLeft gen.mkAnd + def AND(guards: Tree*) = { + def binaryTreeAnd(tests: Seq[Tree]): Tree = tests match{ + case Seq() => EmptyTree + case Seq(single) => single + case multiple => + val (before, after) = multiple.splitAt(tests.size / 2) + gen.mkAnd(binaryTreeAnd(before), binaryTreeAnd(after)) + } + + binaryTreeAnd(guards) + } def IF(tree: Tree) = new IfStart(tree, EmptyTree) def TRY(tree: Tree) = new TryStart(tree, Nil, EmptyTree) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 055ed8c8fb39..baa23aca5c18 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -264,7 +264,7 @@ self => if (syntaxErrors.isEmpty) firstTry else in.healBraces() match { case Nil => showSyntaxErrors() ; firstTry - case patches => (this withPatches patches).parse() + case patches => withPatches(patches).parse() } } } @@ -274,6 +274,14 @@ self => final val InBlock: Location = 1 final val InTemplate: Location = 2 + type ParamOwner = Int + object ParamOwner { + final val Class = 0 + final val Type = 1 + final val TypeParam = 2 // unused + final val Def = 3 + } + // These symbols may not yet be loaded (e.g. in the ide) so don't go // through definitions to obtain the names. lazy val ScalaValueClassNames = Seq(tpnme.AnyVal, @@ -307,25 +315,6 @@ self => } } - /** Perform an operation while peeking ahead. - * Pushback if the operation yields an empty tree or blows to pieces. - */ - @inline def peekingAhead(tree: => Tree): Tree = { - @inline def peekahead() = { - in.prev copyFrom in - in.nextToken() - } - @inline def pushback() = { - in.next copyFrom in - in copyFrom in.prev - } - peekahead() - // try it, in case it is recoverable - val res = try tree catch { case e: Exception => pushback() ; throw e } - if (res.isEmpty) pushback() - res - } - class ParserTreeBuilder extends TreeBuilder { val global: self.global.type = self.global def unit = parser.unit @@ -674,6 +663,24 @@ self => case _ => false } + def isSoftModifier: Boolean = + currentRun.isScala3 && in.token == IDENTIFIER && softModifierNames.contains(in.name) + + /** Is the current token a soft modifier in a position where such a modifier is allowed? */ + def isValidSoftModifier: Boolean = + isSoftModifier && { + val mod = in.name + lookingAhead { + while (in.token == NEWLINE || isModifier || isSoftModifier) in.nextToken() + + in.token match { + case CLASS | CASECLASS => true + case DEF | TRAIT | TYPE => mod == nme.infix + case _ => false + } + } + } + def isAnnotation: Boolean = in.token == AT def isLocalModifier: Boolean = in.token match { @@ -705,6 +712,20 @@ self => def isRawBar = isRawIdent && in.name == raw.BAR def isRawIdent = in.token == IDENTIFIER + def isWildcardType = in.token == USCORE || isScala3WildcardType + def isScala3WildcardType = settings.isScala3 && isRawIdent && in.name == raw.QMARK + def checkQMarkUsage() = + if (!settings.isScala3 && isRawIdent && in.name == raw.QMARK) + deprecationWarning(in.offset, + "Wrap `?` in backticks to continue to use it as an identifier, or use `-Xsource:3` to use it as a wildcard like in Scala 3.", "2.13.6") + def checkQMarkDefinition() = + if (isRawIdent && in.name == raw.QMARK) + syntaxError(in.offset, "using `?` as a type name requires backticks.") + def checkKeywordDefinition() = + if (isRawIdent && scala3Keywords.contains(in.name)) + deprecationWarning(in.offset, + s"Wrap `${in.name}` in backticks to use it as an identifier, it will become a keyword in Scala 3.", "2.13.7") + def isIdent = in.token == IDENTIFIER || in.token == BACKQUOTED_IDENT def isMacro = in.token == IDENTIFIER && in.name == nme.MACROkw @@ -715,12 +736,13 @@ self => } def isLiteral = isLiteralToken(in.token) - def isExprIntroToken(token: Token): Boolean = isLiteralToken(token) || (token match { - case IDENTIFIER | BACKQUOTED_IDENT | - THIS | SUPER | IF | FOR | NEW | USCORE | TRY | WHILE | - DO | RETURN | THROW | LPAREN | LBRACE | XMLSTART => true - case _ => false - }) + def isExprIntroToken(token: Token): Boolean = + !isValidSoftModifier && (isLiteralToken(token) || (token match { + case IDENTIFIER | BACKQUOTED_IDENT | + THIS | SUPER | IF | FOR | NEW | USCORE | TRY | WHILE | + DO | RETURN | THROW | LPAREN | LBRACE | XMLSTART => true + case _ => false + })) def isExprIntro: Boolean = isExprIntroToken(in.token) @@ -918,6 +940,16 @@ self => mkApply(Ident(op.encode), stripParens(left) :: arguments) } + /** Is current ident a `*`, and is it followed by a `)` or `, )`? */ + def followingIsScala3Vararg(): Boolean = + currentRun.isScala3 && isRawStar && lookingAhead { + in.token == RPAREN || + in.token == COMMA && { + in.nextToken() + in.token == RPAREN + } + } + /* --------- OPERAND/OPERATOR STACK --------------------------------------- */ /** Modes for infix types. */ @@ -952,14 +984,14 @@ self => if (opinfo.targs.nonEmpty) syntaxError(opinfo.offset, "type application is not allowed for postfix operators") - val od = stripParens(reduceExprStack(base, opinfo.lhs)) - makePostfixSelect(start, opinfo.offset, od, opinfo.operator) + val lhs = reduceExprStack(base, opinfo.lhs) + makePostfixSelect(if (lhs.pos.isDefined) lhs.pos.start else start, opinfo.offset, stripParens(lhs), opinfo.operator) } def finishBinaryOp(isExpr: Boolean, opinfo: OpInfo, rhs: Tree): Tree = { import opinfo._ val operatorPos: Position = Position.range(rhs.pos.source, offset, offset, offset + operator.length) - val pos = lhs.pos union rhs.pos union operatorPos withPoint offset + val pos = lhs.pos.union(rhs.pos).union(operatorPos).withEnd(in.lastOffset).withPoint(offset) atPos(pos)(makeBinop(isExpr, lhs, operator, rhs, operatorPos, opinfo.targs)) } @@ -1023,13 +1055,14 @@ self => else { ts foreach checkNotByNameOrVarargs val tuple = atPos(start) { makeSafeTupleType(ts) } - infixTypeRest( + val tpt = infixTypeRest( compoundTypeRest( annotTypeRest( simpleTypeRest( tuple))), InfixMode.FirstOp ) + if (currentRun.isScala3) andType(tpt) else tpt } } private def makeExistentialTypeTree(t: Tree) = { @@ -1105,11 +1138,22 @@ self => } else atPos(start)(makeSafeTupleType(inParens(types()))) - case USCORE => wildcardType(in.skipToken()) case _ => - path(thisOK = false, typeOK = true) match { - case r @ SingletonTypeTree(_) => r - case r => convertToTypeId(r) + if (settings.isScala3 && (in.name == raw.PLUS || in.name == raw.MINUS) && lookingAhead(in.token == USCORE)) { + val start = in.offset + val identName = in.name.encode.append("_").toTypeName + in.nextToken() + in.nextToken() + atPos(start)(Ident(identName)) + } else if (isWildcardType) { + val scala3Wildcard = isScala3WildcardType + wildcardType(in.skipToken(), scala3Wildcard) + } else { + checkQMarkUsage() + path(thisOK = false, typeOK = true) match { + case r @ SingletonTypeTree(_) => r + case r => convertToTypeId(r) + } } }) } @@ -1193,12 +1237,44 @@ self => else t } + def andType(tpt: Tree): Tree = { + val parents = ListBuffer.empty[Tree] + var otherInfixOp: Tree = EmptyTree + def collect(tpt: Tree): Unit = tpt match { + case AppliedTypeTree(op @ Ident(tpnme.AND), List(left, right)) => + collect(left) + collect(right) + case AppliedTypeTree(op, args) if args.exists(arg => arg.pos.start < op.pos.point) => + otherInfixOp = op + parents += treeCopy.AppliedTypeTree(tpt, op, args.map(andType)) + case _ => + parents += tpt + } + collect(tpt) + if (parents.lengthCompare(1) > 0) { + if (!otherInfixOp.isEmpty) { + // TODO: Unlike Scala 3, we do not take precedence into account when + // parsing infix types, there's an unmerged PR that attempts to + // change that (#6147), but until that's merged we cannot accurately + // parse things like `A Map B & C`, so give up and emit an error + // rather than continuing with an incorrect parse tree. + syntaxError(otherInfixOp.pos.point, + s"Cannot parse infix type combining `&` and `$otherInfixOp`, please use `$otherInfixOp` as the head of a regular type application.") + } + atPos(tpt.pos.start)(CompoundTypeTree(Template(parents.toList, noSelfType, Nil))) + } + else + parents.head + } + /** {{{ * InfixType ::= CompoundType {id [nl] CompoundType} * }}} */ - def infixType(mode: InfixMode.Value): Tree = - placeholderTypeBoundary { infixTypeRest(compoundType(), mode) } + def infixType(mode: InfixMode.Value): Tree = { + val tpt = placeholderTypeBoundary { infixTypeRest(compoundType(), mode) } + if (currentRun.isScala3) andType(tpt) else tpt + } /** {{{ * Types ::= Type {`,` Type} @@ -1222,16 +1298,20 @@ self => def rawIdent(): Name = try in.name finally in.nextToken() /** For when it's known already to be a type name. */ - def identForType(): TypeName = ident().toTypeName - def identForType(skipIt: Boolean): TypeName = ident(skipIt).toTypeName + def identForType(): TypeName = identForType(skipIt = true) + def identForType(skipIt: Boolean): TypeName = { + checkQMarkDefinition() + ident(skipIt).toTypeName + } def identOrMacro(): Name = if (isMacro) rawIdent() else ident() - def selector(t: Tree): Tree = { + def selector(t0: Tree): Tree = { + val t = stripParens(t0) val point = if (isIdent) in.offset else in.lastOffset //scala/bug#8459 //assert(t.pos.isDefined, t) if (t != EmptyTree) - Select(t, ident(skipIt = false)) setPos r2p(t.pos.start, point, in.lastOffset) + Select(t, ident(skipIt = false)) setPos r2p(t0.pos.start, point, in.lastOffset) else errorTermTree // has already been reported } @@ -1353,8 +1433,7 @@ self => else if (in.token == SYMBOLLIT) { def msg(what: String) = s"""symbol literal is $what; use Symbol("${in.strVal}") instead""" - if (settings.isScala3) syntaxError(in.offset, msg("unsupported")) - else deprecationWarning(in.offset, msg("deprecated"), "2.13.0") + deprecationWarning(in.offset, msg("deprecated"), "2.13.0") Apply(scalaDot(nme.Symbol), List(finish(in.strVal))) } else finish(in.token match { @@ -1472,8 +1551,8 @@ self => * WildcardType ::= `_` TypeBounds * }}} */ - def wildcardType(start: Offset) = { - val pname = freshTypeName("_$") + def wildcardType(start: Offset, qmark: Boolean) = { + val pname = if (qmark) freshTypeName("?$") else freshTypeName("_$") val t = atPos(start)(Ident(pname)) val bounds = typeBounds() val param = atPos(t.pos union bounds.pos) { makeSyntheticTypeParam(pname, bounds) } @@ -1710,7 +1789,7 @@ self => val base = opstack @tailrec - def loop(top: Tree): Tree = if (!isIdent) top else { + def loop(top: Tree): Tree = if (!isIdent || followingIsScala3Vararg()) top else { pushOpInfo(reduceExprStack(base, top)) newLineOptWhenFollowing(isExprIntroToken) if (isExprIntro) @@ -1721,7 +1800,12 @@ self => else finishPostfixOp(start, base, popOpInfo()) } - reduceExprStack(base, loop(prefixExpr())) + val expr = reduceExprStack(base, loop(prefixExpr())) + if (followingIsScala3Vararg()) + atPos(expr.pos.start) { + Typed(stripParens(expr), atPos(in.skipToken()) { Ident(tpnme.WILDCARD_STAR) }) + } + else expr } /** {{{ @@ -1795,14 +1879,14 @@ self => in.token match { case DOT => in.nextToken() - simpleExprRest(selector(stripParens(t)), canApply = true) + simpleExprRest(selector(t), canApply = true) case LBRACKET => val t1 = stripParens(t) t1 match { case Ident(_) | Select(_, _) | Apply(_, _) | Literal(_) => var app: Tree = t1 while (in.token == LBRACKET) - app = atPos(app.pos.start, in.offset)(TypeApply(app, exprTypeArgs())) + app = atPos(t.pos.start, in.offset)(TypeApply(app, exprTypeArgs())) simpleExprRest(app, canApply = true) case _ => @@ -1921,11 +2005,15 @@ self => } /** {{{ - * Generator ::= Pattern1 (`<-` | `=`) Expr [Guard] + * Generator ::= [`case`] Pattern1 (`<-` | `=`) Expr [Guard] * }}} */ def generator(eqOK: Boolean, allowNestedIf: Boolean = true): List[Tree] = { val start = in.offset + val hasCase = in.token == CASE + if (hasCase) + in.skipCASE() + val hasVal = in.token == VAL if (hasVal) in.nextToken() @@ -1944,7 +2032,7 @@ self => else syntaxError(in.offset, msg("unsupported", "just remove `val`")) } - if (hasEq && eqOK) in.nextToken() + if (hasEq && eqOK && !hasCase) in.nextToken() else accept(LARROW) val rhs = expr() @@ -1976,17 +2064,18 @@ self => final def functionArgType(): Tree = argType() final def argType(): Tree = { val start = in.offset - in.token match { - case USCORE => + if (isWildcardType) { + val scala3Wildcard = isScala3WildcardType in.nextToken() - if (in.token == SUBTYPE || in.token == SUPERTYPE) wildcardType(start) + if (in.token == SUBTYPE || in.token == SUPERTYPE) wildcardType(start, scala3Wildcard) else atPos(start) { Bind(tpnme.WILDCARD, EmptyTree) } - case _ => - typ() match { - case Ident(name: TypeName) if nme.isVariableName(name) => - atPos(start) { Bind(name, EmptyTree) } - case t => t - } + } else { + checkQMarkUsage() + typ() match { + case Ident(name: TypeName) if nme.isVariableName(name) => + atPos(start) { Bind(name, EmptyTree) } + case t => t + } } } @@ -2071,13 +2160,19 @@ self => case COMMA => !isXML && in.isTrailingComma(RPAREN) case _ => false } - def checkWildStar: Tree = top match { - case Ident(nme.WILDCARD) if isSequenceOK && isRawStar => peekingAhead ( - if (isCloseDelim) atPos(top.pos.start, in.prev.offset)(Star(stripParens(top))) - else EmptyTree - ) - case _ => EmptyTree - } + def checkWildStar: Tree = + if (isSequenceOK) { + top match { + case Ident(nme.WILDCARD) if isRawStar && lookingAhead(isCloseDelim) => + atPos(top.pos.start, in.skipToken()) { Star(top) } + case Ident(name) if followingIsScala3Vararg() => + atPos(top.pos.start) { + Bind(name, atPos(in.skipToken()) { Star(Ident(nme.WILDCARD)) }) + } + case _ => EmptyTree + } + } + else EmptyTree @tailrec def loop(top: Tree): Tree = reducePatternStack(base, top) match { case next if isIdent && !isRawBar => pushOpInfo(next) ; loop(simplePattern(() => badPattern3())) @@ -2255,8 +2350,11 @@ self => */ def accessModifierOpt(): Modifiers = normalizeModifiers { in.token match { - case m @ (PRIVATE | PROTECTED) => in.nextToken() ; accessQualifierOpt(Modifiers(flagTokens(m))) - case _ => NoMods + case m @ (PRIVATE | PROTECTED) => + in.nextToken() + accessQualifierOpt(Modifiers(flagTokens(m))) + case _ => + NoMods } } @@ -2278,7 +2376,10 @@ self => in.nextToken() loop(mods) case _ => - mods + if (isValidSoftModifier) { + in.nextToken() + loop(mods) + } else mods } loop(NoMods) } @@ -2362,14 +2463,7 @@ self => if (vds.isEmpty) syntaxError(start, s"case classes must have a parameter list; try 'case class $name()' or 'case object $name'") else if (vds.head.nonEmpty && vds.head.head.mods.isImplicit) { - if (currentRun.isScala213) - syntaxError(start, s"case classes must have a non-implicit parameter list; try 'case class $name()$elliptical'") - else { - deprecationWarning(start, s"case classes should have a non-implicit parameter list; adapting to 'case class $name()$elliptical'", "2.12.2") - vds.insert(0, List.empty[ValDef]) - vds(1) = vds(1).map(vd => copyValDef(vd)(mods = vd.mods & ~Flags.CASEACCESSOR)) - if (implicitSection != -1) implicitSection += 1 - } + syntaxError(start, s"case classes must have a non-implicit parameter list; try 'case class $name()$elliptical'") } } if (implicitSection != -1 && implicitSection != vds.length - 1) @@ -2434,6 +2528,7 @@ self => if (caseParam) mods |= Flags.CASEACCESSOR } val nameOffset = in.offset + checkKeywordDefinition() val name = ident() var bynamemod = 0L val tpt = { @@ -2468,8 +2563,9 @@ self => * TypeParam ::= Id TypeParamClauseOpt TypeBounds {`<%` Type} {`:` Type} * }}} */ - def typeParamClauseOpt(owner: Name, contextBoundBuf: ListBuffer[Tree]): List[TypeDef] = { + def typeParamClauseOpt(owner: Name, contextBoundBuf: ListBuffer[Tree], ownerKind: ParamOwner): List[TypeDef] = { def typeParam(ms: Modifiers): TypeDef = { + val isAbstractOwner = ownerKind == ParamOwner.Type //|| ownerKind == ParamOwner.TypeParam var mods = ms | Flags.PARAM val start = in.offset if (owner.isTypeName && isIdent) { @@ -2482,10 +2578,18 @@ self => } } val nameOffset = in.offset - // TODO AM: freshTermName(o2p(in.skipToken()), "_$$"), will need to update test suite - val pname: TypeName = wildcardOrIdent().toTypeName + checkQMarkDefinition() + checkKeywordDefinition() + val pname: TypeName = + if (in.token == USCORE && (isAbstractOwner || !currentRun.isScala3)) { + if (!isAbstractOwner) + deprecationWarning(in.offset, "Top-level wildcard is not allowed and will error under -Xsource:3", "2.13.7") + in.nextToken() + freshTypeName("_$$") + } + else ident(skipIt = false).toTypeName val param = atPos(start, nameOffset) { - val tparams = typeParamClauseOpt(pname, null) // @M TODO null --> no higher-order context bounds for now + val tparams = typeParamClauseOpt(pname, null, ParamOwner.Type) // @M TODO null --> no higher-order context bounds for now TypeDef(mods, pname, tparams, typeBounds()) } if (contextBoundBuf ne null) { @@ -2504,7 +2608,6 @@ self => } param } - newLineOptWhenFollowedBy(LBRACKET) if (in.token == LBRACKET) inBrackets(commaSeparated(typeParam(NoMods withAnnotations annotations(skipNewLines = true)))) else Nil } @@ -2567,19 +2670,27 @@ self => def loop(expr: Tree): Tree = { expr setPos expr.pos.makeTransparent val selectors: List[ImportSelector] = in.token match { - case USCORE => List(wildImportSelector()) // import foo.bar._; - case LBRACE => importSelectors() // import foo.bar.{ x, y, z } - case _ => - val nameOffset = in.offset - val name = ident() - if (in.token == DOT) { - // import foo.bar.ident. and so create a select node and recurse. - val t = atPos(start, if (name == nme.ERROR) in.offset else nameOffset)(Select(expr, name)) - in.nextToken() - return loop(t) + case USCORE => + List(wildImportSelector()) // import foo.bar._ + case IDENTIFIER if currentRun.isScala3 && in.name == raw.STAR => + List(wildImportSelector()) // import foo.bar.* + case LBRACE => + importSelectors() // import foo.bar.{ x, y, z } + case _ => + if (settings.isScala3 && lookingAhead { isRawIdent && in.name == nme.as }) + List(importSelector()) // import foo.bar as baz + else { + val nameOffset = in.offset + val name = ident() + if (in.token == DOT) { + // import foo.bar.ident. and so create a select node and recurse. + val t = atPos(start, if (name == nme.ERROR) in.offset else nameOffset)(Select(expr, name)) + in.nextToken() + return loop(t) + } + // import foo.bar.Baz; + else List(makeImportSelector(name, nameOffset)) } - // import foo.bar.Baz; - else List(makeImportSelector(name, nameOffset)) } // reaching here means we're done walking. atPos(start)(Import(expr, selectors)) @@ -2603,15 +2714,23 @@ self => * }}} */ def importSelectors(): List[ImportSelector] = { - val selectors = inBracesOrNil(commaSeparated(importSelector())) + val selectors0 = inBracesOrNil(commaSeparated(importSelector())) + + // Treat an import of `*, given` or `given, *` as if it was an import of `*` + // since the former in Scala 3 has the same semantics as the latter in Scala 2. + val selectors = + if (currentRun.isScala3 && selectors0.exists(_.isWildcard)) + selectors0.filterNot(sel => sel.name == nme.`given` && sel.rename == sel.name) + else + selectors0 + for (t <- selectors.init if t.isWildcard) syntaxError(t.namePos, "Wildcard import must be in last position") selectors } - def wildcardOrIdent() = { - if (in.token == USCORE) { in.nextToken() ; nme.WILDCARD } + def wildcardOrIdent() = + if (in.token == USCORE || settings.isScala3 && isRawStar) { in.nextToken() ; nme.WILDCARD } else ident() - } /** {{{ * ImportSelector ::= Id [`=>` Id | `=>` `_`] @@ -2622,17 +2741,20 @@ self => val bbq = in.token == BACKQUOTED_IDENT val name = wildcardOrIdent() var renameOffset = -1 - val rename = in.token match { - case ARROW => + + val rename = + if (in.token == ARROW || (settings.isScala3 && isRawIdent && in.name == nme.as)) { in.nextToken() renameOffset = in.offset if (name == nme.WILDCARD && !bbq) syntaxError(renameOffset, "Wildcard import cannot be renamed") wildcardOrIdent() - case _ if name == nme.WILDCARD && !bbq => null - case _ => + } + else if (name == nme.WILDCARD && !bbq) null + else { renameOffset = start name - } + } + ImportSelector(name, start, rename, renameOffset) } @@ -2687,6 +2809,7 @@ self => def patDefOrDcl(pos : Int, mods: Modifiers): List[Tree] = { var newmods = mods in.nextToken() + checkKeywordDefinition() val lhs = commaSeparated(stripParens(noSeq.pattern2())) val tp = typedOpt() val (rhs, rhsPos) = @@ -2782,6 +2905,7 @@ self => } else { val nameOffset = in.offset + checkKeywordDefinition() val name = identOrMacro() funDefRest(start, nameOffset, mods, name) } @@ -2794,7 +2918,7 @@ self => // [T : B] or [T : => B]; it contains the equivalent implicit parameter type, // i.e. (B[T] or T => B) val contextBoundBuf = new ListBuffer[Tree] - val tparams = typeParamClauseOpt(name, contextBoundBuf) + val tparams = typeParamClauseOpt(name, contextBoundBuf, ParamOwner.Def) val vparamss = paramClauses(name, contextBoundBuf.toList, ofCaseClass = false) newLineOptWhenFollowedBy(LBRACE) var restype = fromWithinReturnType(typedOpt()) @@ -2866,8 +2990,7 @@ self => t = Apply(t, argumentExprs()) newLineOptWhenFollowedBy(LBRACE) } - if (classContextBounds.isEmpty) t - else Apply(t, vparamss.last.map(vp => Ident(vp.name))) + t } /** {{{ @@ -2894,9 +3017,10 @@ self => in.nextToken() newLinesOpt() atPos(start, in.offset) { + checkKeywordDefinition() val name = identForType() // @M! a type alias as well as an abstract type may declare type parameters - val tparams = typeParamClauseOpt(name, null) + val tparams = typeParamClauseOpt(name, null, ParamOwner.Type) in.token match { case EQUALS => in.nextToken() @@ -2954,13 +3078,17 @@ self => * }}} */ def classDef(start: Offset, mods: Modifiers): ClassDef = { + def isAfterLineEnd: Boolean = in.lastOffset < in.lineStartOffset && (in.lineStartOffset <= in.offset || in.lastOffset < in.lastLineStartOffset && in.lastLineStartOffset <= in.offset) in.nextToken() + checkKeywordDefinition() val nameOffset = in.offset val name = identForType() + if (currentRun.isScala3 && in.token == LBRACKET && isAfterLineEnd) + deprecationWarning(in.offset, "type parameters should not follow newline", "2.13.7") atPos(start, if (name == tpnme.ERROR) start else nameOffset) { savingClassContextBounds { val contextBoundBuf = new ListBuffer[Tree] - val tparams = typeParamClauseOpt(name, contextBoundBuf) + val tparams = typeParamClauseOpt(name, contextBoundBuf, ParamOwner.Class) classContextBounds = contextBoundBuf.toList val tstart = (in.offset :: classContextBounds.map(_.pos.start)).min if (!classContextBounds.isEmpty && mods.isTrait) { @@ -2990,6 +3118,7 @@ self => def objectDef(start: Offset, mods: Modifiers, isPackageObject: Boolean = false): ModuleDef = { in.nextToken() val nameOffset = in.offset + checkKeywordDefinition() val name = ident() val tstart = in.offset atPos(start, if (name == nme.ERROR) start else nameOffset) { @@ -3104,7 +3233,7 @@ self => deprecationWarning(in.offset, "Using `<:` for `extends` is deprecated", since = "2.12.5") true } - val (parents, self, body) = ( + val (parents, self, body) = if (in.token == EXTENDS || in.token == SUBTYPE && mods.isTrait && deprecatedUsage()) { in.nextToken() template() @@ -3114,27 +3243,25 @@ self => val (self, body) = templateBodyOpt(parenMeansSyntaxError = mods.isTrait || name.isTermName) (List(), self, body) } - ) - def anyvalConstructor() = ( - // Not a well-formed constructor, has to be finished later - see note - // regarding AnyVal constructor in AddInterfaces. - DefDef(NoMods, nme.CONSTRUCTOR, Nil, ListOfNil, TypeTree(), Block(Nil, literalUnit)) - ) - val parentPos = o2p(in.offset) - val tstart1 = if (body.isEmpty && in.lastOffset < tstart) in.lastOffset else tstart + // Not a well-formed constructor, has to be finished later - see note + // regarding AnyVal constructor in AddInterfaces. + def anyvalConstructor() = DefDef(NoMods, nme.CONSTRUCTOR, Nil, ListOfNil, TypeTree(), Block(Nil, literalUnit)) + // tstart is the offset of the token after `class C[A]` (which may be LPAREN, EXTENDS, LBRACE). + // if there is no template body, then tstart may be in the next program element, so back up to just after the `class C[A]`. + val templateOffset = if (body.isEmpty && in.lastOffset < tstart) in.lastOffset else tstart + val templatePos = o2p(templateOffset) - // we can't easily check this later, because `gen.mkParents` adds the default AnyRef parent, and we need to warn based on what the user wrote - if (name == nme.PACKAGEkw && parents.nonEmpty && settings.isScala3) - deprecationWarning(tstart, s"package object inheritance is deprecated (https://github.com/scala/scala-dev/issues/441);\n" + - s"drop the `extends` clause or use a regular object instead", "3.0.0") + // warn now if user wrote parents for package object; `gen.mkParents` adds AnyRef to parents + if (currentRun.isScala3 && name == nme.PACKAGEkw && !parents.isEmpty) + deprecationWarning(tstart, """package object inheritance is deprecated (https://github.com/scala/scala-dev/issues/441); + |drop the `extends` clause or use a regular object instead""".stripMargin, "3.0.0") - atPos(tstart1) { + atPos(templateOffset) { // Exclude only the 9 primitives plus AnyVal. if (inScalaRootPackage && ScalaValueClassNames.contains(name)) Template(parents, self, anyvalConstructor() :: body) else - gen.mkTemplate(gen.mkParents(mods, parents, parentPos), - self, constrMods, vparamss, body, o2p(tstart)) + gen.mkTemplate(gen.mkParents(mods, parents, templatePos), self, constrMods, vparamss, body, templatePos) } } @@ -3208,7 +3335,7 @@ self => case IMPORT => in.flushDoc() importClause() - case _ if isAnnotation || isTemplateIntro || isModifier => + case _ if isAnnotation || isTemplateIntro || isModifier || isValidSoftModifier => joinComment(topLevelTmplDef :: Nil) } @@ -3258,7 +3385,7 @@ self => case IMPORT => in.flushDoc() importClause() - case _ if isDefIntro || isModifier || isAnnotation => + case _ if isDefIntro || isModifier || isAnnotation || isValidSoftModifier => joinComment(nonLocalDefOrDcl) case _ if isExprIntro => in.flushDoc() diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index 5c165a6dfed0..8010fd2756a0 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -13,6 +13,7 @@ package scala.tools.nsc package ast.parser +import scala.tools.nsc.settings.ScalaVersion import scala.tools.nsc.util.{CharArrayReader, CharArrayReaderData} import scala.reflect.internal.util._ import scala.reflect.internal.Chars._ @@ -171,7 +172,45 @@ trait Scanners extends ScannersCommon { /** A switch whether operators at the start of lines can be infix operators. */ private var allowLeadingInfixOperators = true - private def isDigit(c: Char) = java.lang.Character isDigit c + private def isDigit(c: Char) = Character.isDigit(c) + + import Character.{isHighSurrogate, isLowSurrogate, isUnicodeIdentifierPart, isUnicodeIdentifierStart, isValidCodePoint, toCodePoint} + + // given char (ch) is high surrogate followed by low, codepoint passes predicate. + // true means supplementary chars were put to buffer. + // strict to require low surrogate (if not in string literal). + private def isSupplementary(high: Char, test: Int => Boolean, strict: Boolean = true): Boolean = + isHighSurrogate(high) && { + var res = false + nextChar() + val low = ch + if (isLowSurrogate(low)) { + nextChar() + val codepoint = toCodePoint(high, low) + if (isValidCodePoint(codepoint) && test(codepoint)) { + putChar(high) + putChar(low) + res = true + } else + syntaxError(f"illegal character '\\u$high%04x\\u$low%04x'") + } else if (!strict) { + putChar(high) + res = true + } else + syntaxError(f"illegal character '\\u$high%04x' missing low surrogate") + res + } + private def atSupplementary(ch: Char, f: Int => Boolean): Boolean = + isHighSurrogate(ch) && { + val hi = ch + val r = lookaheadReader + r.nextRawChar() + val lo = r.ch + isLowSurrogate(lo) && { + val codepoint = toCodePoint(hi, lo) + isValidCodePoint(codepoint) && f(codepoint) + } + } private var openComments = 0 final protected def putCommentChar(): Unit = { processCommentChar(); nextChar() } @@ -393,6 +432,19 @@ trait Scanners extends ScannersCommon { case _ => } + /** Advance beyond a case token without marking the CASE in sepRegions. + * This method should be called to skip beyond CASE tokens that are + * not part of matches, i.e. no ARROW is expected after them. + */ + def skipCASE(): Unit = { + assert(token == CASE, s"Internal error: skipCASE() called on non-case token $token") + nextToken() + sepRegions = sepRegions.tail + } + + /** True to warn about migration change in infix syntax. */ + private val infixMigration = settings.Xmigration.value <= ScalaVersion("2.13.2") + /** Produce next token, filling TokenData fields of Scanner. */ def nextToken(): Unit = { @@ -432,18 +484,35 @@ trait Scanners extends ScannersCommon { token = nl } + def isOperator: Boolean = token == BACKQUOTED_IDENT || token == IDENTIFIER && isOperatorPart(name.charAt(name.length - 1)) + + /* A leading infix operator must be followed by a lexically suitable expression. + * Usually any simple expr will do. However, a backquoted identifier may serve as + * either an op or a reference. So the additional constraint is that the following + * token can't be an assignment operator. (Dotty disallows binary ops, hence the + * test for unary.) See run/multiLineOps.scala for 42 + `x` on 3 lines, where + + * is not leading infix because backquoted x is non-unary op. + */ + def followedByInfixRHS: Boolean = { + //def isCandidateInfixRHS: Boolean = isSimpleExprIntroToken(token) && (!isOperator || nme.raw.isUnary(name) || token == BACKQUOTED_IDENT) + def isAssignmentOperator: Boolean = + name.endsWith('=') && !name.startsWith('=') && isOperatorPart(name.startChar) && + (name.length != 2 || (name.startChar match { case '!' | '<' | '>' => false case _ => true })) + def isCandidateInfixRHS: Boolean = isSimpleExprIntroToken(token) && (!isOperator || token == BACKQUOTED_IDENT || !isAssignmentOperator) + lookingAhead { + isCandidateInfixRHS || token == NEWLINE && { nextToken() ; isCandidateInfixRHS } + } + } + /* A leading symbolic or backquoted identifier is treated as an infix operator * if it is followed by at least one ' ' and a token on the same line * that can start an expression. */ def isLeadingInfixOperator = allowLeadingInfixOperators && - (token == BACKQUOTED_IDENT || - token == IDENTIFIER && isOperatorPart(name.charAt(name.length - 1))) && - (ch == ' ') && lookingAhead { - // force a NEWLINE after current token if it is on its own line - isSimpleExprIntroToken(token) - } + isOperator && + (isWhitespace(ch) || ch == LF) && + followedByInfixRHS /* Insert NEWLINE or NEWLINES if * - we are after a newline @@ -459,8 +528,8 @@ trait Scanners extends ScannersCommon { val msg = """|Line starts with an operator that in future |will be taken as an infix expression continued from the previous line. |To force the previous interpretation as a separate statement, - |add an explicit `;`, add an empty line, or remove spaces after the operator.""".stripMargin - deprecationWarning(msg, "2.13.2") + |add an explicit `;`, add an empty line, or remove spaces after the operator.""" + if (infixMigration) deprecationWarning(msg.stripMargin, "2.13.2") insertNL(NEWLINE) } } @@ -670,20 +739,22 @@ trait Scanners extends ScannersCommon { val isEmptyCharLit = (ch == '\'') getLitChar() if (ch == '\'') { - if (isEmptyCharLit && currentRun.isScala213) + if (isEmptyCharLit) syntaxError("empty character literal (use '\\'' for single quote)") else { - if (isEmptyCharLit) - deprecationWarning("deprecated syntax for character literal (use '\\'' for single quote)", "2.12.2") nextChar() - token = CHARLIT - setStrVal() + if (cbuf.length != 1) + syntaxError("illegal codepoint in Char constant: " + cbuf.toString.map(c => f"\\u$c%04x").mkString("'", "", "'")) + else { + token = CHARLIT + setStrVal() + } } - } else if (isEmptyCharLit) { + } + else if (isEmptyCharLit) syntaxError("empty character literal") - } else { + else unclosedCharLit() - } } else unclosedCharLit() } @@ -726,7 +797,7 @@ trait Scanners extends ScannersCommon { } else if (ch == '\u2190') { deprecationWarning("The unicode arrow `←` is deprecated, use `<-` instead. If you still wish to display it as one character, consider using a font with programming ligatures such as Fira Code.", "2.13.0") nextChar(); token = LARROW - } else if (Character.isUnicodeIdentifierStart(ch)) { + } else if (isUnicodeIdentifierStart(ch)) { putChar(ch) nextChar() getIdentRest() @@ -734,8 +805,10 @@ trait Scanners extends ScannersCommon { putChar(ch) nextChar() getOperatorRest() + } else if (isSupplementary(ch, isUnicodeIdentifierStart)) { + getIdentRest() } else { - syntaxError("illegal character '" + ("" + '\\' + 'u' + "%04x".format(ch.toInt)) + "'") + syntaxError(f"illegal character '\\u$ch%04x'") nextChar() } } @@ -802,13 +875,15 @@ trait Scanners extends ScannersCommon { case SU => // strangely enough, Character.isUnicodeIdentifierPart(SU) returns true! finishNamed() case _ => - if (Character.isUnicodeIdentifierPart(ch)) { + if (isUnicodeIdentifierPart(ch)) { putChar(ch) nextChar() getIdentRest() - } else { - finishNamed() } + else if (isSupplementary(ch, isUnicodeIdentifierPart)) + getIdentRest() + else + finishNamed() } @tailrec @@ -926,6 +1001,25 @@ trait Scanners extends ScannersCommon { } getStringPart(multiLine, seenEscapedQuote || q) } else if (ch == '$') { + @tailrec def getInterpolatedIdentRest(): Unit = + if (ch != SU && isUnicodeIdentifierPart(ch)) { + putChar(ch) + nextRawChar() + getInterpolatedIdentRest() + } else if (atSupplementary(ch, isUnicodeIdentifierPart)) { + putChar(ch) + nextRawChar() + putChar(ch) + nextRawChar() + getInterpolatedIdentRest() + } else { + next.token = IDENTIFIER + next.name = newTermName(cbuf.toCharArray) + cbuf.clear() + val idx = next.name.start - kwOffset + if (idx >= 0 && idx < kwArray.length) + next.token = kwArray(idx) + } nextRawChar() if (ch == '$' || ch == '"') { putChar(ch) @@ -939,32 +1033,29 @@ trait Scanners extends ScannersCommon { finishStringPart() nextRawChar() next.token = USCORE - } else if (Character.isUnicodeIdentifierStart(ch)) { + } else if (isUnicodeIdentifierStart(ch)) { finishStringPart() - do { - putChar(ch) - nextRawChar() - } while (ch != SU && Character.isUnicodeIdentifierPart(ch)) - next.token = IDENTIFIER - next.name = newTermName(cbuf.toString) - cbuf.clear() - val idx = next.name.start - kwOffset - if (idx >= 0 && idx < kwArray.length) { - next.token = kwArray(idx) - } + putChar(ch) + nextRawChar() + getInterpolatedIdentRest() + } else if (atSupplementary(ch, isUnicodeIdentifierStart)) { + finishStringPart() + putChar(ch) + nextRawChar() + putChar(ch) + nextRawChar() + getInterpolatedIdentRest() } else { val expectations = "$$, $\", $identifier or ${expression}" syntaxError(s"invalid string interpolation $$$ch, expected: $expectations") } } else { val isUnclosedLiteral = (ch == SU || (!multiLine && (ch == CR || ch == LF))) - if (isUnclosedLiteral) { + if (isUnclosedLiteral) if (multiLine) incompleteInputError("unclosed multi-line string literal") - else { + else unclosedStringLit(seenEscapedQuote) - } - } else { putChar(ch) nextRawChar() @@ -998,53 +1089,38 @@ trait Scanners extends ScannersCommon { false } - /** copy current character into cbuf, interpreting any escape sequences, - * and advance to next character. + /** Copy current character into cbuf, interpreting any escape sequences, + * and advance to next character. Surrogate pairs are consumed (see check + * at fetchSingleQuote), but orphan surrogate is allowed. */ protected def getLitChar(): Unit = if (ch == '\\') { nextChar() - if ('0' <= ch && ch <= '7') { - val start = charOffset - 2 - val leadch: Char = ch - var oct: Int = digit2int(ch, 8) - nextChar() - if ('0' <= ch && ch <= '7') { - oct = oct * 8 + digit2int(ch, 8) - nextChar() - if (leadch <= '3' && '0' <= ch && ch <= '7') { - oct = oct * 8 + digit2int(ch, 8) - nextChar() - } - } - val alt = if (oct == LF) "\\n" else "\\u%04x" format oct - syntaxError(start, s"octal escape literals are unsupported: use $alt instead") - putChar(oct.toChar) - } else { - if (ch == 'u') { - if (getUEscape()) nextChar() - } - else { - ch match { - case 'b' => putChar('\b') - case 't' => putChar('\t') - case 'n' => putChar('\n') - case 'f' => putChar('\f') - case 'r' => putChar('\r') - case '\"' => putChar('\"') - case '\'' => putChar('\'') - case '\\' => putChar('\\') - case _ => invalidEscape() - } - nextChar() - } - } - } else { + charEscape() + } else if (!isSupplementary(ch, _ => true, strict = false)) { putChar(ch) nextChar() } - private def getUEscape(): Boolean = { + private def charEscape(): Unit = { + var bump = true + ch match { + case 'b' => putChar('\b') + case 't' => putChar('\t') + case 'n' => putChar('\n') + case 'f' => putChar('\f') + case 'r' => putChar('\r') + case '\"' => putChar('\"') + case '\'' => putChar('\'') + case '\\' => putChar('\\') + case 'u' => bump = uEscape() + case x if '0' <= x && x <= '7' => bump = octalEscape() + case _ => invalidEscape() + } + if (bump) nextChar() + } + + private def uEscape(): Boolean = { while (ch == 'u') nextChar() var codepoint = 0 var digitsRead = 0 @@ -1065,7 +1141,25 @@ trait Scanners extends ScannersCommon { putChar(found) true } - + + private def octalEscape(): Boolean = { + val start = charOffset - 2 + val leadch: Char = ch + var oct: Int = digit2int(ch, 8) + nextChar() + if ('0' <= ch && ch <= '7') { + oct = oct * 8 + digit2int(ch, 8) + nextChar() + if (leadch <= '3' && '0' <= ch && ch <= '7') { + oct = oct * 8 + digit2int(ch, 8) + nextChar() + } + } + val alt = if (oct == LF) "\\n" else f"\\u$oct%04x" + syntaxError(start, s"octal escape literals are unsupported: use $alt instead") + putChar(oct.toChar) + false + } protected def invalidEscape(): Unit = { syntaxError(charOffset - 1, "invalid escape character") @@ -1430,6 +1524,10 @@ trait Scanners extends ScannersCommon { final val token2name = (allKeywords map (_.swap)).toMap + final val softModifierNames = Set(nme.open, nme.infix) + + final val scala3Keywords = Set(nme.`enum`, nme.`export`, nme.`given`) + // Token representation ---------------------------------------------------- /** Returns the string representation of given token. */ diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala index 2b05b600b598..8d6a750fbbb6 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala @@ -56,8 +56,7 @@ abstract class TreeBuilder { ValDef(Modifiers(PRIVATE), name, tpt, EmptyTree) /** Tree for `od op`, start is start0 if od.pos is borked. */ - def makePostfixSelect(start0: Int, end: Int, od: Tree, op: Name): Tree = { - val start = if (od.pos.isDefined) od.pos.start else start0 + def makePostfixSelect(start: Int, end: Int, od: Tree, op: Name): Tree = { atPos(r2p(start, end, end + op.length)) { new PostfixSelect(od, op.encode) } } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala index 6bba2f75190d..b9ea86288ad9 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala @@ -33,7 +33,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { import bTypes._ import coreBTypes._ import definitions._ - import genBCode.postProcessor.backendUtils.addIndyLambdaImplMethod + import genBCode.postProcessor.backendUtils.{addIndyLambdaImplMethod, classfileVersion} import genBCode.postProcessor.callGraph.{inlineAnnotatedCallsites, noInlineAnnotatedCallsites} /* @@ -79,9 +79,23 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { case Assign(lhs, rhs) => val s = lhs.symbol val Local(tk, _, idx, _) = locals.getOrMakeLocal(s) - genLoad(rhs, tk) - lineNumber(tree) - bc.store(idx, tk) + + rhs match { + case Apply(Select(larg: Ident, nme.ADD), Literal(x) :: Nil) + if larg.symbol == s && tk.isIntSizedType && x.isShortRange => + lineNumber(tree) + bc.iinc(idx, x.intValue) + + case Apply(Select(larg: Ident, nme.SUB), Literal(x) :: Nil) + if larg.symbol == s && tk.isIntSizedType && Constant(-x.intValue).isShortRange => + lineNumber(tree) + bc.iinc(idx, -x.intValue) + + case _ => + genLoad(rhs, tk) + lineNumber(tree) + bc.store(idx, tk) + } case _ => genLoad(tree, UNIT) @@ -303,8 +317,8 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { generatedType = genApply(app, expectedType) case app @ ApplyDynamic(qual, Literal(Constant(bootstrapMethodRef: Symbol)) :: staticAndDynamicArgs) => - val numStaticArgs = bootstrapMethodRef.paramss.head.size - 3 /*JVM provided args*/ - val (staticArgs, dynamicArgs) = staticAndDynamicArgs.splitAt(numStaticArgs) + val numDynamicArgs = qual.symbol.info.params.length + val (staticArgs, dynamicArgs) = staticAndDynamicArgs.splitAt(staticAndDynamicArgs.length - numDynamicArgs) val bootstrapDescriptor = staticHandleFromSymbol(bootstrapMethodRef) val bootstrapArgs = staticArgs.map({case t @ Literal(c: Constant) => bootstrapMethodArg(c, t.pos) case x => throw new MatchError(x)}) val descriptor = methodBTypeFromMethodType(qual.symbol.info, false) @@ -953,12 +967,11 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { mbt.descriptor ) } - module.attachments.get[DottyEnumSingleton] match { // TODO [tasty]: dotty enum singletons are not modules. - case Some(enumAttach) => - val enumCompanion = symInfoTK(module.originalOwner).asClassBType - visitAccess(enumCompanion, enumAttach.name) - - case _ => visitAccess(mbt, strMODULE_INSTANCE_FIELD) + if (module.isScala3Defined && module.hasAttachment[DottyEnumSingleton.type]) { // TODO [tasty]: dotty enum singletons are not modules. + val enumCompanion = symInfoTK(module.originalOwner).asClassBType + visitAccess(enumCompanion, module.rawname.toString) + } else { + visitAccess(mbt, strMODULE_INSTANCE_FIELD) } } } @@ -991,44 +1004,110 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { } } + /* Generate string concatenation + * + * On JDK 8: create and append using `StringBuilder` + * On JDK 9+: use `invokedynamic` with `StringConcatFactory` + */ def genStringConcat(tree: Tree): BType = { lineNumber(tree) liftStringConcat(tree) match { - // Optimization for expressions of the form "" + x. We can avoid the StringBuilder. + // Optimization for expressions of the form "" + x case List(Literal(Constant("")), arg) => genLoad(arg, ObjectRef) genCallMethod(String_valueOf, InvokeStyle.Static, arg.pos) case concatenations => - val approxBuilderSize = concatenations.map { - case Literal(Constant(s: String)) => s.length - case Literal(c @ Constant(value)) if c.isNonUnitAnyVal => String.valueOf(c).length - case _ => - // could add some guess based on types of primitive args. - // or, we could stringify all the args onto the stack, compute the exact size of - // the StringBuilder. - // or, just let https://openjdk.java.net/jeps/280 (or a re-implementation thereof in our 2.13.x stdlib) do all the hard work at link time - 0 - }.sum - bc.genStartConcat(tree.pos, approxBuilderSize) - def isEmptyString(t: Tree) = t match { - case Literal(Constant("")) => true - case _ => false - } - for (elem <- concatenations if !isEmptyString(elem)) { - val loadedElem = elem match { + + val concatArguments = concatenations.view + .filter { + case Literal(Constant("")) => false // empty strings are no-ops in concatenation + case _ => true + } + .map { case Apply(boxOp, value :: Nil) if currentRun.runDefinitions.isBox(boxOp.symbol) => // Eliminate boxing of primitive values. Boxing is introduced by erasure because // there's only a single synthetic `+` method "added" to the string class. value + case other => other + } + .toList + + // `StringConcatFactory` only got added in JDK 9, so use `StringBuilder` for lower + if (classfileVersion.get < asm.Opcodes.V9) { + + // Estimate capacity needed for the string builder + val approxBuilderSize = concatArguments.view.map { + case Literal(Constant(s: String)) => s.length + case Literal(c @ Constant(_)) if c.isNonUnitAnyVal => String.valueOf(c).length + case _ => 0 + }.sum + bc.genNewStringBuilder(tree.pos, approxBuilderSize) + + for (elem <- concatArguments) { + val elemType = tpeTK(elem) + genLoad(elem, elemType) + bc.genStringBuilderAppend(elemType, elem.pos) + } + bc.genStringBuilderEnd(tree.pos) + } else { + + /* `StringConcatFactory#makeConcatWithConstants` accepts max 200 argument slots. If + * the string concatenation is longer (unlikely), we spill into multiple calls + */ + val MaxIndySlots = 200 + val TagArg = '\u0001' // indicates a hole (in the recipe string) for an argument + val TagConst = '\u0002' // indicates a hole (in the recipe string) for a constant + + val recipe = new StringBuilder() + val argTypes = Seq.newBuilder[asm.Type] + val constVals = Seq.newBuilder[String] + var totalArgSlots = 0 + var countConcats = 1 // ie. 1 + how many times we spilled + + for (elem <- concatArguments) { + val tpe = tpeTK(elem) + val elemSlots = tpe.size + + // Unlikely spill case + if (totalArgSlots + elemSlots >= MaxIndySlots) { + bc.genIndyStringConcat(recipe.toString, argTypes.result(), constVals.result()) + countConcats += 1 + totalArgSlots = 0 + recipe.setLength(0) + argTypes.clear() + constVals.clear() + } - case _ => elem + elem match { + case Literal(Constant(s: String)) => + if (s.contains(TagArg) || s.contains(TagConst)) { + totalArgSlots += elemSlots + recipe.append(TagConst) + constVals += s + } else { + recipe.append(s) + } + + case other => + totalArgSlots += elemSlots + recipe.append(TagArg) + val tpe = tpeTK(elem) + argTypes += tpe.toASMType + genLoad(elem, tpe) + } + } + bc.genIndyStringConcat(recipe.toString, argTypes.result(), constVals.result()) + + // If we spilled, generate one final concat + if (countConcats > 1) { + bc.genIndyStringConcat( + TagArg.toString * countConcats, + Seq.fill(countConcats)(StringRef.toASMType), + Seq.empty + ) } - val elemType = tpeTK(loadedElem) - genLoad(loadedElem, elemType) - bc.genConcat(elemType, loadedElem.pos) } - bc.genEndConcat(tree.pos) } StringRef } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala index da8f110d5be6..c1eb637d8ae4 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala @@ -14,14 +14,15 @@ package scala package tools.nsc package backend.jvm +import scala.PartialFunction.cond +import scala.annotation.tailrec import scala.tools.asm -import BackendReporting._ -import scala.tools.asm.ClassWriter +import scala.tools.asm.{ClassWriter, Label} +import scala.tools.nsc.Reporting.WarningCategory import scala.tools.nsc.backend.jvm.BCodeHelpers.ScalaSigBytes +import scala.tools.nsc.backend.jvm.BackendReporting._ import scala.tools.nsc.reporters.NoReporter -import PartialFunction.cond -import scala.annotation.tailrec -import scala.tools.nsc.Reporting.WarningCategory +import scala.util.chaining.scalaUtilChainingOps /* * Traits encapsulating functionality to convert Scala AST Trees into ASM ClassNodes. @@ -31,9 +32,9 @@ import scala.tools.nsc.Reporting.WarningCategory */ abstract class BCodeHelpers extends BCodeIdiomatic { import global._ - import definitions._ import bTypes._ import coreBTypes._ + import definitions._ import genBCode.postProcessor.backendUtils /** @@ -282,7 +283,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic { |""".stripMargin, WarningCategory.Other, sym) - val possibles = (sym.tpe nonPrivateMember nme.main).alternatives + val possibles = sym.tpe.nonPrivateMember(nme.main).alternatives val hasApproximate = possibles.exists(m => cond(m.info) { case MethodType(p :: Nil, _) => p.tpe.typeSymbol == definitions.ArrayClass }) // Before erasure so we can identify generic mains. @@ -307,16 +308,19 @@ abstract class BCodeHelpers extends BCodeIdiomatic { val mainAdvice = if (hasExact) Nil else possibles.map { m => - m.info match { + val msg = m.info match { case PolyType(_, _) => - ("main methods cannot be generic", m) + "main methods cannot be generic" case MethodType(params, res) if res.typeSymbol :: params exists (_.isAbstractType) => - ("main methods cannot refer to type parameters or abstract types", m) + "main methods cannot refer to type parameters or abstract types" + case MethodType(param :: Nil, _) if definitions.isArrayOfSymbol(param.tpe, StringClass) => + "main methods must have the exact signature `(Array[String]): Unit`, though Scala runners will forgive a non-Unit result" case MethodType(_, _) => - ("main methods must have the exact signature (Array[String])Unit", m) + "main methods must have the exact signature `(Array[String]): Unit`" case tp => - (s"don't know what this is: $tp", m) + s"don't know what this is: $tp" } + (msg, m) } companionAdvice.foreach(msg => warnNoForwarder(msg, hasExact, exactly.fold(alternate)(_.info))) @@ -365,7 +369,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic { */ trait BCPickles { - import scala.reflect.internal.pickling.{ PickleFormat, PickleBuffer } + import scala.reflect.internal.pickling.{PickleBuffer, PickleFormat} val versionPickle = { val vp = new PickleBuffer(new Array[Byte](16), -1, 0) @@ -794,6 +798,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic { mirrorMethod.visitCode() + val codeStart: Label = new Label().tap(mirrorMethod.visitLabel) mirrorMethod.visitFieldInsn(asm.Opcodes.GETSTATIC, moduleName, strMODULE_INSTANCE_FIELD, classBTypeFromSymbol(moduleClass).descriptor) var index = 0 @@ -805,6 +810,13 @@ abstract class BCodeHelpers extends BCodeIdiomatic { mirrorMethod.visitMethodInsn(asm.Opcodes.INVOKEVIRTUAL, moduleName, mirrorMethodName, methodBTypeFromSymbol(m).descriptor, false) mirrorMethod.visitInsn(jReturnType.typedOpcode(asm.Opcodes.IRETURN)) + val codeEnd = new Label().tap(mirrorMethod.visitLabel) + + methodInfo.params.lazyZip(paramJavaTypes).foldLeft(0) { + case (idx, (p, tp)) => + mirrorMethod.visitLocalVariable(p.name.encoded, tp.descriptor, null, codeStart, codeEnd, idx) + idx + tp.size + } mirrorMethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments mirrorMethod.visitEnd() diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala index 86c0b83671c4..a2b2a21b365c 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala @@ -175,10 +175,11 @@ abstract class BCodeIdiomatic { } // end of method genPrimitiveShift() - /* + /* Creates a new `StringBuilder` instance with the requested capacity + * * can-multi-thread */ - final def genStartConcat(pos: Position, size: Int): Unit = { + final def genNewStringBuilder(pos: Position, size: Int): Unit = { jmethod.visitTypeInsn(Opcodes.NEW, JavaStringBuilderClassName) jmethod.visitInsn(Opcodes.DUP) jmethod.visitLdcInsn(Integer.valueOf(size)) @@ -191,10 +192,11 @@ abstract class BCodeIdiomatic { ) } - /* + /* Issue a call to `StringBuilder#append` for the right element type + * * can-multi-thread */ - def genConcat(elemType: BType, pos: Position): Unit = { + final def genStringBuilderAppend(elemType: BType, pos: Position): Unit = { val paramType: BType = elemType match { case ct: ClassBType if ct.isSubtypeOf(StringRef).get => StringRef case ct: ClassBType if ct.isSubtypeOf(jlStringBufferRef).get => jlStringBufferRef @@ -211,13 +213,38 @@ abstract class BCodeIdiomatic { invokevirtual(JavaStringBuilderClassName, "append", bt.descriptor, pos) } - /* + /* Extract the built `String` from the `StringBuilder` + *: * can-multi-thread */ - final def genEndConcat(pos: Position): Unit = { + final def genStringBuilderEnd(pos: Position): Unit = { invokevirtual(JavaStringBuilderClassName, "toString", "()Ljava/lang/String;", pos) } + /* Concatenate top N arguments on the stack with `StringConcatFactory#makeConcatWithConstants` + * (only works for JDK 9+) + * + * can-multi-thread + */ + final def genIndyStringConcat( + recipe: String, + argTypes: Seq[asm.Type], + constants: Seq[String] + ): Unit = { + jmethod.visitInvokeDynamicInsn( + "makeConcatWithConstants", + asm.Type.getMethodDescriptor(StringRef.toASMType, argTypes:_*), + new asm.Handle( + asm.Opcodes.H_INVOKESTATIC, + "java/lang/invoke/StringConcatFactory", + "makeConcatWithConstants", + "(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/String;[Ljava/lang/Object;)Ljava/lang/invoke/CallSite;", + false + ), + (recipe +: constants):_* + ) + } + /* * Emits one or more conversion instructions based on the types given as arguments. * @@ -365,6 +392,7 @@ abstract class BCodeIdiomatic { final def load( idx: Int, tk: BType): Unit = { emitVarInsn(Opcodes.ILOAD, idx, tk) } // can-multi-thread final def store(idx: Int, tk: BType): Unit = { emitVarInsn(Opcodes.ISTORE, idx, tk) } // can-multi-thread + final def iinc( idx: Int, increment: Int): Unit = jmethod.visitIincInsn(idx, increment) // can-multi-thread final def aload( tk: BType): Unit = { emitTypeBased(JCodeMethodN.aloadOpcodes, tk) } // can-multi-thread final def astore(tk: BType): Unit = { emitTypeBased(JCodeMethodN.astoreOpcodes, tk) } // can-multi-thread diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala index 41c34d056ad2..952a33fc6554 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala @@ -478,8 +478,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { def isAtProgramPoint(lbl: asm.Label): Boolean = { (lastInsn match { case labnode: asm.tree.LabelNode => (labnode.getLabel == lbl); case _ => false } ) } - def lineNumber(tree: Tree): Unit = { - if (!emitLines || !tree.pos.isDefined) return + def lineNumber(tree: Tree): Unit = if (emitLines && tree.pos.isDefined) { val nr = tree.pos.finalPosition.line if (nr != lastEmittedLineNr) { lastEmittedLineNr = nr @@ -644,7 +643,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { case Return(_) | Block(_, Return(_)) | Throw(_) | Block(_, Throw(_)) => () case EmptyTree => globalError("Concrete method has no definition: " + dd + ( - if (settings.debug) "(found: " + methSymbol.owner.info.decls.toList.mkString(", ") + ")" + if (settings.isDebug) "(found: " + methSymbol.owner.info.decls.toList.mkString(", ") + ")" else "")) case _ => bc emitRETURN returnType diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala index 149c29a96ac2..e29cd3e02492 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala @@ -91,9 +91,9 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { assert(classSym != NoSymbol, "Cannot create ClassBType from NoSymbol") assert(classSym.isClass, s"Cannot create ClassBType from non-class symbol $classSym") - if (global.settings.debug) { - // OPT these assertions have too much performance overhead to run unconditionally - assertClassNotArrayNotPrimitive(classSym) + // note: classSym can be scala.Array, see https://github.com/scala/bug/issues/12225#issuecomment-729687859 + if (global.settings.isDebug) { + // OPT this assertion has too much performance overhead to run unconditionally assert(!primitiveTypeToBType.contains(classSym) || isCompilingPrimitive, s"Cannot create ClassBType for primitive class symbol $classSym") } @@ -138,8 +138,15 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { } def bootstrapMethodArg(t: Constant, pos: Position): AnyRef = t match { - case Constant(mt: Type) => methodBTypeFromMethodType(transformedType(mt), isConstructor = false).toASMType - case c @ Constant(sym: Symbol) => staticHandleFromSymbol(sym) + case Constant(mt: Type) => + transformedType(mt) match { + case mt1: MethodType => + methodBTypeFromMethodType(mt1, isConstructor = false).toASMType + case t => + typeToBType(t).toASMType + } + case c @ Constant(sym: Symbol) if sym.owner.isJavaDefined && sym.isStaticMember => staticHandleFromSymbol(sym) + case c @ Constant(sym: Symbol) => handleFromMethodSymbol(sym) case c @ Constant(value: String) => value case c @ Constant(value) if c.isNonUnitAnyVal => c.value.asInstanceOf[AnyRef] case _ => reporter.error(pos, "Unable to convert static argument of ApplyDynamic into a classfile constant: " + t); null @@ -157,6 +164,23 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { new asm.Handle(asm.Opcodes.H_INVOKESTATIC, ownerInternalName, sym.name.encoded, descriptor, isInterface) } + def handleFromMethodSymbol(sym: Symbol): asm.Handle = { + val isConstructor = (sym.isClassConstructor) + val descriptor = methodBTypeFromMethodType(sym.info, isConstructor).descriptor + val ownerBType = classBTypeFromSymbol(sym.owner) + val rawInternalName = ownerBType.internalName + val ownerInternalName = rawInternalName + val isInterface = sym.owner.isTraitOrInterface + val tag = + if (sym.isStaticMember) { + if (sym.owner.isJavaDefined) throw new UnsupportedOperationException("handled by staticHandleFromSymbol") + else asm.Opcodes.H_INVOKESTATIC + } else if (isConstructor) asm.Opcodes.H_NEWINVOKESPECIAL + else if (isInterface) asm.Opcodes.H_INVOKEINTERFACE + else asm.Opcodes.H_INVOKEVIRTUAL + new asm.Handle(tag, ownerInternalName, if (isConstructor) sym.name.toString else sym.name.encoded, descriptor, isInterface) + } + /** * This method returns the BType for a type reference, for example a parameter type. */ @@ -221,11 +245,6 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { assert(sym != definitions.ArrayClass || isCompilingArray, sym) } - def assertClassNotArrayNotPrimitive(sym: Symbol): Unit = { - assertClassNotArray(sym) - assert(!primitiveTypeToBType.contains(sym) || isCompilingPrimitive, sym) - } - def implementedInterfaces(classSym: Symbol): List[Symbol] = { def isInterfaceOrTrait(sym: Symbol) = sym.isInterface || sym.isTrait diff --git a/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala index 716a1d6de31f..15bce5921204 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala @@ -63,7 +63,7 @@ abstract class ClassfileWriters { def apply(global: Global): ClassfileWriter = { //Note dont import global._ - its too easy to leak non threadsafe structures - import global.{cleanup, log, settings, statistics} + import global.{ cleanup, log, settings } def jarManifestMainClass: Option[String] = settings.mainClass.valueSetByUser.orElse { cleanup.getEntryPoints match { case List(name) => Some(name) @@ -91,7 +91,7 @@ abstract class ClassfileWriters { new DebugClassWriter(basicClassWriter, asmp, dump) } - val enableStats = statistics.enabled && settings.YaddBackendThreads.value == 1 + val enableStats = settings.areStatisticsEnabled && settings.YaddBackendThreads.value == 1 if (enableStats) new WithStatsWriter(withAdditionalFormats) else withAdditionalFormats } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala b/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala index 72cd7a0d5ca7..2765c063f17f 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala @@ -50,7 +50,7 @@ abstract class CodeGen[G <: Global](val global: G) extends PerRunInit { } catch { case ex: InterruptedException => throw ex case ex: Throwable => - if (settings.debug) ex.printStackTrace() + if (settings.isDebug) ex.printStackTrace() globalError(s"Error while emitting ${unit.source}\n${ex.getMessage}") } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala b/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala index beec1ade9d06..945d9b539bca 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala @@ -59,8 +59,14 @@ private[jvm] object GeneratedClassHandler { new SyncWritingClassHandler(postProcessor) case maxThreads => - if (statistics.enabled) - runReporting.warning(NoPosition, "jvm statistics are not reliable with multi-threaded jvm class writing", WarningCategory.Other, site = "") + if (settings.areStatisticsEnabled) + runReporting.warning( + NoPosition, + "JVM statistics are not reliable with multi-threaded JVM class writing.\n" + + "To collect compiler statistics remove the " + settings.YaddBackendThreads.name + " setting.", + WarningCategory.Other, + site = "" + ) val additionalThreads = maxThreads - 1 // The thread pool queue is limited in size. When it's full, the `CallerRunsPolicy` causes // a new task to be executed on the main thread, which provides back-pressure. diff --git a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala index b9ec6a85f060..748a8f3cc75a 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala @@ -184,7 +184,7 @@ object PostProcessorFrontendAccess { private def buildCompilerSettings(): CompilerSettings = new CompilerSettings { import global.{settings => s} - val debug: Boolean = s.debug + @inline def debug: Boolean = s.isDebug val target: String = s.target.value diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala index 0e6939a97fd3..b86d33a16ce2 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala @@ -81,6 +81,7 @@ abstract class BackendUtils extends PerRunInit { case "15" => asm.Opcodes.V15 case "16" => asm.Opcodes.V16 case "17" => asm.Opcodes.V17 + case "18" => asm.Opcodes.V18 // to be continued... }) diff --git a/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala b/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala index cc197035ede9..2f4cd0941477 100644 --- a/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala +++ b/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala @@ -130,6 +130,8 @@ trait JFileDirectoryLookup[FileEntryType <: ClassRepresentation] extends Directo object JrtClassPath { import java.nio.file._, java.net.URI + private val jrtClassPathCache = new FileBasedCache[Unit, JrtClassPath]() + private val ctSymClassPathCache = new FileBasedCache[String, CtSymClassPath]() def apply(release: Option[String], closeableRegistry: CloseableRegistry): Option[ClassPath] = { import scala.util.Properties._ if (!isJavaAtLeast("9")) None @@ -148,8 +150,7 @@ object JrtClassPath { val ctSym = Paths.get(javaHome).resolve("lib").resolve("ct.sym") if (Files.notExists(ctSym)) None else { - val classPath = new CtSymClassPath(ctSym, v.toInt) - closeableRegistry.registerCloseable(classPath) + val classPath = ctSymClassPathCache.getOrCreate(v, ctSym :: Nil, () => new CtSymClassPath(ctSym, v.toInt), closeableRegistry, true) Some(classPath) } } catch { @@ -158,7 +159,8 @@ object JrtClassPath { case _ => try { val fs = FileSystems.getFileSystem(URI.create("jrt:/")) - Some(new JrtClassPath(fs)) + val classPath = jrtClassPathCache.getOrCreate((), Nil, () => new JrtClassPath(fs), closeableRegistry, false) + Some(classPath) } catch { case _: ProviderNotFoundException | _: FileSystemNotFoundException => None } diff --git a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala index c8c9d7354213..fcf73b72cb29 100644 --- a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala +++ b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala @@ -18,7 +18,6 @@ import java.nio.file.Files import java.nio.file.attribute.{BasicFileAttributes, FileTime} import java.util.{Timer, TimerTask} import java.util.concurrent.atomic.AtomicInteger - import scala.annotation.tailrec import scala.collection.mutable import scala.reflect.io.{AbstractFile, FileZipArchive, ManifestResources} @@ -33,21 +32,23 @@ import scala.tools.nsc.io.Jar * when there are a lot of projects having a lot of common dependencies. */ sealed trait ZipAndJarFileLookupFactory { - private val cache = new FileBasedCache[ClassPath with Closeable] + case class ZipSettings(releaseValue: Option[String]) + private val cache = new FileBasedCache[ZipSettings, ClassPath with Closeable] def create(zipFile: AbstractFile, settings: Settings, closeableRegistry: CloseableRegistry): ClassPath = { val disabled = (settings.YdisableFlatCpCaching.value && !settings.YforceFlatCpCaching.value) || zipFile.file == null + val zipSettings = ZipSettings(settings.releaseValue) cache.checkCacheability(zipFile.toURL :: Nil, checkStamps = true, disableCache = disabled) match { case Left(_) => - val result: ClassPath with Closeable = createForZipFile(zipFile, settings.releaseValue) + val result: ClassPath with Closeable = createForZipFile(zipFile, zipSettings) closeableRegistry.registerCloseable(result) result case Right(paths) => - cache.getOrCreate(paths, () => createForZipFile(zipFile, settings.releaseValue), closeableRegistry, checkStamps = true) + cache.getOrCreate(zipSettings, paths, () => createForZipFile(zipFile, zipSettings), closeableRegistry, checkStamps = true) } } - protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath with Closeable + protected def createForZipFile(zipFile: AbstractFile, zipSettings: ZipSettings): ClassPath with Closeable } /** @@ -153,12 +154,9 @@ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { case class PackageInfo(packageName: String, subpackages: List[AbstractFile]) } - override protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath with Closeable = + override protected def createForZipFile(zipFile: AbstractFile, zipSettings: ZipSettings): ClassPath with Closeable = if (zipFile.file == null) createWithoutUnderlyingFile(zipFile) - else { - JrtClassPath - ZipArchiveClassPath(zipFile.file, release) - } + else ZipArchiveClassPath(zipFile.file, zipSettings.releaseValue) private def createWithoutUnderlyingFile(zipFile: AbstractFile) = zipFile match { case manifestRes: ManifestResources => @@ -187,15 +185,15 @@ object ZipAndJarSourcePathFactory extends ZipAndJarFileLookupFactory { override protected def isRequiredFileType(file: AbstractFile): Boolean = file.isScalaOrJavaSource } - override protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath with Closeable = ZipArchiveSourcePath(zipFile.file) + override protected def createForZipFile(zipFile: AbstractFile, zipSettings: ZipSettings): ClassPath with Closeable = ZipArchiveSourcePath(zipFile.file) } -final class FileBasedCache[T] { +final class FileBasedCache[K, T] { import java.nio.file.Path private val NoFileKey = new Object private case class Stamp(lastModified: FileTime, size: Long, fileKey: Object) - private case class Entry(stamps: Seq[Stamp], t: T) { + private case class Entry(k: K, stamps: Seq[Stamp], t: T) { val referenceCount: AtomicInteger = new AtomicInteger(1) var timerTask: TimerTask = null def cancelTimer(): Unit = { @@ -205,9 +203,9 @@ final class FileBasedCache[T] { } } } - private val cache = collection.mutable.Map.empty[Seq[Path], Entry] + private val cache = collection.mutable.Map.empty[(K, Seq[Path]), Entry] - private def referenceCountDecrementer(e: Entry, paths: Seq[Path]): Closeable = { + private def referenceCountDecrementer(e: Entry, key: (K, Seq[Path])): Closeable = { // Cancel the deferred close timer (if any) that was started when the reference count // last dropped to zero. e.cancelTimer() @@ -227,7 +225,7 @@ final class FileBasedCache[T] { override def run(): Unit = { cache.synchronized { if (e.referenceCount.compareAndSet(0, -1)) { - cache.remove(paths) + cache.remove(key) cl.close() } } @@ -259,7 +257,7 @@ final class FileBasedCache[T] { } } - def getOrCreate(paths: Seq[Path], create: () => T, closeableRegistry: CloseableRegistry, checkStamps: Boolean): T = cache.synchronized { + def getOrCreate(k: K, paths: Seq[Path], create: () => T, closeableRegistry: CloseableRegistry, checkStamps: Boolean): T = cache.synchronized { val stamps = if (!checkStamps) Nil else paths.map { path => try { val attrs = Files.readAttributes(path, classOf[BasicFileAttributes]) @@ -273,14 +271,15 @@ final class FileBasedCache[T] { Stamp(FileTime.fromMillis(0), -1, new Object) } } + val key = (k, paths) - cache.get(paths) match { - case Some(e@Entry(cachedStamps, cached)) => + cache.get(key) match { + case Some(e@Entry(k1, cachedStamps, cached)) => if (!checkStamps || cachedStamps == stamps) { // Cache hit val count = e.referenceCount.incrementAndGet() assert(count > 0, (stamps, count)) - closeableRegistry.registerCloseable(referenceCountDecrementer(e, paths)) + closeableRegistry.registerCloseable(referenceCountDecrementer(e, (k1, paths))) cached } else { // Cache miss: we found an entry but the underlying files have been modified @@ -294,17 +293,17 @@ final class FileBasedCache[T] { case x => throw new MatchError(x) } val value = create() - val entry = Entry(stamps, value) - cache.put(paths, entry) - closeableRegistry.registerCloseable(referenceCountDecrementer(entry, paths)) + val entry = Entry(k, stamps, value) + cache.put(key, entry) + closeableRegistry.registerCloseable(referenceCountDecrementer(entry, key)) value } case _ => // Cache miss val value = create() - val entry = Entry(stamps, value) - cache.put(paths, entry) - closeableRegistry.registerCloseable(referenceCountDecrementer(entry, paths)) + val entry = Entry(k, stamps, value) + cache.put(key, entry) + closeableRegistry.registerCloseable(referenceCountDecrementer(entry, key)) value } } diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala index f2b820256630..2049693a81f3 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala @@ -118,6 +118,8 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { def javaLangObject(): Tree = javaLangDot(tpnme.Object) + def javaLangRecord(): Tree = javaLangDot(tpnme.Record) + def arrayOf(tpt: Tree) = AppliedTypeTree(scalaDot(tpnme.Array), List(tpt)) @@ -564,6 +566,16 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { def definesInterface(token: Int) = token == INTERFACE || token == AT + /** If the next token is the identifier "record", convert it into a proper + * token. Technically, "record" is just a restricted identifier. However, + * once we've figured out that it is in a position where it identifies a + * "record" class, it is much more convenient to promote it to a token. + */ + def adaptRecordIdentifier(): Unit = { + if (in.token == IDENTIFIER && in.name == nme.javaRestrictedIdentifiers.RECORD) + in.token = RECORD + } + def termDecl(mods: Modifiers, parentToken: Int): List[Tree] = { val inInterface = definesInterface(parentToken) val tparams = if (in.token == LT) typeParams() else List() @@ -587,6 +599,10 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { DefDef(mods, nme.CONSTRUCTOR, tparams, List(vparams), TypeTree(), methodBody()) } } + } else if (in.token == LBRACE && rtptName != nme.EMPTY && parentToken == RECORD) { + // compact constructor + methodBody() + List.empty } else { var mods1 = mods if (mods hasFlag Flags.ABSTRACT) mods1 = mods &~ Flags.ABSTRACT | Flags.DEFERRED @@ -597,7 +613,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { val vparams = formalParams() if (!isVoid) rtpt = optArrayBrackets(rtpt) optThrows() - val isConcreteInterfaceMethod = !inInterface || (mods hasFlag Flags.JAVA_DEFAULTMETHOD) || (mods hasFlag Flags.STATIC) + val isConcreteInterfaceMethod = !inInterface || (mods hasFlag Flags.JAVA_DEFAULTMETHOD) || (mods hasFlag Flags.STATIC) || (mods hasFlag Flags.PRIVATE) val bodyOk = !(mods1 hasFlag Flags.DEFERRED) && isConcreteInterfaceMethod val body = if (bodyOk && in.token == LBRACE) { @@ -721,11 +737,13 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { } } - def memberDecl(mods: Modifiers, parentToken: Int): List[Tree] = in.token match { - case CLASS | ENUM | INTERFACE | AT => - typeDecl(if (definesInterface(parentToken)) mods | Flags.STATIC else mods) - case _ => - termDecl(mods, parentToken) + def memberDecl(mods: Modifiers, parentToken: Int): List[Tree] = { + in.token match { + case CLASS | ENUM | RECORD | INTERFACE | AT => + typeDecl(mods) + case _ => + termDecl(mods, parentToken) + } } def makeCompanionObject(cdef: ClassDef, statics: List[Tree]): Tree = @@ -734,11 +752,6 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { makeTemplate(List(), statics)) } - def importCompanionObject(cdef: ClassDef): Tree = - atPos(cdef.pos) { - Import(Ident(cdef.name.toTermName), ImportSelector.wildList) - } - def addCompanionObject(statics: List[Tree], cdef: ClassDef): List[Tree] = List(makeCompanionObject(cdef, statics), cdef) @@ -802,12 +815,57 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { javaLangObject() } val interfaces = interfacesOpt() - val (statics, body) = typeBody(CLASS, name) + val (statics, body) = typeBody(CLASS) addCompanionObject(statics, atPos(pos) { ClassDef(mods, name, tparams, makeTemplate(superclass :: interfaces, body)) }) } + def recordDecl(mods: Modifiers): List[Tree] = { + accept(RECORD) + val pos = in.currentPos + val name = identForType() + val tparams = typeParams() + val header = formalParams() + val superclass = javaLangRecord() + val interfaces = interfacesOpt() + val (statics, body) = typeBody(RECORD) + + // Generate accessors, if not already manually specified + var generateAccessors = header + .view + .map { case ValDef(mods, name, tpt, _) => (name, (tpt, mods.annotations)) } + .toMap + for (DefDef(_, name, List(), List(params), _, _) <- body if generateAccessors.contains(name) && params.isEmpty) + generateAccessors -= name + + val accessors = generateAccessors + .map { case (name, (tpt, annots)) => + DefDef(Modifiers(Flags.JAVA) withAnnotations annots, name, List(), List(), tpt.duplicate, blankExpr) + } + .toList + + // Generate canonical constructor. During parsing this is done unconditionally but the symbol + // is unlinked in Namer if it is found to clash with a manually specified constructor. + val canonicalCtor = DefDef( + mods | Flags.SYNTHETIC, + nme.CONSTRUCTOR, + List(), + List(header.map(_.duplicate)), + TypeTree(), + blankExpr + ) + + addCompanionObject(statics, atPos(pos) { + ClassDef( + mods | Flags.FINAL, + name, + tparams, + makeTemplate(superclass :: interfaces, canonicalCtor :: accessors ::: body) + ) + }) + } + def interfaceDecl(mods: Modifiers): List[Tree] = { accept(INTERFACE) val pos = in.currentPos @@ -820,7 +878,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { } else { List(javaLangObject()) } - val (statics, body) = typeBody(INTERFACE, name) + val (statics, body) = typeBody(INTERFACE) addCompanionObject(statics, atPos(pos) { ClassDef(mods | Flags.TRAIT | Flags.INTERFACE | Flags.ABSTRACT, name, tparams, @@ -828,14 +886,14 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { }) } - def typeBody(leadingToken: Int, parentName: Name): (List[Tree], List[Tree]) = { + def typeBody(leadingToken: Int): (List[Tree], List[Tree]) = { accept(LBRACE) - val defs = typeBodyDecls(leadingToken, parentName) + val defs = typeBodyDecls(leadingToken) accept(RBRACE) defs } - def typeBodyDecls(parentToken: Int, parentName: Name): (List[Tree], List[Tree]) = { + def typeBodyDecls(parentToken: Int): (List[Tree], List[Tree]) = { val inInterface = definesInterface(parentToken) val statics = new ListBuffer[Tree] val members = new ListBuffer[Tree] @@ -847,7 +905,11 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { } else if (in.token == SEMI) { in.nextToken() } else { - if (in.token == ENUM || definesInterface(in.token)) mods |= Flags.STATIC + + // See "14.3. Local Class and Interface Declarations" + adaptRecordIdentifier() + if (in.token == ENUM || in.token == RECORD || definesInterface(in.token)) + mods |= Flags.STATIC val decls = joinComment(memberDecl(mods, parentToken)) @tailrec @@ -871,7 +933,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { accept(INTERFACE) val pos = in.currentPos val name = identForType() - val (statics, body) = typeBody(AT, name) + val (statics, body) = typeBody(AT) val templ = makeTemplate(annotationParents, body) addCompanionObject(statics, atPos(pos) { import Flags._ @@ -908,7 +970,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { val (statics, body) = if (in.token == SEMI) { in.nextToken() - typeBodyDecls(ENUM, name) + typeBodyDecls(ENUM) } else { (List(), List()) } @@ -956,12 +1018,16 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { (res, hasClassBody) } - def typeDecl(mods: Modifiers): List[Tree] = in.token match { - case ENUM => joinComment(enumDecl(mods)) - case INTERFACE => joinComment(interfaceDecl(mods)) - case AT => annotationDecl(mods) - case CLASS => joinComment(classDecl(mods)) - case _ => in.nextToken(); syntaxError("illegal start of type declaration", skipIt = true); List(errorTypeTree) + def typeDecl(mods: Modifiers): List[Tree] = { + adaptRecordIdentifier() + in.token match { + case ENUM => joinComment(enumDecl(mods)) + case INTERFACE => joinComment(interfaceDecl(mods)) + case AT => annotationDecl(mods) + case CLASS => joinComment(classDecl(mods)) + case RECORD => joinComment(recordDecl(mods)) + case _ => in.nextToken(); syntaxError("illegal start of type declaration", skipIt = true); List(errorTypeTree) + } } def tryLiteral(negate: Boolean = false): Option[Constant] = { diff --git a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala index 3f8ee1166a08..770e680012c0 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala @@ -239,6 +239,9 @@ trait JavaScanners extends ast.parser.ScannersCommon { */ protected def putChar(c: Char): Unit = { cbuf.append(c) } + /** Remove the last N characters from the buffer */ + private def popNChars(n: Int): Unit = if (n > 0) cbuf.setLength(cbuf.length - n) + /** Clear buffer and set name */ private def setName(): Unit = { name = newTermName(cbuf.toString()) @@ -322,15 +325,26 @@ trait JavaScanners extends ast.parser.ScannersCommon { case '\"' => in.next() - while (in.ch != '\"' && (in.isUnicode || in.ch != CR && in.ch != LF && in.ch != SU)) { - getlitch() - } - if (in.ch == '\"') { - token = STRINGLIT - setName() - in.next() + if (in.ch != '\"') { // "..." non-empty string literal + while (in.ch != '\"' && (in.isUnicode || in.ch != CR && in.ch != LF && in.ch != SU)) { + getlitch() + } + if (in.ch == '\"') { + token = STRINGLIT + setName() + in.next() + } else { + syntaxError("unclosed string literal") + } } else { - syntaxError("unclosed string literal") + in.next() + if (in.ch != '\"') { // "" empty string literal + token = STRINGLIT + setName() + } else { + in.next() + getTextBlock() + } } return @@ -664,9 +678,12 @@ trait JavaScanners extends ast.parser.ScannersCommon { // Literals ----------------------------------------------------------------- /** read next character in character or string literal: - */ - protected def getlitch() = - if (in.ch == '\\') { + * + * @param scanOnly skip emitting errors or adding to the literal buffer + * @param inTextBlock is this for a text block? + */ + protected def getlitch(scanOnly: Boolean = false, inTextBlock: Boolean = false): Unit = { + val c: Char = if (in.ch == '\\') { in.next() if ('0' <= in.ch && in.ch <= '7') { val leadch: Char = in.ch @@ -680,27 +697,147 @@ trait JavaScanners extends ast.parser.ScannersCommon { in.next() } } - putChar(oct.asInstanceOf[Char]) + oct.asInstanceOf[Char] } else { - in.ch match { - case 'b' => putChar('\b') - case 't' => putChar('\t') - case 'n' => putChar('\n') - case 'f' => putChar('\f') - case 'r' => putChar('\r') - case '\"' => putChar('\"') - case '\'' => putChar('\'') - case '\\' => putChar('\\') + val c: Char = in.ch match { + case 'b' => '\b' + case 's' => ' ' + case 't' => '\t' + case 'n' => '\n' + case 'f' => '\f' + case 'r' => '\r' + case '\"' => '\"' + case '\'' => '\'' + case '\\' => '\\' + case CR | LF if inTextBlock => + in.next() + return case _ => - syntaxError(in.cpos - 1, "invalid escape character") - putChar(in.ch) + if (!scanOnly) syntaxError(in.cpos - 1, "invalid escape character") + in.ch } in.next() + c } } else { - putChar(in.ch) + val c = in.ch in.next() + c } + if (!scanOnly) putChar(c) + } + + /** read a triple-quote delimited text block, starting after the first three + * double quotes + */ + private def getTextBlock(): Unit = { + // Open delimiter is followed by optional space, then a newline + while (in.ch == ' ' || in.ch == '\t' || in.ch == FF) { + in.next() + } + if (in.ch != LF && in.ch != CR) { // CR-LF is already normalized into LF by `JavaCharArrayReader` + syntaxError("illegal text block open delimiter sequence, missing line terminator") + return + } + in.next() + + /* Do a lookahead scan over the full text block to: + * - compute common white space prefix + * - find the offset where the text block ends + */ + var commonWhiteSpacePrefix = Int.MaxValue + var blockEndOffset = 0 + val backtrackTo = in.copy + var blockClosed = false + var lineWhiteSpacePrefix = 0 + var lineIsOnlyWhitespace = true + while (!blockClosed && (in.isUnicode || in.ch != SU)) { + if (in.ch == '\"') { // Potential end of the block + in.next() + if (in.ch == '\"') { + in.next() + if (in.ch == '\"') { + blockClosed = true + commonWhiteSpacePrefix = commonWhiteSpacePrefix min lineWhiteSpacePrefix + blockEndOffset = in.cpos - 2 + } + } + + // Not the end of the block - just a single or double " character + if (!blockClosed) { + lineIsOnlyWhitespace = false + } + } else if (in.ch == CR || in.ch == LF) { // new line in the block + in.next() + if (!lineIsOnlyWhitespace) { + commonWhiteSpacePrefix = commonWhiteSpacePrefix min lineWhiteSpacePrefix + } + lineWhiteSpacePrefix = 0 + lineIsOnlyWhitespace = true + } else if (lineIsOnlyWhitespace && Character.isWhitespace(in.ch)) { // extend white space prefix + in.next() + lineWhiteSpacePrefix += 1 + } else { + lineIsOnlyWhitespace = false + getlitch(scanOnly = true, inTextBlock = true) + } + } + + // Bail out if the block never did have an end + if (!blockClosed) { + syntaxError("unclosed text block") + return + } + + // Second pass: construct the literal string value this time + in = backtrackTo + while (in.cpos < blockEndOffset) { + // Drop the line's leading whitespace + var remainingPrefix = commonWhiteSpacePrefix + while (remainingPrefix > 0 && in.ch != CR && in.ch != LF && in.cpos < blockEndOffset) { + in.next() + remainingPrefix -= 1 + } + + var trailingWhitespaceLength = 0 + var escapedNewline = false // Does the line end with `\`? + while (in.ch != CR && in.ch != LF && in.cpos < blockEndOffset && !escapedNewline) { + if (Character.isWhitespace(in.ch)) { + trailingWhitespaceLength += 1 + } else { + trailingWhitespaceLength = 0 + } + + // Detect if the line is about to end with `\` + if (in.ch == '\\' && { + val lookahead = in.copy + lookahead.next() + lookahead.ch == CR || lookahead.ch == LF + }) { + escapedNewline = true + } + + getlitch(scanOnly = false, inTextBlock = true) + } + + // Drop the line's trailing whitespace + popNChars(trailingWhitespaceLength) + + // Normalize line terminators + if ((in.ch == CR || in.ch == LF) && !escapedNewline) { + in.next() + putChar('\n') + } + } + + token = STRINGLIT + setName() + + // Trailing """ + in.next() + in.next() + in.next() + } /** read fractional part and exponent of floating point number * if one is present. diff --git a/src/compiler/scala/tools/nsc/javac/JavaTokens.scala b/src/compiler/scala/tools/nsc/javac/JavaTokens.scala index 855fe19e6706..a124d1b90aaa 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaTokens.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaTokens.scala @@ -20,6 +20,7 @@ object JavaTokens extends ast.parser.CommonTokens { /** identifiers */ final val IDENTIFIER = 10 + final val RECORD = 12 // restricted identifier, so not lexed directly def isIdentifier(code: Int) = code == IDENTIFIER diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala index 888c707a7c25..5588f1c8ff6a 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala @@ -111,7 +111,7 @@ object Plugin { val PluginXML = "scalac-plugin.xml" - private[nsc] val pluginClassLoadersCache = new FileBasedCache[ScalaClassLoader.URLClassLoader]() + private[nsc] val pluginClassLoadersCache = new FileBasedCache[Unit, ScalaClassLoader.URLClassLoader]() type AnyClass = Class[_] @@ -142,19 +142,14 @@ object Plugin { ignoring: List[String], findPluginClassloader: (Seq[Path] => ClassLoader)): List[Try[AnyClass]] = { - def targeted(targets: List[List[Path]]) = targets.map { path => - val loader = findPluginClassloader(path) + def pluginResource(classpath: List[Path], loader: ClassLoader) = loader.getResource(PluginXML) match { - case null => Failure(new MissingPluginException(path)) + case null => Failure(new MissingPluginException(classpath)) case url => val inputStream = url.openStream - try { - Try((PluginDescription.fromXML(inputStream), loader)) - } finally { - inputStream.close() - } + try Try((PluginDescription.fromXML(inputStream), loader)) finally inputStream.close() } - } + def targeted(targets: List[List[Path]]) = targets.filter(_.nonEmpty).map(classpath => pluginResource(classpath, findPluginClassloader(classpath))) def dirList(dir: Path) = if (dir.isDirectory) dir.toDirectory.files.filter(Jar.isJarOrZip).toList.sortBy(_.name) else Nil // ask plugin loaders for plugin resources, but ignore if none in -Xpluginsdir @@ -179,9 +174,8 @@ object Plugin { /** Instantiate a plugin class, given the class and * the compiler it is to be used in. */ - def instantiate(clazz: AnyClass, global: Global): Plugin = { - (clazz getConstructor classOf[Global] newInstance global).asInstanceOf[Plugin] - } + def instantiate(clazz: AnyClass, global: Global): Plugin = + clazz.getConstructor(classOf[Global]).newInstance(global).asInstanceOf[Plugin] } class PluginLoadException(val path: String, message: String, cause: Exception) extends Exception(message, cause) { diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala index 89da75e9628e..250b62634689 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala @@ -102,7 +102,7 @@ trait Plugins { global: Global => closeableRegistry.registerCloseable(loader) loader case Right(paths) => - cache.getOrCreate(paths, newLoader, closeableRegistry, checkStamps) + cache.getOrCreate((), paths, newLoader, closeableRegistry, checkStamps) } } @@ -150,7 +150,7 @@ trait Plugins { global: Global => } globalError("bad option: -P:" + opt) // Plugins may opt out, unless we just want to show info - plugs filter (p => p.init(p.options, globalError) || (settings.debug && settings.isInfo)) + plugs filter (p => p.init(p.options, globalError) || (settings.isDebug && settings.isInfo)) } lazy val plugins: List[Plugin] = loadPlugins() @@ -202,7 +202,7 @@ trait Plugins { global: Global => closeableRegistry.registerCloseable(loader) loader case Right(paths) => - cache.getOrCreate(paths, newLoader, closeableRegistry, checkStamps) + cache.getOrCreate((), paths, newLoader, closeableRegistry, checkStamps) } } } diff --git a/src/compiler/scala/tools/nsc/reporters/Reporter.scala b/src/compiler/scala/tools/nsc/reporters/Reporter.scala index 4262ec054914..219906e77fd8 100644 --- a/src/compiler/scala/tools/nsc/reporters/Reporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/Reporter.scala @@ -119,7 +119,7 @@ abstract class FilteringReporter extends Reporter { // Invoked when an error or warning is filtered by position. @inline def suppress = { if (settings.prompt) doReport(pos, msg, severity) - else if (settings.debug) doReport(pos, s"[ suppressed ] $msg", severity) + else if (settings.isDebug) doReport(pos, s"[ suppressed ] $msg", severity) Suppress } if (!duplicateOk(pos, severity, msg)) suppress else if (!maxOk) Count else Display diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index d070a7870652..1b25f95f46c6 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -23,7 +23,7 @@ import scala.language.existentials import scala.annotation.elidable import scala.tools.util.PathResolver.Defaults import scala.collection.mutable -import scala.reflect.internal.util.StringContextStripMarginOps +import scala.reflect.internal.util.{ StatisticsStatics, StringContextStripMarginOps } import scala.tools.nsc.util.DefaultJarFactory import scala.util.chaining._ @@ -92,25 +92,13 @@ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSett } withAbbreviation "--release" def releaseValue: Option[String] = Option(release.value).filter(_ != "") - /* - * The previous "-Xsource" option is intended to be used mainly - * though this helper. - */ - private[this] val version212 = ScalaVersion("2.12.0") - def isScala212: Boolean = source.value >= version212 - private[this] val version213 = ScalaVersion("2.13.0") - def isScala213: Boolean = source.value >= version213 - private[this] val version214 = ScalaVersion("2.14.0") - private[this] val version3 = ScalaVersion("3.0.0") - def isScala3: Boolean = source.value >= version3 - /** * -X "Advanced" settings */ val Xhelp = BooleanSetting ("-X", "Print a synopsis of advanced options.") val async = BooleanSetting ("-Xasync", "Enable the async phase for scala.async.Async.{async,await}.") val checkInit = BooleanSetting ("-Xcheckinit", "Wrap field accessors to throw an exception on uninitialized access.") - val developer = BooleanSetting ("-Xdev", "Issue warnings about anything which seems amiss in compiler internals. Intended for compiler developers") + val developer = BooleanSetting ("-Xdev", "Issue warnings about anything which seems amiss in compiler internals. Intended for compiler developers").withPostSetHook(s => if (s.value) StatisticsStatics.enableDeveloperAndDeoptimize()) val noassertions = BooleanSetting ("-Xdisable-assertions", "Generate no assertions or assumptions.") andThen (flag => if (flag) elidebelow.value = elidable.ASSERTION + 1) val elidebelow = IntSetting ("-Xelide-below", "Calls to @elidable methods are omitted if method priority is lower than argument", @@ -145,10 +133,16 @@ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSett val mainClass = StringSetting ("-Xmain-class", "path", "Class for manifest's Main-Class entry (only useful with -d )", "") val sourceReader = StringSetting ("-Xsource-reader", "classname", "Specify a custom method for reading source files.", "") val reporter = StringSetting ("-Xreporter", "classname", "Specify a custom subclass of FilteringReporter for compiler messages.", "scala.tools.nsc.reporters.ConsoleReporter") - val source = ScalaVersionSetting ("-Xsource", "version", "Enable features that will be available in a future version of Scala, for purposes of early migration and alpha testing.", initial = version213).withPostSetHook { s => - if (s.value < version213) errorFn.apply(s"-Xsource must be at least the current major version (${version213.versionString})") - if (s.value >= version214 && s.value < version3) s.withDeprecationMessage("instead of -Xsource:2.14, use -Xsource:3").value = version3 + val source = ScalaVersionSetting ("-Xsource", "version", "Enable features that will be available in a future version of Scala, for purposes of early migration and alpha testing.", initial = ScalaVersion("2.13")).withPostSetHook { s => + if (s.value >= ScalaVersion("3")) + isScala3.value = true + else if (s.value >= ScalaVersion("2.14")) + s.withDeprecationMessage("instead of -Xsource:2.14, use -Xsource:3").value = ScalaVersion("3") + else if (s.value < ScalaVersion("2.13")) + errorFn.apply(s"-Xsource must be at least the current major version (${ScalaVersion("2.13").versionString})") } + val isScala3 = BooleanSetting ("isScala3", "Is -Xsource Scala 3?").internalOnly() + // The previous "-Xsource" option is intended to be used mainly though ^ helper val XnoPatmatAnalysis = BooleanSetting ("-Xno-patmat-analysis", "Don't perform exhaustivity/unreachability analysis. Also, ignore @switch annotation.") @@ -459,7 +453,7 @@ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSett */ val Vhelp = BooleanSetting("-V", "Print a synopsis of verbose options.") val browse = PhasesSetting("-Vbrowse", "Browse the abstract syntax tree after") withAbbreviation "-Ybrowse" - val debug = BooleanSetting("-Vdebug", "Increase the quantity of debugging output.") withAbbreviation "-Ydebug" + val debug = BooleanSetting("-Vdebug", "Increase the quantity of debugging output.") withAbbreviation "-Ydebug" withPostSetHook (s => if (s.value) StatisticsStatics.enableDebugAndDeoptimize()) val YdebugTasty = BooleanSetting("-Vdebug-tasty", "Increase the quantity of debugging output when unpickling tasty.") withAbbreviation "-Ydebug-tasty" val Ydocdebug = BooleanSetting("-Vdoc", "Trace scaladoc activity.") withAbbreviation "-Ydoc-debug" val Yidedebug = BooleanSetting("-Vide", "Generate, validate and output trees using the interactive compiler.") withAbbreviation "-Yide-debug" @@ -502,13 +496,15 @@ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSett val Ystatistics = PhasesSetting("-Vstatistics", "Print compiler statistics for specific phases", "parser,typer,patmat,erasure,cleanup,jvm") .withPostSetHook(s => YstatisticsEnabled.value = s.value.nonEmpty) .withAbbreviation("-Ystatistics") - val YstatisticsEnabled = BooleanSetting("-Ystatistics-enabled", "Internal setting, indicating that statistics are enabled for some phase.").internalOnly() + val YstatisticsEnabled = BooleanSetting("-Ystatistics-enabled", "Internal setting, indicating that statistics are enabled for some phase.").internalOnly().withPostSetHook(s => if (s) StatisticsStatics.enableColdStatsAndDeoptimize()) val YhotStatisticsEnabled = BooleanSetting("-Vhot-statistics", s"Enable `${Ystatistics.name}` to also print hot statistics.") - .withAbbreviation("-Yhot-statistics") + .withAbbreviation("-Yhot-statistics").withPostSetHook(s => if (s && YstatisticsEnabled) StatisticsStatics.enableHotStatsAndDeoptimize()) val Yshowsyms = BooleanSetting("-Vsymbols", "Print the AST symbol hierarchy after each phase.") withAbbreviation "-Yshow-syms" val Ytyperdebug = BooleanSetting("-Vtyper", "Trace type assignments.") withAbbreviation "-Ytyper-debug" - val XlogImplicits = BooleanSetting("-Vimplicits", "Show more detail on why some implicits are not applicable.") - .withAbbreviation("-Xlog-implicits") + val Vimplicits = BooleanSetting("-Vimplicits", "Print dependent missing implicits.").withAbbreviation("-Xlog-implicits") + val VimplicitsVerboseTree = BooleanSetting("-Vimplicits-verbose-tree", "Display all intermediate implicits in a chain.") + val VimplicitsMaxRefined = IntSetting("-Vimplicits-max-refined", "max chars for printing refined types, abbreviate to `F {...}`", Int.MaxValue, Some((0, Int.MaxValue)), _ => None) + val VtypeDiffs = BooleanSetting("-Vtype-diffs", "Print found/required error messages as colored diffs.") val logImplicitConv = BooleanSetting("-Vimplicit-conversions", "Print a message whenever an implicit conversion is inserted.") .withAbbreviation("-Xlog-implicit-conversions") val logReflectiveCalls = BooleanSetting("-Vreflective-calls", "Print a message when a reflective method call is generated") diff --git a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala index 02e6da5afe0e..15527257a3b5 100644 --- a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala @@ -48,7 +48,7 @@ trait StandardScalaSettings { _: MutableSettings => else Wconf.tryToSet(List(s"cat=feature:s")) } val g = ChoiceSetting ("-g", "level", "Set level of generated debugging info.", List("none", "source", "line", "vars", "notailcalls"), "vars") - val help = BooleanSetting ("-help", "Print a synopsis of standard options") withAbbreviation "--help" + val help = BooleanSetting ("-help", "Print a synopsis of standard options") withAbbreviation "--help" withAbbreviation("-h") val nowarn = BooleanSetting ("-nowarn", "Generate no warnings.") withAbbreviation "--no-warnings" withPostSetHook { s => if (s) maxwarns.value = 0 } val optimise: BooleanSetting // depends on post hook which mutates other settings val print = BooleanSetting ("-print", "Print program with Scala-specific features removed.") withAbbreviation "--print" @@ -73,7 +73,7 @@ trait StandardScalaSettings { _: MutableSettings => object StandardScalaSettings { // not final in case some separately compiled client code wanted to depend on updated values val MinTargetVersion = 8 - val MaxTargetVersion = 17 + val MaxTargetVersion = 18 private val AllTargetVersions = (MinTargetVersion to MaxTargetVersion).map(_.toString).to(List) } diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index 4e8ad9ab2f59..8d547bae5a8f 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -19,7 +19,7 @@ import java.io.IOException import scala.reflect.internal.MissingRequirementError import scala.reflect.io.{AbstractFile, NoAbstractFile} import scala.tools.nsc.util.{ClassPath, ClassRepresentation} -import scala.reflect.internal.util.{ReusableInstance, StatisticsStatics} +import scala.reflect.internal.util.ReusableInstance import scala.tools.nsc.Reporting.WarningCategory /** This class ... @@ -57,7 +57,7 @@ abstract class SymbolLoaders { } protected def signalError(root: Symbol, ex: Throwable): Unit = { - if (settings.debug) ex.printStackTrace() + if (settings.isDebug) ex.printStackTrace() globalError(ex.getMessage() match { case null => "i/o error while loading " + root.name case msg => "error while loading " + root.name + ", " + msg @@ -198,15 +198,16 @@ abstract class SymbolLoaders { } } private def nameOf(classRep: ClassRepresentation): TermName = { - while(true) { - val len = classRep.nameChars(nameCharBuffer) - if (len == -1) nameCharBuffer = new Array[Char](nameCharBuffer.length * 2) - else return newTermName(nameCharBuffer, 0, len) + val name = classRep.name + val nameLength = name.length + if (nameLength <= nameCharBuffer.length) { + name.getChars(0, nameLength, nameCharBuffer, 0) + newTermName(nameCharBuffer, 0, nameLength) + } else { + newTermName(name) } - throw new IllegalStateException() } - private var nameCharBuffer = new Array[Char](256) - + private val nameCharBuffer = new Array[Char](512) /** * A lazy type that completes itself by calling parameter doComplete. @@ -312,7 +313,7 @@ abstract class SymbolLoaders { } } } - private lazy val classFileDataReader: ReusableInstance[ReusableDataReader] = ReusableInstance[ReusableDataReader](new ReusableDataReader(), enabled = isCompilerUniverse) + private lazy val classFileDataReader: ReusableInstance[ReusableDataReader] = ReusableInstance[ReusableDataReader](new ReusableDataReader(), initialSize = 1, enabled = isCompilerUniverse) class ClassfileLoader(val classfile: AbstractFile, clazz: ClassSymbol, module: ModuleSymbol) extends SymbolLoader with FlagAssigningCompleter { private object classfileParser extends { val symbolTable: SymbolLoaders.this.symbolTable.type = SymbolLoaders.this.symbolTable @@ -337,11 +338,11 @@ abstract class SymbolLoaders { protected def description = "class file "+ classfile.toString protected def doComplete(root: Symbol): Unit = { - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.classReadNanos) else null + val start = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.classReadNanos) else null classfileParser.parse(classfile, clazz, module) if (clazz.associatedFile eq NoAbstractFile) clazz.associatedFile = classfile if (module.associatedFile eq NoAbstractFile) module.associatedFile = classfile - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.classReadNanos, start) + if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.classReadNanos, start) } override def sourcefile: Option[AbstractFile] = classfileParser.srcfile override def associatedFile(self: Symbol): AbstractFile = classfile diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala b/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala index e99ed0858a03..7a0af81ee22a 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala @@ -133,7 +133,7 @@ trait SymbolTrackers { else " (" + Flags.flagsToString(masked) + ")" } def symString(sym: Symbol) = ( - if (settings.debug && sym.hasCompleteInfo) { + if (settings.isDebug && sym.hasCompleteInfo) { val s = sym.defString take 240 if (s.length == 240) s + "..." else s } diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala index ca1378e6c87e..faf69d5769e3 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala @@ -27,9 +27,7 @@ import scala.tools.nsc.io.AbstractFile */ final class AbstractFileReader(val buf: Array[Byte]) extends DataReader { @deprecated("Use other constructor", "2.13.0") - def this(file: AbstractFile) = { - this(file.toByteArray) - } + def this(file: AbstractFile) = this(file.toByteArray) /** the current input pointer */ @@ -67,9 +65,8 @@ final class AbstractFileReader(val buf: Array[Byte]) extends DataReader { def getByte(mybp: Int): Byte = buf(mybp) - def getBytes(mybp: Int, bytes: Array[Byte]): Unit = { + def getBytes(mybp: Int, bytes: Array[Byte]): Unit = System.arraycopy(buf, mybp, bytes, 0, bytes.length) - } /** extract a character at position bp from buf */ @@ -95,9 +92,8 @@ final class AbstractFileReader(val buf: Array[Byte]) extends DataReader { */ def getDouble(mybp: Int): Double = longBitsToDouble(getLong(mybp)) - def getUTF(mybp: Int, len: Int): String = { + def getUTF(mybp: Int, len: Int): String = new DataInputStream(new ByteArrayInputStream(buf, mybp, len)).readUTF - } /** skip next 'n' bytes */ diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index e1a218f5df4b..da93a90d72c1 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -117,11 +117,11 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { } private def handleMissing(e: MissingRequirementError) = { - if (settings.debug) e.printStackTrace + if (settings.isDebug) e.printStackTrace throw new IOException(s"Missing dependency '${e.req}', required by $file") } private def handleError(e: Exception) = { - if (settings.debug) e.printStackTrace() + if (settings.isDebug) e.printStackTrace() throw new IOException(s"class file '$file' is broken\n(${e.getClass}/${e.getMessage})") } private def mismatchError(c: Symbol) = { @@ -420,7 +420,8 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { // - better owner than `NoSymbol` // - remove eager warning val msg = s"Class $name not found - continuing with a stub." - if ((!settings.isScaladoc) && (settings.verbose || settings.developer)) loaders.warning(NoPosition, msg, WarningCategory.OtherDebug, clazz.fullNameString) + if ((!settings.isScaladoc) && (settings.verbose || settings.isDeveloper)) + loaders.warning(NoPosition, msg, WarningCategory.OtherDebug, clazz.fullNameString) NoSymbol.newStubSymbol(name.toTypeName, msg) } @@ -471,7 +472,7 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { case ex: FatalError => // getClassByName can throw a MissingRequirementError (which extends FatalError) // definitions.getMember can throw a FatalError, for example in pos/t5165b - if (settings.debug) + if (settings.isDebug) ex.printStackTrace() stubClassSymbol(newTypeName(name)) } @@ -1007,7 +1008,7 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { // with a `FatalError` exception, handled above. Here you'd end up after a NPE (for example), // and that should never be swallowed silently. loaders.warning(NoPosition, s"Caught: $ex while parsing annotations in ${file}", WarningCategory.Other, clazz.fullNameString) - if (settings.debug) ex.printStackTrace() + if (settings.isDebug) ex.printStackTrace() None // ignore malformed annotations } @@ -1264,7 +1265,12 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { val expectedUUID = new UUID(reader.readUncompressedLong(), reader.readUncompressedLong()) val tastyUUID = new TastyHeaderUnpickler(TASTYBytes).readHeader() if (expectedUUID != tastyUUID) { - reporter.error(NoPosition, s"Tasty UUID ($tastyUUID) file did not correspond the tasty UUID ($expectedUUID) declared in the classfile $file.") + loaders.warning( + NoPosition, + s"$file is out of sync with its TASTy file. Loaded TASTy file. Try cleaning the project to fix this issue", + WarningCategory.Other, + clazz.fullNameString + ) } TASTYBytes } diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index 23ef2573d91a..029be7dd30c5 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -112,7 +112,7 @@ abstract class Pickler extends SubComponent { // // OPT: do this only as a recovery after fatal error. Checking in advance was expensive. if (t.isErroneous) { - if (settings.debug) e.printStackTrace() + if (settings.isDebug) e.printStackTrace() reporter.error(t.pos, "erroneous or inaccessible type") return } diff --git a/src/compiler/scala/tools/nsc/tasty/ForceKinds.scala b/src/compiler/scala/tools/nsc/tasty/ForceKinds.scala new file mode 100644 index 000000000000..137bbfe854bc --- /dev/null +++ b/src/compiler/scala/tools/nsc/tasty/ForceKinds.scala @@ -0,0 +1,60 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.tasty + +import scala.language.implicitConversions + +import ForceKinds._ + +object ForceKinds { + + /** When forcing the companion of a module */ + final val DeepForce: ForceKinds.Single = of(1 << 1) + /** When forcing the owner of a symbol */ + final val CompleteOwner: ForceKinds.Single = of(1 << 2) + /** When forcing an overloaded signature */ + final val OverloadedSym: ForceKinds.Single = of(1 << 3) + /** When forcing a symbol that will be copied */ + final val CopySym: ForceKinds.Single = of(1 << 4) + /** When forcing the underlying symbol of some type space */ + final val SpaceForce: ForceKinds.Single = of(1 << 5) + /** When forcing the enum singleton from its "fake" module class */ + final val EnumProxy: ForceKinds.Single = of(1 << 6) + + private def of(mask: Int): ForceKinds.Single = new ForceKinds.Single(mask) + + class Single(val toInt: Int) extends AnyVal { mode => + def |(single: ForceKinds.Single): ForceKinds = new ForceKinds(toInt | single.toInt) + } + + @inline implicit def single2ForceKinds(single: ForceKinds.Single): ForceKinds = new ForceKinds(single.toInt) + +} + +/**A static type representing a bitset of modes that are for debugging why a symbol may have been forced + */ +class ForceKinds(val toInt: Int) extends AnyVal { + def is(single: ForceKinds.Single): Boolean = (toInt & single.toInt) == single.toInt + def |(single: ForceKinds.Single): ForceKinds = new ForceKinds(toInt | single.toInt) + + def describe: List[String] = { + var xs = List.empty[String] + if (is(DeepForce)) xs ::= "deep" + if (is(CompleteOwner)) xs ::= "class owner is required" + if (is(OverloadedSym)) xs ::= "overload resolution" + if (is(CopySym)) xs ::= "copying its info" + if (is(SpaceForce)) xs ::= "space" + if (is(EnumProxy)) xs ::= "forcing enum value from fake object" + xs + } +} diff --git a/src/compiler/scala/tools/nsc/tasty/TastyModes.scala b/src/compiler/scala/tools/nsc/tasty/TastyModes.scala index d826e367db7d..a8e5e8454599 100644 --- a/src/compiler/scala/tools/nsc/tasty/TastyModes.scala +++ b/src/compiler/scala/tools/nsc/tasty/TastyModes.scala @@ -34,10 +34,14 @@ object TastyModes { final val InnerScope: TastyMode = TastyMode(1 << 5) /** When reading the tree of an Opaque type */ final val OpaqueTypeDef: TastyMode = TastyMode(1 << 6) + /** When reading trees of an annotation */ + final val ReadAnnotationCtor: TastyMode = TastyMode(1 << 7) /** The union of `IndexStats` and `InnerScope` */ final val IndexScopedStats: TastyMode = IndexStats | InnerScope + final val ReadAnnotTopLevel: TastyMode = ReadAnnotation | ReadAnnotationCtor + case class TastyMode(val toInt: Int) extends AnyVal { mode => def |(other: TastyMode): TastyMode = TastyMode(toInt | other.toInt) @@ -58,6 +62,7 @@ object TastyModes { if (mode.is(ReadMacro)) sb += "ReadMacro" if (mode.is(InnerScope)) sb += "InnerScope" if (mode.is(OpaqueTypeDef)) sb += "OpaqueTypeDef" + if (mode.is(ReadAnnotationCtor)) sb += "ReadAnnotationCtor" sb.mkString(" | ") } } diff --git a/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala b/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala index 8a10f400b61a..4f38b9dd6d86 100644 --- a/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala +++ b/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala @@ -12,13 +12,16 @@ package scala.tools.nsc.tasty -import scala.tools.tasty.{TastyRefs, TastyReader, TastyName, TastyFormat, TastyFlags}, TastyRefs._, TastyFlags._, TastyFormat._ +import scala.tools.tasty.{TastyRefs, TastyReader, TastyName, TastyFormat, TastyFlags} +import TastyRefs._, TastyFlags._, TastyFormat._ +import ForceKinds._ import scala.annotation.switch import scala.collection.mutable import scala.reflect.io.AbstractFile import scala.reflect.internal.Variance import scala.util.chaining._ +import scala.collection.immutable.ArraySeq /**`TreeUnpickler` is responsible for traversing all trees in the "ASTs" section of a TASTy file, which represent the * definitions inside the classfile associated with the root class/module. `TreeUnpickler` will enter the public api @@ -36,7 +39,6 @@ class TreeUnpickler[Tasty <: TastyUniverse]( nameAtRef: NameRef => TastyName)(implicit val tasty: Tasty) { self => import tasty._ - import FlagSets._ import TreeUnpickler._ import MaybeCycle._ import TastyModes._ @@ -70,8 +72,12 @@ class TreeUnpickler[Tasty <: TastyUniverse]( //---------------- unpickling trees ---------------------------------------------------------------------------------- - private def registerSym(addr: Addr, sym: Symbol)(implicit ctx: Context) = { - ctx.log(s"$addr registered ${showSym(sym)} in ${location(sym.owner)}") + private def registerSym(addr: Addr, sym: Symbol, rejected: Boolean)(implicit ctx: Context) = { + assert(!(rejected && isSymbol(sym)), "expected no symbol when rejected") + ctx.log( + if (isSymbol(sym)) s"$addr registered ${showSym(sym)}" + else s"$addr registering symbol was rejected" + ) symAtAddr(addr) = sym } @@ -81,16 +87,31 @@ class TreeUnpickler[Tasty <: TastyUniverse]( this.roots = Set(objectRoot, classRoot) val rdr = new TreeReader(reader).fork ownerTree = new OwnerTree(NoAddr, 0, rdr.fork, reader.endAddr) - def indexTopLevel(implicit ctx: Context): Unit = rdr.indexStats(reader.endAddr) - if (rdr.isTopLevel) - inIndexScopedStatsContext(indexTopLevel(_)) + def indexTopLevel()(implicit ctx: Context): Unit = rdr.indexStats(reader.endAddr) + if (rdr.isTopLevel) { + inIndexScopedStatsContext { ctx0 => + ctx0.trace(traceTopLevel(classRoot, objectRoot)) { + indexTopLevel()(ctx0) + } + } + } } + private def traceTopLevel(classRoot: Symbol, objectRoot: Symbol) = TraceInfo[Unit]( + query = s"reading top level roots", + qual = s"${showSym(classRoot)}, ${showSym(objectRoot)}", + res = _ => "entered top level roots" + ) + /** A completer that captures the current position and context, which then uses the position to discover the symbol * to compute the info for. */ - class Completer(isClass: Boolean, reader: TastyReader, originalFlagSet: TastyFlagSet)(implicit ctx: Context) - extends TastyCompleter(isClass, originalFlagSet) { self => + class Completer( + isClass: Boolean, + reader: TastyReader, + tflags: TastyFlagSet + )(implicit ctx: Context) + extends TastyCompleter(isClass, tflags) { private val symAddr = reader.currentAddr @@ -200,7 +221,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( /** Read names in an interleaved sequence of types/bounds and (parameter) names, * possibly followed by a sequence of modifiers. */ - def readParamNamesAndMods(end: Addr): (List[TastyName], TastyFlagSet) = { + def readParamNamesAndMods(end: Addr): (ArraySeq[TastyName], TastyFlagSet) = { val names = collectWhile(currentAddr != end && !isModifierTag(nextByte)) { skipTree() @@ -214,17 +235,23 @@ class TreeUnpickler[Tasty <: TastyUniverse]( case GIVEN => mods |= Given } } - (names, mods) + (names.to(ArraySeq), mods) } /** Read `n` parameter types or bounds which are interleaved with names */ - def readParamTypes[T <: Type](n: Int)(implicit ctx: Context): List[T] = { - if (n == 0) Nil - else { - val t = readType().asInstanceOf[T] - readNat() // skip name - t :: readParamTypes(n - 1) + def readParamTypes(ps: ArraySeq[Symbol])(implicit ctx: Context): ArraySeq[Type] = { + def inner(ps1: Iterator[Symbol], buf: mutable.ArrayBuffer[Type]): ArraySeq[Type] = { + if (ps1.isEmpty) buf.to(ArraySeq) + else { + val p = ps1.next() + val rest = ps1 + val localCtx = ctx.withOwner(p) + val t = readType()(localCtx) + readNat() // skip name + inner(rest, buf += t) + } } + inner(ps.iterator, new mutable.ArrayBuffer) } /** Read reference to definition and return symbol created at that definition */ @@ -235,23 +262,35 @@ class TreeUnpickler[Tasty <: TastyUniverse]( case Some(sym) => sym case None => - ctx.log(s"<<< No symbol found at forward reference $addr, ensuring one exists:") - val ctxAtOwner = ctx.withOwner(ownerTree.findOwner(addr)) - val sym = forkAt(addr).createSymbol()(ctxAtOwner) - ctx.log(s">>> $addr forward reference to ${showSym(sym)}") - sym + ctx.trace(traceForwardReference(addr)) { + val ctxAtOwner = ctx.withOwner(ownerTree.findOwner(addr)) + forkAt(addr).createSymbol()(ctxAtOwner) + } } + private def traceForwardReference(addr: Addr) = TraceInfo[Symbol]( + query = s"creating forward reference", + qual = s"at $addr", + res = sym => s"$addr forward reference to ${showSym(sym)}" + ) + /** The symbol defined by current definition */ def symbolAtCurrent()(implicit ctx: Context): Symbol = symAtAddr.get(currentAddr) match { case Some(sym) => assert(ctx.owner === sym.owner, s"owner discrepancy for ${showSym(sym)}, expected: ${showSym(ctx.owner)}, found: ${showSym(sym.owner)}") sym case None => - ctx.log(s"$currentAddr No symbol found at current address, ensuring one exists:") - createSymbol() + ctx.trace(traceCurrentSymbol(currentAddr)) { + createSymbol() + } } + private def traceCurrentSymbol(addr: Addr) = TraceInfo[Symbol]( + query = "create symbol at current address", + qual = s"$addr", + res = sym => if (!isSymbol(sym)) s"evicted symbol at $addr" else s"created ${showSym(sym)} at $addr" + ) + def readConstant(tag: Int)(implicit ctx: Context): Constant = (tag: @switch) match { case UNITconst => tpd.Constant(()) @@ -285,7 +324,12 @@ class TreeUnpickler[Tasty <: TastyUniverse]( def readType()(implicit ctx: Context): Type = { val start = currentAddr val tag = readByte() - ctx.log(s"$start reading type ${astTagToString(tag)}:") + + def traceReadType = TraceInfo[Type]( + query = "reading type", + qual = s"${astTagToString(tag)} $start", + res = tpe => s"exit ${showType(tpe)} ${astTagToString(tag)} $start" + ) def registeringTypeWith[T](tp: Type, op: => T): T = { typeAtAddr(start) = tp @@ -295,18 +339,27 @@ class TreeUnpickler[Tasty <: TastyUniverse]( def readLengthType(): Type = { val end = readEnd() - def readMethodic[N <: TastyName] - (companionOp: TastyFlagSet => LambdaTypeCompanion[N], nameMap: TastyName => N)(implicit ctx: Context): Type = { + def readMethodic[N <: TastyName]( + factory: LambdaFactory[N], + parseFlags: FlagSets.FlagParser, + nameMap: TastyName => N + )(implicit ctx: Context): Type = { val result = typeAtAddr.getOrElse(start, { + // TODO [tasty]: can we share LambdaTypes/RecType/RefinedType safely + // under a new context owner? (aka when referenced by a `SHAREDtype`). + // So far this has been safe to do, but perhaps with macros comparing the + // owners of the symbols of PolyTypes maybe not? + // one concrete example where TypeLambdaType is shared between two unrelated classes: + // - test/tasty/run/src-3/tastytest/issue12420/ShareLambda.scala val nameReader = fork nameReader.skipTree() // skip result val paramReader = nameReader.fork val (paramNames, mods) = nameReader.readParamNamesAndMods(end) - companionOp(mods)(paramNames.map(nameMap))( - pt => typeAtAddr(start) = pt, - () => paramReader.readParamTypes(paramNames.length), - () => readType() - ).tap(typeAtAddr(start) = _) + LambdaFactory.parse(factory, paramNames.map(nameMap), parseFlags(mods)(ctx))( + ps => paramReader.readParamTypes(ps), + () => readType(), + pt => typeAtAddr(start) = pt, // register the lambda so that we can access its parameters + ) }) goto(end) result @@ -327,36 +380,31 @@ class TreeUnpickler[Tasty <: TastyUniverse]( val result = (tag: @switch) match { - case TERMREFin => selectTerm(readTastyName(), readType(), readType()) - case TYPEREFin => selectType(readTastyName().toTypeName, readType(), readType()) + case TERMREFin => + defn.TermRefIn(name = readTastyName(), prefix = readType(), space = readType()) + case TYPEREFin => + defn.TypeRefIn(name = readTastyName().toTypeName, prefix = readType(), space = readType()) case REFINEDtype => var name = readTastyName() val parent = readType() if (nextUnsharedTag === TYPEBOUNDS) name = name.toTypeName - ctx.enterRefinement(parent)(refinedCtx => defn.RefinedType(parent, name, refinedCtx.owner, readType())) + ctx.enterRefinement(parent)(refinedCtx => + defn.RefinedType(parent, name, refinedCtx.owner, readType()) + ) case APPLIEDtype => defn.AppliedType(readType(), until(end)(readType())) case TYPEBOUNDS => val lo = readType() - if (nothingButMods(end)) - typeRef(readVariances(lo)) + if (nothingButMods(end)) readVariances(lo) else defn.TypeBounds(lo, readVariances(readType())) - case ANNOTATEDtype => defn.AnnotatedType(readType(), readTerm()(ctx.addMode(ReadAnnotation))) + case ANNOTATEDtype => defn.AnnotatedType(readType(), readTerm()(ctx.addMode(ReadAnnotTopLevel))) case ANDtype => defn.IntersectionType(readType(), readType()) case ORtype => unionIsUnsupported case SUPERtype => defn.SuperType(readType(), readType()) case MATCHtype | MATCHCASEtype => matchTypeIsUnsupported - case POLYtype => readMethodic(Function.const(PolyType), _.toTypeName) - case METHODtype => - def companion(mods0: TastyFlagSet) = { - var mods = EmptyTastyFlags - if (mods0.is(Erased)) erasedRefinementIsUnsupported[Unit] - if (mods0.isOneOf(Given | Implicit)) mods |= Implicit - methodTypeCompanion(mods) - } - readMethodic(companion, id) - case TYPELAMBDAtype => readMethodic(Function.const(HKTypeLambda), _.toTypeName) - case PARAMtype => // reference to a type parameter within a LambdaType - readTypeRef().typeParams(readNat()).ref + case POLYtype => readMethodic(PolyTypeLambda, FlagSets.addDeferred, _.toTypeName) + case METHODtype => readMethodic(MethodTermLambda, FlagSets.parseMethod, id) + case TYPELAMBDAtype => readMethodic(HKTypeLambda, FlagSets.addDeferred, _.toTypeName) + case PARAMtype => defn.ParamRef(readTypeRef(), readNat()) // reference to a parameter within a LambdaType } assert(currentAddr === end, s"$start $currentAddr $end ${astTagToString(tag)}") result @@ -364,14 +412,14 @@ class TreeUnpickler[Tasty <: TastyUniverse]( def readSimpleType(): Type = { (tag: @switch) match { - case TYPEREFdirect => readSymRef().termRef - case TERMREFdirect => readSymRef().singleRef - case TYPEREFsymbol | TERMREFsymbol => readSymNameRef() - case TYPEREFpkg => readPackageRef().objectImplementation.ref - case TERMREFpkg => readPackageRef().termRef - case TYPEREF => selectType(readTastyName().toTypeName, readType()) - case TERMREF => selectTerm(readTastyName(), readType()) - case THIS => defn.ThisType(singletonLike(readType())) + case TYPEREFdirect => defn.NamedType(defn.NoPrefix, readSymRef()) + case TERMREFdirect => defn.NamedType(defn.NoPrefix, readSymRef()) + case TYPEREFsymbol | TERMREFsymbol => defn.NamedType(sym = readSymRef(), prefix = readType()) + case TYPEREFpkg => defn.NamedType(defn.NoPrefix, sym = readPackageRef().objectImplementation) + case TERMREFpkg => defn.NamedType(defn.NoPrefix, sym = readPackageRef()) + case TYPEREF => defn.TypeRef(name = readTastyName().toTypeName, prefix = readType()) + case TERMREF => defn.TermRef(name = readTastyName(), prefix = readType()) + case THIS => defn.ThisType(readType()) case RECtype => typeAtAddr.get(start) match { case Some(tp) => @@ -382,7 +430,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( registeringTypeWith(rt, readType()(ctx.withOwner(rt.refinementClass))) ).tap(typeAtAddr(start) = _) } - case RECthis => recThis(readTypeRef()) + case RECthis => defn.RecThis(readTypeRef()) case SHAREDtype => val ref = readAddr() typeAtAddr.getOrElseUpdate(ref, forkAt(ref).readType()) @@ -390,13 +438,9 @@ class TreeUnpickler[Tasty <: TastyUniverse]( case _ => defn.ConstantType(readConstant(tag)) } } - if (tag < firstLengthTreeTag) readSimpleType() else readLengthType() - } - - private def readSymNameRef()(implicit ctx: Context): Type = { - val sym = readSymRef() - val prefix = readType() - prefixedRef(prefix, sym) + ctx.traceV(traceReadType) { + if (tag < firstLengthTreeTag) readSimpleType() else readLengthType() + } } private def readPackageRef()(implicit ctx: Context): Symbol = { @@ -415,20 +459,25 @@ class TreeUnpickler[Tasty <: TastyUniverse]( if (isType) prior.toTypeName else prior } - private def normalizeFlags(tag: Int, tastyFlags: TastyFlagSet, name: TastyName, isAbsType: Boolean, isClass: Boolean, rhsIsEmpty: Boolean)(implicit ctx: Context): TastyFlagSet = { + private def addInferredFlags(tag: Int, tastyFlags: TastyFlagSet, name: TastyName, isAbsType: Boolean, isClass: Boolean, rhsIsEmpty: Boolean)(implicit ctx: Context): TastyFlagSet = { var flags = tastyFlags + if (flags.is(Given)) + flags |= Implicit val lacksDefinition = rhsIsEmpty && - name.isTermName && !name.isConstructorName && !flags.isOneOf(TermParamOrAccessor) || + name.isTermName && !name.isConstructorName && !flags.isOneOf(FlagSets.TermParamOrAccessor) || isAbsType || flags.is(Opaque) && !isClass if (lacksDefinition && tag != PARAM) flags |= Deferred if (isClass && flags.is(Trait)) flags |= Abstract if (tag === DEFDEF) flags |= Method if (tag === VALDEF) { - if (flags.is(Inline) || ctx.owner.is(Trait)) flags |= FieldAccessor - if (flags.not(Mutable)) flags |= Stable - if (flags.is(SingletonEnumFlags)) flags |= Object // we will encode dotty enum constants as objects (this needs to be corrected in bytecode) + if (flags.is(Inline) || ctx.owner.is(Trait)) + flags |= FieldAccessor + if (flags.not(Mutable)) + flags |= Stable + if (flags.is(Case | Enum)) // singleton enum case + flags |= Object | Stable // encode as a module (this needs to be corrected in bytecode) } if (ctx.owner.isClass) { if (tag === TYPEPARAM) flags |= Param @@ -439,7 +488,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( } } else if (isParamTag(tag)) flags |= Param - if (flags.is(Object)) flags |= (if (tag === VALDEF) ObjectCreationFlags else ObjectClassCreationFlags) + if (flags.is(Object)) flags |= (if (tag === VALDEF) FlagSets.Creation.ObjectDef else FlagSets.Creation.ObjectClassDef) flags } @@ -462,7 +511,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( createMemberSymbol() case TEMPLATE => val localDummy = ctx.newLocalDummy - registerSym(currentAddr, localDummy) + registerSym(currentAddr, localDummy, rejected = false) localDummy case tag => assert(tag != BIND, "bind pattern symbol creation from TASTy") @@ -473,12 +522,22 @@ class TreeUnpickler[Tasty <: TastyUniverse]( * @return the created symbol */ def createMemberSymbol()(implicit ctx: Context): Symbol = { + + def rejectSymbol(owner: Symbol, name: TastyName, flags: TastyFlagSet): Boolean = { + def isPureMixinCtor = + name == TastyName.MixinConstructor && owner.isTrait && flags.is(Stable) + def isInvisible = + flags.is(Invisible) + + isPureMixinCtor || isInvisible + } + val start = currentAddr val tag = readByte() def isTypeTag = tag === TYPEDEF || tag === TYPEPARAM val end = readEnd() val parsedName: TastyName = readTastyName() - ctx.log(s"$start ::: => create ${astTagToString(tag)} ${parsedName.debug}") + ctx.log(s"${astTagToString(tag)} ${parsedName.debug} in ${location(ctx.owner)}") skipParams() val ttag = nextUnsharedTag val isAbsType = isAbstractType(ttag) @@ -487,54 +546,76 @@ class TreeUnpickler[Tasty <: TastyUniverse]( skipTree() // tpt val rhsIsEmpty = nothingButMods(end) if (!rhsIsEmpty) skipTree() - val (name, flags, annotations, privateWithin) = { - val (parsedFlags, annotations, privateWithin) = - readModifiers(end, readTypedAnnot, readTypedWithin, noSymbol) - val name = normalizeName(isTypeTag, parsedName) - val flags = normalizeFlags(tag, parsedFlags, name, isAbsType, isClass, rhsIsEmpty) - (name, flags, annotations, privateWithin) - } + val (parsedFlags0, annotations, privateWithin) = + readModifiers(end, readTypedAnnot, readTypedWithin, noSymbol) + val name = normalizeName(isTypeTag, parsedName) + val flags = addInferredFlags(tag, parsedFlags0, name, isAbsType, isClass, rhsIsEmpty) + def mkCompleter = new Completer(isClass, subReader(start, end), flags)(ctx.retractMode(IndexScopedStats)) def isTypeParameter = flags.is(Param) && isTypeTag def canEnterInClass = !isTypeParameter ctx.log { - val privateFlag = if (isSymbol(privateWithin)) s"private[$privateWithin] " else "" + val privateFlag = { + if (isSymbol(privateWithin)) { + if (flags.is(Protected)) s"Protected[$privateWithin]" + else s"Private[$privateWithin]" + } + else { + "" + } + } val debugFlags = { if (privateFlag.nonEmpty) { - val given = if (!flags) "" else " " + (flags &~ Private).debug - privateFlag + given + val flags0 = flags &~ Protected + val rest = if (!flags0) "" else s" ${flags0.debug}" + privateFlag + rest } else flags.debug } s"""$start parsed flags $debugFlags""" } + val rejected = rejectSymbol(ctx.owner, name, flags) val sym = { if (tag === TYPEPARAM && ctx.owner.isConstructor) { - ctx.findOuterClassTypeParameter(name.toTypeName) + // TASTy encodes type parameters for constructors + // nsc only has class type parameters + val tparam = ctx.findOuterClassTypeParameter(name.toTypeName) + ctx.log(s"$start reusing class type param ${showSym(tparam)}") + tparam } else { - val completer = new Completer(isClass, subReader(start, end), flags)(ctx.retractMode(IndexScopedStats)) ctx.findRootSymbol(roots, name) match { case Some(rootd) => - ctx.adjustSymbol(rootd, flags, completer, privateWithin) // dotty "removes one completion" here from the flags, which is not possible in nsc - ctx.log(s"$start replaced info of ${showSym(rootd)}") - rootd + roots -= rootd + if (rejected) { + ctx.evict(rootd) + noSymbol + } + else { + ctx.redefineSymbol(rootd, flags, mkCompleter, privateWithin) + ctx.log(s"$start replaced info of root ${showSym(rootd)}") + rootd + } case _ => - if (isClass) ctx.delayClassCompletion(ctx.owner, name.toTypeName, completer, privateWithin) - else ctx.delayCompletion(ctx.owner, name, completer, privateWithin) + if (rejected) noSymbol + else if (isClass) ctx.delayClassCompletion(ctx.owner, name.toTypeName, mkCompleter, privateWithin) + else ctx.delayCompletion(ctx.owner, name, mkCompleter, privateWithin) } } - }.ensuring(isSymbol(_), s"${ctx.classRoot}: Could not create symbol at $start") - if (tag == VALDEF && flags.is(SingletonEnumFlags)) - ctx.markAsEnumSingleton(sym) - registerSym(start, sym) - if (canEnterInClass && ctx.owner.isClass) - ctx.enterIfUnseen(sym) - if (isClass) { - val localCtx = ctx.withOwner(sym) - forkAt(templateStart).indexTemplateParams()(localCtx) + } + registerSym(start, sym, rejected) + if (isSymbol(sym)) { + if (tag == VALDEF && flags.is(FlagSets.SingletonEnum)) + ctx.markAsEnumSingleton(sym) + if (canEnterInClass && ctx.owner.isClass) + ctx.enterIfUnseen(sym) + if (isClass) { + ctx.log(s"$templateStart indexing params (may be empty):") + val localCtx = ctx.withOwner(sym) + forkAt(templateStart).indexTemplateParams()(localCtx) + } + ctx.adjustAnnotations(sym, annotations) } goto(start) - ctx.adjustAnnotations(sym, annotations) sym } @@ -554,7 +635,6 @@ class TreeUnpickler[Tasty <: TastyUniverse]( } nextByte match { case PRIVATE => addFlag(Private) - case INTERNAL => addFlag(Internal) case PROTECTED => addFlag(Protected) case ABSTRACT => readByte() @@ -590,11 +670,12 @@ class TreeUnpickler[Tasty <: TastyUniverse]( case HASDEFAULT => addFlag(HasDefault) case STABLE => addFlag(Stable) case EXTENSION => addFlag(Extension) - case GIVEN => addFlag(Implicit) + case GIVEN => addFlag(Given) case PARAMsetter => addFlag(ParamSetter) case PARAMalias => addFlag(ParamAlias) case EXPORTED => addFlag(Exported) case OPEN => addFlag(Open) + case INVISIBLE => addFlag(Invisible) case PRIVATEqualified => readByte() privateWithin = readWithin(ctx) @@ -613,20 +694,27 @@ class TreeUnpickler[Tasty <: TastyUniverse]( private val readTypedWithin: Context => Symbol = implicit ctx => readType().typeSymbolDirect private val readTypedAnnot: Context => DeferredAnnotation = { implicit ctx => - val annotCtx = ctx.addMode(ReadAnnotation) + val annotCtx = ctx.addMode(ReadAnnotTopLevel) val start = currentAddr - ctx.log(s"<<< $start reading annotation:") readByte() // tag val end = readEnd() val annotSym = readType()(annotCtx).typeSymbolDirect - val deferred = readLaterWithOwner(end, rdr => ctx => { - ctx.log(s"${rdr.reader.currentAddr} reading LazyAnnotationRef[${annotSym.fullName}]()") - rdr.readTerm()(ctx) - })(annotCtx.retractMode(IndexScopedStats)) - ctx.log(s">>> $start LazyAnnotationRef[${annotSym.fullName}]()") - DeferredAnnotation.fromTree(deferred) + val annotStart = currentAddr + ctx.log(s"$annotStart collected annotation ${showSym(annotSym)}, starting at $start, ending at $end") + val mkTree = readLaterWithOwner(end, rdr => ctx => + ctx.trace(traceAnnotation(annotStart, annotSym, ctx.owner)) { + rdr.readTerm()(ctx) + } + )(annotCtx.retractMode(IndexScopedStats)) + DeferredAnnotation.fromTree(mkTree) } + private def traceAnnotation(annotStart: Addr, annotSym: Symbol, annotee: Symbol) = TraceInfo[Tree]( + query = s"reading annotation tree", + qual = s"${showSym(annotSym)} at $annotStart", + res = atree => s"annotation of ${showSym(annotee)} = ${showTree(atree)}" + ) + /** Create symbols for the definitions in the statement sequence between * current address and `end`. */ @@ -716,8 +804,9 @@ class TreeUnpickler[Tasty <: TastyUniverse]( } def DefDef(repr: TastyRepr, localCtx: Context)(implicit ctx: Context): Unit = { - val isMacro = repr.originalFlagSet.is(Erased | Macro) - checkUnsupportedFlags(repr.tastyOnlyFlags &~ (Extension | Exported | Infix | optFlag(isMacro)(Erased))) + val isMacro = repr.tflags.is(Erased | Macro) + val supportedFlags = Extension | Exported | Infix | Given | optFlag(isMacro)(Erased) + checkUnsupportedFlags(repr.unsupportedFlags &~ supportedFlags) val isCtor = sym.isConstructor val paramDefss = readParamss()(localCtx).map(_.map(symFromNoCycle)) val typeParams = { @@ -732,7 +821,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( unsupportedWhen(hasTypeParams, { val noun = ( if (isCtor) "constructor" - else if (repr.tastyOnlyFlags.is(Extension)) "extension method" + else if (repr.unsupportedFlags.is(Extension)) "extension method" else "method" ) s"$noun with unmergeable type parameters: $tname" @@ -750,27 +839,18 @@ class TreeUnpickler[Tasty <: TastyUniverse]( sym.addAnnotation(annot) } val valueParamss = normalizeIfConstructor(vparamss, isCtor) - val resType = effectiveResultType(sym, typeParams, tpt.tpe) + val resType = effectiveResultType(sym, tpt.tpe) ctx.setInfo(sym, defn.DefDefType(if (isCtor) Nil else typeParams, valueParamss, resType)) } def ValDef(repr: TastyRepr, localCtx: Context)(implicit ctx: Context): Unit = { // valdef in TASTy is either a singleton object or a method forwarder to a local value. - checkUnsupportedFlags(repr.tastyOnlyFlags &~ (Enum | Extension | Exported)) + checkUnsupportedFlags(repr.unsupportedFlags &~ (Enum | Extension | Exported | Given)) val tpe = readTpt()(localCtx).tpe ctx.setInfo(sym, - if (repr.originalFlagSet.is(SingletonEnumFlags)) { - val enumClass = sym.objectImplementation - val selfTpe = defn.SingleType(sym.owner.thisPrefix, sym) - val ctor = ctx.unsafeNewSymbol( - owner = enumClass, - name = TastyName.Constructor, - flags = Method, - info = defn.DefDefType(Nil, Nil :: Nil, selfTpe) - ) - enumClass.typeOfThis = selfTpe - ctx.setInfo(enumClass, defn.ClassInfoType(intersectionParts(tpe), ctor :: Nil, enumClass)) - prefixedRef(sym.owner.thisPrefix, enumClass) + if (repr.tflags.is(FlagSets.SingletonEnum)) { + ctx.completeEnumSingleton(sym, tpe) + defn.NamedType(sym.owner.thisPrefix, sym.objectImplementation) } else if (sym.isFinal && isConstantType(tpe)) defn.InlineExprType(tpe) else if (sym.isMethod) defn.ExprType(tpe) @@ -779,19 +859,20 @@ class TreeUnpickler[Tasty <: TastyUniverse]( } def TypeDef(repr: TastyRepr, localCtx: Context)(implicit ctx: Context): Unit = { - val allowedShared = Enum | Opaque | Infix + val allowedShared = Enum | Opaque | Infix | Given val allowedTypeFlags = allowedShared | Exported val allowedClassFlags = allowedShared | Open | Transparent if (sym.isClass) { - checkUnsupportedFlags(repr.tastyOnlyFlags &~ allowedClassFlags) - sym.owner.ensureCompleted() + checkUnsupportedFlags(repr.unsupportedFlags &~ allowedClassFlags) + sym.owner.ensureCompleted(CompleteOwner) readTemplate()(localCtx) } else { - checkUnsupportedFlags(repr.tastyOnlyFlags &~ allowedTypeFlags) - val rhs = readTpt()(if (repr.originalFlagSet.is(Opaque)) localCtx.addMode(OpaqueTypeDef) else localCtx) + checkUnsupportedFlags(repr.unsupportedFlags &~ allowedTypeFlags) + sym.info = defn.InitialTypeInfo // needed to avoid cyclic references when unpickling rhs, see dotty_i3816.scala + val rhs = readTpt()(if (repr.tflags.is(Opaque)) localCtx.addMode(OpaqueTypeDef) else localCtx) val info = - if (repr.originalFlagSet.is(Opaque)) { + if (repr.tflags.is(Opaque)) { val (info, alias) = defn.OpaqueTypeToBounds(rhs.tpe) ctx.markAsOpaqueType(sym, alias) info @@ -803,45 +884,81 @@ class TreeUnpickler[Tasty <: TastyUniverse]( } def TermParam(repr: TastyRepr, localCtx: Context)(implicit ctx: Context): Unit = { - checkUnsupportedFlags(repr.tastyOnlyFlags &~ (ParamAlias | Exported)) + checkUnsupportedFlags(repr.unsupportedFlags &~ (ParamAlias | Exported | Given)) val tpt = readTpt()(localCtx) ctx.setInfo(sym, if (nothingButMods(end) && sym.not(ParamSetter)) tpt.tpe else defn.ExprType(tpt.tpe)) } - def initialize()(implicit ctx: Context): Unit = { - val repr = sym.rawInfo match { - case repr: TastyRepr => repr - case _ => return () // nothing to do here (assume correctly initalised) - } - ctx.log(s"$symAddr completing ${showSym(sym)} in scope ${showSym(ctx.owner)}") - val localCtx = ctx.withOwner(sym) - tag match { - case DEFDEF => DefDef(repr, localCtx) - case VALDEF => ValDef(repr, localCtx) - case TYPEDEF | TYPEPARAM => TypeDef(repr, localCtx) - case PARAM => TermParam(repr, localCtx) + def initialize(localCtx: Context)(implicit ctx: Context): Unit = ctx.trace(traceCompletion(symAddr, sym)) { + sym.rawInfo match { + case repr: TastyRepr => + tag match { + case DEFDEF => DefDef(repr, localCtx) + case VALDEF => ValDef(repr, localCtx) + case TYPEDEF | TYPEPARAM => TypeDef(repr, localCtx) + case PARAM => TermParam(repr, localCtx) + } + case _ => // nothing to do here (assume correctly initalised) + ctx.log(s"${showSym(sym)} is already initialised, in owner ${showSym(sym.owner)}") } } try { - initialize() - ctx.log(s"$symAddr @@@ ${showSym(sym)}.tpe =:= '[${if (sym.isType) sym.tpe else sym.info}]; owned by ${location(sym.owner)}") + val localCtx = ctx.withOwner(sym) + if (sym.isClass) { + inIndexScopedStatsContext(localCtx0 => initialize(localCtx0)(ctx))(localCtx) + } + else { + initialize(localCtx) + } NoCycle(at = symAddr) } catch ctx.onCompletionError(sym) finally goto(end) } + private def traceCompletion(addr: Addr, sym: Symbol)(implicit ctx: Context) = TraceInfo[Unit]( + query = "begin completion", + qual = s"${showSym(sym)} in context ${showSym(ctx.owner)} $addr", + res = _ => s"completed ${showSym(sym)}: ${showType(sym.info)}" + ) + private def readTemplate()(implicit ctx: Context): Unit = { val cls = ctx.enterClassCompletion() val localDummy = symbolAtCurrent() assert(readByte() === TEMPLATE) val end = readEnd() - def completeTypeParameters()(implicit ctx: Context): List[Symbol] = { - ctx.log(s"$currentAddr Template: reading parameters of $cls:") + def traceCompleteParams = TraceInfo[List[Symbol]]( + query = "force template parameters", + qual = s"${showSym(cls)} $currentAddr", + res = _ => "forced template parameters" + ) + + def traceIndexMembers = TraceInfo[Unit]( + query = "index template body", + qual = s"${showSym(cls)} $currentAddr", + res = _ => "indexed template body" + ) + + def traceCollectParents = TraceInfo[List[Type]]( + query = "collect template parents", + qual = s"${showSym(cls)} $currentAddr", + res = { parentTypes => + val addendum = parentTypes.map(lzyShow).mkString(s"`${cls.fullName} extends ", " with ", "`") + s"collected template parents $addendum" + } + ) + + def traceReadSelf = TraceInfo[Type]( + query = "reading template self-type", + qual = s"${showSym(cls)} $currentAddr", + res = tpe => s"template self-type is $tpe" + ) + + def completeParameters()(implicit ctx: Context): List[Symbol] = ctx.trace(traceCompleteParams) { val tparams = readIndexedParams[NoCycle](TYPEPARAM).map(symFromNoCycle) if (tparams.nonEmpty) { cls.info = defn.PolyType(tparams, cls.info) @@ -850,41 +967,35 @@ class TreeUnpickler[Tasty <: TastyUniverse]( tparams } - def indexMembers()(implicit ctx: Context): Unit = { - ctx.log(s"$currentAddr Template: indexing members of $cls:") + def indexMembers()(implicit ctx: Context): Unit = ctx.trace(traceIndexMembers) { val bodyIndexer = fork while (bodyIndexer.reader.nextByte != DEFDEF) bodyIndexer.skipTree() // skip until primary ctor bodyIndexer.indexStats(end) } - def traverseParents()(implicit ctx: Context): List[Type] = { - ctx.log(s"$currentAddr Template: adding parents of $cls:") + def collectParents()(implicit ctx: Context): List[Type] = ctx.trace(traceCollectParents) { val parentCtx = ctx.withOwner(localDummy).addMode(ReadParents) val parentWithOuter = parentCtx.addMode(OuterTerm) collectWhile(nextByte != SELFDEF && nextByte != DEFDEF) { - nextUnsharedTag match { - case APPLY | TYPEAPPLY | BLOCK => readTerm()(parentWithOuter).tpe - case _ => readTpt()(parentCtx).tpe - } + defn.adjustParent( + nextUnsharedTag match { + case APPLY | TYPEAPPLY | BLOCK => readTerm()(parentWithOuter).tpe + case _ => readTpt()(parentCtx).tpe + } + ) } } def addSelfDef()(implicit ctx: Context): Unit = { - ctx.log(s"$currentAddr Template: adding self-type of $cls:") - readByte() // read SELFDEF tag - readLongNat() // skip Name - val selfTpe = readTpt().tpe - ctx.log(s"$currentAddr Template: self-type is $selfTpe") + val selfTpe = ctx.trace(traceReadSelf) { + readByte() // read SELFDEF tag + readLongNat() // skip Name + readTpt().tpe + } cls.typeOfThis = selfTpe } def setInfoWithParents(tparams: List[Symbol], parentTypes: List[Type])(implicit ctx: Context): Unit = { - def debugMsg = { - val addendum = - if (parentTypes.isEmpty) "" - else parentTypes.map(lzyShow).mkString(" extends ", " with ", "") // don't force types - s"$currentAddr Template: Updated info of $cls$addendum" - } val info = { val classInfo = defn.ClassInfoType(parentTypes, cls) // TODO [tasty]: if support opaque types, refine the self type with any opaque members here @@ -892,21 +1003,19 @@ class TreeUnpickler[Tasty <: TastyUniverse]( else defn.PolyType(tparams, classInfo) } ctx.setInfo(cls, info) - ctx.log(debugMsg) } def traverseTemplate()(implicit ctx: Context): Unit = { - val tparams = completeTypeParameters() + val tparams = completeParameters() indexMembers() - val parents = traverseParents() + val parents = collectParents() if (nextByte === SELFDEF) { addSelfDef() } - val parentTypes = ctx.adjustParents(cls, parents) - setInfoWithParents(tparams, parentTypes) + setInfoWithParents(tparams, ctx.processParents(cls, parents)) } - inIndexScopedStatsContext(traverseTemplate()(_)) + traverseTemplate() } @@ -953,7 +1062,12 @@ class TreeUnpickler[Tasty <: TastyUniverse]( def readTerm()(implicit ctx: Context): Tree = { val start = currentAddr val tag = readByte() - ctx.log(s"$start reading term ${astTagToString(tag)}:") + + def traceReadTerm = TraceInfo[Tree]( + query = "reading term", + qual = s"${astTagToString(tag)} $start", + res = tree => s"exit term `${showTree(tree)}` ${astTagToString(tag)} $start" + ) def inParentCtor = ctx.mode.is(ReadParents | OuterTerm) @@ -964,7 +1078,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( def readQualId(): (TastyName.TypeName, Type) = { val qual = readTerm() - (qual.typeIdent, defn.ThisType(symOfTypeRef(qual.tpe))) + (qual.typeIdent, defn.ThisType(qual.tpe)) } def completeSelectType(name: TastyName.TypeName)(implicit ctx: Context): Tree = completeSelect(name) @@ -1001,7 +1115,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( (tag: @switch) match { case SELECTin => val name = readTastyName() - val qual = readTerm() + val qual = readTerm() if (inParentCtor) { assert(name.isSignedConstructor, s"Parent of ${ctx.owner} is not a constructor.") skipTree() @@ -1020,7 +1134,8 @@ class TreeUnpickler[Tasty <: TastyUniverse]( until(end)(skipTree()) tpd.TypeTree(fnResult(fn.tpe)) } else { - tpd.Apply(fn, until(end)(readTerm())) + val argsCtx = ctx.argumentCtx(fn) + tpd.Apply(fn, until(end)(readTerm()(argsCtx))) } case TYPEAPPLY => tpd.TypeApply(readTerm(), until(end)(readTpt())) case TYPED => tpd.Typed(readTerm(), readTpt()) @@ -1032,7 +1147,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( tpd.SeqLiteral(until(end)(readTerm()), elemtpt) case REFINEDtpt => val refineCls = symAtAddr.getOrElse(start, ctx.newRefinementClassSymbol) - registerSym(start, refineCls) + registerSym(start, refineCls, rejected = false) typeAtAddr(start) = refineCls.ref val parent = readTpt() ctx.withOwner(refineCls).enterRefinement(parent.tpe) { refinedCtx => @@ -1044,7 +1159,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( // wrong number of arguments in some scenarios reading F-bounded // types. This came up in #137 of collection strawman. tpd.AppliedTypeTree(readTpt(), until(end)(readTpt())) - case ANNOTATEDtpt => tpd.Annotated(readTpt(), readTerm()(ctx.addMode(ReadAnnotation))) + case ANNOTATEDtpt => tpd.Annotated(readTpt(), readTerm()(ctx.addMode(ReadAnnotTopLevel))) case LAMBDAtpt => tpd.LambdaTypeTree(readParams[NoCycle](TYPEPARAM).map(symFromNoCycle), readTpt()) case MATCHtpt => matchTypeIsUnsupported case TYPEBOUNDStpt => @@ -1081,14 +1196,16 @@ class TreeUnpickler[Tasty <: TastyUniverse]( case UNAPPLY => unsupportedTermTreeError("unapply pattern") case INLINED => unsupportedTermTreeError("inlined expression") case SELECTouter => metaprogrammingIsUnsupported // only within inline - case HOLE => assertNoMacroHole + case HOLE => abortMacroHole case _ => readPathTerm() } assert(currentAddr === end, s"$start $currentAddr $end ${astTagToString(tag)}") result } - if (tag < firstLengthTreeTag) readSimpleTerm() else readLengthTerm() // dotty sets span of tree to start + ctx.traceV(traceReadTerm) { + if (tag < firstLengthTreeTag) readSimpleTerm() else readLengthTerm() // dotty sets span of tree to start + } } def readTpt()(implicit ctx: Context): Tree = { @@ -1098,7 +1215,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( forkAt(readAddr()).readTpt() case BLOCK => // BLOCK appears in type position when quoting a type, but only in the body of a method metaprogrammingIsUnsupported - case HOLE => assertNoMacroHole + case HOLE => abortMacroHole case tag => if (isTypeTreeTag(tag)) readTerm()(ctx.retractMode(OuterTerm)) else { @@ -1112,7 +1229,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( /** * A HOLE should never appear in TASTy for a top level class, only in quotes. */ - private def assertNoMacroHole[T]: T = assertError("Scala 3 macro hole in pickled TASTy") + private def abortMacroHole[T]: T = abortWith(msg = "Scala 3 macro hole in pickled TASTy") private def metaprogrammingIsUnsupported[T](implicit ctx: Context): T = unsupportedError("Scala 3 metaprogramming features") @@ -1126,21 +1243,27 @@ class TreeUnpickler[Tasty <: TastyUniverse]( } def readWith[T <: AnyRef]( - reader: TreeReader, + treader: TreeReader, owner: Symbol, mode: TastyMode, source: AbstractFile, op: TreeReader => Context => T)( implicit ctx: Context - ): T = + ): T = ctx.trace[T](traceReadWith(treader, mode, owner)) { ctx.withPhaseNoLater("pickler") { ctx0 => - ctx0.log(s"${reader.reader.currentAddr} starting to read with owner ${location(owner)}:") - op(reader)(ctx0 + op(treader)(ctx0 .withOwner(owner) .withMode(mode) .withSource(source) ) } + } + + private def traceReadWith[T](treader: TreeReader, mode: TastyMode, owner: Symbol) = TraceInfo[T]( + query = "read within owner", + qual = s"${showSym(owner)} with modes `${mode.debug}` at ${treader.reader.currentAddr}", + res = t => s"exiting sub reader" + ) /** A lazy datastructure that records how definitions are nested in TASTY data. * The structure is lazy because it needs to be computed only for forward references diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/AnnotationOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/AnnotationOps.scala index 948bbc2868e7..da033324bd42 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/AnnotationOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/AnnotationOps.scala @@ -47,7 +47,6 @@ trait AnnotationOps { self: TastyUniverse => class FromTree(tree: Symbol => Context => Tree) extends DeferredAnnotation { private[bridge] def eager(annotee: Symbol)(implicit ctx: Context): u.AnnotationInfo = { val atree = tree(annotee)(ctx) - ctx.log(s"annotation on $annotee: $atree") val annot = mkAnnotation(atree) val annotSym = annot.tpe.typeSymbol if ((annotSym eq defn.TargetNameAnnotationClass) || (annotSym eq defn.StaticMethodAnnotationClass)) { diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala index de66f846786e..c4f5aeec6fc9 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala @@ -13,12 +13,17 @@ package scala.tools.nsc.tasty.bridge import scala.annotation.tailrec + +import scala.collection.mutable import scala.reflect.io.AbstractFile +import scala.reflect.internal.MissingRequirementError import scala.tools.tasty.{TastyName, TastyFlags}, TastyFlags._, TastyName.ObjectName import scala.tools.nsc.tasty.{TastyUniverse, TastyModes, SafeEq}, TastyModes._ -import scala.reflect.internal.MissingRequirementError -import scala.collection.mutable +import scala.tools.nsc.tasty.{cyan, yellow, magenta, blue, green} + +import scala.util.chaining._ + /**This contains the definition for `Context`, along with standard error throwing capabilities with user friendly * formatted errors that can change their output depending on the context mode. @@ -59,26 +64,32 @@ trait ContextOps { self: TastyUniverse => } final def location(owner: Symbol): String = { - if (owner.isClass) s"${owner.kindString} ${owner.fullNameString}" - else s"${describeOwner(owner)} in ${location(owner.owner)}" + if (!isSymbol(owner)) + "" + else if (owner.isClass || owner.isPackageClass || owner.isPackageObjectOrClass) + s"${owner.kindString} ${owner.fullNameString}" + else + s"${describeOwner(owner)} in ${location(owner.owner)}" } @inline final def typeError[T](msg: String): T = throw new u.TypeError(msg) - @inline final def assertError[T](msg: String): T = - throw new AssertionError(s"assertion failed: ${u.supplementErrorMessage(msg)}") + final def abortWith[T](msg: String): T = { + u.assert(false, msg) + ??? + } @inline final def assert(assertion: Boolean, msg: => Any): Unit = - if (!assertion) assertError(String.valueOf(msg)) + u.assert(assertion, msg) @inline final def assert(assertion: Boolean): Unit = - if (!assertion) assertError("") + u.assert(assertion, "") private final def findObject(owner: Symbol, name: u.Name): Symbol = { val scope = if (owner != null && owner.isClass) owner.rawInfo.decls else u.EmptyScope - val it = scope.lookupAll(name).filter(_.isModule) + val it = scope.lookupAll(name).withFilter(_.isModule) if (it.hasNext) it.next() else u.NoSymbol //throw new AssertionError(s"no module $name in ${location(owner)}") } @@ -108,16 +119,51 @@ trait ContextOps { self: TastyUniverse => * sealed child. */ private def analyseAnnotations(sym: Symbol)(implicit ctx: Context): Unit = { + + def lookupChild(childTpe: Type): Symbol = { + val child = symOfType(childTpe) + assert(isSymbol(child), s"did not find symbol of sealed child ${showType(childTpe)}") + if (child.isClass) { + child + } + else { + assert(child.isModule, s"sealed child was not class or object ${showSym(child)}") + child.moduleClass + } + } + for (annot <- sym.annotations) { annot.completeInfo() if (annot.tpe.typeSymbolDirect === defn.ChildAnnot) { - val child = annot.tpe.typeArgs.head.typeSymbolDirect - sym.addChild(child) + val child = { + val child0 = lookupChild(annot.tpe.typeArgs.head) + if (child0 eq sym) { + // dotty represents a local sealed child of `C` with a child annotation + // that directly references `C`, this causes an infinite loop in + // `sealedDescendants`. See the tests: + // - test/tasty/neg/src-3/dottyi3149/dotty_i3149.scala + // - test/tasty/neg/src-2/Testdotty_i3149_fail.scala + // TODO [tasty] - fix assumption in compiler that sealed children cannot + // contain the parent class + ctx.newLocalSealedChildProxy(sym) + } + else { + child0 + } + } ctx.log(s"adding sealed child ${showSym(child)} to ${showSym(sym)}") + sym.addChild(child) } } } + final case class TraceInfo[-T](query: String, qual: String, res: T => String, modifiers: List[String] = Nil) + + trait TraceFrame { + def parent: TraceFrame + def id: String + } + /**Maintains state through traversal of a TASTy file, such as the outer scope of the defintion being traversed, the * traversal mode, and the root owners and source path for the TASTy file. * It also provides all operations for manipulation of the symbol table, such as creating/updating symbols and @@ -144,21 +190,50 @@ trait ContextOps { self: TastyUniverse => final def globallyVisibleOwner: Symbol = owner.logicallyEnclosingMember final def ignoreAnnotations: Boolean = u.settings.YtastyNoAnnotations - final def verboseDebug: Boolean = u.settings.debug def requiresLatentEntry(decl: Symbol): Boolean = decl.isScala3Inline - def neverEntered(decl: Symbol): Boolean = decl.isPureMixinCtor def canEnterOverload(decl: Symbol): Boolean = { !(decl.isModule && isSymbol(findObject(thisCtx.owner, decl.name))) } final def log(str: => String): Unit = { - if (u.settings.YdebugTasty) - u.reporter.echo( - pos = u.NoPosition, - msg = str.linesIterator.map(line => s"#[$classRoot]: $line").mkString(System.lineSeparator) + if (u.settings.YdebugTasty) { + logImpl(str) + } + } + + private final def logImpl(str: => String): Unit = u.reporter.echo( + pos = u.NoPosition, + msg = str + .linesIterator + .map(line => s"${blue(s"${showSymStable(classRoot)}:")} $line") + .mkString(System.lineSeparator) + ) + + @inline final def trace[T](info: => TraceInfo[T])(op: => T): T = { + + def addInfo(i: TraceInfo[T], op: => T)(frame: TraceFrame): T = { + val id0 = frame.id + val modStr = ( + if (i.modifiers.isEmpty) "" + else " " + green(i.modifiers.mkString("[", ",", "]")) ) + logImpl(s"${yellow(id0)} ${cyan(s"<<< ${i.query}:")} ${magenta(i.qual)}$modStr") + op.tap(eval => logImpl(s"${yellow(id0)} ${cyan(s">>>")} ${magenta(i.res(eval))}$modStr")) + } + + if (u.settings.YdebugTasty) initialContext.subTrace(addInfo(info, op)) + else op + } + + /** Trace only when `-Vdebug` is set + */ + @inline final def traceV[T](info: => TraceInfo[T])(op: => T): T = { + if (u.settings.debug.value) { + trace(info)(op) + } + else op } def owner: Symbol @@ -190,8 +265,40 @@ trait ContextOps { self: TastyUniverse => final def newLocalDummy: Symbol = owner.newLocalDummy(u.NoPosition) - final def newWildcardSym(info: Type): Symbol = - owner.newTypeParameter(u.nme.WILDCARD.toTypeName, u.NoPosition, u.NoFlags).setInfo(info) + final def newWildcard(info: Type): Symbol = + owner.newTypeParameter( + name = u.freshTypeName("_$")(u.currentFreshNameCreator), + pos = u.NoPosition, + newFlags = FlagSets.Creation.Wildcard + ).setInfo(info) + + final def newConstructor(owner: Symbol, info: Type): Symbol = unsafeNewSymbol( + owner = owner, + name = TastyName.Constructor, + flags = Method, + info = info + ) + + final def newLocalSealedChildProxy(cls: Symbol): Symbol = { + val tflags = Private | Local + unsafeNewClassSymbol( + owner = cls, + typeName = TastyName.SimpleName(cls.fullName('$') + "$$localSealedChildProxy").toTypeName, + flags = tflags, + info = defn.LocalSealedChildProxyInfo(cls, tflags), + privateWithin = u.NoSymbol + ) + } + + final def newLambdaParameter(tname: TastyName, flags: TastyFlagSet, idx: Int, infoDb: Int => Type): Symbol = { + val flags1 = flags | Param + unsafeNewSymbol( + owner = owner, + name = tname, + flags = flags1, + info = defn.LambdaParamInfo(flags1, idx, infoDb) + ) + } final def findRootSymbol(roots: Set[Symbol], name: TastyName): Option[Symbol] = { import TastyName.TypeName @@ -218,7 +325,8 @@ trait ContextOps { self: TastyUniverse => final def newRefinementSymbol(parent: Type, owner: Symbol, name: TastyName, tpe: Type): Symbol = { val overridden = parent.member(encodeTastyName(name)) val isOverride = isSymbol(overridden) - var flags = if (isOverride && overridden.isType) Override else EmptyTastyFlags + var flags = EmptyTastyFlags + if (isOverride && overridden.isType) flags |= Override val info = { if (name.isTermName) { flags |= Method | Deferred @@ -243,11 +351,11 @@ trait ContextOps { self: TastyUniverse => /** Guards the creation of an object val by checking for an existing definition in the owner's scope */ final def delayCompletion(owner: Symbol, name: TastyName, completer: TastyCompleter, privateWithin: Symbol = noSymbol): Symbol = { - def default() = unsafeNewSymbol(owner, name, completer.originalFlagSet, completer, privateWithin) - if (completer.originalFlagSet.is(Object)) { + def default() = unsafeNewSymbol(owner, name, completer.tflags, completer, privateWithin) + if (completer.tflags.is(Object)) { val sourceObject = findObject(owner, encodeTermName(name)) if (isSymbol(sourceObject)) - adjustSymbol(sourceObject, completer.originalFlagSet, completer, privateWithin) + redefineSymbol(sourceObject, completer.tflags, completer, privateWithin) else default() } @@ -259,11 +367,11 @@ trait ContextOps { self: TastyUniverse => /** Guards the creation of an object class by checking for an existing definition in the owner's scope */ final def delayClassCompletion(owner: Symbol, typeName: TastyName.TypeName, completer: TastyCompleter, privateWithin: Symbol): Symbol = { - def default() = unsafeNewClassSymbol(owner, typeName, completer.originalFlagSet, completer, privateWithin) - if (completer.originalFlagSet.is(Object)) { + def default() = unsafeNewClassSymbol(owner, typeName, completer.tflags, completer, privateWithin) + if (completer.tflags.is(Object)) { val sourceObject = findObject(owner, encodeTermName(typeName.toTermName)) if (isSymbol(sourceObject)) - adjustSymbol(sourceObject.objectImplementation, completer.originalFlagSet, completer, privateWithin) + redefineSymbol(sourceObject.objectImplementation, completer.tflags, completer, privateWithin) else default() } @@ -272,11 +380,18 @@ trait ContextOps { self: TastyUniverse => } } + def evict(sym: Symbol): Unit = { + if (isSymbol(sym)) { + sym.owner.rawInfo.decls.unlink(sym) + sym.info = u.NoType + } + } + final def enterIfUnseen(sym: Symbol): Unit = { - if (mode.is(IndexScopedStats)) - initialContext.collectLatentEvidence(owner, sym) val decl = declaringSymbolOf(sym) - if (!(requiresLatentEntry(decl) || neverEntered(decl))) + if (mode.is(IndexScopedStats)) + initialContext.collectLatentEvidence(owner, decl) + if (!requiresLatentEntry(decl)) enterIfUnseen0(owner.rawInfo.decls, decl) } @@ -293,84 +408,88 @@ trait ContextOps { self: TastyUniverse => /** Unsafe to call for creation of a object val, prefer `delayCompletion` if info is a LazyType */ - final def unsafeNewSymbol(owner: Symbol, name: TastyName, flags: TastyFlagSet, info: Type, privateWithin: Symbol = noSymbol): Symbol = - adjustSymbol(unsafeNewUntypedSymbol(owner, name, flags), info, privateWithin) + private def unsafeNewSymbol(owner: Symbol, name: TastyName, flags: TastyFlagSet, info: Type, privateWithin: Symbol = noSymbol): Symbol = + unsafeSetInfoAndPrivate(unsafeNewUntypedSymbol(owner, name, flags), info, privateWithin) /** Unsafe to call for creation of a object class, prefer `delayClassCompletion` if info is a LazyType */ - final def unsafeNewClassSymbol(owner: Symbol, typeName: TastyName.TypeName, flags: TastyFlagSet, info: Type, privateWithin: Symbol): Symbol = - adjustSymbol(unsafeNewUntypedClassSymbol(owner, typeName, flags), info, privateWithin) + private def unsafeNewClassSymbol(owner: Symbol, typeName: TastyName.TypeName, flags: TastyFlagSet, info: Type, privateWithin: Symbol): Symbol = + unsafeSetInfoAndPrivate(unsafeNewUntypedClassSymbol(owner, typeName, flags), info, privateWithin) private final def unsafeNewUntypedSymbol(owner: Symbol, name: TastyName, flags: TastyFlagSet): Symbol = { if (flags.isOneOf(Param | ParamSetter)) { if (name.isTypeName) { - owner.newTypeParameter(encodeTypeName(name.toTypeName), u.NoPosition, encodeFlagSet(flags)) + owner.newTypeParameter(encodeTypeName(name.toTypeName), u.NoPosition, newSymbolFlagSet(flags)) } else { - if (owner.isClass && flags.is(FlagSets.FieldAccessorFlags)) { - val fieldFlags = flags &~ FlagSets.FieldAccessorFlags | FlagSets.LocalFieldFlags + if (owner.isClass && flags.is(FlagSets.FieldGetter)) { + val fieldFlags = flags &~ FlagSets.FieldGetter | FlagSets.LocalField val termName = encodeTermName(name) - val getter = owner.newMethodSymbol(termName, u.NoPosition, encodeFlagSet(flags)) - val fieldSym = owner.newValue(termName, u.NoPosition, encodeFlagSet(fieldFlags)) + val getter = owner.newMethodSymbol(termName, u.NoPosition, newSymbolFlagSet(flags)) + val fieldSym = owner.newValue(termName, u.NoPosition, newSymbolFlagSet(fieldFlags)) fieldSym.info = defn.CopyInfo(getter, fieldFlags) owner.rawInfo.decls.enter(fieldSym) getter } else { - owner.newValueParameter(encodeTermName(name), u.NoPosition, encodeFlagSet(flags)) + owner.newValueParameter(encodeTermName(name), u.NoPosition, newSymbolFlagSet(flags)) } } } - else if (name === TastyName.Constructor) { - owner.newConstructor(u.NoPosition, encodeFlagSet(flags &~ Stable)) - } - else if (name === TastyName.MixinConstructor) { - owner.newMethodSymbol(u.nme.MIXIN_CONSTRUCTOR, u.NoPosition, encodeFlagSet(flags &~ Stable)) - } - else if (flags.is(FlagSets.ObjectCreationFlags)) { - log(s"!!! visited module value $name first") - assert(!owner.rawInfo.decls.lookupAll(encodeTermName(name)).exists(_.isModule)) - val module = owner.newModule(encodeTermName(name), u.NoPosition, encodeFlagSet(flags)) - module.moduleClass.info = defn.DefaultInfo + else if (flags.is(FlagSets.Creation.ObjectDef)) { + val isEnum = flags.is(FlagSets.SingletonEnum) + if (!isEnum) { + log(s"!!! visited module value $name first") + } + val module = owner.newModule(encodeTermName(name), u.NoPosition, newSymbolFlagSet(flags)) + module.moduleClass.info = + if (isEnum) defn.SingletonEnumClassInfo(module, flags) + else defn.DefaultInfo module } else if (name.isTypeName) { - owner.newTypeSymbol(encodeTypeName(name.toTypeName), u.NoPosition, encodeFlagSet(flags)) + owner.newTypeSymbol(encodeTypeName(name.toTypeName), u.NoPosition, newSymbolFlagSet(flags)) + } + else if (name === TastyName.Constructor) { + owner.newConstructor(u.NoPosition, newSymbolFlagSet(flags &~ Stable)) + } + else if (name === TastyName.MixinConstructor) { + owner.newMethodSymbol(u.nme.MIXIN_CONSTRUCTOR, u.NoPosition, newSymbolFlagSet(flags &~ Stable)) } else { - owner.newMethodSymbol(encodeTermName(name), u.NoPosition, encodeFlagSet(flags)) + owner.newMethodSymbol(encodeTermName(name), u.NoPosition, newSymbolFlagSet(flags)) } } private final def unsafeNewUntypedClassSymbol(owner: Symbol, typeName: TastyName.TypeName, flags: TastyFlagSet): Symbol = { - if (flags.is(FlagSets.ObjectClassCreationFlags)) { + if (flags.is(FlagSets.Creation.ObjectClassDef)) { log(s"!!! visited module class $typeName first") - val module = owner.newModule(encodeTermName(typeName), u.NoPosition, encodeFlagSet(FlagSets.ObjectCreationFlags)) + val module = owner.newModule(encodeTermName(typeName), u.NoPosition, FlagSets.Creation.Default) module.info = defn.DefaultInfo - module.moduleClass.flags = encodeFlagSet(flags) + module.moduleClass.flags = newSymbolFlagSet(flags) module.moduleClass } else { - owner.newClassSymbol(encodeTypeName(typeName), u.NoPosition, encodeFlagSet(flags)) + owner.newClassSymbol(encodeTypeName(typeName), u.NoPosition, newSymbolFlagSet(flags)) } } final def enterClassCompletion(): Symbol = { val cls = globallyVisibleOwner.asClass - val assumedSelfType = - if (cls.is(Object) && cls.owner.isClass) defn.SingleType(cls.owner.thisType, cls.sourceModule) - else u.NoType - cls.info = u.ClassInfoType(cls.repr.parents, cls.repr.decls, assumedSelfType.typeSymbolDirect) + val assumedSelfSym = { + if (cls.is(Object) && cls.owner.isClass) { + cls.sourceModule + } + else { + u.NoSymbol + } + } + cls.info = u.ClassInfoType(cls.repr.parents, cls.repr.decls, assumedSelfSym) cls } - /** Normalises the parents and sets up value class machinery */ - final def adjustParents(cls: Symbol, parents: List[Type]): List[Type] = { - val parentTypes = parents.map { tp => - val tpe = tp.dealias - if (tpe.typeSymbolDirect === u.definitions.ObjectClass) u.definitions.AnyRefTpe - else tpe - } + /** sets up value class machinery */ + final def processParents(cls: Symbol, parentTypes: List[Type]): parentTypes.type = { if (parentTypes.head.typeSymbolDirect === u.definitions.AnyValClass) { // TODO [tasty]: please reconsider if there is some shared optimised logic that can be triggered instead. withPhaseNoLater("extmethods") { ctx0 => @@ -388,16 +507,29 @@ trait ContextOps { self: TastyUniverse => parentTypes } - final def removeFlags(symbol: Symbol, flags: TastyFlagSet): symbol.type = - symbol.resetFlag(encodeFlagSet(flags)) + private[bridge] final def resetFlag0(symbol: Symbol, flags: u.FlagSet): symbol.type = + symbol.resetFlag(flags) - final def addFlags(symbol: Symbol, flags: TastyFlagSet): symbol.type = - symbol.setFlag(encodeFlagSet(flags)) + final def completeEnumSingleton(sym: Symbol, tpe: Type): Unit = { + val moduleCls = sym.moduleClass + val moduleClsFlags = FlagSets.withAccess( + flags = FlagSets.Creation.ObjectClassDef, + inheritedAccess = sym.repr.tflags + ) + val selfTpe = defn.SingleType(sym.owner.thisPrefix, sym) + val ctor = newConstructor(moduleCls, selfTpe) + moduleCls.typeOfThis = selfTpe + moduleCls.flags = newSymbolFlagSet(moduleClsFlags) + moduleCls.info = defn.ClassInfoType(intersectionParts(tpe), ctor :: Nil, moduleCls) + moduleCls.privateWithin = sym.privateWithin + } - final def adjustSymbol(symbol: Symbol, flags: TastyFlagSet, info: Type, privateWithin: Symbol): symbol.type = - adjustSymbol(addFlags(symbol, flags), info, privateWithin) + final def redefineSymbol(symbol: Symbol, flags: TastyFlagSet, completer: TastyCompleter, privateWithin: Symbol): symbol.type = { + symbol.flags = newSymbolFlagSet(flags) + unsafeSetInfoAndPrivate(symbol, completer, privateWithin) + } - final def adjustSymbol(symbol: Symbol, info: Type, privateWithin: Symbol): symbol.type = { + private def unsafeSetInfoAndPrivate(symbol: Symbol, info: Type, privateWithin: Symbol): symbol.type = { symbol.privateWithin = privateWithin symbol.info = info symbol @@ -425,10 +557,13 @@ trait ContextOps { self: TastyUniverse => final def newRefinementClassSymbol: Symbol = owner.newRefinementClass(u.NoPosition) + final def argumentCtx(fn: Tree): Context = + if (fn.symbol.isPrimaryConstructor) retractMode(ReadAnnotationCtor) else thisCtx + final def setInfo(sym: Symbol, info: Type): Unit = sym.info = info final def markAsEnumSingleton(sym: Symbol): Unit = - sym.updateAttachment(new u.DottyEnumSingleton(sym.name.toString)) + sym.updateAttachment(u.DottyEnumSingleton) final def markAsOpaqueType(sym: Symbol, alias: Type): Unit = sym.updateAttachment(new u.DottyOpaqueTypeAlias(alias)) @@ -480,6 +615,35 @@ trait ContextOps { self: TastyUniverse => def mode: TastyMode = EmptyTastyMode def owner: Symbol = topLevelClass.owner + private class TraceFrameImpl(val worker: Int, val parent: TraceFrameImpl) extends TraceFrame { + + var nextChild: Int = 0 + + val id: String = { + val buf = mutable.ArrayDeque.empty[Int] + var cur = this + while (cur.worker != -1) { + buf.prepend(cur.worker) + cur = cur.parent + } + buf.mkString("[", " ", ")") + } + + } + + private[this] var _trace: TraceFrameImpl = new TraceFrameImpl(worker = -1, parent = null) + + private[ContextOps] def subTrace[T](op: TraceFrame => T): T = { + val parent = _trace + val child = new TraceFrameImpl(worker = parent.nextChild, parent) + _trace = child + try op(child) + finally { + parent.nextChild += 1 + _trace = parent + } + } + private[this] var mySymbolsToForceAnnots: mutable.LinkedHashSet[Symbol] = _ private[ContextOps] def stageSymbolToForceAnnots(sym: Symbol): Unit = { @@ -502,13 +666,20 @@ trait ContextOps { self: TastyUniverse => val toForce = mySymbolsToForceAnnots.toList mySymbolsToForceAnnots.clear() for (sym <- toForce) { - log(s"!!! forcing annotations on ${showSym(sym)}") - analyseAnnotations(sym) + trace(traceForceAnnotations(sym)) { + analyseAnnotations(sym) + } } assert(mySymbolsToForceAnnots.isEmpty, "more symbols added while forcing") } } + private def traceForceAnnotations(sym: Symbol) = TraceInfo[Unit]( + query = "forcing annotations of symbol", + qual = s"${showSym(sym)}", + res = _ => s"annotations were forced on ${showSym(sym)}" + ) + private[this] var myInlineDefs: mutable.Map[Symbol, mutable.ArrayBuffer[Symbol]] = null private[this] var myMacros: mutable.Map[Symbol, mutable.ArrayBuffer[Symbol]] = null private[this] var myTraitParamAccessors: mutable.Map[Symbol, mutable.ArrayBuffer[Symbol]] = null @@ -548,7 +719,7 @@ trait ContextOps { self: TastyUniverse => * Reports illegal definitions: * - trait constructors with parameters * - * @param cls should be a symbol associated with a non-empty scope + * @param cls should be a class symbol associated with a non-empty scope */ private[ContextOps] def enterLatentDefs(cls: Symbol): Unit = { diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala index b4f88b88c886..28fc84e1657e 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala @@ -16,29 +16,72 @@ import scala.tools.tasty.TastyFlags._ import scala.tools.nsc.tasty.TastyUniverse import scala.reflect.internal.{Flags, ModifierFlags} -/**Handles encoding of `TastyFlagSet` to `scala.reflect` flags and witnessing which flags do not map directly - * from TASTy. +/** Handles encoding of `TastyFlagSet` to `scala.reflect` flags and witnessing which flags do not map directly + * from TASTy. */ trait FlagOps { self: TastyUniverse => import self.{symbolTable => u} object FlagSets { + val TastyOnlyFlags: TastyFlagSet = ( - Erased | Internal | Inline | InlineProxy | Opaque | Extension | Given | Exported | Transparent | Enum | Infix - | Open | ParamAlias + Erased | Inline | InlineProxy | Opaque | Extension | Given | Exported | Transparent + | Enum | Infix | Open | ParamAlias | Invisible ) + + type FlagParser = TastyFlagSet => Context => TastyFlagSet + + val addDeferred: FlagParser = flags => _ => flags | Deferred + val parseMethod: FlagParser = { mods0 => implicit ctx => + var mods = EmptyTastyFlags + if (mods0.is(Erased)) erasedRefinementIsUnsupported[Unit] + if (mods0.isOneOf(Given | Implicit)) mods |= Implicit + mods + } + + object Creation { + val ObjectDef: TastyFlagSet = Object | Lazy | Final | Stable + val ObjectClassDef: TastyFlagSet = Object | Final + val Wildcard: u.FlagSet = newSymbolFlagSetFromEncoded(Flags.EXISTENTIAL) + val Default: u.FlagSet = newSymbolFlagSet(EmptyTastyFlags) + } + def withAccess(flags: TastyFlagSet, inheritedAccess: TastyFlagSet): TastyFlagSet = + flags | (inheritedAccess & (Private | Local | Protected)) + val SingletonEnum: TastyFlagSet = Case | Static | Enum | Stable val TermParamOrAccessor: TastyFlagSet = Param | ParamSetter - val ObjectCreationFlags: TastyFlagSet = Object | Lazy | Final | Stable - val ObjectClassCreationFlags: TastyFlagSet = Object | Final - val SingletonEnumFlags: TastyFlagSet = Case | Static | Enum | Stable - val FieldAccessorFlags: TastyFlagSet = FieldAccessor | Stable - val LocalFieldFlags: TastyFlagSet = Private | Local + val FieldGetter: TastyFlagSet = FieldAccessor | Stable + val ParamGetter: TastyFlagSet = FieldGetter | ParamSetter + val LocalField: TastyFlagSet = Private | Local + val Scala2Macro: TastyFlagSet = Erased | Macro + } + + /** For purpose of symbol initialisation, encode a `TastyFlagSet` as a `symbolTable.FlagSet`. */ + private[bridge] def newSymbolFlagSet(tflags: TastyFlagSet): u.FlagSet = + newSymbolFlagSetFromEncoded(unsafeEncodeTastyFlagSet(tflags)) + + private[bridge] def newSymbolFlagSetFromEncoded(flags: u.FlagSet): u.FlagSet = + flags | ModifierFlags.SCALA3X + + implicit final class SymbolFlagOps(val sym: Symbol) { + def reset(tflags: TastyFlagSet)(implicit ctx: Context): sym.type = + ctx.resetFlag0(sym, unsafeEncodeTastyFlagSet(tflags)) + def isOneOf(mask: TastyFlagSet): Boolean = + sym.hasFlag(unsafeEncodeTastyFlagSet(mask)) + def is(mask: TastyFlagSet): Boolean = + sym.hasAllFlags(unsafeEncodeTastyFlagSet(mask)) + def is(mask: TastyFlagSet, butNot: TastyFlagSet): Boolean = + if (!butNot) + sym.is(mask) + else + sym.is(mask) && sym.not(butNot) + def not(mask: TastyFlagSet): Boolean = + sym.hasNoFlags(unsafeEncodeTastyFlagSet(mask)) } - /**encodes a `TastyFlagSet` as `scala.reflect` flags and will ignore flags that can't be converted, such as - * members of `FlagSets.TastyOnlyFlags` + /** encodes a `TastyFlagSet` as a `symbolTable.FlagSet`, the flags in `FlagSets.TastyOnlyFlags` are ignored. + * @note Do not use directly to initialise symbol flags, use `newSymbolFlagSet` */ - private[bridge] def encodeFlagSet(tflags: TastyFlagSet): u.FlagSet = { + private def unsafeEncodeTastyFlagSet(tflags: TastyFlagSet): u.FlagSet = { import u.Flag var flags = u.NoFlags if (tflags.is(Private)) flags |= Flag.PRIVATE @@ -78,7 +121,6 @@ trait FlagOps { self: TastyUniverse => else { val sb = collection.mutable.ArrayBuffer.empty[String] if (flags.is(Erased)) sb += "erased" - if (flags.is(Internal)) sb += "" if (flags.is(Inline)) sb += "inline" if (flags.is(InlineProxy)) sb += "" if (flags.is(Opaque)) sb += "opaque" @@ -90,6 +132,7 @@ trait FlagOps { self: TastyUniverse => if (flags.is(Open)) sb += "open" if (flags.is(ParamAlias)) sb += "" if (flags.is(Infix)) sb += "infix" + if (flags.is(Invisible)) sb += "" sb.mkString(" | ") } } diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala index 2dccefa5a129..543bbb72d469 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala @@ -12,9 +12,8 @@ package scala.tools.nsc.tasty.bridge -import scala.tools.nsc.tasty.SafeEq - -import scala.tools.nsc.tasty.{TastyUniverse, TastyModes}, TastyModes._ +import scala.annotation.tailrec +import scala.tools.nsc.tasty.{SafeEq, TastyUniverse, ForceKinds, TastyModes}, TastyModes._, ForceKinds._ import scala.tools.tasty.{TastyName, Signature, TastyFlags}, TastyName.SignedName, Signature.MethodSignature, TastyFlags._ import scala.tools.tasty.ErasedTypeRef @@ -34,28 +33,46 @@ trait SymbolOps { self: TastyUniverse => final def declaringSymbolOf(sym: Symbol): Symbol = if (sym.isModuleClass) sym.sourceModule else sym - private final def deepComplete(tpe: Type): Unit = { - val asTerm = tpe.termSymbol - if (asTerm ne u.NoSymbol) { - asTerm.ensureCompleted() - deepComplete(tpe.widen) - } else { - tpe.typeSymbol.ensureCompleted() + private final def deepComplete(space: Type)(implicit ctx: Context): Unit = { + symOfType(space) match { + case u.NoSymbol => + ctx.log(s"could not retrieve symbol from type ${showType(space)}") + case termSym if termSym.isTerm => + if (termSym.is(Object)) { + termSym.ensureCompleted(SpaceForce) + termSym.moduleClass.ensureCompleted(DeepForce | SpaceForce) + } + else { + ctx.log(s"deep complete on non-module term ${showSym(termSym)}, not taking action") + } + case typeSym => + typeSym.ensureCompleted(SpaceForce) } } - implicit final class SymbolDecorator(val sym: Symbol) { - - def isScala3Inline: Boolean = repr.originalFlagSet.is(Inline) - def isScala2Macro: Boolean = repr.originalFlagSet.is(Erased | Macro) + /** Fetch the symbol of a path type without forcing the symbol, + * `NoSymbol` if not a path. + */ + @tailrec + private[bridge] final def symOfType(tpe: Type): Symbol = tpe match { + case tpe: u.TypeRef => tpe.sym + case tpe: u.SingleType => tpe.sym + case tpe: u.ThisType => tpe.sym + case tpe: u.ConstantType => symOfType(tpe.value.tpe) + case tpe: u.ClassInfoType => tpe.typeSymbol + case tpe: u.RefinedType0 => tpe.typeSymbol + case tpe: u.ExistentialType => symOfType(tpe.underlying) + case _ => u.NoSymbol + } - def isPureMixinCtor: Boolean = isMixinCtor && repr.originalFlagSet.is(Stable) - def isMixinCtor: Boolean = u.nme.MIXIN_CONSTRUCTOR == sym.name && sym.owner.isTrait + implicit final class SymbolDecorator(val sym: Symbol) { - def isTraitParamAccessor: Boolean = sym.owner.isTrait && repr.originalFlagSet.is(FieldAccessor|ParamSetter) + def isScala3Inline: Boolean = repr.tflags.is(Inline) + def isScala2Macro: Boolean = repr.tflags.is(FlagSets.Scala2Macro) + def isTraitParamAccessor: Boolean = sym.owner.isTrait && repr.tflags.is(FieldAccessor|ParamSetter) def isParamGetter: Boolean = - sym.isMethod && sym.repr.originalFlagSet.is(FlagSets.FieldAccessorFlags) + sym.isMethod && sym.repr.tflags.is(FlagSets.ParamGetter) /** A computed property that should only be called on a symbol which is known to have been initialised by the * Tasty Unpickler and is not yet completed. @@ -72,30 +89,32 @@ trait SymbolOps { self: TastyUniverse => } } - def ensureCompleted(): Unit = { - sym.info - sym.annotations.foreach(_.completeInfo()) + def ensureCompleted(forceKinds: ForceKinds)(implicit ctx: Context): Unit = { + val raw = sym.rawInfo + if (raw.isInstanceOf[u.LazyType]) { + ctx.trace(traceForceInfo(sym, forceKinds)) { + sym.info + sym.annotations.foreach(_.completeInfo()) + } + } else { + assert(!raw.isInstanceOf[TastyRepr], s"${showSym(sym)} has incorrectly initialised info $raw") + } } + + private def traceForceInfo( + sym: Symbol, + forceKinds: ForceKinds + )(implicit ctx: Context) = TraceInfo[Unit]( + query = "force symbol info", + qual = s"${showSym(sym)} in context ${showSym(ctx.owner)}", + res = _ => s"${showSym(sym)} was forced", + modifiers = forceKinds.describe + ) + def objectImplementation: Symbol = sym.moduleClass def sourceObject: Symbol = sym.sourceModule - def ref(args: List[Type]): Type = u.appliedType(sym, args) - def ref: Type = sym.ref(Nil) - def singleRef: Type = u.singleType(u.NoPrefix, sym) - def termRef: Type = sym.preciseRef(u.NoPrefix) - def preciseRef(pre: Type): Type = u.typeRef(pre, sym, Nil) + def ref: Type = u.appliedType(sym, Nil) def safeOwner: Symbol = if (sym.owner eq sym) sym else sym.owner - - def set(mask: TastyFlagSet)(implicit ctx: Context): sym.type = ctx.addFlags(sym, mask) - def reset(mask: TastyFlagSet)(implicit ctx: Context): sym.type = ctx.removeFlags(sym, mask) - - def isOneOf(mask: TastyFlagSet): Boolean = sym.hasFlag(encodeFlagSet(mask)) - def is(mask: TastyFlagSet): Boolean = sym.hasAllFlags(encodeFlagSet(mask)) - def is(mask: TastyFlagSet, butNot: TastyFlagSet): Boolean = - if (!butNot) - sym.is(mask) - else - sym.is(mask) && sym.not(butNot) - def not(mask: TastyFlagSet): Boolean = sym.hasNoFlags(encodeFlagSet(mask)) } /** if isConstructor, make sure it has one non-implicit parameter list */ @@ -106,15 +125,15 @@ trait SymbolOps { self: TastyUniverse => else termParamss - def namedMemberOfType(space: Type, tname: TastyName)(implicit ctx: Context): Symbol = { + private[bridge] def lookupSymbol(space: Type, tname: TastyName)(implicit ctx: Context): Symbol = { deepComplete(space) tname match { - case SignedName(qual, sig, target) => signedMemberOfSpace(space, qual, sig.map(_.encode), target) - case _ => memberOfSpace(space, tname) + case SignedName(qual, sig, target) => lookupSigned(space, qual, sig.map(_.encode), target) + case _ => lookupSimple(space, tname) } } - private def memberOfSpace(space: Type, tname: TastyName)(implicit ctx: Context): Symbol = { + private def lookupSimple(space: Type, tname: TastyName)(implicit ctx: Context): Symbol = { // TODO [tasty]: dotty uses accessibleDenot which asserts that `fetched.isAccessibleFrom(pre)`, // or else filters for non private. // There should be an investigation to see what code makes that false, and what is an equivalent check. @@ -129,7 +148,11 @@ trait SymbolOps { self: TastyUniverse => space.member(selector).orElse(lookInTypeCtor) } } - else space.member(encodeTermName(tname)) + else { + val firstTry = space.member(encodeTermName(tname)) + if (firstTry.isOverloaded) firstTry.filter(!_.isPrivateLocal) + else firstTry + } } if (isSymbol(member) && hasType(member)) member else errorMissing(space, tname) @@ -143,13 +166,13 @@ trait SymbolOps { self: TastyUniverse => val kind = if (tname.isTypeName) "type" else "term" def typeToString(tpe: Type) = { def inner(sb: StringBuilder, tpe: Type): StringBuilder = tpe match { - case u.SingleType(pre, sym) => inner(sb, pre) append '.' append ( - if (sym.isPackageObjectOrClass) s"`${sym.name}`" - else String valueOf sym.name - ) - case u.TypeRef(pre, sym, _) if sym.isTerm => - if ((pre eq u.NoPrefix) || (pre eq u.NoType)) sb append sym.name - else inner(sb, pre) append '.' append sym.name + case u.ThisType(cls) => sb append cls.fullNameString + case u.SingleType(pre, sym) => + if ((pre eq u.NoPrefix) || (pre eq u.NoType)) sb append sym.nameString + else inner(sb, pre) append '.' append sym.nameString + case u.TypeRef(pre, sym, _) => + if ((pre eq u.NoPrefix) || (pre eq u.NoType)) sb append sym.nameString + else inner(sb, pre) append '.' append sym.nameString case tpe => sb append tpe } inner(new StringBuilder(), tpe).toString @@ -162,44 +185,62 @@ trait SymbolOps { self: TastyUniverse => typeError(s"can't find $missing; perhaps it is missing from the classpath.") } - private def signedMemberOfSpace(space: Type, qual: TastyName, sig: MethodSignature[ErasedTypeRef], target: TastyName)(implicit ctx: Context): Symbol = { + private def lookupSigned( + space: Type, + qual: TastyName, + sig: MethodSignature[ErasedTypeRef], + target: TastyName + )(implicit ctx: Context): Symbol = { if (target ne qual) { unsupportedError(s"selection of method $qual with @targetName(" + '"' + target + '"' + ")") } else { - ctx.log(s"""<<< looking for overload in symbolOf[$space] @@ $qual: ${showSig(sig)}""") - val member = space.member(encodeTermName(qual)) - if (!(isSymbol(member) && hasType(member))) errorMissing(space, qual) - val (tyParamCount, argTpeRefs) = { - val (tyParamCounts, params) = sig.params.partitionMap(identity) - if (tyParamCounts.length > 1) { - unsupportedError(s"method with unmergeable type parameters: $qual") - } - (tyParamCounts.headOption.getOrElse(0), params) - } - def compareSym(sym: Symbol): Boolean = sym match { - case sym: u.MethodSymbol => - val method = sym.tpe.asSeenFrom(space, sym.owner) - ctx.log(s">>> trying $sym: $method") - val params = method.paramss.flatten - val isJava = sym.isJavaDefined - NameErasure.sigName(method.finalResultType, isJava) === sig.result && - params.length === argTpeRefs.length && - (qual === TastyName.Constructor && tyParamCount === member.owner.typeParams.length - || tyParamCount === sym.typeParams.length) && - params.zip(argTpeRefs).forall { case (param, tpe) => NameErasure.sigName(param.tpe, isJava) === tpe } && { - ctx.log(s">>> selected ${showSym(sym)}: ${sym.tpe}") - true + ctx.trace(traceOverload(space, qual, sig)) { + val member = space.member(encodeTermName(qual)) + if (!(isSymbol(member) && hasType(member))) errorMissing(space, qual) + val (tyParamCount, paramRefs) = { + val (tyParamCounts, params) = sig.params.partitionMap(identity) + if (tyParamCounts.length > 1) { + unsupportedError(s"method with unmergeable type parameters: $qual") } - case _ => - ctx.log(s"""! member[$space]("$qual") ${showSym(sym)} is not a method""") - false + (tyParamCounts.headOption.getOrElse(0), params) + } + def compareSym(sym: Symbol): Boolean = sym match { + case sym: u.MethodSymbol => + sym.ensureCompleted(OverloadedSym) + // TODO [tasty]: we should cache signatures for symbols and compare against `sig` + val meth0 = u.unwrapWrapperTypes(sym.tpe.asSeenFrom(space, sym.owner)) + val paramSyms = meth0.paramss.flatten + val resTpe = meth0.finalResultType + val sameParamSize = paramSyms.length === paramRefs.length + def sameTyParamSize = tyParamCount === ({ + // the signature of a class/mixin constructor includes + // type parameters, in nsc these come from the parent. + val tyParamOwner = if (qual.isConstructorName) member.owner else sym + tyParamOwner.typeParams.length + }) + def sameParams = paramSyms.lazyZip(paramRefs).forall({ + case (paramSym, paramRef) => sameErasure(sym)(paramSym.tpe, paramRef) + }) + sameParamSize && sameTyParamSize && sameParams && sameErasure(sym)(resTpe, sig.result) + case _ => + ctx.log(s"""! member[$space]("$qual") ${showSym(sym)} is not a method""") + false + } + member.asTerm.alternatives.find(compareSym).getOrElse( + typeError(s"No matching overload of $space.$qual with signature ${showSig(sig)}") + ) } - member.asTerm.alternatives.find(compareSym).getOrElse( - typeError(s"No matching overload of $space.$qual with signature ${showSig(sig)}")) } } + private def traceOverload(space: Type, tname: TastyName, sig: MethodSignature[ErasedTypeRef]) = TraceInfo[Symbol]( + query = s"looking for overload", + qual = s"symbolOf[$space] @@ $tname: ${showSig(sig)}", + res = overload => s"selected overload ${showSym(overload)}" + ) + def showSig(sig: MethodSignature[ErasedTypeRef]): String = sig.map(_.signature).show - def showSym(sym: Symbol): String = s"Symbol($sym, #${sym.id})" + def showSym(sym: Symbol): String = s"`(#${sym.id}) ${sym.accurateKindString} ${sym.name}`" + def showSymStable(sym: Symbol): String = s"#[${sym.id}, ${sym.name}]" } diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/TastyCore.scala b/src/compiler/scala/tools/nsc/tasty/bridge/TastyCore.scala index 6af38a66246d..01ca7a60fffe 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/TastyCore.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/TastyCore.scala @@ -34,6 +34,5 @@ abstract class TastyCore { self: TastyUniverse => private val Identity = (x: Any) => x def id[T]: T => T = Identity.asInstanceOf[T => T] - def map[T, U](ts: List[T], f: T => U): List[U] = if (f `eq` Identity) ts.asInstanceOf[List[U]] else ts.map(f) } diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/TreeOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/TreeOps.scala index 6f6edd0de981..82d82af03a44 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/TreeOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/TreeOps.scala @@ -35,6 +35,19 @@ trait TreeOps { self: TastyUniverse => } } + def showTree(tree: Tree): String = { + // here we want to avoid forcing the symbols of type trees, + // so instead substitute the type tree with an Identifier + // of the `showType`, which does not force. + val tree1 = tree.transform(new u.Transformer { + override def transform(tree: Tree) = tree match { + case tree: u.TypeTree => u.Ident(s"${showType(tree.tpe, wrap = false)}") // ident prints its name directly + case tree => super.transform(tree) + } + }) + u.show(tree1) + } + object tpd { @inline final def Constant(value: Any): Constant = @@ -44,10 +57,10 @@ trait TreeOps { self: TastyUniverse => new TastyIdent(name).setType(tpe) @inline final def Select(qual: Tree, name: TastyName)(implicit ctx: Context): Tree = - selectImpl(qual, name)(implicit ctx => namedMemberOfPrefix(qual.tpe, name)) + selectImpl(qual, name)(implicit ctx => lookupTypeFrom(qual.tpe)(qual.tpe, name)) @inline final def Select(owner: Type)(qual: Tree, name: TastyName)(implicit ctx: Context): Tree = - selectImpl(qual, name)(implicit ctx => namedMemberOfTypeWithPrefix(qual.tpe, owner, name)) + selectImpl(qual, name)(implicit ctx => lookupTypeFrom(owner)(qual.tpe, name)) private def selectImpl(qual: Tree, name: TastyName)(lookup: Context => Type)(implicit ctx: Context): Tree = { @@ -57,17 +70,10 @@ trait TreeOps { self: TastyUniverse => def selectCtor(qual: Tree) = u.Select(qual, u.nme.CONSTRUCTOR).setType(qual.tpe.typeSymbol.primaryConstructor.tpe) - if (ctx.mode.is(ReadAnnotation) && name.isSignedConstructor) { - val cls = qual.tpe.typeSymbol - cls.ensureCompleted() // need to force flags - if (cls.isJavaAnnotation) - selectCtor(qual) - else - selectName(qual, name)(lookup) - } - else { + if (ctx.mode.is(ReadAnnotationCtor) && name.isSignedConstructor) + selectCtor(qual) + else selectName(qual, name)(lookup) - } } diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala index 94d9645b8ca3..513a2bf01cee 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala @@ -12,7 +12,7 @@ package scala.tools.nsc.tasty.bridge -import scala.tools.nsc.tasty.{TastyUniverse, SafeEq, TastyModes}, TastyModes._ +import scala.tools.nsc.tasty.{TastyUniverse, SafeEq, TastyModes, ForceKinds}, TastyModes._, ForceKinds._ import scala.tools.tasty.{TastyName, ErasedTypeRef, TastyFlags}, TastyFlags._ @@ -20,6 +20,8 @@ import scala.reflect.internal.Variance import scala.util.chaining._ import scala.collection.mutable +import scala.collection.immutable.ArraySeq + import scala.reflect.internal.Flags /**This layer adds factories that construct `scala.reflect` Types in the shapes that TASTy expects. @@ -52,9 +54,70 @@ trait TypeOps { self: TastyUniverse => } } - def lzyShow(tpe: Type): String = tpe match { - case u.TypeRef(_, sym, args) => s"$sym${if (args.nonEmpty) args.map(lzyShow).mkString("[", ",","]") else ""}" - case tpe => tpe.typeSymbolDirect.toString + def lzyShow(tpe: Type): String = { + val sym = symOfType(tpe) + if (isSymbol(sym)) { + val args = tpe.typeArgs + s"${sym.fullName}${if (args.nonEmpty) args.map(lzyShow).mkString("[", ",", "]") else ""}" + } + else { + s"${tpe.typeSymbolDirect.fullName}" + } + } + + def showType(tpe: Type, wrap: Boolean = true): String = { + def prefixed(prefix: String)(op: => String) = { + val raw = op + if (wrap) s"""$prefix"$raw"""" + else raw + } + def parameterised(tparams: List[Symbol], prefix: String)(f: String => String) = prefixed(prefix) { + f(if (tparams.isEmpty) "" else tparams.map(p => s"${p.name}").mkString("[", ", ", "]")) + } + def cls(tparams: List[Symbol], tpe: u.ClassInfoType) = parameterised(tparams, "cls") { paramStr => + s"$paramStr${tpe.typeSymbol.fullName}$paramStr" + } + def meth(tparams: List[Symbol], tpe: u.MethodType) = parameterised(tparams, "meth") { paramStr => + s"$paramStr$tpe" + } + def preStr(pre: Type): String = { + val preSym = symOfType(pre) + val thisStr = { + if (pre.isInstanceOf[u.ThisType] && !pre.typeSymbol.isPackageClass && !pre.typeSymbol.isModuleClass) + ".this" + else + "" + } + if (isSymbol(preSym)) s"${preSym.fullName}$thisStr." else "" + } + tpe match { + case tpe: u.ClassInfoType => cls(Nil, tpe) + case u.PolyType(tparams, tpe: u.ClassInfoType) => cls(tparams, tpe) + case u.PolyType(tparams, tpe: u.MethodType) => meth(tparams, tpe) + case tpe: u.MethodType => meth(Nil, tpe) + case tpe: u.ThisType => prefixed("path") { s"${tpe.sym.fullName}.this" } + + case tpe: u.SingleType => + prefixed("path") { + if (tpe.sym.isModule) tpe.sym.fullName + ".type" + else s"${preStr(tpe.pre)}${tpe.sym.name}.type" + } + + case tpe: u.TypeRef => + if (tpe.sym.is(Object)) prefixed("path") { + s"${tpe.sym.fullName}.type" + } + else prefixed("tpelazy") { + val pre = preStr(tpe.pre) + val argsStrs = tpe.args.map(showType(_, wrap = false)) + val argsStr = if (argsStrs.nonEmpty) argsStrs.mkString("[", ", ", "]") else "" + s"$pre${tpe.sym.name}$argsStr" + } + + case tpe: u.TypeBounds => prefixed("tpebounds") { s"$tpe"} + + case tpe => prefixed("tpe") { s"$tpe" } + } } def fnResult(fn: Type): Type = fn.dealiasWiden.finalResultType @@ -92,17 +155,40 @@ trait TypeOps { self: TastyUniverse => } final val NoType: Type = u.NoType + final val NoPrefix: Type = u.NoPrefix + + def adjustParent(tp: Type): Type = { + val tpe = tp.dealias + if (tpe.typeSymbolDirect === u.definitions.ObjectClass) u.definitions.AnyRefTpe + else tpe + } /** Represents a symbol that has been initialised by TastyUnpickler, but can not be in a state of completion * because its definition has not yet been seen. */ object DefaultInfo extends TastyRepr { override def isTrivial: Boolean = true - def originalFlagSet: TastyFlagSet = EmptyTastyFlags + def tflags: TastyFlagSet = EmptyTastyFlags } - private[bridge] def CopyInfo(underlying: u.TermSymbol, originalFlagSet: TastyFlagSet): TastyRepr = - new CopyCompleter(underlying, originalFlagSet) + private[bridge] def CopyInfo(underlying: u.TermSymbol, tflags: TastyFlagSet)(implicit ctx: Context): TastyRepr = + new CopyCompleter(underlying, tflags) + + private[bridge] def SingletonEnumClassInfo( + enumValue: u.TermSymbol, + originalFlagSet: TastyFlagSet + )(implicit ctx: Context): TastyRepr = + new SingletonEnumModuleClassCompleter(enumValue, originalFlagSet) + + private[bridge] def LocalSealedChildProxyInfo(parent: Symbol, tflags: TastyFlagSet)(implicit ctx: Context): Type = + new LocalSealedChildProxyCompleter(parent, tflags) + + private[bridge] def LambdaParamInfo( + tflags: TastyFlagSet, + idx: Int, + infoDb: Int => Type + )(implicit ctx: Context): Type = + new LambdaParamCompleter(tflags, idx, infoDb) def OpaqueTypeToBounds(tpe: Type): (Type, Type) = tpe match { case u.PolyType(tparams, tpe) => @@ -118,6 +204,7 @@ trait TypeOps { self: TastyUniverse => } def ByNameType(arg: Type): Type = u.definitions.byNameType(arg) def TypeBounds(lo: Type, hi: Type): Type = u.TypeBounds.apply(lo, hi) + def InitialTypeInfo: Type = u.TypeBounds.empty def SingleType(pre: Type, sym: Symbol): Type = u.singleType(pre, sym) def ExprType(res: Type): Type = u.NullaryMethodType(res) def InlineExprType(res: Type): Type = res match { @@ -127,7 +214,7 @@ trait TypeOps { self: TastyUniverse => def PolyType(params: List[Symbol], res: Type): Type = u.PolyType(params, res) def ClassInfoType(parents: List[Type], clazz: Symbol): Type = u.ClassInfoType(parents, clazz.rawInfo.decls, clazz.asType) def ClassInfoType(parents: List[Type], decls: List[Symbol], clazz: Symbol): Type = u.ClassInfoType(parents, u.newScopeWith(decls:_*), clazz.asType) - def ThisType(sym: Symbol): Type = u.ThisType(sym) + def ThisType(tpe: Type): Type = u.ThisType(symOfType(tpe)) def ConstantType(c: Constant): Type = u.ConstantType(c) def IntersectionType(tps: Type*): Type = u.intersectionType(tps.toList) def IntersectionType(tps: List[Type]): Type = u.intersectionType(tps) @@ -140,6 +227,7 @@ trait TypeOps { self: TastyUniverse => def SuperType(thisTpe: Type, superTpe: Type): Type = u.SuperType(thisTpe, superTpe) def LambdaFromParams(typeParams: List[Symbol], ret: Type): Type = u.PolyType(typeParams, lambdaResultType(ret)) def RecType(run: RecType => Type)(implicit ctx: Context): Type = new RecType(run).parent + def RecThis(tpe: Type): Type = tpe.asInstanceOf[RecType].recThis /** The method type corresponding to given parameters and result type */ def DefDefType(typeParams: List[Symbol], valueParamss: List[List[Symbol]], resultType: Type): Type = { @@ -192,7 +280,7 @@ trait TypeOps { self: TastyUniverse => if (args.exists(tpe => tpe.isInstanceOf[u.TypeBounds] | tpe.isInstanceOf[LambdaPolyType])) { val syms = mutable.ListBuffer.empty[Symbol] def bindWildcards(tpe: Type) = tpe match { - case tpe: u.TypeBounds => ctx.newWildcardSym(tpe).tap(syms += _).pipe(_.ref) + case tpe: u.TypeBounds => ctx.newWildcard(tpe).tap(syms += _).pipe(_.ref) case tpe: LambdaPolyType => tpe.toNested case tpe => tpe } @@ -205,6 +293,64 @@ trait TypeOps { self: TastyUniverse => } } + + def ParamRef(binder: Type, idx: Int): Type = + binder.asInstanceOf[LambdaType].lambdaParams(idx).ref + + def NamedType(prefix: Type, sym: Symbol): Type = { + if (sym.isType) { + prefix match { + case tp: u.ThisType if !sym.isTypeParameter => u.typeRef(prefix, sym, Nil) + case _:u.SingleType | _:u.RefinedType => u.typeRef(prefix, sym, Nil) + case _ => u.appliedType(sym, Nil) + } + } + else { // is a term + if (sym.hasAllFlags(Flags.PackageFlags)) { + u.typeRef(u.NoPrefix, sym, Nil) + } else { + u.singleType(prefix, sym) + } + } + } + + def TypeRef(prefix: Type, name: TastyName.TypeName)(implicit ctx: Context): Type = + TypeRefIn(prefix, prefix, name) + + def TypeRefIn(prefix: Type, space: Type, name: TastyName.TypeName)(implicit ctx: Context): Type = { + import scala.tools.tasty.TastyName._ + + def doLookup = lookupTypeFrom(space)(prefix, name) + + // we escape some types in the scala package especially + if (prefix.typeSymbol === u.definitions.ScalaPackage) { + name match { + case TypeName(SimpleName(raw @ SyntheticScala3Type())) => raw match { + case tpnme.And => AndTpe + case tpnme.Or => unionIsUnsupported + case tpnme.ContextFunctionN(n) if (n.toInt > 0) => ContextFunctionType(n.toInt) + case tpnme.FunctionN(n) if (n.toInt > 22) => FunctionXXLType(n.toInt) + case tpnme.TupleCons => genTupleIsUnsupported("scala.*:") + case tpnme.Tuple if !ctx.mode.is(ReadParents) => genTupleIsUnsupported("scala.Tuple") + case tpnme.AnyKind => u.definitions.AnyTpe + case tpnme.Matchable => u.definitions.AnyTpe + case _ => doLookup + } + + case _ => doLookup + } + } + else { + doLookup + } + } + + def TermRef(prefix: Type, name: TastyName)(implicit ctx: Context): Type = + TermRefIn(prefix, prefix, name) + + def TermRefIn(prefix: Type, space: Type, name: TastyName)(implicit ctx: Context): Type = + lookupTypeFrom(space)(prefix, name.toTermName) + } private[bridge] def mkRefinedTypeWith(parents: List[Type], clazz: Symbol, decls: u.Scope): Type = @@ -240,9 +386,12 @@ trait TypeOps { self: TastyUniverse => bounds } + private[bridge] def sameErasure(sym: Symbol)(tpe: Type, ref: ErasedTypeRef) = + NameErasure.sigName(tpe, sym) === ref + /** This is a port from Dotty of transforming a Method type to an ErasedTypeRef */ - private[bridge] object NameErasure { + private object NameErasure { def isRepeatedParam(self: Type): Boolean = self.typeSymbol eq u.definitions.RepeatedParamClass @@ -251,9 +400,9 @@ trait TypeOps { self: TastyUniverse => * `from` and `to` must be static classes, both with one type parameter, and the same variance. * Do the same for by name types => From[T] and => To[T] */ - def translateParameterized(self: Type)(from: u.ClassSymbol, to: u.ClassSymbol, wildcardArg: Boolean = false)(implicit ctx: Context): Type = self match { + def translateParameterized(self: Type)(from: u.ClassSymbol, to: u.ClassSymbol, wildcardArg: Boolean): Type = self match { case self @ u.NullaryMethodType(tp) => - u.NullaryMethodType(translateParameterized(tp)(from, to, wildcardArg=false)) + u.NullaryMethodType(translateParameterized(tp)(from, to, wildcardArg = false)) case _ => if (self.typeSymbol.isSubClass(from)) { def elemType(tp: Type): Type = tp.dealiasWiden match { @@ -263,28 +412,28 @@ trait TypeOps { self: TastyUniverse => } val arg = elemType(self) val arg1 = if (wildcardArg) u.TypeBounds.upper(arg) else arg - to.ref(arg1 :: Nil) + u.appliedType(to, arg1 :: Nil) } else self } - def translateFromRepeated(self: Type)(toArray: Boolean, translateWildcard: Boolean = false)(implicit ctx: Context): Type = { + def translateFromRepeated(self: Type)(toArray: Boolean): Type = { val seqClass = if (toArray) u.definitions.ArrayClass else u.definitions.SeqClass - if (translateWildcard && self === u.WildcardType) - seqClass.ref(u.WildcardType :: Nil) - else if (isRepeatedParam(self)) + if (isRepeatedParam(self)) // We want `Array[? <: T]` because arrays aren't covariant until after // erasure. See `tests/pos/i5140`. translateParameterized(self)(u.definitions.RepeatedParamClass, seqClass, wildcardArg = toArray) else self } - def sigName(tp: Type, isJava: Boolean)(implicit ctx: Context): ErasedTypeRef = { - val normTp = translateFromRepeated(tp)(toArray = isJava) - erasedSigName(normTp.erasure) + def sigName(tp: Type, sym: Symbol): ErasedTypeRef = { + val normTp = translateFromRepeated(tp)(toArray = sym.isJavaDefined) + erasedSigName( + u.erasure.erasure(sym)(normTp) + ) } - private def erasedSigName(erased: Type)(implicit ctx: Context): ErasedTypeRef = erased match { + private def erasedSigName(erased: Type): ErasedTypeRef = erased match { case erased: u.ExistentialType => erasedSigName(erased.underlying) case erased: u.TypeRef => import TastyName._ @@ -348,56 +497,71 @@ trait TypeOps { self: TastyUniverse => private val SyntheticScala3Type = raw"^(?:&|\||AnyKind|(?:Context)?Function\d+|\*:|Tuple|Matchable)$$".r - def selectType(name: TastyName.TypeName, prefix: Type)(implicit ctx: Context): Type = selectType(name, prefix, prefix) - def selectType(name: TastyName.TypeName, prefix: Type, space: Type)(implicit ctx: Context): Type = { - import scala.tools.tasty.TastyName._ - - def lookupType = namedMemberOfTypeWithPrefix(prefix, space, name) - - // we escape some types in the scala package especially - if (prefix.typeSymbol === u.definitions.ScalaPackage) { - name match { - case TypeName(SimpleName(raw @ SyntheticScala3Type())) => raw match { - case tpnme.And => AndTpe - case tpnme.Or => unionIsUnsupported - case tpnme.ContextFunctionN(n) if (n.toInt > 0) => ContextFunctionType(n.toInt) - case tpnme.FunctionN(n) if (n.toInt > 22) => FunctionXXLType(n.toInt) - case tpnme.TupleCons => genTupleIsUnsupported("scala.*:") - case tpnme.Tuple if !ctx.mode.is(ReadParents) => genTupleIsUnsupported("scala.Tuple") - case tpnme.AnyKind => u.definitions.AnyTpe - case tpnme.Matchable => u.definitions.AnyTpe - case _ => lookupType - } - - case _ => lookupType - } - } - else { - lookupType - } + sealed abstract trait TastyRepr extends u.Type { + def tflags: TastyFlagSet + final def unsupportedFlags: TastyFlagSet = tflags & FlagSets.TastyOnlyFlags } - def selectTerm(name: TastyName, prefix: Type)(implicit ctx: Context): Type = selectTerm(name, prefix, prefix) - def selectTerm(name: TastyName, prefix: Type, space: Type)(implicit ctx: Context): Type = - namedMemberOfTypeWithPrefix(prefix, space, name.toTermName) + abstract class TastyCompleter( + isClass: Boolean, + tflags: TastyFlagSet + )(implicit capturedCtx: Context) + extends BaseTastyCompleter(tflags) { + override final val decls: u.Scope = if (isClass) u.newScope else u.EmptyScope + } - def singletonLike(tpe: Type): Symbol = tpe match { - case u.SingleType(_, sym) => sym - case u.TypeRef(_,sym,_) => sym - case x => throw new MatchError(x) + private[TypeOps] class CopyCompleter( + underlying: u.TermSymbol, + tflags: TastyFlagSet + )(implicit ctx: Context) + extends BaseTastyCompleter(tflags) { + def computeInfo(sym: Symbol)(implicit ctx: Context): Unit = { + underlying.ensureCompleted(CopySym) + sym.info = underlying.tpe + underlying.attachments.all.foreach(sym.updateAttachment(_)) + } } - private[TypeOps] val NoSymbolFn = (_: Context) => u.NoSymbol + /** This completer ensures that if the "fake" singleton enum module class + * is completed first, that it completes the module symbol which + * then completes the module class. + */ + private[TypeOps] class SingletonEnumModuleClassCompleter( + enumValue: u.TermSymbol, + tflags: TastyFlagSet + )(implicit ctx: Context) + extends BaseTastyCompleter(tflags) { + def computeInfo(sym: Symbol)(implicit ctx: Context): Unit = { + enumValue.ensureCompleted(EnumProxy) + } + } - sealed abstract trait TastyRepr extends u.Type { - def originalFlagSet: TastyFlagSet - final def tastyOnlyFlags: TastyFlagSet = originalFlagSet & FlagSets.TastyOnlyFlags + private[TypeOps] class LocalSealedChildProxyCompleter( + parent: Symbol, + tflags: TastyFlagSet + )(implicit ctx: Context) + extends BaseTastyCompleter(tflags) { + def computeInfo(sym: Symbol)(implicit ctx: Context): Unit = { + sym.info = defn.ClassInfoType(parent.tpe_* :: Nil, sym) // TODO [tasty]: check if tpe_* forces + } } - abstract class TastyCompleter(isClass: Boolean, final val originalFlagSet: TastyFlagSet)(implicit - capturedCtx: Context) extends u.LazyType with TastyRepr with u.FlagAgnosticCompleter { + private[TypeOps] final class LambdaParamCompleter( + flags: TastyFlagSet, + idx: Int, + infoDb: Int => Type, + )(implicit ctx: Context) + extends BaseTastyCompleter(flags) { + override def computeInfo(denot: Symbol)(implicit ctx: Context): Unit = + denot.info = infoDb(idx) + } - override final val decls: u.Scope = if (isClass) u.newScope else u.EmptyScope + abstract class BaseTastyCompleter( + final val tflags: TastyFlagSet + )(implicit capturedCtx: Context) + extends u.LazyType + with TastyRepr + with u.FlagAgnosticCompleter { override final def load(sym: Symbol): Unit = complete(sym) @@ -412,56 +576,14 @@ trait TypeOps { self: TastyUniverse => def computeInfo(sym: Symbol)(implicit ctx: Context): Unit } - private[TypeOps] class CopyCompleter(underlying: u.TermSymbol, final val originalFlagSet: TastyFlagSet) - extends u.LazyType with TastyRepr with u.FlagAgnosticCompleter { - override final def complete(sym: Symbol): Unit = { - underlying.ensureCompleted() - sym.info = underlying.tpe - } - } - - def prefixedRef(prefix: Type, sym: Symbol): Type = { - if (sym.isType) { - prefix match { - case tp: u.ThisType if tp.sym.isRefinementClass => sym.preciseRef(prefix) - case _:u.SingleType | _:u.RefinedType => sym.preciseRef(prefix) - case _ => sym.ref - } - } - else if (sym.isConstructor) { - normaliseConstructorRef(sym) - } - else { - u.singleType(prefix, sym) - } - } - - def normaliseConstructorRef(ctor: Symbol): Type = { - var tpe = ctor.tpe - val tParams = ctor.owner.typeParams - if (tParams.nonEmpty) tpe = u.PolyType(tParams, tpe) - tpe - } - - def namedMemberOfPrefix(pre: Type, name: TastyName)(implicit ctx: Context): Type = - namedMemberOfTypeWithPrefix(pre, pre, name) + private[bridge] def lookupTypeFrom(owner: Type)(pre: Type, tname: TastyName)(implicit ctx: Context): Type = + defn.NamedType(pre, lookupSymbol(owner, tname)) - def namedMemberOfTypeWithPrefix(pre: Type, space: Type, tname: TastyName)(implicit ctx: Context): Type = { - prefixedRef(pre, namedMemberOfType(space, tname)) - } - - def lambdaResultType(resType: Type): Type = resType match { + private def lambdaResultType(resType: Type): Type = resType match { case res: LambdaPolyType => res.toNested case res => res } - abstract class LambdaTypeCompanion[N <: TastyName] { - def factory(params: List[N])(registerCallback: Type => Unit, paramInfosOp: () => List[Type], resultTypeOp: () => Type)(implicit ctx: Context): LambdaType - - final def apply(params: List[N])(registerCallback: Type => Unit, paramInfosOp: () => List[Type], resultTypeOp: () => Type)(implicit ctx: Context): Type = - factory(params)(registerCallback, paramInfosOp, resultTypeOp).canonical - } - final class LambdaPolyType(typeParams: List[Symbol], val resType: Type) extends u.PolyType(typeParams, LambdaPolyType.addLower(resType)) { def toNested: u.PolyType = resType match { case _: u.TypeBounds => this @@ -488,196 +610,259 @@ trait TypeOps { self: TastyUniverse => private[bridge] final class OpaqueTypeBounds(lo: Type, hi: Type, val alias: Type) extends u.TypeBounds(lo, hi) - def typeRef(tpe: Type): Type = u.appliedType(tpe, Nil) - /** The given type, unless `sym` is a constructor, in which case the * type of the constructed instance is returned */ - def effectiveResultType(sym: Symbol, typeParams: List[Symbol], givenTp: Type): Type = + def effectiveResultType(sym: Symbol, givenTp: Type): Type = if (sym.name == u.nme.CONSTRUCTOR) sym.owner.tpe else givenTp - private[TypeOps] type LambdaType = Type with Lambda - private[TypeOps] type TypeLambda = LambdaType with TypeLike - private[TypeOps] type TermLambda = LambdaType with TermLike - - private[TypeOps] trait TypeLike { self: Type with Lambda => - type ThisTName = TastyName.TypeName - type ThisName = u.TypeName - } + /** Lazy thread unsafe non-nullable value that can not be re-entered */ + private[bridge] final class SyncRef[A](private var compute: () => A) { + private var out: A = _ + private var entered: Boolean = false - private[TypeOps] trait TermLike { self: Type with Lambda => - type ThisTName = TastyName - type ThisName = u.TermName - type PInfo = Type + def apply(): A = { + if (entered) { + assert(out != null, "cyclic completion of SyncRef") + } + else { + entered = true + val result = compute() + compute = null + assert(result != null, "SyncRef is non-nullable") + out = result + } + out + } } - private[TypeOps] trait Lambda extends Product with Serializable { self: Type => - type ThisTName <: TastyName - type ThisName <: u.Name - type This <: Type - - val paramNames: List[ThisName] - val paramInfos: List[Type] - val resType: Type + object MethodTermLambda extends TermLambdaFactory { - def typeParams: List[Symbol] // deferred to final implementation + type ThisLambda = MethodTermLambda - final protected def validateThisLambda(): Unit = { - assert(resType.isComplete, self) - assert(paramInfos.length == paramNames.length, self) + protected def apply( + params: ArraySeq[TastyName], + flags: TastyFlagSet, + paramInfosOp: ArraySeq[Symbol] => ArraySeq[Type], + resultTypeOp: () => Type, + registerCallback: Type => Unit, + )(implicit ctx: Context): ThisLambda = { + new MethodTermLambda(params, paramInfosOp, resultTypeOp, flags, registerCallback) } - override final def productArity: Int = 2 - - override final def productElement(n: Int): Any = n match { - case 0 => paramNames - case 1 => resType - case _ => throw new IndexOutOfBoundsException(n.toString) - } - - def canEqual(that: Any): Boolean = that.isInstanceOf[Lambda] - - def canonical: This - - override final def equals(that: Any): Boolean = that match { - case that: Lambda => - (that.canEqual(self) - && that.paramNames == paramNames - && that.resType == resType) - case _ => false - } } - object HKTypeLambda extends TypeLambdaCompanion { - def factory(params: List[TastyName.TypeName])(registerCallback: Type => Unit, - paramInfosOp: () => List[Type], resultTypeOp: () => Type)(implicit ctx: Context): LambdaType = - new HKTypeLambda(params)(registerCallback, paramInfosOp, resultTypeOp) - } + private[TypeOps] final class MethodTermLambda( + paramTNames: ArraySeq[TastyName], + paramInfosOp: ArraySeq[Symbol] => ArraySeq[Type], + resultTypeOp: () => Type, + flags: TastyFlagSet, + registerCallback: Type => Unit, + )(implicit ctx: Context) + extends TermLambda("MethodTermLambda")(paramTNames, paramInfosOp, resultTypeOp, flags)(registerCallback) { - object PolyType extends TypeLambdaCompanion { - def factory(params: List[TastyName.TypeName])(registerCallback: Type => Unit, - paramInfosOp: () => List[Type], resultTypeOp: () => Type)(implicit ctx: Context): LambdaType = - new PolyTypeLambda(params)(registerCallback, paramInfosOp, resultTypeOp) - } + protected def canonical(ps: List[Symbol], res: Type): Type = u.MethodType(ps, res) - final class MethodTypeCompanion(defaultFlags: TastyFlagSet) extends TermLambdaCompanion { self => - def factory(params: List[TastyName])(registerCallback: Type => Unit, - paramInfosOp: () => List[Type], resultTypeOp: () => Type)(implicit ctx: Context): LambdaType = - new MethodTermLambda(params, defaultFlags)(registerCallback, paramInfosOp, resultTypeOp) + override def canEqual(that: Any): Boolean = that.isInstanceOf[MethodTermLambda] } - def recThis(tpe: Type): Type = tpe.asInstanceOf[RecType].recThis - def symOfTypeRef(tpe: Type): Symbol = tpe.asInstanceOf[u.TypeRef].sym + object HKTypeLambda extends TypeLambdaFactory { - private[TypeOps] final class RecType(run: RecType => Type)(implicit ctx: Context) extends Type with Product { - - override val productPrefix = "RecType" - override val productArity = 2 - - val refinementClass = ctx.newRefinementClassSymbol - val recThis: Type = u.ThisType(refinementClass) - val parent: Type = run(this) + type ThisLambda = HKTypeLambda - def canEqual(that: Any): Boolean = that.isInstanceOf[RecType] - def productElement(n: Int): Any = n match { - case 0 => if (parent == null) "" else parent - case 1 => hashCode - case _ => throw new IndexOutOfBoundsException(n.toString) + protected def apply( + params: ArraySeq[TastyName.TypeName], + flags: TastyFlagSet, + paramInfosOp: ArraySeq[Symbol] => ArraySeq[Type], + resultTypeOp: () => Type, + registerCallback: Type => Unit, + )(implicit ctx: Context): ThisLambda = { + new HKTypeLambda(params, flags, paramInfosOp, resultTypeOp, registerCallback) } + } - override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef] - override def safeToString: String = s"RecType(rt @ $hashCode => ${if (parent == null) "" else parent})" + private[TypeOps] final class HKTypeLambda( + paramTNames: ArraySeq[TastyName.TypeName], + flags: TastyFlagSet, + paramInfosOp: ArraySeq[Symbol] => ArraySeq[Type], + resultTypeOp: () => Type, + registerCallback: Type => Unit + )(implicit ctx: Context) + extends TypeLambda("HKTypeLambda")(paramTNames, flags, paramInfosOp, resultTypeOp)(registerCallback) { - } + final override protected def normaliseResult(resType: Type): Type = lambdaResultType(resType) - def methodTypeCompanion(initialFlags: TastyFlagSet): MethodTypeCompanion = new MethodTypeCompanion(initialFlags) + protected def canonical(ps: List[Symbol], res: Type): Type = new LambdaPolyType(ps, res) - abstract class TermLambdaCompanion - extends LambdaTypeCompanion[TastyName] + override def canEqual(that: Any): Boolean = that.isInstanceOf[HKTypeLambda] + } - abstract class TypeLambdaCompanion - extends LambdaTypeCompanion[TastyName.TypeName] + object PolyTypeLambda extends TypeLambdaFactory { - private[TypeOps] final class MethodTermLambda(paramTNames: List[TastyName], defaultFlags: TastyFlagSet)(registerCallback: MethodTermLambda => Unit, - paramInfosOp: () => List[Type], resultTypeOp: () => Type)(implicit ctx: Context) - extends Type with Lambda with TermLike { methodLambda => - type This = u.MethodType + type ThisLambda = PolyTypeLambda - val paramNames: List[u.TermName] = paramTNames.map(encodeTermName) + protected def apply( + params: ArraySeq[TastyName.TypeName], + flags: TastyFlagSet, + paramInfosOp: ArraySeq[Symbol] => ArraySeq[Type], + resultTypeOp: () => Type, + registerCallback: Type => Unit, + )(implicit ctx: Context): ThisLambda = { + new PolyTypeLambda(params, flags, paramInfosOp, resultTypeOp, registerCallback) + } + } - override val productPrefix = "MethodTermLambda" + private[TypeOps] final class PolyTypeLambda( + paramTNames: ArraySeq[TastyName.TypeName], + flags: TastyFlagSet, + paramInfosOp: ArraySeq[Symbol] => ArraySeq[Type], + resultTypeOp: () => Type, + registerCallback: Type => Unit + )(implicit ctx: Context) + extends TypeLambda("PolyTypeLambda")(paramTNames, flags, paramInfosOp, resultTypeOp)(registerCallback) { - registerCallback(this) + protected def canonical(ps: List[Symbol], res: Type): Type = u.PolyType(ps, res) - val paramInfos: List[Type] = paramInfosOp() + override def canEqual(that: Any): Boolean = that.isInstanceOf[PolyTypeLambda] + } - override val params: List[Symbol] = paramNames.lazyZip(paramInfos).map { - case (name, argInfo) => ctx.owner.newValueParameter(name, u.NoPosition, encodeFlagSet(defaultFlags)).setInfo(argInfo) + private[TypeOps] abstract class TypeLambda( + kind: String)( + paramTNames: ArraySeq[TastyName.TypeName], + flags: TastyFlagSet, + paramInfosOp: ArraySeq[Symbol] => ArraySeq[Type], + resultTypeOp: () => Type)( + registerCallback: Type => Unit + )(implicit ctx: Context) + extends LambdaType(kind)(paramTNames, paramInfosOp, resultTypeOp, flags)(registerCallback) { + final override def typeParams: List[Symbol] = lambdaParams.toList + final protected def normaliseParam(info: Type): Type = normaliseIfBounds(info) + } + + private[TypeOps] abstract class TermLambda( + kind: String)( + paramTNames: ArraySeq[TastyName], + paramInfosOp: ArraySeq[Symbol] => ArraySeq[Type], + resultTypeOp: () => Type, + flags: TastyFlagSet)( + registerCallback: Type => Unit + )(implicit ctx: Context) + extends LambdaType(kind)(paramTNames, paramInfosOp, resultTypeOp, flags)(registerCallback) { + final override def params: List[Symbol] = lambdaParams.toList + final protected def normaliseParam(info: Type): Type = info + } + + private[TypeOps] abstract class LambdaType( + kind: String)( + paramTNames: ArraySeq[TastyName], + paramInfosOp: ArraySeq[Symbol] => ArraySeq[Type], + resultTypeOp: () => Type, + flags: TastyFlagSet)( + registerCallback: Type => Unit + )(implicit ctx: Context) extends AbstractLambdaType(kind) { + + protected def normaliseParam(info: Type): Type + protected def normaliseResult(resType: Type): Type = resType + + final val lambdaParams: ArraySeq[Symbol] = { + val paramInfoDb = new SyncRef(() => paramInfosOp(this.lambdaParams)) + def infoAt(idx: Int) = normaliseParam(paramInfoDb()(idx)) + + paramTNames.zipWithIndex.map { case (tname, idx) => + ctx.newLambdaParameter(tname, flags, idx, infoAt) + } } - val resType: Type = resultTypeOp() - - validateThisLambda() + registerCallback(this) - def canonical: u.MethodType = u.MethodType(params, resType) + final val resType: Type = normaliseResult(resultTypeOp()) - override def canEqual(that: Any): Boolean = that.isInstanceOf[MethodTermLambda] } - private[TypeOps] final class HKTypeLambda(paramTNames: List[TastyName.TypeName])(registerCallback: HKTypeLambda => Unit, - paramInfosOp: () => List[Type], resultTypeOp: () => Type)(implicit ctx: Context) - extends Type with Lambda with TypeLike { + private[TypeOps] abstract class AbstractLambdaType(override val productPrefix: String) + extends Type + with Product + with Serializable { - type This = LambdaPolyType - val paramNames: List[u.TypeName] = paramTNames.map(encodeTypeName) + def lambdaParams: ArraySeq[Symbol] + def resType: Type - override val productPrefix = "HKTypeLambda" + final override def etaExpand: Type = { + lambdaParams.foreach(_.info) // force locally + canonical(lambdaParams.toList, resType) + } - registerCallback(this) + protected def canonical(ps: List[Symbol], res: Type): Type - val paramInfos: List[Type] = paramInfosOp() + override final def productArity: Int = 2 - override val typeParams: List[Symbol] = paramNames.lazyZip(paramInfos).map { - case (name, bounds) => - val argInfo = normaliseIfBounds(bounds) - ctx.owner.newTypeParameter(name, u.NoPosition, u.Flag.DEFERRED).setInfo(argInfo) + override final def productElement(n: Int): Any = n match { + case 0 => lambdaParams + case 1 => resType + case _ => throw new IndexOutOfBoundsException(n.toString) } - val resType: Type = lambdaResultType(resultTypeOp()) - - validateThisLambda() - - def canonical: LambdaPolyType = new LambdaPolyType(typeParams, resType) + override final def equals(that: Any): Boolean = that match { + case that: AbstractLambdaType => + (that.canEqual(self) + && that.lambdaParams == lambdaParams + && that.resType == resType) + case _ => false + } - override def canEqual(that: Any): Boolean = that.isInstanceOf[HKTypeLambda] } - private[TypeOps] final class PolyTypeLambda(paramTNames: List[TastyName.TypeName])(registerCallback: PolyTypeLambda => Unit, - paramInfosOp: () => List[Type], resultTypeOp: () => Type)(implicit ctx: Context) - extends Type with Lambda with TypeLike { + abstract class LambdaFactory[N <: TastyName] { - type This = u.PolyType + type ThisLambda <: LambdaType - val paramNames: List[u.TypeName] = paramTNames.map(encodeTypeName) + protected def apply( + params: ArraySeq[N], + flags: TastyFlagSet, + paramInfosOp: ArraySeq[Symbol] => ArraySeq[Type], + resultTypeOp: () => Type, + registerCallback: Type => Unit, + )(implicit ctx: Context): ThisLambda - override val productPrefix = "PolyTypeLambda" + } - registerCallback(this) + object LambdaFactory { + final def parse[N <: TastyName]( + factory: LambdaFactory[N], + params: ArraySeq[N], + flags: TastyFlagSet)( + paramInfosOp: ArraySeq[Symbol] => ArraySeq[Type], + resultTypeOp: () => Type, + registerCallback: Type => Unit, + )(implicit ctx: Context): Type = + factory(params, flags, paramInfosOp, resultTypeOp, registerCallback) + .etaExpand // turn the LambdaType into something the compiler understands + .tap(registerCallback) // we should replace the type at start as it has been expanded + } - val paramInfos: List[Type] = paramInfosOp() + abstract class TermLambdaFactory extends LambdaFactory[TastyName] + abstract class TypeLambdaFactory extends LambdaFactory[TastyName.TypeName] - override val typeParams: List[Symbol] = paramNames.lazyZip(paramInfos).map { - case (name, argInfo) => ctx.owner.newTypeParameter(name, u.NoPosition, u.Flag.DEFERRED).setInfo(argInfo) - } + private[TypeOps] final class RecType(run: RecType => Type)(implicit ctx: Context) extends Type with Product { - val resType: Type = resultTypeOp() // potentially need to flatten? (probably not, happens in typer in dotty) + override val productPrefix = "RecType" + override val productArity = 2 + + val refinementClass = ctx.newRefinementClassSymbol + val recThis: Type = u.ThisType(refinementClass) + val parent: Type = run(this) - validateThisLambda() + def canEqual(that: Any): Boolean = that.isInstanceOf[RecType] + def productElement(n: Int): Any = n match { + case 0 => if (parent == null) "" else parent + case 1 => hashCode + case _ => throw new IndexOutOfBoundsException(n.toString) + } - def canonical: u.PolyType = u.PolyType(typeParams, resType) + override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef] + override def safeToString: String = s"RecType(rt @ $hashCode => ${if (parent == null) "" else parent})" - override def canEqual(that: Any): Boolean = that.isInstanceOf[PolyTypeLambda] } } diff --git a/src/compiler/scala/tools/nsc/tasty/package.scala b/src/compiler/scala/tools/nsc/tasty/package.scala index 5122e0711a88..a490d74d6ede 100644 --- a/src/compiler/scala/tools/nsc/tasty/package.scala +++ b/src/compiler/scala/tools/nsc/tasty/package.scala @@ -22,4 +22,11 @@ package object tasty { @inline final def !==(u: T): Boolean = t != u } + def cyan(str: String): String = Console.CYAN + str + Console.RESET + def yellow(str: String): String = Console.YELLOW + str + Console.RESET + def magenta(str: String): String = Console.MAGENTA + str + Console.RESET + def red(str: String): String = Console.RED + str + Console.RESET + def green(str: String): String = Console.GREEN + str + Console.RESET + def blue(str: String): String = Console.BLUE + str + Console.RESET + } diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala index aaec0a0a314b..41922c945662 100644 --- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala +++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala @@ -17,14 +17,14 @@ import symtab._ import Flags._ import scala.collection._ import scala.tools.nsc.Reporting.WarningCategory +import scala.util.chaining._ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { import global._ import definitions._ import CODE._ - import treeInfo.StripCast + import treeInfo.{ SYNTH_CASE_FLAGS, isDefaultCase, StripCast } - /** the following two members override abstract members in Transform */ val phaseName: String = "cleanup" /* used in GenBCode: collects ClassDef symbols owning a main(Array[String]) method */ @@ -369,7 +369,7 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { /* For testing purposes, the dynamic application's condition * can be printed-out in great detail. Remove? */ - if (settings.debug) { + if (settings.isDebug) { def paramsToString(xs: Any*) = xs map (_.toString) mkString ", " val mstr = ad.symbol.tpe match { case MethodType(mparams, resType) => @@ -398,105 +398,94 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { } } - // transform scrutinee of all matches to ints - def transformSwitch(sw: Match): Tree = { import CODE._ - sw.selector.tpe.widen match { - case IntTpe => sw // can switch directly on ints - case StringTpe => - // these assumptions about the shape of the tree are justified by the codegen in MatchOptimization - val Match(Typed(selTree, _), cases) = sw: @unchecked - def selArg = selTree match { - case x: Ident => REF(x.symbol) - case x: Literal => x - case x => throw new MatchError(x) - } - val restpe = sw.tpe - val swPos = sw.pos.focus - - /* From this: - * string match { case "AaAa" => 1 case "BBBB" | "c" => 2 case _ => 3} - * Generate this: - * string.## match { - * case 2031744 => - * if ("AaAa" equals string) goto match1 - * else if ("BBBB" equals string) goto match2 - * else goto matchFailure - * case 99 => - * if ("c" equals string) goto match2 - * else goto matchFailure - * case _ => goto matchFailure - * } - * match1: goto matchSuccess (1) - * match2: goto matchSuccess (2) - * matchFailure: goto matchSuccess (3) // would be throw new MatchError(string) if no default was given - * matchSuccess(res: Int): res - * This proliferation of labels is needed to handle alternative patterns, since multiple branches in the - * resulting switch may need to correspond to a single case body. - */ - - val stats = mutable.ListBuffer.empty[Tree] - var failureBody = Throw(New(definitions.MatchErrorClass.tpe_*, selArg)) : Tree - - // genbcode isn't thrilled about seeing labels with Unit arguments, so `success`'s type is one of - // `${sw.tpe} => ${sw.tpe}` or `() => Unit` depending. - val success = { - val lab = currentOwner.newLabel(unit.freshTermName("matchEnd"), swPos) - if (restpe =:= UnitTpe) { - lab.setInfo(MethodType(Nil, restpe)) - } else { - lab.setInfo(MethodType(lab.newValueParameter(nme.x_1).setInfo(restpe) :: Nil, restpe)) - } - } - def succeed(res: Tree): Tree = - if (restpe =:= UnitTpe) BLOCK(res, REF(success) APPLY Nil) else REF(success) APPLY res - - val failure = currentOwner.newLabel(unit.freshTermName("matchEnd"), swPos).setInfo(MethodType(Nil, restpe)) - def fail(): Tree = atPos(swPos) { Apply(REF(failure), Nil) } - - val ifNull = LIT(0) - val noNull = Apply(selArg DOT Object_hashCode, Nil) - - val newSel = selTree match { - case _: Ident => atPos(selTree.symbol.pos) { IF(selTree.symbol OBJ_EQ NULL) THEN ifNull ELSE noNull } - case x: Literal => atPos(selTree.pos) { if (x.value.value == null) ifNull else noNull } - case x => throw new MatchError(x) + private def transformStringSwitch(sw: Match): Tree = { import CODE._ + // these assumptions about the shape of the tree are justified by the codegen in MatchOptimization + val Match(Typed(selTree, _), cases) = sw: @unchecked + def selArg = selTree match { + case x: Ident => REF(x.symbol) + case x: Literal => x + case x => throw new MatchError(x) + } + val newSel = selTree match { + case x: Ident => atPos(x.symbol.pos)(IF (x.symbol OBJ_EQ NULL) THEN ZERO ELSE selArg.OBJ_##) + case x: Literal => atPos(x.pos) (if (x.value.value == null) ZERO else selArg.OBJ_##) + case x => throw new MatchError(x) + } + val restpe = sw.tpe + val resUnit = restpe =:= UnitTpe + val swPos = sw.pos.focus + + /* From this: + * string match { case "AaAa" => 1 case "BBBB" | "c" => 2 case _ => 3 } + * Generate this: + * string.## match { + * case 2031744 => + * if ("AaAa" equals string) goto matchEnd (1) + * else if ("BBBB" equals string) goto case2 + * else goto defaultCase + * case 99 => + * if ("c" equals string) goto case2 + * else goto defaultCase + * case _ => goto defaultCase + * } + * case2: goto matchEnd (2) + * defaultCase: goto matchEnd (3) // or `goto matchEnd (throw new MatchError(string))` if no default was given + * matchEnd(res: Int): res + * Extra labels are added for alternative patterns branches, since multiple branches in the + * resulting switch may need to correspond to a single case body. + */ + + val labels = mutable.ListBuffer.empty[LabelDef] + var defaultCaseBody = Throw(New(MatchErrorClass.tpe_*, selArg)): Tree + + def LABEL(name: String) = currentOwner.newLabel(unit.freshTermName(name), swPos).setFlag(SYNTH_CASE_FLAGS) + def newCase() = LABEL( "case").setInfo(MethodType(Nil, restpe)) + val defaultCase = LABEL("defaultCase").setInfo(MethodType(Nil, restpe)) + val matchEnd = LABEL("matchEnd").tap { lab => + // genbcode isn't thrilled about seeing labels with Unit arguments, so `success`'s type is one of + // `${sw.tpe} => ${sw.tpe}` or `() => Unit` depending. + lab.setInfo(MethodType(if (resUnit) Nil else List(lab.newSyntheticValueParam(restpe)), restpe)) + } + def goto(sym: Symbol, params: Tree*) = REF(sym) APPLY (params: _*) + def gotoEnd(body: Tree) = if (resUnit) BLOCK(body, goto(matchEnd)) else goto(matchEnd, body) + + val casesByHash = cases.flatMap { + case cd@CaseDef(StringsPattern(strs), _, body) => + val jump = newCase() // always create a label so when its used it matches the source case (e.g. `case4()`) + strs match { + case str :: Nil => List((str, gotoEnd(body), cd.pat.pos)) + case _ => + labels += LabelDef(jump, Nil, gotoEnd(body)) + strs.map((_, goto(jump), cd.pat.pos)) } - val casesByHash = - cases.flatMap { - case cd@CaseDef(StringsPattern(strs), _, body) => - val jump = currentOwner.newLabel(unit.freshTermName("case"), swPos).setInfo(MethodType(Nil, restpe)) - stats += LabelDef(jump, Nil, succeed(body)) - strs.map((_, jump, cd.pat.pos)) - case cd@CaseDef(Ident(nme.WILDCARD), _, body) => - failureBody = succeed(body) - None - case cd => globalError(s"unhandled in switch: $cd"); None - }.groupBy(_._1.##) - val newCases = casesByHash.toList.sortBy(_._1).map { - case (hash, cases) => - val newBody = cases.foldLeft(fail()) { - case (next, (pat, jump, pos)) => - val comparison = if (pat == null) Object_eq else Object_equals - atPos(pos) { - IF(LIT(pat) DOT comparison APPLY selArg) THEN (REF(jump) APPLY Nil) ELSE next - } - } - CaseDef(LIT(hash), EmptyTree, newBody) + case cd if isDefaultCase(cd) => defaultCaseBody = gotoEnd(cd.body); None + case cd => globalError(s"unhandled in switch: $cd"); None + }.groupBy(_._1.##) + + val newCases = casesByHash.toList.sortBy(_._1).map { + case (hash, cases) => + val newBody = cases.foldRight(atPos(swPos)(goto(defaultCase): Tree)) { + case ((null, rhs, pos), next) => atPos(pos)(IF (NULL OBJ_EQ selArg) THEN rhs ELSE next) + case ((str, rhs, pos), next) => atPos(pos)(IF (LIT(str) OBJ_== selArg) THEN rhs ELSE next) } + CASE(LIT(hash)) ==> newBody + } - stats += LabelDef(failure, Nil, failureBody) + labels += LabelDef(defaultCase, Nil, defaultCaseBody) + labels += LabelDef(matchEnd, matchEnd.info.params, matchEnd.info.params.headOption.fold(UNIT: Tree)(REF)) - stats += (if (restpe =:= UnitTpe) { - LabelDef(success, Nil, gen.mkLiteralUnit) - } else { - LabelDef(success, success.info.params.head :: Nil, REF(success.info.params.head)) - }) + val stats = Match(newSel, newCases :+ (DEFAULT ==> goto(defaultCase))) :: labels.toList - stats prepend Match(newSel, newCases :+ CaseDef(Ident(nme.WILDCARD), EmptyTree, fail())) + val res = Block(stats: _*) + localTyper.typedPos(sw.pos)(res) + } - val res = Block(stats.result() : _*) - localTyper.typedPos(sw.pos)(res) - case _ => globalError(s"unhandled switch scrutinee type ${sw.selector.tpe}: $sw"); sw + // transform scrutinee of all matches to switchable types (ints, strings) + def transformSwitch(sw: Match): Tree = { + sw.selector.tpe.widen match { + case IntTpe => sw // can switch directly on ints + case StringTpe => transformStringSwitch(sw) + case _ => globalError(s"unhandled switch scrutinee type ${sw.selector.tpe}: $sw"); sw } } diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala index f6dfa26851de..d24618ce507f 100644 --- a/src/compiler/scala/tools/nsc/transform/Constructors.scala +++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala @@ -363,39 +363,41 @@ abstract class Constructors extends Statics with Transform with TypingTransforme adapter.transform(tree) } + def rewriteUnspecialized(assignee: Symbol, stat: Tree): Tree = { + assert(ctorParams(genericClazz).length == primaryConstrParams.length, "Bad param len") + // this is just to make private fields public + (new specializeTypes.ImplementationAdapter(ctorParams(genericClazz), primaryConstrParams, null, true))(stat) + // also make assigned fields mutable so they don't end up final in bytecode + // and mark the specialized class constructor for a release fence addition + if (assignee.isField) + assignee.setFlag(MUTABLE) + + val rewritten = rewriteArrayUpdate(stat) + // statements coming from the original class need retyping in the current context + debuglog("retyping " + rewritten) + val duplicator = new specializeTypes.Duplicator(Map.empty) + val context = localTyper.context1.asInstanceOf[duplicator.Context] + duplicator.retyped(context, rewritten, genericClazz, clazz, Map.empty) + } + log("merging: " + originalStats.mkString("\n") + "\nwith\n" + specializedStats.mkString("\n")) - for (s <- originalStats; stat = s.duplicate) yield { + for (stat <- originalStats) yield { log("merge: looking at " + stat) - val stat1 = stat match { - case Assign(sel @ Select(This(_), field), _) => - specializedAssignFor(sel.symbol).getOrElse(stat) - case _ => stat - } - if (stat1 ne stat) { - log("replaced " + stat + " with " + stat1) - specBuf -= stat1 + stat.duplicate match { + case assign @ Assign(select @ Select(This(_), _), _) => + val assignee = select.symbol + specializedAssignFor(assignee) match { + case Some(specialized) => + log("replaced " + assign + " with " + specialized) + specBuf -= specialized + specialized + case None => + rewriteUnspecialized(assignee, assign) + } + case other => + rewriteUnspecialized(NoSymbol, other) } - - if (stat1 eq stat) { - assert(ctorParams(genericClazz).length == primaryConstrParams.length, "Bad param len") - // this is just to make private fields public - (new specializeTypes.ImplementationAdapter(ctorParams(genericClazz), primaryConstrParams, null, true))(stat1) - - val stat2 = rewriteArrayUpdate(stat1) - // statements coming from the original class need retyping in the current context - debuglog("retyping " + stat2) - - val d = new specializeTypes.Duplicator(Map[Symbol, Type]()) - d.retyped(localTyper.context1.asInstanceOf[d.Context], - stat2, - genericClazz, - clazz, - Map.empty) - } else - stat1 } -// if (specBuf.nonEmpty) -// println("residual specialized constructor statements: " + specBuf) } /* Add an 'if' around the statements coming after the super constructor. This @@ -759,18 +761,20 @@ abstract class Constructors extends Statics with Transform with TypingTransforme } else (Nil, remainingConstrStats) + val specializedStats = guardSpecializedInitializer(remainingConstrStatsDelayedInit) val fence = if (needFenceForDelayedInit || clazz.primaryConstructor.hasAttachment[ConstructorNeedsFence.type]) { val tree = localTyper.typedPos(clazz.primaryConstructor.pos)(gen.mkMethodCall(RuntimeStaticsModule, nme.releaseFence, Nil)) tree :: Nil } else Nil // Assemble final constructor - val primaryConstructor = deriveDefDef(primaryConstr)(_ => { + val primaryConstructor = deriveDefDef(primaryConstr) { _ => treeCopy.Block( primaryConstrBody, - paramInits ::: constructorPrefix ::: uptoSuperStats ::: guardSpecializedInitializer(remainingConstrStatsDelayedInit) ::: fence, - primaryConstrBody.expr) - }) + paramInits ::: constructorPrefix ::: uptoSuperStats ::: specializedStats ::: fence, + primaryConstrBody.expr + ) + } if ((exitingPickler(clazz.isAnonymousClass) || clazz.originalOwner.isTerm) && omittableAccessor.exists(_.isOuterField) && !constructorStats.exists(_.exists { case i: Ident if i.symbol.isOuterParam => true; case _ => false})) primaryConstructor.symbol.updateAttachment(OuterArgCanBeElided) diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index daf574fcabe8..9109bff9945f 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -94,7 +94,7 @@ abstract class Erasure extends InfoTransform if (! ts.isEmpty && ! result) { apply(ts.head) ; untilApply(ts.tail) } } - override protected def verifyJavaErasure = settings.Xverify || settings.debug + override protected def verifyJavaErasure = settings.Xverify || settings.isDebug private def needsJavaSig(sym: Symbol, tp: Type, throwsArgs: List[Type]) = !settings.Ynogenericsig && { def needs(tp: Type) = NeedsSigCollector(sym.isClassConstructor).collect(tp) needs(tp) || throwsArgs.exists(needs) @@ -518,7 +518,7 @@ abstract class Erasure extends InfoTransform clashErrors += Tuple2(pos, msg) } for (bc <- root.baseClasses) { - if (settings.debug) + if (settings.isDebug) exitingPostErasure(println( sm"""check bridge overrides in $bc |${bc.info.nonPrivateDecl(bridge.name)} @@ -652,7 +652,7 @@ abstract class Erasure extends InfoTransform val rhs = member.tpe match { case MethodType(Nil, FoldableConstantType(c)) => Literal(c) case _ => - val sel: Tree = Select(This(root), member) + val sel: Tree = gen.mkAttributedSelect(gen.mkAttributedThis(root), member) val bridgingCall = bridge.paramss.foldLeft(sel)((fun, vparams) => Apply(fun, vparams map Ident)) maybeWrap(bridgingCall) @@ -1297,7 +1297,6 @@ abstract class Erasure extends InfoTransform if (ct.tag == ClazzTag && ct.typeValue.typeSymbol != definitions.UnitClass) { val typeValue = ct.typeValue.dealiasWiden val erased = erasure(typeValue.typeSymbol) applyInArray typeValue - treeCopy.Literal(cleanLiteral, Constant(erased)) } else cleanLiteral diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala index bb2778927539..3971302b1c98 100644 --- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala +++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala @@ -488,6 +488,31 @@ abstract class ExplicitOuter extends InfoTransform transform(treeCopy.Apply(tree, treeCopy.Select(eqsel, outerSelect, eq), args)) } + // (t12312) C.this.a().X().isInstanceOf[C.this.a.X.type]() --> + // D.this.$outer().a().X().isInstanceOf[D.this.$outer.a.X.type]() + case TypeApply(fun, targs) => + val rewriteTypeToExplicitOuter = new TypeMap { typeMap => + def apply(tp: Type) = tp match { + case ThisType(sym) if sym != currentClass && !(sym.hasModuleFlag && sym.isStatic) => + var cls = currentClass + var tpe = cls.thisType + do { + tpe = singleType(tpe, outerAccessor(cls)) + cls = cls.outerClass + } while (cls != NoSymbol && sym != cls) + tpe.mapOver(typeMap) + case tp => tp.mapOver(typeMap) + } + } + val fun2 = transform(fun) + val targs2 = targs.mapConserve { targ0 => + val targ = transform(targ0) + val targTp = targ.tpe + val targTp2 = rewriteTypeToExplicitOuter(targTp.dealias) + if (targTp eq targTp2) targ else TypeTree(targTp2).setOriginal(targ) + } + treeCopy.TypeApply(tree, fun2, targs2) + case _ => val x = super.transform(tree) if (x.tpe eq null) x diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala index 6387ddde49d7..fc9592732517 100644 --- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala +++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala @@ -37,7 +37,7 @@ abstract class OverridingPairs extends SymbolPairs { * including bridges. But it may be refined in subclasses. */ override protected def exclude(sym: Symbol) = ( - sym.isPrivateLocal + (sym.isPrivateLocal && sym.isParamAccessor) || sym.isArtifact || sym.isConstructor || (sym.isPrivate && sym.owner != base) // Privates aren't inherited. Needed for pos/t7475a.scala @@ -54,8 +54,20 @@ abstract class OverridingPairs extends SymbolPairs { && (lowMemberType matches (self memberType high)) ) // TODO we don't call exclude(high), should we? - override def skipOwnerPair(lowClass: Symbol, highClass: Symbol): Boolean = - lowClass.isJavaDefined && highClass.isJavaDefined // javac is already checking this better than we could + override protected def skipOwnerPair(lowClass: Symbol, highClass: Symbol): Boolean = { + // Two Java-defined methods can be skipped if javac will check the overrides. Skipping is actually necessary to + // avoid false errors, as Java doesn't have the Scala's linearization rules and subtyping rules + // (`Array[String] <:< Array[Object]`). However, when a Java interface is mixed into a Scala class, mixed-in + // methods need to go through override checking (neg/t12394, neg/t12380). + lowClass.isJavaDefined && highClass.isJavaDefined && { // skip if both are java-defined, and + lowClass.isNonBottomSubClass(highClass) || { // - low <:< high, which means they are overrides in Java and javac is doing the check; or + base.info.parents.tail.forall(p => { // - every mixin parent is unrelated to (not a subclass of) low and high, i.e., + val psym = p.typeSymbol // we're not mixing in high or low, both are coming from the superclass + !psym.isNonBottomSubClass(lowClass) && !psym.isNonBottomSubClass(highClass) + }) + } + } + } } private def bothJavaOwnedAndEitherIsField(low: Symbol, high: Symbol): Boolean = { diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 0e68021ae7ca..14077bb69e49 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -14,7 +14,7 @@ package scala package tools.nsc package transform -import scala.annotation.nowarn +import scala.annotation.{nowarn, tailrec} import scala.collection.mutable import scala.tools.nsc.symtab.Flags import scala.tools.nsc.Reporting.WarningCategory @@ -600,14 +600,22 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { * specialized subclass of "clazz" throughout this file. */ + val clazzName = specializedName(clazz, env0).toTypeName // scala/bug#5545: Eliminate classes with the same name loaded from the bytecode already present - all we need to do is // to force .info on them, as their lazy type will be evaluated and the symbols will be eliminated. Unfortunately // evaluating the info after creating the specialized class will mess the specialized class signature, so we'd - // better evaluate it before creating the new class symbol - val clazzName = specializedName(clazz, env0).toTypeName + // better unlink the the class-file backed symbol before creating the new class symbol val bytecodeClazz = clazz.owner.info.decl(clazzName) // debuglog("Specializing " + clazz + ", but found " + bytecodeClazz + " already there") - bytecodeClazz.info + def unlink(sym: Symbol): Unit = if (sym != NoSymbol) { + devWarningIf(sym.hasCompleteInfo)("Stale specialized symbol has been accessed: " + sym) + sym.setInfo(NoType) + sym.owner.info.decls.unlink(sym) + } + unlink(bytecodeClazz) + val companionModule = bytecodeClazz.companionModule + unlink(companionModule.moduleClass) + unlink(companionModule) val sClass = { val sc = clazz.owner.newClass(clazzName, clazz.pos, (clazz.flags | SPECIALIZED) & ~CASE) @@ -736,124 +744,139 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { enterMember(om) } - for (m <- normMembers if needsSpecialization(fullEnv, m) && satisfiable(fullEnv)) { - if (!m.isDeferred) - addConcreteSpecMethod(m) - // specialized members have to be overridable. - if (m.isPrivate) - m.resetFlag(PRIVATE).setFlag(PROTECTED) - - if (m.isConstructor) { - val specCtor = enterMember(cloneInSpecializedClass(m, x => x)) - info(specCtor) = Forward(m) - } - else if (isNormalizedMember(m)) { // methods added by normalization - val NormalizedMember(original) = info(m): @unchecked - if (nonConflicting(env ++ typeEnv(m))) { - if (info(m).degenerate) { - debuglog("degenerate normalized member " + m.defString) - val specMember = enterMember(cloneInSpecializedClass(m, _ & ~DEFERRED)) - - info(specMember) = Implementation(original) - typeEnv(specMember) = env ++ typeEnv(m) - } else { - val om = forwardToOverload(m) - debuglog("normalizedMember " + m + " om: " + om + " " + pp(typeEnv(om))) - } + @tailrec def isTraitValSetter(sym: Symbol): Boolean = + sym.isSetter && sym.getterIn(sym.owner).isStable && + (sym.hasFlag(SYNTHESIZE_IMPL_IN_SUBCLASS) || isTraitValSetter(sym.nextOverriddenSymbol)) + + for (m <- normMembers) { + if (!needsSpecialization(fullEnv, m)) { + if (m.isValue && !m.isMutable && !m.isMethod && !m.isDeferred && !m.isLazy && !m.isParamAccessor) { + // non-specialized `val` fields are made mutable (in Constructors) and assigned from the + // constructors of specialized subclasses. See PR scala/scala#9704. + clazz.primaryConstructor.updateAttachment(ConstructorNeedsFence) + sClass.primaryConstructor.updateAttachment(ConstructorNeedsFence) } - else - debuglog("conflicting env for " + m + " env: " + env) - } - else if (m.isDeferred && m.isSpecialized) { // abstract methods - val specMember = enterMember(cloneInSpecializedClass(m, _ | DEFERRED)) - // debuglog("deferred " + specMember.fullName + " remains abstract") - - info(specMember) = Abstract(specMember) - // was: new Forward(specMember) { - // override def target = m.owner.info.member(specializedName(m, env)) - // } - } else if (m.hasFlag(SUPERACCESSOR)) { // basically same as abstract case - // we don't emit a specialized overload for the super accessor because we can't jump back and forth - // between specialized and non-specialized methods during an invokespecial for the super call, - // so, we must jump immediately into the non-specialized world to find our super - val specMember = enterMember(cloneInSpecializedClass(m, f => f)) - - // rebindSuper in mixins knows how to rejigger this - // (basically it skips this specialized class in the base class seq, and then also never rebinds to a specialized method) - specMember.asInstanceOf[TermSymbol].referenced = m.alias - - info(specMember) = SpecialSuperAccessor(specMember) - } else if (m.isMethod && !m.hasFlag(DEFERRED) && (!m.hasFlag(ACCESSOR) || m.hasFlag(LAZY))) { // other concrete methods - forwardToOverload(m) - } else if (m.isValue && !m.isMethod) { // concrete value definition - def mkAccessor(field: Symbol, name: Name) = { - val newFlags = (SPECIALIZED | m.getterIn(clazz).flags) & ~(LOCAL | CASEACCESSOR | PARAMACCESSOR) - // we rely on the super class to initialize param accessors - val sym = sClass.newMethod(name.toTermName, field.pos, newFlags) - info(sym) = SpecializedAccessor(field) - sym + } else if (satisfiable(fullEnv)) { + if (!m.isDeferred) + addConcreteSpecMethod(m) + // specialized members have to be overridable. + if (m.isPrivate) + m.resetFlag(PRIVATE).setFlag(PROTECTED) + + if (m.isConstructor) { + val specCtor = enterMember(cloneInSpecializedClass(m, x => x)) + info(specCtor) = Forward(m) } - def overrideIn(clazz: Symbol, sym: Symbol) = { - val newFlags = (sym.flags | OVERRIDE | SPECIALIZED) & ~(DEFERRED | CASEACCESSOR | PARAMACCESSOR) - val sym1 = sym.cloneSymbol(clazz, newFlags) - sym1.modifyInfo(_.asSeenFrom(clazz.tpe, sym1.owner)) + else if (isNormalizedMember(m)) { // methods added by normalization + val NormalizedMember(original) = info(m): @unchecked + if (nonConflicting(env ++ typeEnv(m))) { + if (info(m).degenerate) { + debuglog("degenerate normalized member " + m.defString) + val specMember = enterMember(cloneInSpecializedClass(m, _ & ~DEFERRED)) + + info(specMember) = Implementation(original) + typeEnv(specMember) = env ++ typeEnv(m) + } else { + val om = forwardToOverload(m) + debuglog("normalizedMember " + m + " om: " + om + " " + pp(typeEnv(om))) + } + } + else + debuglog("conflicting env for " + m + " env: " + env) } - val specVal = specializedOverload(sClass, m, env) + else if (m.isDeferred && m.isSpecialized) { // abstract methods + val specMember = enterMember(cloneInSpecializedClass(m, _ | DEFERRED)) + // debuglog("deferred " + specMember.fullName + " remains abstract") + + info(specMember) = Abstract(specMember) + // was: new Forward(specMember) { + // override def target = m.owner.info.member(specializedName(m, env)) + // } + } else if (m.hasFlag(SUPERACCESSOR)) { // basically same as abstract case + // we don't emit a specialized overload for the super accessor because we can't jump back and forth + // between specialized and non-specialized methods during an invokespecial for the super call, + // so, we must jump immediately into the non-specialized world to find our super + val specMember = enterMember(cloneInSpecializedClass(m, f => f)) + + // rebindSuper in mixins knows how to rejigger this + // (basically it skips this specialized class in the base class seq, and then also never rebinds to a specialized method) + specMember.asInstanceOf[TermSymbol].referenced = m.alias + + info(specMember) = SpecialSuperAccessor(specMember) + } else if (m.isMethod && !m.isDeferred && (!m.isAccessor || m.isLazy || isTraitValSetter(m))) { // other concrete methods + forwardToOverload(m) + } else if (m.isValue && !m.isMethod) { // concrete value definition + def mkAccessor(field: Symbol, name: Name) = { + val newFlags = (SPECIALIZED | m.getterIn(clazz).flags) & ~(LOCAL | CASEACCESSOR | PARAMACCESSOR) + // we rely on the super class to initialize param accessors + val sym = sClass.newMethod(name.toTermName, field.pos, newFlags) + info(sym) = SpecializedAccessor(field) + sym + } - addConcreteSpecMethod(m) - specVal.asInstanceOf[TermSymbol].setAlias(m) + def overrideIn(clazz: Symbol, sym: Symbol) = { + val newFlags = (sym.flags | OVERRIDE | SPECIALIZED) & ~(DEFERRED | CASEACCESSOR | PARAMACCESSOR) + val sym1 = sym.cloneSymbol(clazz, newFlags) + sym1.modifyInfo(_.asSeenFrom(clazz.tpe, sym1.owner)) + } - enterMember(specVal) - // create accessors + val specVal = specializedOverload(sClass, m, env) - if (m.isLazy) { - // no getters needed (we'll specialize the compute method and accessor separately), can stay private - // m.setFlag(PRIVATE) -- TODO: figure out how to leave the non-specialized lazy var private - // (the implementation needs it to be visible while duplicating and retypechecking, - // but it really could be private in bytecode) - specVal.setFlag(PRIVATE) - } - else if (nme.isLocalName(m.name)) { - val specGetter = mkAccessor(specVal, specVal.getterName) setInfo MethodType(Nil, specVal.info) - val origGetter = overrideIn(sClass, m.getterIn(clazz)) - info(origGetter) = Forward(specGetter) - enterMember(specGetter) - enterMember(origGetter) - debuglog("specialize accessor in %s: %s -> %s".format(sClass.name.decode, origGetter.name.decode, specGetter.name.decode)) - - clazz.caseFieldAccessors.find(_.name.startsWith(m.name)) foreach { cfa => - val cfaGetter = overrideIn(sClass, cfa) - info(cfaGetter) = SpecializedAccessor(specVal) - enterMember(cfaGetter) - debuglog("override case field accessor %s -> %s".format(m.name.decode, cfaGetter.name.decode)) + addConcreteSpecMethod(m) + specVal.asInstanceOf[TermSymbol].setAlias(m) + + enterMember(specVal) + // create accessors + + if (m.isLazy) { + // no getters needed (we'll specialize the compute method and accessor separately), can stay private + // m.setFlag(PRIVATE) -- TODO: figure out how to leave the non-specialized lazy var private + // (the implementation needs it to be visible while duplicating and retypechecking, + // but it really could be private in bytecode) + specVal.setFlag(PRIVATE) } + else if (nme.isLocalName(m.name)) { + val specGetter = mkAccessor(specVal, specVal.getterName) setInfo MethodType(Nil, specVal.info) + val origGetter = overrideIn(sClass, m.getterIn(clazz)) + info(origGetter) = Forward(specGetter) + enterMember(specGetter) + enterMember(origGetter) + debuglog("specialize accessor in %s: %s -> %s".format(sClass.name.decode, origGetter.name.decode, specGetter.name.decode)) + + clazz.caseFieldAccessors.find(_.name.startsWith(m.name)) foreach { cfa => + val cfaGetter = overrideIn(sClass, cfa) + info(cfaGetter) = SpecializedAccessor(specVal) + enterMember(cfaGetter) + debuglog("override case field accessor %s -> %s".format(m.name.decode, cfaGetter.name.decode)) + } - if (specVal.isVariable && m.setterIn(clazz) != NoSymbol) { - val specSetter = mkAccessor(specVal, specGetter.setterName) - .resetFlag(STABLE) - specSetter.setInfo(MethodType(specSetter.newSyntheticValueParams(List(specVal.info)), - UnitTpe)) - val origSetter = overrideIn(sClass, m.setterIn(clazz)) - info(origSetter) = Forward(specSetter) - enterMember(specSetter) - enterMember(origSetter) + if (specVal.isVariable && m.setterIn(clazz) != NoSymbol) { + val specSetter = mkAccessor(specVal, specGetter.setterName) + .resetFlag(STABLE) + specSetter.setInfo(MethodType(specSetter.newSyntheticValueParams(List(specVal.info)), + UnitTpe)) + val origSetter = overrideIn(sClass, m.setterIn(clazz)) + info(origSetter) = Forward(specSetter) + enterMember(specSetter) + enterMember(origSetter) + } + } + else { // if there are no accessors, specialized methods will need to access this field in specialized subclasses + m.resetFlag(PRIVATE) + specVal.resetFlag(PRIVATE) + debuglog("no accessors for %s/%s, specialized methods must access field in subclass".format( + m.name.decode, specVal.name.decode)) } } - else { // if there are no accessors, specialized methods will need to access this field in specialized subclasses - m.resetFlag(PRIVATE) - specVal.resetFlag(PRIVATE) - debuglog("no accessors for %s/%s, specialized methods must access field in subclass".format( - m.name.decode, specVal.name.decode)) + else if (m.isClass) { + val specClass: Symbol = cloneInSpecializedClass(m, x => x) + typeEnv(specClass) = fullEnv + specClass setName specializedName(specClass, fullEnv).toTypeName + enterMember(specClass) + debuglog("entered specialized class " + specClass.fullName) + info(specClass) = SpecializedInnerClass(m, fullEnv) } } - else if (m.isClass) { - val specClass: Symbol = cloneInSpecializedClass(m, x => x) - typeEnv(specClass) = fullEnv - specClass setName specializedName(specClass, fullEnv).toTypeName - enterMember(specClass) - debuglog("entered specialized class " + specClass.fullName) - info(specClass) = SpecializedInnerClass(m, fullEnv) - } } sClass } @@ -1393,7 +1416,16 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } } - protected override def newBodyDuplicator(context: Context): SpecializeBodyDuplicator = new SpecializeBodyDuplicator(context) + private class SpecializeNamer(context: Context) extends Namer(context) { + // Avoid entering synthetic trees during specialization because the duplicated trees already contain them. + override def enterSyntheticSym(tree: Tree): Symbol = tree.symbol + } + + protected override def newBodyDuplicator(context: Context): SpecializeBodyDuplicator = + new SpecializeBodyDuplicator(context) + + override def newNamer(context: Context): Namer = + new SpecializeNamer(context) } /** Introduced to fix scala/bug#7343: Phase ordering problem between Duplicators and Specialization. @@ -1459,7 +1491,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { * in order to be accessible from specialized subclasses. */ override def transform(tree: Tree): Tree = tree match { - case Select(qual, name) => + case Select(_, _) => val sym = tree.symbol if (sym.isPrivate) debuglog( "seeing private member %s, currentClass: %s, owner: %s, isAccessible: %b, isLocalName: %b".format( @@ -1903,32 +1935,35 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { debuglog("specializing body of" + symbol.defString) val DefDef(_, _, tparams, vparams :: Nil, tpt, _) = tree: @unchecked val env = typeEnv(symbol) - val origtparams = source.typeParams.filter(tparam => !env.contains(tparam) || !isPrimitiveValueType(env(tparam))) - if (origtparams.nonEmpty || symbol.typeParams.nonEmpty) - debuglog("substituting " + origtparams + " for " + symbol.typeParams) + + val srcVparams = parameters(source) + val srcTparams = source.typeParams.filter(tparam => !env.contains(tparam) || !isPrimitiveValueType(env(tparam))) + if (settings.isDebug && (srcTparams.nonEmpty || symbol.typeParams.nonEmpty)) + debuglog("substituting " + srcTparams + " for " + symbol.typeParams) // skolemize type parameters - val oldtparams = tparams map (_.symbol) - val newtparams = deriveFreshSkolems(oldtparams) - map2(tparams, newtparams)(_ setSymbol _) + val oldTparams = tparams.map(_.symbol) + val newTparams = deriveFreshSkolems(oldTparams) + map2(tparams, newTparams)(_ setSymbol _) // create fresh symbols for value parameters to hold the skolem types - val newSyms = cloneSymbolsAtOwnerAndModify(vparams map (_.symbol), symbol, _.substSym(oldtparams, newtparams)) + val oldVparams = vparams.map(_.symbol) + val newVparams = cloneSymbolsAtOwnerAndModify(oldVparams, symbol, _.substSym(oldTparams, newTparams)) + + val srcParams = srcVparams ::: srcTparams + val oldParams = oldVparams ::: oldTparams + val newParams = newVparams ::: newTparams // replace value and type parameters of the old method with the new ones // log("Adding body for " + tree.symbol + " - origtparams: " + origtparams + "; tparams: " + tparams) // log("Type vars of: " + source + ": " + source.typeParams) // log("Type env of: " + tree.symbol + ": " + boundTvars) // log("newtparams: " + newtparams) - val symSubstituter = new ImplementationAdapter( - parameters(source) ::: origtparams, - newSyms ::: newtparams, - source.enclClass, - false) // don't make private fields public - - val newBody = symSubstituter(body(source).duplicate) - tpt modifyType (_.substSym(oldtparams, newtparams)) - copyDefDef(tree)(vparamss = List(newSyms map ValDef.apply), rhs = newBody) + // don't make private fields public + val substituter = new ImplementationAdapter(srcParams, newParams, source.enclClass, false) + val newRhs = substituter(body(source).duplicate) + tpt.modifyType(_.substSym(oldParams, newParams)) + copyDefDef(tree)(vparamss = newVparams.map(ValDef.apply) :: Nil, rhs = newRhs) } /** Create trees for specialized members of 'sClass', based on the diff --git a/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala b/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala index 0f327b540fa8..93eb50dc6939 100644 --- a/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala +++ b/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala @@ -97,7 +97,7 @@ trait TypeAdaptingTransformer { self: TreeDSL => case ArrayClass => assert(pt.typeSymbol != ArrayClass, "array") ; tree case _ => val unboxer = currentRun.runDefinitions.unboxMethod(pt.typeSymbol) - if (settings.developer) assert(boxedClass(pt.typeSymbol).tpe <:< tree.tpe, s"${tree.tpe} is not a boxed ${pt}") + if (settings.isDeveloper) assert(boxedClass(pt.typeSymbol).tpe <:< tree.tpe, s"${tree.tpe} is not a boxed ${pt}") Apply(unboxer, tree) // don't `setType pt` the Apply tree, as the Apply's fun won't be typechecked if the Apply tree already has a type } } @@ -116,7 +116,7 @@ trait TypeAdaptingTransformer { self: TreeDSL => * @note Pre-condition: pt eq pt.normalize */ final def cast(tree: Tree, pt: Type): Tree = { - if (settings.debug && (tree.tpe ne null) && !(tree.tpe =:= ObjectTpe)) { + if (settings.isDebug && (tree.tpe ne null) && !(tree.tpe =:= ObjectTpe)) { def word = if (tree.tpe <:< pt) "upcast" else if (pt <:< tree.tpe) "downcast" diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index f3428ca3c677..0ee1246b3357 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -445,7 +445,7 @@ abstract class UnCurry extends InfoTransform if (sym.isMethod) level < settings.elidebelow.value else { // TODO: report error? It's already done in RefChecks. https://github.com/scala/scala/pull/5539#issuecomment-331376887 - if (currentRun.isScala213) reporter.error(sym.pos, s"${sym.name}: Only methods can be marked @elidable.") + reporter.error(sym.pos, s"${sym.name}: Only methods can be marked @elidable.") false } } diff --git a/src/compiler/scala/tools/nsc/transform/async/AsyncPhase.scala b/src/compiler/scala/tools/nsc/transform/async/AsyncPhase.scala index 60d7c510723f..e58c4dc8c914 100644 --- a/src/compiler/scala/tools/nsc/transform/async/AsyncPhase.scala +++ b/src/compiler/scala/tools/nsc/transform/async/AsyncPhase.scala @@ -27,7 +27,9 @@ abstract class AsyncPhase extends Transform with TypingTransformers with AnfTran val phaseName: String = "async" override def enabled: Boolean = settings.async - private final case class AsyncAttachment(awaitSymbol: Symbol, postAnfTransform: Block => Block, stateDiagram: ((Symbol, Tree) => Option[String => Unit])) extends PlainAttachment + private final case class AsyncAttachment(awaitSymbol: Symbol, postAnfTransform: Block => Block, + stateDiagram: ((Symbol, Tree) => Option[String => Unit]), + allowExceptionsToPropagate: Boolean) extends PlainAttachment // Optimization: avoid the transform altogether if there are no async blocks in a unit. private val sourceFilesToTransform = perRunCaches.newSet[SourceFile]() @@ -45,7 +47,8 @@ abstract class AsyncPhase extends Transform with TypingTransformers with AnfTran sourceFilesToTransform += pos.source val postAnfTransform = config.getOrElse("postAnfTransform", (x: Block) => x).asInstanceOf[Block => Block] val stateDiagram = config.getOrElse("stateDiagram", (sym: Symbol, tree: Tree) => None).asInstanceOf[(Symbol, Tree) => Option[String => Unit]] - method.updateAttachment(new AsyncAttachment(awaitMethod, postAnfTransform, stateDiagram)) + val allowExceptionsToPropagate = config.contains("allowExceptionsToPropagate") + method.updateAttachment(new AsyncAttachment(awaitMethod, postAnfTransform, stateDiagram, allowExceptionsToPropagate)) // Wrap in `{ expr: Any }` to force value class boxing before calling `completeSuccess`, see test/async/run/value-class.scala deriveDefDef(method) { rhs => Block(Apply(gen.mkAttributedRef(definitions.Predef_locally), rhs :: Nil), Literal(Constant(()))) @@ -112,9 +115,8 @@ abstract class AsyncPhase extends Transform with TypingTransformers with AnfTran deriveTemplate(impl)(liftedTrees ::: _) }) } - assert(localTyper.context.owner == cd.symbol.owner, - "local typer context's owner must be ClassDef symbol's owner") - val withFields = new UseFields(localTyper, cd.symbol, applySym, liftedSyms).transform(cd1) + assert(localTyper.context.owner == cd.symbol.owner, "local typer context's owner must be ClassDef symbol's owner") + val withFields = new UseFields(localTyper, cd.symbol, applySym, liftedSyms, NoSymbol).transform(cd1) withFields case dd: DefDef if dd.hasAttachment[AsyncAttachment] => @@ -124,14 +126,25 @@ abstract class AsyncPhase extends Transform with TypingTransformers with AnfTran } atOwner(dd, dd.symbol) { - val trSym = dd.vparamss.head.head.symbol + val trSym = dd.vparamss.head.last.symbol + val selfSym = if (dd.symbol.owner.isTerm) dd.vparamss.head.head.symbol else NoSymbol val saved = currentTransformState - currentTransformState = new AsyncTransformState(asyncAttachment.awaitSymbol, - asyncAttachment.postAnfTransform, asyncAttachment.stateDiagram, this, trSym, asyncBody.tpe, asyncNames) + currentTransformState = new AsyncTransformState( + asyncAttachment.awaitSymbol, + asyncAttachment.postAnfTransform, + asyncAttachment.stateDiagram, + asyncAttachment.allowExceptionsToPropagate, + this, + selfSym, + trSym, + asyncBody.tpe, + asyncNames) try { - val (newRhs, liftableFields) = asyncTransform(asyncBody) - liftableMap(dd.symbol.owner) = (dd.symbol, liftableFields) - deriveDefDef(dd)(_ => newRhs) + val (newRhs, liftedTrees) = asyncTransform(asyncBody) + liftableMap(currentTransformState.stateMachineClass) = (dd.symbol, liftedTrees) + val liftedSyms = liftedTrees.iterator.map(_.symbol).toSet + val withFields = new UseFields(localTyper, currentTransformState.stateMachineClass, dd.symbol, liftedSyms, selfSym).transform(newRhs) + deriveDefDef(dd)(_ => withFields) } finally { currentTransformState = saved } @@ -169,7 +182,7 @@ abstract class AsyncPhase extends Transform with TypingTransformers with AnfTran if (nullOut) { for ((state, (preNulls, postNulls)) <- fieldsToNullOut(asyncBlock.asyncStates, asyncBlock.asyncStates.last, liftedFields)) { val asyncState = asyncBlock.asyncStates.find(_.state == state).get - if (asyncState.nextStates.nonEmpty) + if (asyncState.hasNonTerminalNextState) asyncState.insertNullAssignments(preNulls.iterator, postNulls.iterator) } } @@ -178,7 +191,7 @@ abstract class AsyncPhase extends Transform with TypingTransformers with AnfTran val applyBody = atPos(asyncPos)(asyncBlock.onCompleteHandler) // Logging - if ((settings.debug.value && shouldLogAtThisPhase)) + if ((settings.isDebug && shouldLogAtThisPhase)) logDiagnostics(anfTree, asyncBlock, asyncBlock.asyncStates.map(_.toString)) // Offer async frontends a change to produce the .dot diagram transformState.dotDiagram(applySym, asyncBody).foreach(f => f(asyncBlock.toDot)) @@ -193,15 +206,20 @@ abstract class AsyncPhase extends Transform with TypingTransformers with AnfTran // - references to them are rewritten as referencs to the fields. // - the rhs of ValDefs that initialize such fields is turned into an assignment to the field private class UseFields(initLocalTyper: analyzer.Typer, stateMachineClass: Symbol, - applySym: Symbol, liftedSyms: Set[Symbol]) extends explicitOuter.OuterPathTransformer(initLocalTyper) { + applySym: Symbol, liftedSyms: Set[Symbol], selfSym: Symbol) extends explicitOuter.OuterPathTransformer(initLocalTyper) { private def fieldSel(tree: Tree) = { assert(currentOwner != NoSymbol, "currentOwner cannot be NoSymbol") - val outerOrThis = if (stateMachineClass == currentClass) gen.mkAttributedThis(stateMachineClass) else { - // These references need to be selected from an outer reference, because explicitouter - // has already run we must perform this transform explicitly here. - tree.symbol.makeNotPrivate(tree.symbol.owner) - outerPath(outerValue, currentClass.outerClass, stateMachineClass) - } + val outerOrThis = + if (selfSym != NoSymbol) + gen.mkAttributedIdent(selfSym) + else if (stateMachineClass == currentClass) + gen.mkAttributedThis(stateMachineClass) + else { + // These references need to be selected from an outer reference, because explicitouter + // has already run we must perform this transform explicitly here. + tree.symbol.makeNotPrivate(tree.symbol.owner) + outerPath(outerValue, currentClass.outerClass, stateMachineClass) + } atPos(tree.pos)(Select(outerOrThis.setType(stateMachineClass.tpe), tree.symbol).setType(tree.symbol.tpe)) } override def transform(tree: Tree): Tree = tree match { diff --git a/src/compiler/scala/tools/nsc/transform/async/AsyncTransformStates.scala b/src/compiler/scala/tools/nsc/transform/async/AsyncTransformStates.scala index 8bfc6651be23..a1da31110c77 100644 --- a/src/compiler/scala/tools/nsc/transform/async/AsyncTransformStates.scala +++ b/src/compiler/scala/tools/nsc/transform/async/AsyncTransformStates.scala @@ -24,7 +24,9 @@ trait AsyncTransformStates extends TypingTransformers { class AsyncTransformState(val awaitSymbol: Symbol, val postAnfTransform: Block => Block, val dotDiagram: (Symbol, Tree) => Option[String => Unit], + val allowExceptionsToPropagate: Boolean, val typingTransformer: TypingTransformer, + val exteralFsmSelfParam: Symbol, val applyTrParam: Symbol, val asyncType: Type, val asyncNames: AsyncNames[global.type]) { @@ -39,7 +41,7 @@ trait AsyncTransformStates extends TypingTransformers { val applySym: Symbol = applyTr.owner var currentPos: Position = applySym.pos - lazy val stateMachineClass: Symbol = applySym.owner + lazy val stateMachineClass: Symbol = if (exteralFsmSelfParam != NoSymbol) exteralFsmSelfParam.info.typeSymbol else applySym.owner lazy val stateGetter: Symbol = stateMachineMember(nme.state) lazy val stateSetter: Symbol = stateMachineMember(nme.state.setterName) lazy val stateOnComplete: Symbol = stateMachineMember(TermName("onComplete")) @@ -49,10 +51,21 @@ trait AsyncTransformStates extends TypingTransformers { lazy val stateTryGet: Symbol = stateMachineMember(TermName("tryGet")) lazy val whileLabel: Symbol = applySym.newLabel(TermName(nme.WHILE_PREFIX)).setInfo(MethodType(Nil, definitions.UnitTpe)) + lazy val tryGetIsIdentity: Boolean = exitingTyper { + stateTryGet.info.finalResultType.termSymbol == stateTryGet.firstParam + } def stateMachineMember(name: TermName): Symbol = stateMachineClass.info.member(name) def memberRef(sym: Symbol): Tree = - gen.mkAttributedRef(stateMachineClass.typeConstructor, sym) + if (exteralFsmSelfParam == NoSymbol) + gen.mkAttributedRef(stateMachineClass.typeConstructor, sym) + else + gen.mkAttributedSelect(gen.mkAttributedIdent(exteralFsmSelfParam), sym) + def stateMachineRef(): Tree = + if (exteralFsmSelfParam == NoSymbol) + gen.mkAttributedThis(stateMachineClass) + else + gen.mkAttributedIdent(exteralFsmSelfParam) } } \ No newline at end of file diff --git a/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala b/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala index 9761bf0ed6dd..fd2affe54268 100644 --- a/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala +++ b/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala @@ -40,6 +40,15 @@ trait ExprBuilder extends TransformUtils with AsyncAnalysis { } } final class AsyncState(var stats: List[Tree], val state: Int, var nextStates: Array[Int], val isEmpty: Boolean) { + def hasNonTerminalNextState: Boolean = { + var i = 0 + val ns = nextStates + while (i < ns.length) { + if (ns(i) != StateAssigner.Terminal) return true + i += 1 + } + false + } def mkHandlerCaseForState: CaseDef = { replaceResidualJumpsWithStateTransitions.transform(CaseDef(Literal(Constant(state)), EmptyTree, adaptToUnit(stats))).asInstanceOf[CaseDef] } @@ -51,7 +60,7 @@ trait ExprBuilder extends TransformUtils with AsyncAnalysis { val stats1 = mutable.ListBuffer[Tree]() def addNullAssigments(syms: Iterator[Symbol]): Unit = { for (fieldSym <- syms) { - stats1 += typed(Assign(gen.mkAttributedStableRef(fieldSym.owner.thisPrefix, fieldSym), gen.mkZero(fieldSym.info))) + stats1 += typed(Assign(currentTransformState.memberRef(fieldSym), gen.mkZero(fieldSym.info))) } } // Add pre-state null assigments at the beginning. @@ -303,7 +312,7 @@ trait ExprBuilder extends TransformUtils with AsyncAnalysis { buildStateAndOpenNextState(afterLabelState, style = StateTransitionStyle.None) } } else if (containsAwait(rhs)) { - // A while loop containg an await. We assuming that the the backward branch is reachable across the async + // A while loop containing an await. We assuming that the the backward branch is reachable across the async // code path and create a state for the `while` label. // // In theory we could avoid creating this state in code like: @@ -449,25 +458,28 @@ trait ExprBuilder extends TransformUtils with AsyncAnalysis { def onCompleteHandler: Tree = { val transformState = currentTransformState def stateMemberRef = gen.mkApplyIfNeeded(transformState.memberRef(transformState.stateGetter)) - val throww = Throw(Apply(Select(New(Ident(IllegalStateExceptionClass)), IllegalStateExceptionClass_NEW_String), List(gen.mkMethodCall(currentRun.runDefinitions.String_valueOf_Int, stateMemberRef :: Nil)))) + val asyncStatesInit = asyncStates.init // drop the terminal state which has no code. - val body = + val throww = Throw(Apply(Select(New(Ident(IllegalStateExceptionClass)), IllegalStateExceptionClass_NEW_String), List(gen.mkMethodCall(currentRun.runDefinitions.String_valueOf_Int, stateMemberRef :: Nil)))) + val body = typed(Match(stateMemberRef, - asyncStatesInit.map(_.mkHandlerCaseForState) ++ - List(CaseDef(Ident(nme.WILDCARD), EmptyTree, - throww)))) - - val body1 = compactStates(body.asInstanceOf[Match]) - - val stateMatch = Try( - body1, - List( - CaseDef( - Bind(nme.t, Typed(Ident(nme.WILDCARD), Ident(definitions.ThrowableClass))), - EmptyTree, - Block(Apply(currentTransformState.memberRef(currentTransformState.stateCompleteFailure), Ident(nme.t) :: Nil) :: Nil, Return(literalUnit)) - ) - ), EmptyTree) + asyncStatesInit.map(_.mkHandlerCaseForState) ++ + List(CaseDef(Ident(nme.WILDCARD), EmptyTree, + throww)))) + val body1 = compactStates(body.asInstanceOf[Match]) + val stateMatch = if (transformState.allowExceptionsToPropagate) { + body1 + } else { + Try( + body1, + List( + CaseDef( + Bind(nme.t, Typed(Ident(nme.WILDCARD), Ident(definitions.ThrowableClass))), + EmptyTree, + Block(Apply(currentTransformState.memberRef(currentTransformState.stateCompleteFailure), Ident(nme.t) :: Nil) :: Nil, Return(literalUnit)) + ) + ), EmptyTree) + } typed(LabelDef(transformState.whileLabel, Nil, Block(stateMatch :: Nil, Apply(Ident(transformState.whileLabel), Nil)))) } @@ -540,15 +552,19 @@ trait ExprBuilder extends TransformUtils with AsyncAnalysis { private def resumeTree(awaitableResult: ValDef): Tree = { def tryyReference = gen.mkAttributedIdent(currentTransformState.applyTrParam) deriveValDef(awaitableResult) { _ => - val temp = awaitableResult.symbol.newTermSymbol(TermName("tryGetResult$" + "async")).setInfo(definitions.ObjectTpe) - val tempVd = ValDef(temp, gen.mkMethodCall(currentTransformState.memberRef(currentTransformState.stateTryGet), tryyReference :: Nil)) - typed(Block( - tempVd :: Nil, - If(Apply(gen.mkAttributedSelect(gen.mkAttributedThis(currentTransformState.stateMachineClass), definitions.Any_==), gen.mkAttributedIdent(temp) :: Nil), - Return(literalUnit), - gen.mkCast(gen.mkAttributedIdent(temp), tempVd.symbol.info) - ) - )) + if (currentTransformState.tryGetIsIdentity) { + tryyReference + } else { + val temp = awaitableResult.symbol.newTermSymbol(nme.trGetResult).setInfo(definitions.ObjectTpe) + val tempVd = ValDef(temp, gen.mkMethodCall(currentTransformState.memberRef(currentTransformState.stateTryGet), tryyReference :: Nil)) + typed(Block( + tempVd :: Nil, + If(Apply(gen.mkAttributedSelect(currentTransformState.stateMachineRef(), definitions.Object_eq), gen.mkAttributedIdent(temp) :: Nil), + Return(literalUnit), + gen.mkCast(gen.mkAttributedIdent(temp), tempVd.symbol.info) + ) + )) + } } } @@ -565,16 +581,7 @@ trait ExprBuilder extends TransformUtils with AsyncAnalysis { protected def mkStateTree(nextState: Int): Tree = { val transformState = currentTransformState val callSetter = Apply(transformState.memberRef(transformState.stateSetter), Literal(Constant(nextState)) :: Nil) - val printStateUpdates = false - val tree = if (printStateUpdates) { - Block( - callSetter :: Nil, - gen.mkMethodCall(definitions.PredefModule.info.member(TermName("println")), - currentTransformState.localTyper.typed(gen.mkApplyIfNeeded(transformState.memberRef(transformState.stateGetter)), definitions.ObjectTpe) :: Nil) - ) - } - else callSetter - typed(tree.updateAttachment(StateTransitionTree)) + typed(callSetter.updateAttachment(StateTransitionTree)) } } @@ -603,7 +610,7 @@ trait ExprBuilder extends TransformUtils with AsyncAnalysis { // (_without_ consuming an extra stack frome!) def callOnComplete(fut: Tree): Tree = - Apply(Select(This(currentTransformState.stateMachineClass), transformState.stateOnComplete), fut :: Nil) + Apply(currentTransformState.memberRef(transformState.stateOnComplete), fut :: Nil) val runCompletedOnSameThread = transformState.stateGetCompleted != NoSymbol if (runCompletedOnSameThread) { diff --git a/src/compiler/scala/tools/nsc/transform/async/Lifter.scala b/src/compiler/scala/tools/nsc/transform/async/Lifter.scala index b1d947aa21d5..e1a2519af5d8 100644 --- a/src/compiler/scala/tools/nsc/transform/async/Lifter.scala +++ b/src/compiler/scala/tools/nsc/transform/async/Lifter.scala @@ -150,15 +150,19 @@ trait Lifter extends ExprBuilder { val treeLifted = t match { case vd@ValDef(_, _, tpt, rhs) => val isLazy = sym.isLazy - sym.setFlag(STABLE | PRIVATE | LOCAL) - if (isLazy) sym.resetFlag(LAZY) else sym.setFlag(MUTABLE) + sym.setFlag(STABLE) + if (currentTransformState.exteralFsmSelfParam == NoSymbol) + sym.setFlag(PRIVATE | LOCAL) + + if (isLazy) sym.resetFlag(LAZY) + sym.setFlag(MUTABLE) sym.setName(currentTransformState.name.freshenIfNeeded(sym.name.toTermName)) sym.setInfo(sym.info.deconst) - val rhs1 = if (isLazy) rhs else EmptyTree - treeCopy.ValDef(vd, Modifiers(sym.flags), sym.name, TypeTree(sym.info).setPos(t.pos), rhs1) + treeCopy.ValDef(vd, Modifiers(sym.flags), sym.name, TypeTree(sym.info).setPos(t.pos), EmptyTree) case dd@DefDef(_, _, tparams, vparamss, tpt, rhs) => sym.setName(currentTransformState.name.freshen(sym.name.toTermName)) - sym.setFlag(PRIVATE | LOCAL) + if (currentTransformState.exteralFsmSelfParam == NoSymbol) + sym.setFlag(PRIVATE | LOCAL) // Was `DefDef(sym, rhs)`, but this ran afoul of `ToughTypeSpec.nestedMethodWithInconsistencyTreeAndInfoParamSymbols` // due to the handling of type parameter skolems in `thisMethodType` in `Namers` treeCopy.DefDef(dd, Modifiers(sym.flags), sym.name, tparams, vparamss, tpt, rhs) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala index a06f648680ce..551a54f9cf02 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala @@ -16,7 +16,7 @@ package tools.nsc.transform.patmat import scala.collection.mutable import scala.collection.immutable.ArraySeq import scala.reflect.internal.util.Collections._ -import scala.reflect.internal.util.{HashSet, StatisticsStatics} +import scala.reflect.internal.util.HashSet trait Logic extends Debugging { import global._ @@ -395,7 +395,7 @@ trait Logic extends Debugging { // according to subtyping, e.g., V = ConstantType(1) and V = Int are valid assignments // we rewrite V = C to a fresh boolean symbol, and model what we know about the variable's domain // in a prelude (the equality axioms) - // 1. a variable with a closed domain (of a sealed type) must be assigned one of the instantiatable types in its domain + // 1. a variable with a closed domain (of a sealed type) must be assigned one of the instantiable types in its domain // 2. for each variable V in props, and each constant C it is compared to, // compute which assignments imply each other (as in the example above: V = 1 implies V = Int) // and which assignments are mutually exclusive (V = String implies -(V = Int)) @@ -408,7 +408,7 @@ trait Logic extends Debugging { // V1 = Nil implies -(V2 = Ci) for all Ci in V2's domain (i.e., it is unassignable) // may throw an AnalysisBudget.Exception def removeVarEq(props: List[Prop], modelNull: Boolean = false): (Prop, List[Prop]) = { - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.patmatAnaVarEq) else null + val start = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.patmatAnaVarEq) else null val vars = new mutable.LinkedHashSet[Var] @@ -491,7 +491,7 @@ trait Logic extends Debugging { debug.patmat(s"eqAxioms:\n${eqAxiomsSeq.mkString("\n")}") debug.patmat(s"pure:${pure.mkString("\n")}") - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.patmatAnaVarEq, start) + if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.patmatAnaVarEq, start) (And(eqAxiomsSeq: _*), pure) } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala index 3730a5668bcb..99aafbee6a03 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala @@ -14,7 +14,6 @@ package scala.tools.nsc.transform.patmat import scala.annotation.tailrec import scala.collection.mutable -import scala.reflect.internal.util.StatisticsStatics import scala.tools.nsc.Reporting.WarningCategory trait TreeAndTypeAnalysis extends Debugging { @@ -459,7 +458,7 @@ trait MatchAnalysis extends MatchApproximation { // or, equivalently, P \/ -C, or C => P def unreachableCase(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Int] = { debug.patmat("reachability analysis") - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.patmatAnaReach) else null + val start = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.patmatAnaReach) else null // use the same approximator so we share variables, // but need different conditions depending on whether we're conservatively looking for failure or success @@ -503,7 +502,7 @@ trait MatchAnalysis extends MatchApproximation { } } - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.patmatAnaReach, start) + if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.patmatAnaReach, start) if (reachable) None else Some(caseIndex) } catch { @@ -521,7 +520,7 @@ trait MatchAnalysis extends MatchApproximation { // - approximate the pattern `List()` (unapplySeq on List with empty length) as `Nil`, // otherwise the common (xs: List[Any]) match { case List() => case x :: xs => } is deemed unexhaustive // - back off (to avoid crying exhaustive too often) in unhandled cases - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.patmatAnaExhaust) else null + val start = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.patmatAnaExhaust) else null var backoff = false val strict = !settings.nonStrictPatmatAnalysis.value @@ -578,7 +577,7 @@ trait MatchAnalysis extends MatchApproximation { // and make sure the strings are distinct, see Shmeez & TestSequence06 in run/patmatnew.scala val pruned = CounterExample.prune(counterExamples.sortBy(_.toString)).map(_.toString).distinct - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.patmatAnaExhaust, start) + if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.patmatAnaExhaust, start) pruned } catch { case ex: AnalysisBudget.Exception => diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala index 6d5a8eab3919..c02bf8d339d6 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala @@ -12,8 +12,6 @@ package scala.tools.nsc.transform.patmat -import scala.reflect.internal.util.StatisticsStatics - /** Translate typed Trees that represent pattern matches into the patternmatching IR, defined by TreeMakers. */ trait MatchTranslation { @@ -125,16 +123,9 @@ trait MatchTranslation { // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC] // (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder) val typeTest = TypeTestTreeMaker(binder, binder, paramType, paramType)(pos, extractorArgTypeTest = true) - val binderKnownNonNull = typeTest impliesBinderNonNull binder - // skip null test if it's implied - if (binderKnownNonNull) { - val unappBinder = typeTest.nextBinder - (typeTest :: treeMakers(unappBinder, pos), unappBinder) - } else { - val nonNullTest = NonNullTestTreeMaker(typeTest.nextBinder, paramType, pos) - val unappBinder = nonNullTest.nextBinder - (typeTest :: nonNullTest :: treeMakers(unappBinder, pos), unappBinder) - } + // binder is known non-null because the type test would not succeed on `null` + val unappBinder = typeTest.nextBinder + (typeTest :: treeMakers(unappBinder, pos), unappBinder) } } @@ -209,7 +200,7 @@ trait MatchTranslation { debug.patmat("translating "+ cases.mkString("{", "\n", "}")) - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.patmatNanos) else null + val start = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.patmatNanos) else null val selectorTp = repeatedToSeq(elimAnonymousClass(selector.tpe.withoutAnnotations)) @@ -225,7 +216,7 @@ trait MatchTranslation { // pt = Any* occurs when compiling test/files/pos/annotDepMethType.scala val combined = combineCases(selector, selectorSym, nonSyntheticCases map translateCase(selectorSym, pt), pt, selectorPos, matchOwner, defaultOverride, getSuppression(selector)) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.patmatNanos, start) + if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.patmatNanos, start) combined } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala index 414407141b26..bda182568a2d 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala @@ -214,11 +214,11 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { val nullCheck = REF(prevBinder) OBJ_NE NULL lazy val localSubstitution = Substitution(Nil, Nil) - def isExpectedPrimitiveType = isPrimitiveValueType(expectedTp) + def skipNullTest = isPrimitiveValueType(expectedTp) || expectedTp.typeSymbol.isDerivedValueClass def chainBefore(next: Tree)(casegen: Casegen): Tree = atPos(pos) { - if (isExpectedPrimitiveType) next + if (skipNullTest) next else casegen.ifThenElseZero(nullCheck, next) } @@ -347,9 +347,6 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { def eqTest(pat: Tree, testedBinder: Symbol) = REF(testedBinder) OBJ_EQ pat override def withOuterTest(orig: Tree)(testedBinder: Symbol, expectedTp: Type): Tree = { - val expectedPrefix = expectedTp.prefix - val testedPrefix = testedBinder.info.prefix - // Check if a type is defined in a static location. Unlike `tp.isStatic` before `flatten`, // this also includes methods and (possibly nested) objects inside of methods. def definedInStaticLocation(tp: Type): Boolean = { @@ -361,20 +358,88 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { tp.typeSymbol.owner == tp.prefix.typeSymbol && isStatic(tp.prefix) } - if ((expectedPrefix eq NoPrefix) - || expectedTp.typeSymbol.isJava - || definedInStaticLocation(expectedTp) - || testedPrefix =:= expectedPrefix) orig - else gen.mkAttributedQualifierIfPossible(expectedPrefix) match { - case None => orig - case Some(expectedOuterRef) => - // ExplicitOuter replaces `Select(q, outerSym) OBJ_EQ expectedPrefix` - // by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix` - // if there's an outer accessor, otherwise the condition becomes `true` - // TODO: centralize logic whether there's an outer accessor and use here? - val synthOuterGetter = expectedTp.typeSymbol.newMethod(nme.OUTER_SYNTH, newFlags = SYNTHETIC | ARTIFACT) setInfo expectedPrefix - val outerTest = (Select(codegen._asInstanceOf(testedBinder, expectedTp), synthOuterGetter)) OBJ_EQ expectedOuterRef - and(orig, outerTest) + // In `def foo(a: b.B) = a match { case _: p.P }` + // testedBinder.symbol.info = b.B + // expectedTp = p.P + + expectedTp.dealias match { + case RefinedType(Nil, _) => orig + case rt@RefinedType(parent :: rest, scope) => + // If the pattern type is refined type, emit outer tests for each component. + withOuterTest(withOuterTest(orig)(testedBinder, parent))(testedBinder, copyRefinedType(rt, rest, scope)) + case expectedTp => + val expectedClass = expectedTp.typeSymbol + // .typeSymbol dealiases, so look at the prefix of the base type at the dealiased symbol, + // not of expectedTp itself. + val expectedPrefix = expectedTp.baseType(expectedClass).prefix + + + // Given `(a: x.B) match { case _: x.P }` where P is subclass of B, is it possible + // that a value conforms to both x.B and x1.P where `x ne x1`? + // + // To answer this, we create a new prefix based on a fresh symbol and check the + // base type of TypeRef(freshPrefix, typePatternSymbol (P), args) at the binder + // symbol (B). If that is prefixed by the fresh symbol, they are statically the + // same. + // + // It is not sufficient to show that x.P is a subtype of x.B, as this + // would incorrectly elide the outer test in: + // + // class P extends p1.B + // def test(b: p1.B) = b match { case _: p1.P } + // test(new p2.P) + def prefixAligns: Boolean = { + expectedTp match { + case TypeRef(pre, _, _) if !pre.isStable => // e.g. _: Outer#Inner + false + case TypeRef(pre, sym, args) => + val testedBinderClass = testedBinder.info.baseClasses.find { sym => + sym.isClass && !sym.isRefinementClass + }.getOrElse(NoSymbol) + val testedBinderType = testedBinder.info.baseType(testedBinderClass) + + val testedPrefixIsExpectedTypePrefix = pre =:= testedBinderType.prefix + val testedPrefixAndExpectedPrefixAreStaticallyIdentical: Boolean = { + def check(freshPrefix: Type): Boolean = { + val expectedTpFromFreshPrefix = TypeRef(freshPrefix, sym, args) + val baseTypeFromFreshPrefix = expectedTpFromFreshPrefix.baseType(testedBinderClass) + freshPrefix eq baseTypeFromFreshPrefix.prefix + } + pre match { + case ThisType(thissym) => + check(ThisType(thissym.cloneSymbol(thissym.owner))) + case _ => + pre.termSymbol match { + case NoSymbol => false + case preSym => + val freshPreSym = preSym.cloneSymbol(preSym.owner).setInfo(preSym.info) + check(singleType(pre.prefix, freshPreSym)) + } + } + + } + testedPrefixAndExpectedPrefixAreStaticallyIdentical && testedPrefixIsExpectedTypePrefix + case _ => + false + } + } + + if ((expectedPrefix eq NoPrefix) + || expectedTp.typeSymbol.isJava + || definedInStaticLocation(expectedTp) + || testedBinder.info <:< expectedTp + || prefixAligns) orig + else gen.mkAttributedQualifierIfPossible(expectedPrefix) match { + case None => orig + case Some(expectedOuterRef) => + // ExplicitOuter replaces `Select(q, outerSym) OBJ_EQ expectedPrefix` + // by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix` + // if there's an outer accessor, otherwise the condition becomes `true` + // TODO: centralize logic whether there's an outer accessor and use here? + val synthOuterGetter = expectedTp.typeSymbol.newMethod(nme.OUTER_SYNTH, newFlags = SYNTHETIC | ARTIFACT) setInfo expectedPrefix + val outerTest = (Select(codegen._asInstanceOf(testedBinder, expectedTp), synthOuterGetter)) OBJ_EQ expectedOuterRef + and(orig, outerTest) + } } } } @@ -390,17 +455,6 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { def and(a: Result, b: Result): Result = false // we don't and type tests, so the conjunction must include at least one false def tru = true } - - def nonNullImpliedByTestChecker(binder: Symbol) = new TypeTestCondStrategy { - type Result = Boolean - - def typeTest(testedBinder: Symbol, expectedTp: Type): Result = testedBinder eq binder - def nonNullTest(testedBinder: Symbol): Result = testedBinder eq binder - def equalsTest(pat: Tree, testedBinder: Symbol): Result = false // could in principle analyse pat and see if it's statically known to be non-null - def eqTest(pat: Tree, testedBinder: Symbol): Result = false // could in principle analyse pat and see if it's statically known to be non-null - def and(a: Result, b: Result): Result = a || b - def tru = false - } } /** implements the run-time aspects of (§8.2) (typedPattern has already done the necessary type transformations) @@ -497,8 +551,6 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { // is this purely a type test, e.g. no outer check, no equality tests (used in switch emission) def isPureTypeTest = renderCondition(pureTypeTestChecker) - def impliesBinderNonNull(binder: Symbol) = renderCondition(nonNullImpliedByTestChecker(binder)) - override def toString = "TT"+((expectedTp, testedBinder.name, nextBinderTp)) } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala index 58e35abbfd1d..de10983e95a4 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala @@ -16,9 +16,10 @@ package nsc package transform package patmat -import scala.tools.nsc.typechecker.Contexts import scala.reflect.internal.util import scala.tools.nsc.Reporting.WarningCategory +import scala.tools.nsc.typechecker.Contexts +import scala.util.chaining._ /** An 'extractor' can be a case class or an unapply or unapplySeq method. * @@ -205,15 +206,17 @@ trait PatternExpansion { if (isUnapply || equivConstrParamTypes.isEmpty) notRepeated else { val lastParamTp = equivConstrParamTypes.last - if (isUnapplySeq) { - val elementTp = elementTypeFromApply(lastParamTp) - (elementTp, scalaRepeatedType(elementTp)) - } else { + if (isUnapplySeq) + elementTypeFromApply(lastParamTp) match { + case NoType => notRepeated.tap(_ => + err(s"${unapplyResultType()} is not a valid result type of an unapplySeq method of an extractor.")) + case elementTp => (elementTp, scalaRepeatedType(elementTp)) + } + else definitions.elementType(RepeatedParamClass, lastParamTp) match { - case NoType => notRepeated + case NoType => notRepeated case elementTp => (elementTp, lastParamTp) } - } } // errors & warnings @@ -248,10 +251,11 @@ trait PatternExpansion { // emit error/warning on mismatch if (isStar && !isSeq) err("Star pattern must correspond with varargs or unapplySeq") - else if (equivConstrParamTypes == List(NoType) && unapplyResultType().isNothing) - err(s"${fun.symbol.owner} can't be used as an extractor: The result type of an ${fun.symbol.name} method may not be Nothing") else if (equivConstrParamTypes == List(NoType)) - err(s"The result type of an ${fun.symbol.name} method must contain a member `get` to be used as an extractor pattern, no such member exists in ${unapplyResultType()}") + if (unapplyResultType().isNothing) + err(s"${fun.symbol.owner} can't be used as an extractor: The result type of an ${fun.symbol.name} method may not be Nothing") + else + err(s"The result type of an ${fun.symbol.name} method must contain a member `get` to be used as an extractor pattern, no such member exists in ${unapplyResultType()}") else if (elementArity < 0) arityError("not enough") else if (elementArity > 0 && !isSeq) arityError("too many") else if (settings.warnStarsAlign && isSeq && productArity > 0 && elementArity > 0) warn( diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala index 4146db459b4e..dd6a524549dc 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala @@ -15,7 +15,6 @@ package scala.tools.nsc.transform.patmat import scala.annotation.tailrec import scala.collection.mutable.ArrayBuffer import scala.collection.{immutable, mutable} -import scala.reflect.internal.util.StatisticsStatics /** Solve pattern matcher exhaustivity problem via DPLL. */ trait Solving extends Logic { @@ -479,12 +478,12 @@ trait Solving extends Logic { def hasModel(solvable: Solvable): Boolean = findTseitinModelFor(solvable.cnf) != NoTseitinModel def findTseitinModelFor(clauses: Array[Clause]): TseitinModel = { - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.patmatAnaDPLL) else null + val start = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.patmatAnaDPLL) else null debug.patmat(s"DPLL\n${cnfString(clauses)}") val satisfiableWithModel = findTseitinModel0((java.util.Arrays.copyOf(clauses, clauses.length), Nil) :: Nil) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.patmatAnaDPLL, start) + if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.patmatAnaDPLL, start) satisfiableWithModel } diff --git a/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala b/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala index 133b299e5412..4fc3c1fdddd8 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala @@ -36,6 +36,10 @@ trait Adaptations { case Apply(_, arg :: Nil) => arg case _ => EmptyTree } + def isInfix = t match { + case Apply(_, arg :: Nil) => t.hasAttachment[MultiargInfixAttachment.type] + case _ => false + } def callString = ( ( if (t.symbol.isConstructor) "new " else "" ) + ( t.symbol.owner.decodedName ) + @@ -86,15 +90,17 @@ trait Adaptations { true // keep adaptation } @inline def warnAdaptation = { - if (settings.warnAdaptedArgs) context.warning(t.pos, adaptWarningMessage( + if (settings.warnAdaptedArgs && !isInfix) context.warning(t.pos, adaptWarningMessage( s"adapted the argument list to the expected ${args.size}-tuple: add additional parens instead"), WarningCategory.LintAdaptedArgs) true // keep adaptation } - if (args.isEmpty) { - if (currentRun.isScala3) noAdaptation else deprecatedAdaptation - } else + if (args.nonEmpty) warnAdaptation + else if (currentRun.isScala3) + noAdaptation + else + deprecatedAdaptation } } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala index cd5278776a57..65e669a7743e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala @@ -13,8 +13,6 @@ package scala.tools.nsc package typechecker -import scala.reflect.internal.util.StatisticsStatics - /** Defines the sub-components for the namer, packageobjects, and typer phases. */ trait Analyzer extends AnyRef @@ -54,6 +52,7 @@ trait Analyzer extends AnyRef object packageObjects extends { val global: Analyzer.this.global.type = Analyzer.this.global } with SubComponent { + val deferredOpen = perRunCaches.newSet[Symbol]() val phaseName = "packageobjects" val runsAfter = List[String]() val runsRightAfter= Some("namer") @@ -66,6 +65,9 @@ trait Analyzer extends AnyRef override def traverse(tree: Tree): Unit = tree match { case ModuleDef(_, _, _) => if (tree.symbol.name == nme.PACKAGEkw) { + // we've actually got a source file + deferredOpen.remove(tree.symbol.owner) + openPackageModule(tree.symbol, tree.symbol.owner) } case ClassDef(_, _, _, _) => () // make it fast @@ -75,6 +77,7 @@ trait Analyzer extends AnyRef def apply(unit: CompilationUnit): Unit = { openPackageObjectsTraverser(unit.body) + deferredOpen.foreach(openPackageModule(_)) } } } @@ -96,7 +99,7 @@ trait Analyzer extends AnyRef // compiler run). This is good enough for the resident compiler, which was the most affected. undoLog.clear() override def run(): Unit = { - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.typerNanos) else null + val start = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.typerNanos) else null global.echoPhaseSummary(this) val units = currentRun.units while (units.hasNext) { @@ -106,7 +109,7 @@ trait Analyzer extends AnyRef finishComputeParamAlias() // defensive measure in case the bookkeeping in deferred macro expansion is buggy clearDelayed() - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.typerNanos, start) + if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.typerNanos, start) } def apply(unit: CompilationUnit): Unit = { try { diff --git a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala index a86f2c409151..2557867ea966 100644 --- a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala +++ b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala @@ -16,7 +16,7 @@ package typechecker /** * @author Lukas Rytz */ -trait AnalyzerPlugins { self: Analyzer => +trait AnalyzerPlugins { self: Analyzer with splain.SplainData => import global._ trait AnalyzerPlugin { @@ -179,6 +179,16 @@ trait AnalyzerPlugins { self: Analyzer => * @param result The result to a given implicit search. */ def pluginsNotifyImplicitSearchResult(result: SearchResult): Unit = () + + /** + * Construct a custom error message for implicit parameters that could not be resolved. + * + * @param param The implicit parameter that was resolved + * @param errors The chain of intermediate implicits that lead to this error + * @param previous The error message constructed by the previous analyzer plugin, or the builtin default + */ + def noImplicitFoundError(param: Symbol, errors: List[ImplicitError], previous: String): String = + previous } /** @@ -390,6 +400,13 @@ trait AnalyzerPlugins { self: Analyzer => def accumulate = (_, p) => p.pluginsNotifyImplicitSearchResult(result) }) + /** @see AnalyzerPlugin.noImplicitFoundError */ + def pluginsNoImplicitFoundError(param: Symbol, errors: List[ImplicitError], initial: String): String = + invoke(new CumulativeOp[String] { + def default = initial + def accumulate = (previous, p) => p.noImplicitFoundError(param, errors, previous) + }) + /** A list of registered macro plugins */ private var macroPlugins: List[MacroPlugin] = Nil diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala index c3bb3f65fbd4..481531a5951d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala @@ -13,8 +13,6 @@ package scala.tools.nsc package typechecker -import Checkability._ -import scala.collection.mutable.ListBuffer import scala.tools.nsc.Reporting.WarningCategory /** On pattern matcher checkability: @@ -39,7 +37,7 @@ import scala.tools.nsc.Reporting.WarningCategory * * There are four possibilities to consider: * [P1] X will always conform to P - * [P2] x will never conform to P + * [P2] x will never be a P, because it is an X * [P3] X will conform to P if some runtime test is true * [P4] X cannot be checked against P * @@ -52,7 +50,7 @@ import scala.tools.nsc.Reporting.WarningCategory * which is essentially the intersection of X and |P|, where |P| is * the erasure of P. If XR <: P, then no warning is emitted. * - * We evaluate "X with conform to P" by checking `X <: P_wild`, where + * We evaluate "X will conform to P" by checking `X <: P_wild`, where * P_wild is the result of substituting wildcard types in place of * pattern type variables. This is intentionally stricter than * (X matchesPattern P), see scala/bug#8597 for motivating test cases. @@ -76,7 +74,22 @@ trait Checkable { import global._ import definitions._ - import CheckabilityChecker.{ isNeverSubType, isNeverSubClass } + + type Checkability = Int + object Checkability { + final val StaticallyTrue = 0 + final val StaticallyFalse = 1 + final val RuntimeCheckable = 2 + final val Uncheckable = 3 + final val CheckabilityError = 4 + lazy val describe: (Checkability => String) = List( + "statically true", + "statically false", + "runtime checkable", + "uncheckable", + "error", + ) + } /** The applied type of class 'to' after inferring anything * possible from the knowledge that 'to' must also be of the @@ -115,63 +128,30 @@ trait Checkable { appliedType(to, resArgs) } - private def isUnwarnableTypeArgSymbol(sym: Symbol) = ( - sym.isTypeParameter // dummy - || (sym.name.toTermName == nme.WILDCARD) // _ - || nme.isVariableName(sym.name) // type variable - ) - private def isUnwarnableTypeArg(arg: Type) = ( - uncheckedOk(arg) // @unchecked T - || isUnwarnableTypeArgSymbol(arg.typeSymbolDirect) // has to be direct: see pos/t1439 - ) - private def uncheckedOk(tp: Type) = tp hasAnnotation UncheckedClass - - private def typeArgsInTopLevelType(tp: Type): List[Type] = { - val res: ListBuffer[Type] = ListBuffer.empty[Type] - def add(t: Type) = if (!isUnwarnableTypeArg(t)) res += t - def loop(tp: Type): Unit = tp match { - case RefinedType(parents, _) => - parents foreach loop - case TypeRef(_, ArrayClass, arg :: Nil) => - if (arg.typeSymbol.isAbstractType) add(arg) else loop(arg) - case TypeRef(pre, sym, args) => - loop(pre) - args.foreach(add) - case ExistentialType(tparams, underlying) => - tparams.foreach(tp => add(tp.tpe)) - loop(underlying) - case _ => () - } - loop(tp) - res.toList - } + private def uncheckedOk(tp: Type) = tp.hasAnnotation(UncheckedClass) private def scrutConformsToPatternType(scrut: Type, pattTp: Type): Boolean = { - def typeVarToWildcard(tp: Type) = { - // The need for typeSymbolDirect is demonstrated in neg/t8597b.scala - if (tp.typeSymbolDirect.isPatternTypeVariable) WildcardType else tp - } + // The need for typeSymbolDirect is demonstrated in neg/t8597b.scala + def typeVarToWildcard(tp: Type) = if (tp.typeSymbolDirect.isPatternTypeVariable) WildcardType else tp val pattTpWild = pattTp.map(typeVarToWildcard) scrut <:< pattTpWild } - private class CheckabilityChecker(val X: Type, val P: Type) { + private class CheckabilityChecker(val X: Type, val P: Type, isRecheck: Boolean = false) { + import Checkability._ + import erasure.GenericArray def Xsym = X.typeSymbol def Psym = P.typeSymbol - def PErased = { + def PErased = P match { - case erasure.GenericArray(n, core) => existentialAbstraction(core.typeSymbol :: Nil, P) - case _ => existentialAbstraction(Psym.typeParams, Psym.tpe_*) + case GenericArray(n, core) => existentialAbstraction(core.typeSymbol :: Nil, P) + case _ => existentialAbstraction(Psym.typeParams, Psym.tpe_*) } - } - def XR = if (Xsym == AnyClass) PErased else propagateKnownTypes(X, Psym) - - - // sadly the spec says (new java.lang.Boolean(true)).isInstanceOf[scala.Boolean] - def P1 = scrutConformsToPatternType(X, P) - def P2 = !Psym.isPrimitiveValueClass && isNeverSubType(X, P) - def P3 = isNonRefinementClassType(P) && scrutConformsToPatternType(XR, P) - def P4 = !(P1 || P2 || P3) + def XR = if (Xsym == AnyClass) PErased else propagateKnownTypes(X, Psym) + def P1 = scrutConformsToPatternType(X, P) + def P2 = !Psym.isPrimitiveValueClass && isNeverSubType(X, P) + def P3 = isNonRefinementClassType(P) && scrutConformsToPatternType(XR, P) + def P4 = !(P1 || P2 || P3) def summaryString = f""" |Checking checkability of (x: $X) against pattern $P @@ -181,58 +161,61 @@ trait Checkable { |[P4] $P4%-6s None of the above // !(P1 || P2 || P3) """.stripMargin.trim - val result = ( + val result: Checkability = if (X.isErroneous || P.isErroneous) CheckabilityError else if (P1) StaticallyTrue else if (P2) StaticallyFalse else if (P3) RuntimeCheckable - else if (uncheckableType == NoType) { - // Avoid warning (except ourselves) if we can't pinpoint the uncheckable type - debuglog("Checkability checker says 'Uncheckable', but uncheckable type cannot be found:\n" + summaryString) + else if (uncheckableType != NoType) Uncheckable + else { // Avoid warning (except ourselves) if we can't pinpoint the uncheckable type + debuglog(s"Checkability checker says 'Uncheckable', but uncheckable type cannot be found:\n$summaryString") CheckabilityError } - else Uncheckable - ) - lazy val uncheckableType = if (Psym.isAbstractType) P else { - val possibles = typeArgsInTopLevelType(P).toSet - val opt = possibles find { targ => + // collect type args which are candidates for warning because uncheckable + private def typeArgsInTopLevelType(tp: Type): Set[Type] = { + def isUnwarnableTypeArg(arg: Type) = + uncheckedOk(arg) || { // @unchecked T + val sym = arg.typeSymbolDirect // has to be direct: see pos/t1439 + sym.name.toTermName == nme.WILDCARD || // don't warn for `case l: List[_]`. Here, `List[_]` is a TypeRef, the arg refers an abstract type symbol `_` + nme.isVariableName(sym.name) // don't warn for `x.isInstanceOf[List[_]]`. Here, `List[_]` is an existential, quantified sym has `isVariableName` + } + var res: Set[Type] = Set.empty[Type] + def add(t: Type): Unit = if (!isUnwarnableTypeArg(t)) res += t + def loop(tp: Type): Unit = tp match { + case RefinedType(parents, _) => + parents.foreach(loop) + case TypeRef(_, ArrayClass, arg :: Nil) => + if (arg.typeSymbol.isAbstractType) add(arg) else loop(arg) + case TypeRef(pre, sym, args) => + loop(pre) + args.foreach(add) + case ExistentialType(tparams, underlying) => + tparams.foreach(tp => add(tp.tpe)) + loop(underlying) + case _ => () + } + loop(tp) + res + } + lazy val (uncheckableType, uncheckableCard) = + if (Psym.isAbstractType) (P, 1) + else { + val possibles = typeArgsInTopLevelType(P) // Create a derived type with every possibly uncheckable type replaced // with a WildcardType, except for 'targ'. If !(XR <: derived) then // 'targ' is uncheckable. - val derived = P map (tp => if (possibles(tp) && !(tp =:= targ)) WildcardType else tp) - !(XR <:< derived) + def candidate(targ: Type) = { + val derived = P.map(tp => if (possibles(tp) && !(tp =:= targ)) WildcardType else tp) + !(XR <:< derived) + } + val opt = possibles.find(candidate) + opt.map(res => (res, possibles.iterator.map(candidate).take(2).size)).getOrElse((NoType, 0)) } - opt getOrElse NoType - } def neverSubClass = isNeverSubClass(Xsym, Psym) def neverMatches = result == StaticallyFalse def isUncheckable = result == Uncheckable def isCheckable = !isUncheckable - def uncheckableMessage = uncheckableType match { - case NoType => "something" - case tp @ RefinedType(_, _) => "refinement " + tp - case TypeRef(_, sym, _) if sym.isAbstractType => "abstract type " + sym.name - case tp => "non-variable type argument " + tp - } - } - - /** X, P, [P1], etc. are all explained at the top of the file. - */ - private object CheckabilityChecker { - /** Are these symbols classes with no subclass relationship? */ - def areUnrelatedClasses(sym1: Symbol, sym2: Symbol) = ( - sym1.isClass - && sym2.isClass - && !(sym1 isSubClass sym2) - && !(sym2 isSubClass sym1) - ) - /** Are all children of these symbols pairwise irreconcilable? */ - def allChildrenAreIrreconcilable(sym1: Symbol, sym2: Symbol) = { - val sc1 = sym1.sealedChildren - val sc2 = sym2.sealedChildren - sc1.forall(c1 => sc2.forall(c2 => areIrreconcilableAsParents(c1, c2))) - } /** Is it impossible for the given symbols to be parents in the same class? * This means given A and B, can there be an instance of A with B? This is the @@ -242,23 +225,36 @@ trait Checkable { * - neither A nor B is a trait (i.e. both are actual classes, not eligible for mixin) * - both A and B are sealed/final, and every possible pairing of their children is irreconcilable * - * TODO: the last two conditions of the last possibility (that the symbols are not of + * The last two conditions of the last possibility (that the symbols are not of * classes being compiled in the current run) are because this currently runs too early, * and .children returns Nil for sealed classes because their children will not be - * populated until typer. It was too difficult to move things around for the moment, - * so I will consult with moors about the optimal time to be doing this. + * populated until typer. As a workaround, in this case, this check is performed a second + * time at the end of typer. #6537, #12414 */ - def areIrreconcilableAsParents(sym1: Symbol, sym2: Symbol): Boolean = areUnrelatedClasses(sym1, sym2) && ( + def areIrreconcilableAsParents(sym1: Symbol, sym2: Symbol): Boolean = { + // Are these symbols classes with no subclass relationship? + def areUnrelatedClasses(sym1: Symbol, sym2: Symbol) = ( + sym1.isClass + && sym2.isClass + && !sym1.isSubClass(sym2) + && !sym2.isSubClass(sym1) + ) + // Are all children of these symbols pairwise irreconcilable? + def allChildrenAreIrreconcilable(sym1: Symbol, sym2: Symbol) = { + val sc1 = sym1.sealedChildren + val sc2 = sym2.sealedChildren + sc1.forall(c1 => sc2.forall(c2 => areIrreconcilableAsParents(c1, c2))) + } + areUnrelatedClasses(sym1, sym2) && ( isEffectivelyFinal(sym1) // initialization important || isEffectivelyFinal(sym2) || !sym1.isTrait && !sym2.isTrait - || isSealedOrFinal(sym1) && isSealedOrFinal(sym2) && allChildrenAreIrreconcilable(sym1, sym2) && !currentRun.compiles(sym1) && !currentRun.compiles(sym2) - ) + || isSealedOrFinal(sym1) && isSealedOrFinal(sym2) && allChildrenAreIrreconcilable(sym1, sym2) && (isRecheck || !currentRun.compiles(sym1) && !currentRun.compiles(sym2)) + ) + } private def isSealedOrFinal(sym: Symbol) = sym.isSealed || sym.isFinal - private def isEffectivelyFinal(sym: Symbol): Boolean = ( - // initialization important - sym.initialize.isEffectivelyFinalOrNotOverridden - ) + // initialization important + private def isEffectivelyFinal(sym: Symbol): Boolean = sym.initialize.isEffectivelyFinalOrNotOverridden def isNeverSubClass(sym1: Symbol, sym2: Symbol) = areIrreconcilableAsParents(sym1, sym2) @@ -278,7 +274,7 @@ trait Checkable { case _ => false } - // Important to dealias at any entry point (this is the only one at this writing.) + // Important to dealias at any entry point (this is the only one at this writing but cf isNeverSubClass.) def isNeverSubType(tp1: Type, tp2: Type): Boolean = /*logResult(s"isNeverSubType($tp1, $tp2)")*/((tp1.dealias, tp2.dealias) match { case (TypeRef(_, sym1, args1), TypeRef(_, sym2, args2)) => isNeverSubClass(sym1, sym2) || { @@ -296,14 +292,13 @@ trait Checkable { def isUncheckable(P0: Type) = !isCheckable(P0) - def isCheckable(P0: Type): Boolean = ( + def isCheckable(P0: Type): Boolean = uncheckedOk(P0) || (P0.widen match { case TypeRef(_, NothingClass | NullClass | AnyValClass, _) => false case RefinedType(_, decls) if !decls.isEmpty => false - case RefinedType(parents, _) => parents forall isCheckable + case RefinedType(parents, _) => parents.forall(isCheckable) case p => new CheckabilityChecker(AnyTpe, p).isCheckable }) - ) /** TODO: much better error positions. * Kind of stuck right now because they just pass us the one tree. @@ -311,14 +306,13 @@ trait Checkable { * * Instead of the canRemedy flag, annotate uncheckable types that have become checkable because of the availability of a class tag? */ - def checkCheckable(tree: Tree, P0: Type, X0: Type, inPattern: Boolean, canRemedy: Boolean = false): Unit = { - if (uncheckedOk(P0)) return - def where = if (inPattern) "pattern " else "" + def checkCheckable(tree: Tree, P0: Type, X0: Type, inPattern: Boolean, canRemedy: Boolean = false): Unit = if (!uncheckedOk(P0)) { + import Checkability._ - if(P0.typeSymbol == SingletonClass) + if (P0.typeSymbol == SingletonClass) context.warning(tree.pos, s"fruitless type test: every non-null value will be a Singleton dynamically", WarningCategory.Other) else { - // singleton types not considered here, dealias the pattern for SI-XXXX + // singleton types not considered here, dealias the pattern val P = P0.dealiasWiden val X = X0.widen @@ -335,34 +329,52 @@ trait Checkable { case RefinedType(_, decls) if !decls.isEmpty => context.warning(tree.pos, s"a pattern match on a refinement type is unchecked", WarningCategory.Unchecked) case RefinedType(parents, _) => - parents foreach (p => checkCheckable(tree, p, X, inPattern, canRemedy)) + parents.foreach(checkCheckable(tree, _, X, inPattern, canRemedy)) case _ => val checker = new CheckabilityChecker(X, P) if (checker.result == RuntimeCheckable) log(checker.summaryString) - if (checker.neverMatches) { - val addendum = if (checker.neverSubClass) "" else " (but still might match its erasure)" + def neverMatchesWarning(result: CheckabilityChecker) = { + val addendum = if (result.neverSubClass) "" else " (but still might match its erasure)" context.warning(tree.pos, s"fruitless type test: a value of type $X cannot also be a $PString$addendum", WarningCategory.Other) } + if (checker.neverMatches) + neverMatchesWarning(checker) else if (checker.isUncheckable) { - val msg = ( - if (checker.uncheckableType =:= P) s"abstract type $where$PString" - else s"${checker.uncheckableMessage} in type $where$PString" - ) - context.warning(tree.pos, s"$msg is unchecked since it is eliminated by erasure", WarningCategory.Unchecked) + def uncheckableMessage = checker.uncheckableType match { + case NoType => "something" + case tp @ RefinedType(_, _) => "refinement " + tp + case TypeRef(_, sym, _) if sym.isAbstractType => "abstract type " + sym.name + case tp => "non-variable type argument " + tp + } + val msg = { + val where = if (inPattern) "pattern " else "" + if (checker.uncheckableCard == 2) + s"the type test for $where$PString cannot be checked at runtime because it has type parameters eliminated by erasure" + else { + val thing = + if (checker.uncheckableType =:= P) s"abstract type $where$PString" + else s"$uncheckableMessage in type $where$PString" + s"$thing is unchecked since it is eliminated by erasure" + } + } + context.warning(tree.pos, msg, WarningCategory.Unchecked) + } + else if (checker.result == RuntimeCheckable) { + // register deferred checking for sealed types in current run + def recheckFruitless(): Unit = { + val rechecker = new CheckabilityChecker(X, P, isRecheck = true) + if (rechecker.neverMatches) neverMatchesWarning(rechecker) + } + def isSealedOrFinal(sym: Symbol) = sym.isSealed || sym.isFinal + val Xsym = X.typeSymbol + val Psym = P.typeSymbol + if (isSealedOrFinal(Xsym) && isSealedOrFinal(Psym) && (currentRun.compiles(Xsym) || currentRun.compiles(Psym))) + context.unit.toCheck += (() => recheckFruitless()) } } } } } } - -private[typechecker] final class Checkability(val value: Int) extends AnyVal { } -private[typechecker] object Checkability { - val StaticallyTrue = new Checkability(0) - val StaticallyFalse = new Checkability(1) - val RuntimeCheckable = new Checkability(2) - val Uncheckable = new Checkability(3) - val CheckabilityError = new Checkability(4) -} diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 04c5258561d6..b105b821ccee 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -16,6 +16,7 @@ package typechecker import scala.reflect.internal.util.StringOps.{countAsString, countElementsAsString} import java.lang.System.{lineSeparator => EOL} +import scala.PartialFunction.cond import scala.annotation.tailrec import scala.reflect.runtime.ReflectionUtils import scala.reflect.macros.runtime.AbortMacroException @@ -24,7 +25,7 @@ import scala.tools.nsc.util.stackTraceString import scala.reflect.io.NoAbstractFile import scala.reflect.internal.util.NoSourceFile -trait ContextErrors { +trait ContextErrors extends splain.SplainErrors { self: Analyzer => import global._ @@ -107,7 +108,7 @@ trait ContextErrors { def issueTypeError(err: AbsTypeError)(implicit context: Context): Unit = { context.issue(err) } def typeErrorMsg(context: Context, found: Type, req: Type) = - if (context.openImplicits.nonEmpty && !settings.XlogImplicits.value && currentRun.isScala213) + if (context.openImplicits.nonEmpty && !settings.Vimplicits) // OPT: avoid error string creation for errors that won't see the light of day, but predicate // this on -Xsource:2.13 for bug compatibility with https://github.com/scala/scala/pull/7147#issuecomment-418233611 "type mismatch" @@ -151,8 +152,26 @@ trait ContextErrors { def MacroCantExpandIncompatibleMacrosError(internalMessage: String) = MacroIncompatibleEngineError("macro cannot be expanded, because it was compiled by an incompatible macro engine", internalMessage) + /** The implicit not found message from the annotation, and whether it's a supplement message or not. */ + def NoImplicitFoundAnnotation(tree: Tree, param: Symbol): (Boolean, String) = { + param match { + case ImplicitNotFoundMsg(msg) => (false, msg.formatParameterMessage(tree)) + case _ => + val paramTp = param.tpe + paramTp.typeSymbolDirect match { + case ImplicitNotFoundMsg(msg) => (false, msg.formatDefSiteMessage(paramTp)) + case _ => + val supplement = param.baseClasses.collectFirst { + case ImplicitNotFoundMsg(msg) => s" (${msg.formatDefSiteMessage(paramTp)})" + }.getOrElse("") + true -> supplement + } + } + } + def NoImplicitFoundError(tree: Tree, param: Symbol)(implicit context: Context): Unit = { - def errMsg = { + val (isSupplement, annotationMsg) = NoImplicitFoundAnnotation(tree, param) + def defaultErrMsg = { val paramName = param.name val paramTp = param.tpe def evOrParam = @@ -160,21 +179,11 @@ trait ContextErrors { "evidence parameter of type" else s"parameter $paramName:" - - param match { - case ImplicitNotFoundMsg(msg) => msg.formatParameterMessage(tree) - case _ => - paramTp.typeSymbolDirect match { - case ImplicitNotFoundMsg(msg) => msg.formatDefSiteMessage(paramTp) - case _ => - val supplement = param.baseClasses.collectFirst { - case ImplicitNotFoundMsg(msg) => s" (${msg.formatDefSiteMessage(paramTp)})" - }.getOrElse("") - s"could not find implicit value for $evOrParam $paramTp$supplement" - } - } + if (isSupplement) s"could not find implicit value for $evOrParam $paramTp$annotationMsg" + else annotationMsg } - issueNormalTypeError(tree, errMsg) + val errMsg = splainPushOrReportNotFound(tree, param, annotationMsg) + issueNormalTypeError(tree, if (errMsg.isEmpty) defaultErrMsg else errMsg) } trait TyperContextErrors { @@ -286,8 +295,9 @@ trait ContextErrors { def AmbiguousIdentError(tree: Tree, name: Name, msg: String) = NormalTypeError(tree, "reference to " + name + " is ambiguous;\n" + msg) - def SymbolNotFoundError(tree: Tree, name: Name, owner: Symbol, startingIdentCx: Context) = { - NormalTypeError(tree, "not found: "+decodeWithKind(name, owner)) + def SymbolNotFoundError(tree: Tree, name: Name, owner: Symbol, startingIdentCx: Context, inPattern: Boolean) = { + def help = if (inPattern && name.isTermName) s"\nIdentifiers ${if (name.charAt(0).isUpper) "that begin with uppercase" else "enclosed in backticks"} are not pattern variables but match the value in scope." else "" + NormalTypeError(tree, s"not found: ${decodeWithKind(name, owner)}$help") } // typedAppliedTypeTree @@ -1166,9 +1176,15 @@ trait ContextErrors { val proscription = if (tree.symbol.isConstructor) " cannot be invoked with " else " cannot be applied to " + val junkNames = { + val bads = argtpes.collect { + case NamedType(name, _) if !alts.exists(cond(_) { case MethodType(params, _) => params.exists(_.name == name) }) => name.decoded + } + if (bads.isEmpty) "" else bads.mkString(" [which have no such parameter ", ",", "]") + } issueNormalTypeError(tree, - applyErrorMsg(tree, proscription, widenedArgtpes, pt)) + applyErrorMsg(tree, junkNames + proscription, widenedArgtpes, pt)) // since inferMethodAlternative modifies the state of the tree // we have to set the type of tree to ErrorType only in the very last // fallback action that is done in the inference. diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index bb4e1fb1ccd8..5c7e3128b8ed 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -1058,7 +1058,7 @@ trait Contexts { self: Analyzer => ) && !(imported && { val e = scope.lookupEntry(name) - (e ne null) && (e.owner == scope) && (!currentRun.isScala212 || e.sym.exists) + (e ne null) && (e.owner == scope) && e.sym.exists }) /** Do something with the symbols with name `name` imported via the import in `imp`, diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 1ed5bfd55f52..507bf035b924 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -20,20 +20,19 @@ package tools.nsc package typechecker import scala.annotation.{nowarn, tailrec} -import scala.collection.mutable -import mutable.{LinkedHashMap, ListBuffer} -import scala.util.matching.Regex -import symtab.Flags._ -import scala.reflect.internal.util.{ReusableInstance, Statistics, StatisticsStatics, TriState} -import scala.reflect.internal.TypesStats +import scala.collection.mutable, mutable.{LinkedHashMap, ListBuffer} import scala.language.implicitConversions +import scala.reflect.internal.util.{ReusableInstance, Statistics, TriState} +import scala.reflect.internal.TypesStats import scala.tools.nsc.Reporting.WarningCategory +import scala.util.matching.Regex +import symtab.Flags._ /** This trait provides methods to find various kinds of implicits. * * @author Martin Odersky */ -trait Implicits { +trait Implicits extends splain.SplainData { self: Analyzer => import global._ @@ -99,18 +98,20 @@ trait Implicits { // Note that the isInvalidConversionTarget seems to make a lot more sense right here, before all the // work is performed, than at the point where it presently exists. val shouldPrint = printTypings && !context.undetparams.isEmpty - val findMemberStart = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startCounter(findMemberImpl) else null - val subtypeStart = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startCounter(subtypeImpl) else null - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(implicitNanos) else null + val findMemberStart = if (settings.areStatisticsEnabled) statistics.startCounter(findMemberImpl) else null + val subtypeStart = if (settings.areStatisticsEnabled) statistics.startCounter(subtypeImpl) else null + val start = if (settings.areStatisticsEnabled) statistics.startTimer(implicitNanos) else null if (shouldPrint) typingStack.printTyping(tree, "typing implicit: %s %s".format(tree, context.undetparamsString)) val implicitSearchContext = context.makeImplicit(reportAmbiguous) + ImplicitErrors.startSearch(pt) val dpt = if (isView) pt else dropByName(pt) val isByName = dpt ne pt val search = new ImplicitSearch(tree, dpt, isView, implicitSearchContext, pos, isByName) pluginsNotifyImplicitSearch(search) val result = search.bestImplicit pluginsNotifyImplicitSearchResult(result) + ImplicitErrors.finishSearch(result.isSuccess, pt) if (result.isFailure && saveAmbiguousDivergent && implicitSearchContext.reporter.hasErrors) implicitSearchContext.reporter.propagateImplicitTypeErrorsTo(context.reporter) @@ -121,9 +122,9 @@ trait Implicits { // and then filter out any which *were* inferred and are part of the substitutor in the implicit search result. context.undetparams = ((context.undetparams ++ result.undetparams) filterNot result.subst.from.contains).distinct - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(implicitNanos, start) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopCounter(findMemberImpl, findMemberStart) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopCounter(subtypeImpl, subtypeStart) + if (settings.areStatisticsEnabled) statistics.stopTimer(implicitNanos, start) + if (settings.areStatisticsEnabled) statistics.stopCounter(findMemberImpl, findMemberStart) + if (settings.areStatisticsEnabled) statistics.stopCounter(subtypeImpl, subtypeStart) if (result.isSuccess && settings.lintImplicitRecursion && result.tree.symbol != null) { val s = @@ -146,7 +147,7 @@ trait Implicits { if (result.isFailure && !silent) { val err = context.reporter.firstError val errPos = err.map(_.errPos).getOrElse(pos) - val errMsg = err.map(_.errMsg).getOrElse("implicit search has failed. to find out the reason, turn on -Xlog-implicits") + val errMsg = err.map(_.errMsg).getOrElse("implicit search has failed. to find out the reason, turn on -Vimplicits") onError(errPos, errMsg) } result.tree @@ -420,7 +421,7 @@ trait Implicits { } import infer._ - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(implicitSearchCount) + if (settings.areStatisticsEnabled) statistics.incCounter(implicitSearchCount) /** The type parameters to instantiate */ val undetParams = if (isView) Nil else context.outer.undetparams @@ -443,19 +444,19 @@ trait Implicits { def pos = if (pos0 != NoPosition) pos0 else tree.pos @inline final def failure(what: Any, reason: => String, pos: Position = this.pos): SearchResult = { - if (settings.XlogImplicits) + if (settings.debug) reporter.echo(pos, s"$what is not a valid implicit value for $pt because:\n$reason") SearchFailure } /** Is implicit info `info1` better than implicit info `info2`? */ def improves(info1: ImplicitInfo, info2: ImplicitInfo) = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(improvesCount) + if (settings.areStatisticsEnabled) statistics.incCounter(improvesCount) (info2 == NoImplicitInfo) || (info1 != NoImplicitInfo) && { if (info1.sym.isStatic && info2.sym.isStatic) { improvesCache get ((info1, info2)) match { - case Some(b) => if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(improvesCachedCount); b + case Some(b) => if (settings.areStatisticsEnabled) statistics.incCounter(improvesCachedCount); b case None => val result = isStrictlyMoreSpecific(info1.tpe, info2.tpe, info1.sym, info2.sym) improvesCache((info1, info2)) = result @@ -648,14 +649,14 @@ trait Implicits { * This method is performance critical: 5-8% of typechecking time. */ private def matchesPt(tp: Type, pt: Type, undet: List[Symbol]): Boolean = { - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(matchesPtNanos) else null + val start = if (settings.areStatisticsEnabled) statistics.startTimer(matchesPtNanos) else null val result = normSubType(tp, pt) || isView && { pt match { case Function1(arg1, arg2) => matchesPtView(tp, arg1, arg2, undet) case _ => false } } - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(matchesPtNanos, start) + if (settings.areStatisticsEnabled) statistics.stopTimer(matchesPtNanos, start) result } private def matchesPt(info: ImplicitInfo): Boolean = ( @@ -682,7 +683,7 @@ trait Implicits { } private def matchesPtInst(info: ImplicitInfo): Boolean = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(matchesPtInstCalls) + if (settings.areStatisticsEnabled) statistics.incCounter(matchesPtInstCalls) info.tpe match { case PolyType(tparams, restpe) => try { @@ -691,7 +692,7 @@ trait Implicits { val tp = ApproximateDependentMap(restpe) val tpInstantiated = tp.instantiateTypeParams(allUndetparams, tvars) if(!matchesPt(tpInstantiated, wildPt, allUndetparams)) { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(matchesPtInstMismatch1) + if (settings.areStatisticsEnabled) statistics.incCounter(matchesPtInstMismatch1) false } else { // we can't usefully prune views any further because we would need to type an application @@ -701,7 +702,7 @@ trait Implicits { val adjusted = adjustTypeArgs(allUndetparams, tvars, targs) val tpSubst = deriveTypeWithWildcards(adjusted.undetParams)(tp.instantiateTypeParams(adjusted.okParams, adjusted.okArgs)) if(!matchesPt(tpSubst, wildPt, adjusted.undetParams)) { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(matchesPtInstMismatch2) + if (settings.areStatisticsEnabled) statistics.incCounter(matchesPtInstMismatch2) false } else true } @@ -799,7 +800,7 @@ trait Implicits { } private def typedImplicit0(info: ImplicitInfo, ptChecked: Boolean, isLocalToCallsite: Boolean): SearchResult = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(plausiblyCompatibleImplicits) + if (settings.areStatisticsEnabled) statistics.incCounter(plausiblyCompatibleImplicits) val ok = ptChecked || matchesPt(info) && { def word = if (isLocalToCallsite) "local " else "" typingLog("match", s"$word$info") @@ -809,7 +810,7 @@ trait Implicits { } private def typedImplicit1(info: ImplicitInfo, isLocalToCallsite: Boolean): SearchResult = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(matchingImplicits) + if (settings.areStatisticsEnabled) statistics.incCounter(matchingImplicits) // workaround for deficient context provided by ModelFactoryImplicitSupport#makeImplicitConstraints val isScaladoc = context.tree == EmptyTree @@ -865,7 +866,7 @@ trait Implicits { case None => } - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(typedImplicits) + if (settings.areStatisticsEnabled) statistics.incCounter(typedImplicits) val itree3 = if (isView) treeInfo.dissectApplied(itree2).callee else adapt(itree2, EXPRmode, wildPt) @@ -906,7 +907,9 @@ trait Implicits { // bounds check on the expandee tree itree3.attachments.get[MacroExpansionAttachment] match { case Some(MacroExpansionAttachment(exp @ TypeApply(fun, targs), _)) => - checkBounds(exp, NoPrefix, NoSymbol, fun.symbol.typeParams, targs.map(_.tpe), "inferred ") + val targTpes = mapList(targs)(_.tpe) + val withinBounds = checkBounds(exp, NoPrefix, NoSymbol, fun.symbol.typeParams, targTpes, "inferred ") + if (!withinBounds) splainPushNonconformantBonds(pt, tree, targTpes, undetParams, None) case _ => () } @@ -953,10 +956,11 @@ trait Implicits { context.reporter.firstError match { case Some(err) => + splainPushImplicitSearchFailure(itree3, pt, err) fail("typing TypeApply reported errors for the implicit tree: " + err.errMsg) case None => val result = new SearchResult(unsuppressMacroExpansion(itree3), subst, context.undetparams) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(foundImplicits) + if (settings.areStatisticsEnabled) statistics.incCounter(foundImplicits) typingLog("success", s"inferred value of type $ptInstantiated is $result") result } @@ -1087,19 +1091,12 @@ trait Implicits { /** Sorted list of eligible implicits. */ - private def eligibleOld = Shadower.using(isLocalToCallsite){ shadower => - val matches = iss flatMap { is => + private def eligibleOld = Shadower.using(isLocalToCallsite) { shadower => + iss flatMap { is => val result = is filter (info => checkValid(info.sym) && survives(info, shadower)) shadower addInfos is result } - - if (currentRun.isScala213) matches - else { - // most frequent one first under Scala 2.12 mode. We've turned this optimization off to avoid - // compilation order variation in whether a search succeeds or diverges. - matches sortBy (x => if (isView) -x.useCountView else -x.useCountArg) - } } /** Sorted list of eligible implicits. @@ -1173,16 +1170,7 @@ trait Implicits { } } - val eligible: List[ImplicitInfo] = { - val matches = if (shadowerUseOldImplementation) eligibleOld else eligibleNew - if (currentRun.isScala213) matches - else { - // most frequent one first under Scala 2.12 mode. We've turned this optimization off to avoid - // compilation order variation in whether a search succeeds or diverges. - matches sortBy (x => if (isView) -x.useCountView else -x.useCountArg) - } - } - + val eligible: List[ImplicitInfo] = if (shadowerUseOldImplementation) eligibleOld else eligibleNew if (eligible.nonEmpty) printTyping(tree, "" + eligible.size + s" eligible for pt=$pt at ${fullSiteString(context)}") @@ -1222,7 +1210,7 @@ trait Implicits { foreach2(undetParams, savedInfos){ (up, si) => up.setInfo(si) } } } - if (typedFirstPending.isFailure && currentRun.isScala213) + if (typedFirstPending.isFailure) undoLog.undoTo(mark) // Don't accumulate constraints from typechecking or type error message creation for failed candidates // Pass the errors to `DivergentImplicitRecovery` so that it can note @@ -1291,11 +1279,11 @@ trait Implicits { * @return map from infos to search results */ def applicableInfos(iss: Infoss, isLocalToCallsite: Boolean): mutable.LinkedHashMap[ImplicitInfo, SearchResult] = { - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startCounter(subtypeAppInfos) else null + val start = if (settings.areStatisticsEnabled) statistics.startCounter(subtypeAppInfos) else null val computation = new ImplicitComputation(iss, isLocalToCallsite) { } val applicable = computation.findAll() - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopCounter(subtypeAppInfos, start) + if (settings.areStatisticsEnabled) statistics.stopCounter(subtypeAppInfos, start) applicable } @@ -1439,13 +1427,13 @@ trait Implicits { * such that some part of `tp` has C as one of its superclasses. */ private def implicitsOfExpectedType: Infoss = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(implicitCacheAccs) + if (settings.areStatisticsEnabled) statistics.incCounter(implicitCacheAccs) implicitsCache get pt match { case Some(implicitInfoss) => - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(implicitCacheHits) + if (settings.areStatisticsEnabled) statistics.incCounter(implicitCacheHits) implicitInfoss case None => - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(subtypeETNanos) else null + val start = if (settings.areStatisticsEnabled) statistics.startTimer(subtypeETNanos) else null // val implicitInfoss = companionImplicits(pt) val implicitInfoss1 = companionImplicitMap(pt).valuesIterator.toList // val is1 = implicitInfoss.flatten.toSet @@ -1454,7 +1442,7 @@ trait Implicits { // if (!(is2 contains i)) println("!!! implicit infos of "+pt+" differ, new does not contain "+i+",\nold: "+implicitInfoss+",\nnew: "+implicitInfoss1) // for (i <- is2) // if (!(is1 contains i)) println("!!! implicit infos of "+pt+" differ, old does not contain "+i+",\nold: "+implicitInfoss+",\nnew: "+implicitInfoss1) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(subtypeETNanos, start) + if (settings.areStatisticsEnabled) statistics.stopTimer(subtypeETNanos, start) implicitsCache(pt) = implicitInfoss1 if (implicitsCache.size >= sizeLimit) implicitsCache -= implicitsCache.keysIterator.next() @@ -1508,17 +1496,17 @@ trait Implicits { // so that if we find one, we could convert it to whatever universe we need by the means of the `in` method // if no tag is found in scope, we end up here, where we ask someone to materialize the tag for us // however, since the original search was about a tag with no particular prefix, we cannot proceed - // this situation happens very often, so emitting an error message here (even if only for -Xlog-implicits) would be too much + // this situation happens very often, so emitting an error message here (even if only for -Vimplicits) would be too much //return failure(tp, "tag error: unsupported prefix type %s (%s)".format(pre, pre.kind)) return SearchFailure } ) // todo. migrate hardcoded materialization in Implicits to corresponding implicit macros val materializer = atPos(pos.focus)(gen.mkMethodCall(TagMaterializers(tagClass), List(tp), if (prefix != EmptyTree) List(prefix) else List())) - if (settings.XlogImplicits) reporter.echo(pos, "materializing requested %s.%s[%s] using %s".format(pre, tagClass.name, tp, materializer)) + if (settings.debug) reporter.echo(pos, "materializing requested %s.%s[%s] using %s".format(pre, tagClass.name, tp, materializer)) if (context.macrosEnabled) success(materializer) // don't call `failure` here. if macros are disabled, we just fail silently - // otherwise -Xlog-implicits will spam the long with zillions of "macros are disabled" + // otherwise -Vimplicits/-Vdebug will spam the long with zillions of "macros are disabled" // this is ugly but temporary, since all this code will be removed once I fix implicit macros else SearchFailure } @@ -1536,7 +1524,7 @@ trait Implicits { if (args contains EmptyTree) EmptyTree else typedPos(tree.pos.focus) { val mani = gen.mkManifestFactoryCall(full, constructor, tparg, args.toList) - if (settings.debug) println("generated manifest: "+mani) // DEBUG + if (settings.isDebug) println("generated manifest: "+mani) // DEBUG mani } @@ -1700,7 +1688,7 @@ trait Implicits { * If all fails return SearchFailure */ def bestImplicit: SearchResult = { - val stats = StatisticsStatics.areSomeColdStatsEnabled + val stats = settings.areStatisticsEnabled val failstart = if (stats) statistics.startTimer(inscopeFailNanos) else null val succstart = if (stats) statistics.startTimer(inscopeSucceedNanos) else null @@ -1773,7 +1761,7 @@ trait Implicits { } } - if (result.isFailure && settings.debug) // debuglog is not inlined for some reason + if (result.isFailure && settings.isDebug) // debuglog is not inlined for some reason log(s"no implicits found for ${pt} ${pt.typeSymbol.info.baseClasses} ${implicitsOfExpectedType}") result @@ -1841,7 +1829,7 @@ trait Implicits { private def interpolate(text: String, vars: Map[String, String]) = Intersobralator.replaceAllIn(text, (_: Regex.Match) match { - case Regex.Groups(v) => Regex quoteReplacement vars.getOrElse(v, "") + case Regex.Groups(v) => Regex.quoteReplacement(vars.getOrElse(v, "")) // #3915: need to quote replacement string since it may include $'s (such as the interpreter's $iw) case x => throw new MatchError(x) }) @@ -1870,7 +1858,7 @@ trait Implicits { formatDefSiteMessage(typeArgsAtSym(paramTp).map(_.toString)) def formatDefSiteMessage(typeArgs: List[String]): String = - interpolate(msg, Map(symTypeParamNames zip typeArgs: _*)) + interpolate(msg, Map(symTypeParamNames.zip(typeArgs): _*)) def formatParameterMessage(fun: Tree): String = { val paramNames = referencedTypeParams @@ -1891,13 +1879,15 @@ trait Implicits { case PolyType(tps, tr@TypeRef(_, _, tprefs)) => if (tps.corresponds(tprefs)((p, r) => p == r.typeSymbol)) tr.typeConstructor.toString else { - val freshTpars = tps.mapConserve { case p if p.name == tpnme.WILDCARD => p.cloneSymbol.setName(newTypeName("?T" + tps.indexOf(p))) case p => p } + val freshTpars = tps.mapConserve { p => + if (p.unexpandedName == tpnme.WILDCARD) p.cloneSymbol.setName(newTypeName("?T" + tps.indexOf(p))) + else p + } freshTpars.map(_.name).mkString("[", ", ", "] -> ") + tr.instantiateTypeParams(tps, freshTpars.map(_.typeConstructor)).toString } - case tp => tp.toString } - interpolate(msg, Map(paramNames zip argTypes: _*)) + interpolate(msg, Map(paramNames.zip(argTypes): _*)) } def validate: Option[String] = { diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 094dc1032487..49b40e16903b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -239,7 +239,7 @@ trait Infer extends Checkable { // When filtering sym down to the accessible alternatives leaves us empty handed. private def checkAccessibleError(tree: Tree, sym: Symbol, pre: Type, site: Tree): Tree = { - if (settings.debug) { + if (settings.isDebug) { Console.println(context) Console.println(tree) Console.println("" + pre + " " + sym.owner + " " + context.owner + " " + context.outer.enclClass.owner + " " + sym.owner.thisType + (pre =:= sym.owner.thisType)) @@ -1076,7 +1076,11 @@ trait Infer extends Checkable { */ def inferMethodInstance(fn: Tree, undetParams: List[Symbol], args: List[Tree], pt0: Type): List[Symbol] = fn.tpe match { - case mt @ MethodType(params0, _) => + case mt: MethodType => + // If we can't infer the type parameters, we can recover in `tryTypedApply` with an implicit conversion, + // but only when implicit conversions are enabled. In that case we have to infer the type parameters again. + def noInstanceResult = if (context.implicitsEnabled) undetParams else Nil + try { val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0 val formals = formalTypes(mt.paramTypes, args.length) @@ -1094,17 +1098,17 @@ trait Infer extends Checkable { adjusted.undetParams match { case Nil => Nil case xs => - // #3890 + // scala/bug#3890 val xs1 = treeSubst.typeMap mapOver xs if (xs ne xs1) new TreeSymSubstTraverser(xs, xs1) traverseTrees fn :: args enhanceBounds(adjusted.okParams, adjusted.okArgs, xs1) xs1 } - } else Nil - } - catch ifNoInstance { msg => - NoMethodInstanceError(fn, args, msg); List() + } else noInstanceResult + } catch ifNoInstance { msg => + NoMethodInstanceError(fn, args, msg) + noInstanceResult } case x => throw new MatchError(x) } @@ -1256,7 +1260,7 @@ trait Infer extends Checkable { } } - def inferTypedPattern(tree0: Tree, pattp: Type, pt0: Type, canRemedy: Boolean): Type = { + def inferTypedPattern(tree0: Tree, pattp: Type, pt0: Type, canRemedy: Boolean, isUnapply: Boolean): Type = { val pt = abstractTypesToBounds(pt0) val ptparams = freeTypeParamsOfTerms(pt) val tpparams = freeTypeParamsOfTerms(pattp) @@ -1273,7 +1277,7 @@ trait Infer extends Checkable { return ErrorType } - checkCheckable(tree0, pattp, pt, inPattern = true, canRemedy) + checkCheckable(tree0, if (isUnapply) typer.applyTypeToWildcards(pattp) else pattp, pt, inPattern = true, canRemedy = canRemedy) if (pattp <:< pt) () else { debuglog("free type params (1) = " + tpparams) diff --git a/src/compiler/scala/tools/nsc/typechecker/MacroAnnotationNamers.scala b/src/compiler/scala/tools/nsc/typechecker/MacroAnnotationNamers.scala index 31eeedf2853e..267501f23175 100644 --- a/src/compiler/scala/tools/nsc/typechecker/MacroAnnotationNamers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/MacroAnnotationNamers.scala @@ -166,7 +166,17 @@ trait MacroAnnotationNamers { self: Analyzer => protected def weakEnsureCompanionObject(cdef: ClassDef, creator: ClassDef => Tree = companionModuleDef(_)): Symbol = { val m = patchedCompanionSymbolOf(cdef.symbol, context) if (m != NoSymbol && currentRun.compiles(m)) m - else { val mdef = atPos(cdef.pos.focus)(creator(cdef)); enterSym(mdef); markWeak(mdef.symbol) } + else { + val existsVal = context.tree.children.find { + case ValDef(_, term, _, _) if cdef.getterName == term => true + case _ => false + } + if (existsVal.isDefined) NoSymbol else { + val mdef = atPos(cdef.pos.focus)(creator(cdef)) + enterSym(mdef) + markWeak(mdef.symbol) + } + } } protected def finishSymbol(tree: Tree): Unit = { diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index b7bf7a219dcb..f89b81add55c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -18,7 +18,7 @@ import java.lang.Math.min import symtab.Flags._ import scala.reflect.internal.util.ScalaClassLoader import scala.reflect.runtime.ReflectionUtils -import scala.reflect.internal.util.{Statistics, StatisticsStatics} +import scala.reflect.internal.util.Statistics import scala.reflect.internal.TypesStats import scala.reflect.macros.util._ import scala.util.control.ControlThrowable @@ -562,8 +562,8 @@ trait Macros extends MacroRuntimes with Traces with Helpers { if (macroDebugVerbose) println(s"macroExpand: ${summary()}") linkExpandeeAndDesugared(expandee, desugared) - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.macroExpandNanos) else null - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(statistics.macroExpandCount) + val start = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.macroExpandNanos) else null + if (settings.areStatisticsEnabled) statistics.incCounter(statistics.macroExpandCount) try { withInfoLevel(nodePrinters.InfoLevel.Quiet) { // verbose printing might cause recursive macro expansions if (expandee.symbol.isErroneous || (expandee exists (_.isErroneous))) { @@ -596,7 +596,7 @@ trait Macros extends MacroRuntimes with Traces with Helpers { } } } finally { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.macroExpandNanos, start) + if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.macroExpandNanos, start) } } } @@ -919,7 +919,7 @@ trait Macros extends MacroRuntimes with Traces with Helpers { object Macros { final val macroClassLoadersCache = - new scala.tools.nsc.classpath.FileBasedCache[ScalaClassLoader.URLClassLoader]() + new scala.tools.nsc.classpath.FileBasedCache[Unit, ScalaClassLoader.URLClassLoader]() } trait MacrosStats { diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 35dbe24f0650..039281214c00 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -467,17 +467,6 @@ trait Namers extends MethodSynthesis { val existingModule = context.scope lookupModule tree.name if (existingModule.isModule && !existingModule.hasPackageFlag && inCurrentScope(existingModule) && (currentRun.canRedefine(existingModule) || existingModule.isSynthetic)) { - // This code accounts for the way the package objects found in the classpath are opened up - // early by the completer of the package itself. If the `packageobjects` phase then finds - // the same package object in sources, we have to clean the slate and remove package object - // members from the package class. - // - // TODO scala/bug#4695 Pursue the approach in https://github.com/scala/scala/pull/2789 that avoids - // opening up the package object on the classpath at all if one exists in source. - if (existingModule.isPackageObject) { - val packageScope = existingModule.enclosingPackageClass.rawInfo.decls - packageScope.foreach(mem => if (mem.owner != existingModule.enclosingPackageClass) packageScope unlink mem) - } updatePosFlags(existingModule, tree.pos, moduleFlags) setPrivateWithin(tree, existingModule) existingModule.moduleClass andAlso (setPrivateWithin(tree, _)) @@ -632,7 +621,7 @@ trait Namers extends MethodSynthesis { def assignParamTypes(copyDef: DefDef, sym: Symbol): Unit = { val clazz = sym.owner val constructorType = clazz.primaryConstructor.tpe - val subst = new SubstSymMap(clazz.typeParams, copyDef.tparams map (_.symbol)) + val subst = SubstSymMap(clazz.typeParams, copyDef.tparams.map(_.symbol)) val classParamss = constructorType.paramss foreach2(copyDef.vparamss, classParamss)((copyParams, classParams) => @@ -1487,7 +1476,13 @@ trait Namers extends MethodSynthesis { } val methSig = deskolemizedPolySig(vparamSymssOrEmptyParamsFromOverride, resTp) - pluginsTypeSig(methSig, typer, ddef, resTpGiven) + val unlink = methOwner.isJava && meth.isSynthetic && meth.isConstructor && methOwner.superClass == JavaRecordClass && + methOwner.info.decl(meth.name).alternatives.exists(c => c != meth && c.tpe.matches(methSig)) + if (unlink) { + methOwner.info.decls.unlink(meth) + ErrorType + } else + pluginsTypeSig(methSig, typer, ddef, resTpGiven) } /** @@ -1724,7 +1719,7 @@ trait Namers extends MethodSynthesis { val valOwner = owner.owner // there's no overriding outside of classes, and we didn't use to do this in 2.11, so provide opt-out - if (!currentRun.isScala212 || !valOwner.isClass) WildcardType + if (!valOwner.isClass) WildcardType else { // normalize to getter so that we correctly consider a val overriding a def // (a val's name ends in a " ", so can't compare to def) diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala index 50117fde232f..0e169c0d80b9 100644 --- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala +++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala @@ -302,18 +302,15 @@ trait NamesDefaults { self: Analyzer => case _ => val byName = isByNameParamType(paramTpe) val repeated = isScalaRepeatedParamType(paramTpe) - val argTpe = ( - if (repeated) arg match { + // TODO In 83c9c764b, we tried to a stable type here to fix scala/bug#7234. But the resulting TypeTree over a + // singleton type without an original TypeTree fails to retypecheck after a resetAttrs (scala/bug#7516), + // which is important for (at least) macros. + val argTpe = + arg match { + case _ if !repeated => arg.tpe case WildcardStarArg(expr) => expr.tpe - case _ => seqType(arg.tpe) + case _ => seqType(arg.tpe.widen) // avoid constant type } - else { - // TODO In 83c9c764b, we tried to a stable type here to fix scala/bug#7234. But the resulting TypeTree over a - // singleton type without an original TypeTree fails to retypecheck after a resetAttrs (scala/bug#7516), - // which is important for (at least) macros. - arg.tpe - } - ) val s = context.owner.newValue(freshTermName(nme.NAMEDARG_PREFIX)(typer.fresh), arg.pos, newFlags = ARTIFACT) setInfo { val tp = if (byName) functionType(Nil, argTpe) else argTpe uncheckedBounds(tp) @@ -330,10 +327,11 @@ trait NamesDefaults { self: Analyzer => res } else { new ChangeOwnerTraverser(context.owner, sym) traverse arg // fixes #4502 - if (repeated) arg match { + arg match { + case _ if !repeated => arg case WildcardStarArg(expr) => expr - case _ => blockTyper typed gen.mkSeqApply(resetAttrs(arg)) - } else arg + case _ => blockTyper.typed(gen.mkSeqApply(resetAttrs(arg))) + } } Some(atPos(body.pos)(ValDef(sym, body).setType(NoType))) } @@ -478,9 +476,33 @@ trait NamesDefaults { self: Analyzer => def defaultGetter(param: Symbol, context: Context): Symbol = { val i = param.owner.paramss.flatten.indexWhere(p => p.name == param.name) + 1 if (i > 0) { - val defGetterName = nme.defaultGetterName(param.owner.name, i) - if (param.owner.isConstructor) { - val mod = companionSymbolOf(param.owner.owner, context) + + def isScala3SyntheticApply(meth: Symbol): Boolean = { + // According to rules in Scala 3, a synthetic method named `apply` + // should use `` as the prefix of its default getters, + // i.e. reuse the constructor's default getters. + // We add some more precision - also verify that `apply` + // is defined in a module which has a case class companion + + def isModuleWithCaseClassCompanion(owner: Symbol) = ( + owner.isModuleClass + && linkedClassOfClassOf(owner, context).isCaseClass + ) + + (meth.isScala3Defined + && meth.isSynthetic + && meth.name == nme.apply + && isModuleWithCaseClassCompanion(meth.owner)) + } + + val scala3SynthApply = isScala3SyntheticApply(param.owner) + val defGetterName = { + val methodName = if (scala3SynthApply) nme.CONSTRUCTOR else param.owner.name + nme.defaultGetterName(methodName, i) + } + if (scala3SynthApply || param.owner.isConstructor) { + val scope = param.owner.owner + val mod = if (scala3SynthApply) scope else companionSymbolOf(scope, context) mod.info.member(defGetterName) } else { diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala index 43e4560772a1..176867663f40 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala @@ -72,6 +72,14 @@ trait PatternTypers { case tp => tp } + def applyTypeToWildcards(tp: Type) = tp match { + case tr @ TypeRef(pre, sym, args) if args.nonEmpty => + // similar to `typedBind` + def wld = context.owner.newAbstractType(tpnme.WILDCARD, sym.pos) setInfo TypeBounds.empty + copyTypeRef(tr, pre, sym, args.map(_ => wld.tpe)) + case t => t + } + def typedConstructorPattern(fun0: Tree, pt: Type): Tree = { // Do some ad-hoc overloading resolution and update the tree's symbol and type // do not update the symbol if the tree's symbol's type does not define an unapply member @@ -183,7 +191,7 @@ trait PatternTypers { case _ => extractor.nonEmpty } - val ownType = inferTypedPattern(tptTyped, tpe, pt, canRemedy) + val ownType = inferTypedPattern(tptTyped, tpe, pt, canRemedy = canRemedy, isUnapply = false) val treeTyped = treeCopy.Typed(tree, exprTyped, tptTyped) setType ownType extractor match { @@ -316,13 +324,12 @@ trait PatternTypers { case OverloadedType(_, _) => OverloadedUnapplyError(funOverloadResolved); ErrorType case _ => UnapplyWithSingleArgError(funOverloadResolved); ErrorType } - val GenPolyType(freeVars, unappFormal) = freshArgType(unapplyType.skolemizeExistential(context.owner, tree)) val unapplyContext = context.makeNewScope(tree, context.owner) - freeVars foreach unapplyContext.scope.enter - val pattp = newTyper(unapplyContext).infer.inferTypedPattern(tree, unappFormal, pt, canRemedy) + freeVars.foreach(unapplyContext.scope.enter) + val pattp = newTyper(unapplyContext).infer.inferTypedPattern(tree, unappFormal, pt, canRemedy = canRemedy, isUnapply = true) // turn any unresolved type variables in freevars into existential skolems - val skolems = freeVars map (fv => unapplyContext.owner.newExistentialSkolem(fv, fv)) + val skolems = freeVars.map(fv => unapplyContext.owner.newExistentialSkolem(fv, fv)) pattp.substSym(freeVars, skolems) } } @@ -390,10 +397,7 @@ trait PatternTypers { } // only look at top-level type, can't (reliably) do anything about unchecked type args (in general) // but at least make a proper type before passing it elsewhere - val pt1 = pt.dealiasWiden match { - case tr @ TypeRef(pre, sym, args) if args.nonEmpty => copyTypeRef(tr, pre, sym, sym.typeParams map (_.tpeHK)) // replace actual type args with dummies - case pt1 => pt1 - } + val pt1 = applyTypeToWildcards(pt.dealiasWiden) if (isCheckable(pt1)) EmptyTree else resolveClassTag(pos, pt1) match { case tree if unapplyMember(tree.tpe).exists => tree diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 3b0ad5ad7084..baef73b1df77 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -53,8 +53,8 @@ abstract class RefChecks extends Transform { def newTransformer(unit: CompilationUnit): RefCheckTransformer = new RefCheckTransformer(unit) - val toJavaRepeatedParam = new SubstSymMap(RepeatedParamClass -> JavaRepeatedParamClass) - val toScalaRepeatedParam = new SubstSymMap(JavaRepeatedParamClass -> RepeatedParamClass) + val toJavaRepeatedParam = SubstSymMap(RepeatedParamClass -> JavaRepeatedParamClass) + val toScalaRepeatedParam = SubstSymMap(JavaRepeatedParamClass -> RepeatedParamClass) def accessFlagsToString(sym: Symbol) = flagsToString( sym getFlag (PRIVATE | PROTECTED), @@ -148,7 +148,7 @@ abstract class RefChecks extends Transform { } // This has become noisy with implicit classes. - if (settings.warnPolyImplicitOverload && settings.developer) { + if (settings.isDeveloper && settings.warnPolyImplicitOverload) { clazz.info.decls.foreach(sym => if (sym.isImplicit && sym.typeParams.nonEmpty) { // implicit classes leave both a module symbol and a method symbol as residue val alts = clazz.info.decl(sym.name).alternatives filterNot (_.isModule) @@ -303,7 +303,7 @@ abstract class RefChecks extends Transform { def isNeitherInClass = memberClass != clazz && otherClass != clazz val indent = " " - def overriddenWithAddendum(msg: String, foundReq: Boolean = settings.debug.value): String = { + def overriddenWithAddendum(msg: String, foundReq: Boolean = settings.isDebug): String = { val isConcreteOverAbstract = (otherClass isSubClass memberClass) && other.isDeferred && !member.isDeferred val addendum = @@ -383,7 +383,7 @@ abstract class RefChecks extends Transform { def isOverrideAccessOK = member.isPublic || { // member is public, definitely same or relaxed access (!other.isProtected || member.isProtected) && // if o is protected, so is m ((!isRootOrNone(ob) && ob.hasTransOwner(mb)) || // m relaxes o's access boundary - (other.isJavaDefined && other.isProtected)) // overriding a protected java member, see #3946 #12349 + (other.isJavaDefined && other.isProtected)) // overriding a protected java member, see #3946 #12349 } if (!isOverrideAccessOK) { overrideAccessError() @@ -1215,14 +1215,18 @@ abstract class RefChecks extends Transform { finally popLevel() } + private def showCurrentRef: String = { + val refsym = currentLevel.refsym + s"$refsym defined on line ${refsym.pos.line}" + } + def transformStat(tree: Tree, index: Int): Tree = tree match { case t if treeInfo.isSelfConstrCall(t) => assert(index == 0, index) try transform(tree) finally if (currentLevel.maxindex > 0) { - // An implementation restriction to avoid VerifyErrors and lazyvals mishaps; see scala/bug#4717 - debuglog("refsym = " + currentLevel.refsym) - reporter.error(currentLevel.refpos, "forward reference not allowed from self constructor invocation") + // An implementation restriction to avoid VerifyErrors and lazy vals mishaps; see scala/bug#4717 + reporter.error(currentLevel.refpos, s"forward reference to $showCurrentRef not allowed from self constructor invocation") } case ValDef(_, _, _, _) => val tree1 = transform(tree) // important to do before forward reference check @@ -1230,8 +1234,7 @@ abstract class RefChecks extends Transform { else { val sym = tree.symbol if (sym.isLocalToBlock && index <= currentLevel.maxindex) { - debuglog("refsym = " + currentLevel.refsym) - reporter.error(currentLevel.refpos, "forward reference extends over definition of " + sym) + reporter.error(currentLevel.refpos, s"forward reference to $showCurrentRef extends over definition of $sym") } tree1 } @@ -1404,40 +1407,87 @@ abstract class RefChecks extends Transform { false } - private def checkTypeRef(tp: Type, tree: Tree, skipBounds: Boolean) = tp match { - case TypeRef(pre, sym, args) => - tree match { - case tt: TypeTree if tt.original == null => // scala/bug#7783 don't warn about inferred types - // FIXME: reconcile this check with one in resetAttrs - case _ => checkUndesiredProperties(sym, tree.pos) + private object RefCheckTypeMap extends TypeMap { + object UnboundExistential extends TypeMap { + private[this] val bound = mutable.Set.empty[Symbol] + + def toWildcardIn(tpe: Type): Type = + try apply(tpe) finally bound.clear() + + override def apply(tpe: Type): Type = tpe match { + case ExistentialType(quantified, _) => + bound ++= quantified + tpe.mapOver(this) + case tpe => + val sym = tpe.typeSymbol + if (sym.isExistential && !bound(sym)) WildcardType + else tpe.mapOver(this) } - if(sym.isJavaDefined) - sym.typeParams foreach (_.cookJavaRawInfo()) - if (!tp.isHigherKinded && !skipBounds) - checkBounds(tree, pre, sym.owner, sym.typeParams, args) - case _ => - } + } - private def checkTypeRefBounds(tp: Type, tree: Tree) = { - var skipBounds = false - tp match { - case AnnotatedType(ann :: Nil, underlying) if ann.symbol == UncheckedBoundsClass => + private[this] var inPattern = false + private[this] var skipBounds = false + private[this] var tree: Tree = EmptyTree + + def check(tpe: Type, tree: Tree, inPattern: Boolean = false): Type = { + this.inPattern = inPattern + this.tree = tree + try apply(tpe) finally { + this.inPattern = false + this.skipBounds = false + this.tree = EmptyTree + } + } + + // check all bounds, except those that are existential type parameters + // or those within typed annotated with @uncheckedBounds + override def apply(tpe: Type): Type = tpe match { + case tpe: AnnotatedType if tpe.hasAnnotation(UncheckedBoundsClass) => + // scala/bug#7694 Allow code synthesizers to disable checking of bounds for TypeTrees based on inferred LUBs + // which might not conform to the constraints. + val savedSkipBounds = skipBounds skipBounds = true - underlying + try tpe.mapOver(this).filterAnnotations(_.symbol != UncheckedBoundsClass) + finally skipBounds = savedSkipBounds + case tpe: TypeRef => + if (!inPattern) checkTypeRef(UnboundExistential.toWildcardIn(tpe)) + checkUndesired(tpe.sym) + tpe.mapOver(this) + case tpe => + tpe.mapOver(this) + } + + private def checkTypeRef(tpe: Type): Unit = tpe match { case TypeRef(pre, sym, args) => - if (!tp.isHigherKinded && !skipBounds) + if (sym.isJavaDefined) + sym.typeParams.foreach(_.cookJavaRawInfo()) + if (!tpe.isHigherKinded && !skipBounds) checkBounds(tree, pre, sym.owner, sym.typeParams, args) - tp case _ => - tp + } + + private def checkUndesired(sym: Symbol): Unit = tree match { + // scala/bug#7783 don't warn about inferred types + // FIXME: reconcile this check with one in resetAttrs + case tree: TypeTree if tree.original == null => + case tree => checkUndesiredProperties(sym, tree.pos) } } private def applyRefchecksToAnnotations(tree: Tree): Unit = { + def checkVarArgs(tp: Type, tree: Tree): Unit = tp match { + case TypeRef(_, VarargsClass, _) => + tree match { + case tt: TypeTree if tt.original == null => // same exception as in checkTypeRef + case _: DefDef => + case _ => reporter.error(tree.pos, s"Only methods can be marked @varargs") + } + case _ => + } def applyChecks(annots: List[AnnotationInfo]): List[AnnotationInfo] = if (annots.isEmpty) Nil else { annots.foreach { ann => - checkTypeRef(ann.atp, tree, skipBounds = false) - checkTypeRefBounds(ann.atp, tree) + checkVarArgs(ann.atp, tree) + RefCheckTypeMap.check(ann.atp, tree) if (ann.original != null && ann.original.hasExistingSymbol) checkUndesiredProperties(ann.original.symbol, tree.pos) } @@ -1486,7 +1536,7 @@ abstract class RefChecks extends Transform { reporter.error(sym.pos, s"${sym.name}: Only concrete methods can be marked @elidable.$rest") } } - if (currentRun.isScala213) checkIsElidable(tree.symbol) + checkIsElidable(tree.symbol) def checkMember(sym: Symbol): Unit = { sym.setAnnotations(applyChecks(sym.annotations)) @@ -1742,29 +1792,7 @@ abstract class RefChecks extends Transform { } } - val existentialParams = new ListBuffer[Symbol] - var skipBounds = false - // check all bounds, except those that are existential type parameters - // or those within typed annotated with @uncheckedBounds - if (!inPattern) tree.tpe foreach { - case tp @ ExistentialType(tparams, tpe) => - existentialParams ++= tparams - case ann: AnnotatedType if ann.hasAnnotation(UncheckedBoundsClass) => - // scala/bug#7694 Allow code synthesizers to disable checking of bounds for TypeTrees based on inferred LUBs - // which might not conform to the constraints. - skipBounds = true - case tp: TypeRef => - val tpWithWildcards = deriveTypeWithWildcards(existentialParams.toList)(tp) - checkTypeRef(tpWithWildcards, tree, skipBounds) - case _ => - } - if (skipBounds) { - tree.setType(tree.tpe.map { - _.filterAnnotations(_.symbol != UncheckedBoundsClass) - }) - } - - tree + tree.setType(RefCheckTypeMap.check(tree.tpe, tree, inPattern)) case TypeApply(fn, args) => checkBounds(tree, NoPrefix, NoSymbol, fn.tpe.typeParams, args map (_.tpe)) @@ -1799,6 +1827,10 @@ abstract class RefChecks extends Transform { case x @ Select(_, _) => transformSelect(x) + case Literal(Constant(tpe: Type)) => + RefCheckTypeMap.check(tpe, tree) + tree + case UnApply(fun, args) => transform(fun) // just make sure we enterReference for unapply symbols, note that super.transform(tree) would not transform(fun) // transformTrees(args) // TODO: is this necessary? could there be forward references in the args?? @@ -1868,7 +1900,7 @@ abstract class RefChecks extends Transform { result1 } catch { case ex: TypeError => - if (settings.debug) ex.printStackTrace() + if (settings.isDebug) ex.printStackTrace() reporter.error(tree.pos, ex.getMessage()) tree } finally { diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala index ccdbabaff4c4..ef168e5926c9 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala @@ -356,6 +356,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT && !sym.owner.isTrait && sym.owner.enclosingPackageClass != currentClass.enclosingPackageClass && qual.symbol.info.member(sym.name).exists + && !(currentClass.typeOfThis.typeSymbol.isSubClass(sym.owner)) // scala/bug#11924 && !needsProtectedAccessor(sym, tree.pos) ) if (shouldEnsureAccessor) { diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala index 575324df0f76..f0e89af2ff4e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala @@ -77,10 +77,8 @@ trait SyntheticMethods extends ast.TreeDSL { if (!syntheticsOk) return templ - val synthesizer = new ClassMethodSynthesis( - clazz0, - newTyper( if (reporter.hasErrors) context makeSilent false else context ) - ) + val typer = newTyper(if (reporter.hasErrors) context.makeSilent(false) else context) + val synthesizer = new ClassMethodSynthesis(clazz0, typer) import synthesizer._ if (clazz0 == AnyValClass || isPrimitiveValueClass(clazz0)) return { @@ -126,8 +124,8 @@ trait SyntheticMethods extends ast.TreeDSL { createSwitchMethod(name, accessors.indices, returnType)(idx => caseFn(accessors(idx))) def productElementNameMethod = { - val constrParamAccessors = clazz.constrParamAccessors - createSwitchMethod(nme.productElementName, constrParamAccessors.indices, StringTpe)(idx => LIT(constrParamAccessors(idx).name.dropLocal.decode)) + val elementAccessors = clazz.constrParamAccessors.take(arity) + createSwitchMethod(nme.productElementName, elementAccessors.indices, StringTpe)(idx => LIT(elementAccessors(idx).name.dropLocal.decode)) } var syntheticCanEqual = false @@ -154,7 +152,7 @@ trait SyntheticMethods extends ast.TreeDSL { Match( Ident(eqmeth.firstParam), List( - CaseDef(Typed(Ident(nme.WILDCARD), TypeTree(clazz.tpe)), EmptyTree, TRUE), + CaseDef(Typed(Ident(nme.WILDCARD), TypeTree(typer.applyTypeToWildcards(clazz.tpe))), EmptyTree, TRUE), CaseDef(Ident(nme.WILDCARD), EmptyTree, FALSE) ) ) @@ -283,10 +281,7 @@ trait SyntheticMethods extends ast.TreeDSL { case sym => (sym, () => productElementNameMethod) :: Nil } - List( - productMethods, - elementName - ).flatten + productMethods ::: elementName } def hashcodeImplementation(sym: Symbol): Tree = { diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala index bda816b31af0..b4e0d5339c01 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala @@ -130,7 +130,7 @@ abstract class TreeCheckers extends Analyzer { // new symbols if (newSyms.nonEmpty) { informFn("" + newSyms.size + " new symbols.") - val toPrint = if (settings.debug) sortedNewSyms mkString " " else "" + val toPrint = if (settings.isDebug) sortedNewSyms mkString " " else "" newSyms.clear() if (toPrint != "") @@ -177,7 +177,7 @@ abstract class TreeCheckers extends Analyzer { def errorFn(msg: Any): Unit = errorFn(NoPosition, msg) def informFn(msg: Any): Unit = { - if (settings.verbose || settings.debug) + if (settings.verbose || settings.isDebug) println("[check: %s] %s".format(phase.prev, msg)) } diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index 2ee0a2efba1f..cef28da57f62 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -40,7 +40,7 @@ import scala.tools.nsc.Reporting.WarningCategory * * @author Paul Phillips */ -trait TypeDiagnostics { +trait TypeDiagnostics extends splain.SplainDiagnostics { self: Analyzer with StdAttachments => import global._ @@ -310,7 +310,7 @@ trait TypeDiagnostics { // when the message will never be seen. I though context.reportErrors // being false would do that, but if I return "" under // that condition, I see it. - def foundReqMsg(found: Type, req: Type): String = { + def builtinFoundReqMsg(found: Type, req: Type): String = { val foundWiden = found.widen val reqWiden = req.widen val sameNamesDifferentPrefixes = @@ -340,6 +340,11 @@ trait TypeDiagnostics { } } + def foundReqMsg(found: Type, req: Type): String = { + val errMsg = splainFoundReqMsg(found, req) + if (errMsg.isEmpty) builtinFoundReqMsg(found, req) else errMsg + } + def typePatternAdvice(sym: Symbol, ptSym: Symbol) = { val clazz = if (sym.isModuleClass) sym.companionClass else sym val caseString = @@ -815,7 +820,7 @@ trait TypeDiagnostics { // but it seems that throwErrors excludes some of the errors that should actually be // buffered, causing TypeErrors to fly around again. This needs some more investigation. if (!context0.reportErrors) throw ex - if (settings.debug) ex.printStackTrace() + if (settings.isDebug) ex.printStackTrace() ex match { case CyclicReference(sym, info: TypeCompleter) => diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala b/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala index 1290964fdffd..48b7b7c45bae 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala @@ -59,7 +59,7 @@ trait StructuredTypeStrings extends DestructureTypes { else block(level, grouping)(name, nodes) } private def shortClass(x: Any) = { - if (settings.debug) { + if (settings.isDebug) { val name = (x.getClass.getName split '.').last val str = if (TypeStrings.isAnonClass(x.getClass)) name else (name split '$').last diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 54b82ebe4fdf..f0d111d66813 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -17,7 +17,7 @@ package typechecker import scala.annotation.tailrec import scala.collection.mutable import scala.reflect.internal.{Chars, TypesStats} -import scala.reflect.internal.util.{FreshNameCreator, ListOfNil, Statistics, StatisticsStatics} +import scala.reflect.internal.util.{FreshNameCreator, ListOfNil, Statistics} import scala.tools.nsc.Reporting.{MessageFilter, Suppression, WConf, WarningCategory} import scala.util.chaining._ import mutable.ListBuffer @@ -232,7 +232,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // (it erases in TypeTrees, but not in, e.g., the type a Function node) def phasedAppliedType(sym: Symbol, args: List[Type]) = { val tp = appliedType(sym, args) - if (phase.erasedTypes) erasure.specialScalaErasure(tp) else tp + if (phase.erasedTypes) erasure.specialScalaErasureFor(sym)(tp) else tp } def typedDocDef(docDef: DocDef, mode: Mode, pt: Type): Tree = @@ -672,13 +672,13 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def silent[T](op: Typer => T, reportAmbiguousErrors: Boolean = context.ambiguousErrors, newtree: Tree = context.tree): SilentResult[T] = { - val findMemberStart = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startCounter(findMemberFailed) else null - val subtypeStart = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startCounter(subtypeFailed) else null - val failedSilentStart = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(failedSilentNanos) else null + val findMemberStart = if (settings.areStatisticsEnabled) statistics.startCounter(findMemberFailed) else null + val subtypeStart = if (settings.areStatisticsEnabled) statistics.startCounter(subtypeFailed) else null + val failedSilentStart = if (settings.areStatisticsEnabled) statistics.startTimer(failedSilentNanos) else null def stopStats() = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopCounter(findMemberFailed, findMemberStart) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopCounter(subtypeFailed, subtypeStart) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(failedSilentNanos, failedSilentStart) + if (settings.areStatisticsEnabled) statistics.stopCounter(findMemberFailed, findMemberStart) + if (settings.areStatisticsEnabled) statistics.stopCounter(subtypeFailed, subtypeStart) + if (settings.areStatisticsEnabled) statistics.stopTimer(failedSilentNanos, failedSilentStart) } @inline def wrapResult(reporter: ContextReporter, result: T) = if (reporter.hasErrors) { @@ -1100,7 +1100,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def adaptExprNotFunMode(): Tree = { def lastTry(err: AbsTypeError = null): Tree = { debuglog("error tree = " + tree) - if (settings.debug && settings.explaintypes) explainTypes(tree.tpe, pt) + if (settings.isDebug && settings.explaintypes) explainTypes(tree.tpe, pt) if (err ne null) context.issue(err) if (tree.tpe.isErroneous || pt.isErroneous) setError(tree) else adaptMismatchedSkolems() @@ -1803,10 +1803,12 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (!ps.isEmpty && !superclazz.isSubClass(ps.head.typeSymbol)) pending += ParentSuperSubclassError(parent, superclazz, ps.head.typeSymbol, psym) if (!clazzIsTrait) { + def hasTraitParams(sym: Symbol) = + sym.isScala3Defined && sym.isTrait && sym.hasAttachment[DottyParameterisedTrait] // TODO perhaps there can be a flag to skip this when we know there can be no Scala 3 definitions // or otherwise use an optimised representation for trait parameters (parent.tpe :: ps).collectFirst { - case p if p.typeSymbol.hasAttachment[DottyParameterisedTrait] => + case p if hasTraitParams(p.typeSymbol) => p.typeSymbol.attachments.get[DottyParameterisedTrait].foreach( attach => pending += ParentIsScala3TraitError(parent, p.typeSymbol, attach.params, psym) ) @@ -4205,9 +4207,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def isCapturedExistential(sym: Symbol) = (sym hasAllFlags EXISTENTIAL | CAPTURED) && { - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(isReferencedNanos) else null + val start = if (settings.areStatisticsEnabled) statistics.startTimer(isReferencedNanos) else null try !isReferencedFrom(context, sym) - finally if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(isReferencedNanos, start) + finally if (settings.areStatisticsEnabled) statistics.stopTimer(isReferencedNanos, start) } def packCaptured(tpe: Type): Type = { @@ -4930,10 +4932,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * insert an implicit conversion. */ def tryTypedApply(fun: Tree, args: List[Tree]): Tree = { - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(failedApplyNanos) else null + val start = if (settings.areStatisticsEnabled) statistics.startTimer(failedApplyNanos) else null def onError(typeErrors: Seq[AbsTypeError], warnings: Seq[(Position, String, WarningCategory, Symbol)]): Tree = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(failedApplyNanos, start) + if (settings.areStatisticsEnabled) statistics.stopTimer(failedApplyNanos, start) // If the problem is with raw types, convert to existentials and try again. // See #4712 for a case where this situation arises, @@ -5014,8 +5016,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // TODO: replace `fun.symbol.isStable` by `treeInfo.isStableIdentifierPattern(fun)` val stableApplication = (fun.symbol ne null) && fun.symbol.isMethod && fun.symbol.isStable val funpt = if (mode.inPatternMode) pt else WildcardType - val appStart = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(failedApplyNanos) else null - val opeqStart = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(failedOpEqNanos) else null + val appStart = if (settings.areStatisticsEnabled) statistics.startTimer(failedApplyNanos) else null + val opeqStart = if (settings.areStatisticsEnabled) statistics.startTimer(failedOpEqNanos) else null def isConversionCandidate(qual: Tree, name: Name): Boolean = !mode.inPatternMode && nme.isOpAssignmentName(TermName(name.decode)) && !qual.exists(_.isErroneous) @@ -5045,7 +5047,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case Select(qual, name) if isConversionCandidate(qual, name) => val qual1 = typedQualifier(qual) if (treeInfo.isVariableOrGetter(qual1)) { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(failedOpEqNanos, opeqStart) + if (settings.areStatisticsEnabled) statistics.stopTimer(failedOpEqNanos, opeqStart) val erred = qual1.exists(_.isErroneous) || args.exists(_.isErroneous) if (erred) reportError(error) else { val convo = convertToAssignment(fun, qual1, name, args) @@ -5057,7 +5059,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } } else { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(failedApplyNanos, appStart) + if (settings.areStatisticsEnabled) statistics.stopTimer(failedApplyNanos, appStart) val Apply(Select(qual2, _), args2) = tree: @unchecked val erred = qual2.exists(_.isErroneous) || args2.exists(_.isErroneous) reportError { @@ -5065,7 +5067,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } case _ => - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(failedApplyNanos, appStart) + if (settings.areStatisticsEnabled) statistics.stopTimer(failedApplyNanos, appStart) reportError(error) } val silentResult = silent( @@ -5076,7 +5078,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper silentResult match { case SilentResultValue(fun1) => val fun2 = if (stableApplication) stabilizeFun(fun1, mode, pt) else fun1 - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(typedApplyCount) + if (settings.areStatisticsEnabled) statistics.incCounter(typedApplyCount) val noSecondTry = ( isPastTyper || context.inSecondTry @@ -5404,7 +5406,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper else UnstableTreeError(qualTyped) typedSelect(tree, qualStableOrError, name) } else { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(typedSelectCount) + if (settings.areStatisticsEnabled) statistics.incCounter(typedSelectCount) val qualTyped = checkDead(context, typedQualifier(qual, mode)) val tree1 = typedSelect(tree, qualTyped, name) @@ -5474,7 +5476,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case LookupInaccessible(sym, msg) => issue(AccessError(tree, sym, context, msg)) case LookupNotFound => asTypeName orElse inEmptyPackage orElse lookupInRoot(name) match { - case NoSymbol => issue(SymbolNotFoundError(tree, name, context.owner, startContext)) + case NoSymbol => issue(SymbolNotFoundError(tree, name, context.owner, startContext, mode.in(all = PATTERNmode, none = APPSELmode | TYPEPATmode))) case sym => typed1(tree setSymbol sym, mode, pt) } case LookupSucceeded(qual, sym) => @@ -5511,7 +5513,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def typedIdentOrWildcard(tree: Ident) = { val name = tree.name - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(typedIdentCount) + if (settings.areStatisticsEnabled) statistics.incCounter(typedIdentCount) if (!tree.isBackquoted && ((name == nme.WILDCARD && mode.typingPatternNotConstructor) || (name == tpnme.WILDCARD && mode.inTypeMode))) @@ -5613,7 +5615,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper AppliedTypeNoParametersError(tree, tpt1.tpe) } else { //Console.println("\{tpt1}:\{tpt1.symbol}:\{tpt1.symbol.info}") - if (settings.debug) Console.println(s"$tpt1:${tpt1.symbol}:${tpt1.symbol.info}")//debug + if (settings.isDebug) Console.println(s"$tpt1:${tpt1.symbol}:${tpt1.symbol.info}")//debug AppliedTypeWrongNumberOfArgsError(tree, tpt1, tparams) } } @@ -5841,7 +5843,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } def isNullaryTerm: Boolean = { val maybe = context.lookupSymbol(TermName(id), _ => true).symbol - maybe != NoSymbol && !maybe.hasPackageFlag && maybe.alternatives.exists(x => requiresNoArgs(x.info)) + maybe != NoSymbol && !maybe.hasPackageFlag && !maybe.isModule && maybe.alternatives.exists(x => requiresNoArgs(x.info)) } id == "this" || isNullaryTerm } @@ -6003,9 +6005,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def typed(tree: Tree, mode: Mode, pt: Type): Tree = { lastTreeToTyper = tree - val statsEnabled = StatisticsStatics.areSomeHotStatsEnabled() && statistics.areHotStatsLocallyEnabled - val startByType = if (statsEnabled) statistics.pushTimer(byTypeStack, byTypeNanos(tree.getClass)) else null - if (statsEnabled) statistics.incCounter(visitsByType, tree.getClass) + val startByType = if (settings.areHotStatisticsEnabled) statistics.pushTimer(byTypeStack, byTypeNanos(tree.getClass)) else null + if (settings.areHotStatisticsEnabled) statistics.incCounter(visitsByType, tree.getClass) val shouldPrintTyping = printTypings && !phase.erasedTypes && !noPrintTyping(tree) val shouldPopTypingStack = shouldPrintTyping && typingStack.beforeNextTyped(tree, mode, pt, context) @@ -6091,7 +6092,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper throw ex } finally { if (shouldPopTypingStack) typingStack.pop(tree) - if (statsEnabled) statistics.popTimer(byTypeStack, startByType) + if (settings.areHotStatisticsEnabled) statistics.popTimer(byTypeStack, startByType) if (shouldInsertStabilizers) context.pendingStabilizers = savedPendingStabilizer } } diff --git a/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala b/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala index 8ffa6cbe0b40..95512297b20d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala @@ -29,7 +29,7 @@ trait TypersTracking { def fullSiteString(context: Context): String = { def owner_long_s = ( - if (settings.debug.value) { + if (settings.isDebug) { def flags_s = context.owner.debugFlagString match { case "" => "" case s => " with flags " + inLightMagenta(s) @@ -70,7 +70,7 @@ trait TypersTracking { private def truncAndOneLine(s: String): String = { val s1 = s.replaceAll("\\s+", " ") - if (s1.length < 60 || settings.debug.value) s1 else s1.take(57) + "..." + if (s1.length < 60 || settings.isDebug) s1 else s1.take(57) + "..." } private class Frame(val tree: Tree) { } @@ -160,7 +160,7 @@ trait TypersTracking { // Some trees which are typed with mind-numbing frequency and // which add nothing by being printed. Did () type to Unit? Let's // gamble on yes. - def printingOk(t: Tree) = printTypings && (settings.debug.value || !noPrint(t)) + def printingOk(t: Tree) = printTypings && (settings.isDebug || !noPrint(t)) def noPrintTyping(t: Tree) = (t.tpe ne null) || !printingOk(t) def noPrintAdapt(tree1: Tree, tree2: Tree) = !printingOk(tree1) || ( (tree1.tpe == tree2.tpe) diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala index b63f8c0e7b55..cb6356103af9 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala @@ -168,7 +168,7 @@ trait Unapplies extends ast.TreeDSL { case _ => nme.unapply } val cparams = List(ValDef(Modifiers(PARAM | SYNTHETIC), unapplyParamName, classType(cdef, tparams), EmptyTree)) - val resultType = if (!currentRun.isScala212) TypeTree() else { // fix for scala/bug#6541 under -Xsource:2.12 + val resultType = { // fix for scala/bug#6541 under -Xsource:2.12 def repeatedToSeq(tp: Tree) = tp match { case AppliedTypeTree(Select(_, tpnme.REPEATED_PARAM_CLASS_NAME), tps) => AppliedTypeTree(gen.rootScalaDot(tpnme.Seq), tps) case _ => tp diff --git a/src/compiler/scala/tools/nsc/typechecker/splain/SplainData.scala b/src/compiler/scala/tools/nsc/typechecker/splain/SplainData.scala new file mode 100644 index 000000000000..7c438a2d202d --- /dev/null +++ b/src/compiler/scala/tools/nsc/typechecker/splain/SplainData.scala @@ -0,0 +1,94 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc +package typechecker +package splain + +import scala.util.matching.Regex + +trait SplainData { + self: Analyzer => + + import global._ + + sealed trait ImplicitErrorSpecifics + + object ImplicitErrorSpecifics { + case class NotFound(param: Symbol) extends ImplicitErrorSpecifics + + case class NonconformantBounds( + targs: List[Type], tparams: List[Symbol], originalError: Option[AbsTypeError], + ) extends ImplicitErrorSpecifics + } + + object ImplicitErrors { + var stack: List[Type] = Nil + var errors: List[ImplicitError] = Nil + + def push(error: ImplicitError): Unit = errors ::= error + def nesting: Int = stack.length - 1 + def nested: Boolean = stack.nonEmpty + def removeErrorsFor(tpe: Type): Unit = errors = errors.dropWhile(_.tpe == tpe) + + def startSearch(expectedType: Type): Unit = { + if (settings.Vimplicits) { + if (!nested) errors = List() + stack = expectedType :: stack + } + } + + def finishSearch(success: Boolean, expectedType: Type): Unit = { + if (settings.Vimplicits) { + if (success) removeErrorsFor(expectedType) + stack = stack.drop(1) + } + } + } + + case class ImplicitError(tpe: Type, candidate: Tree, nesting: Int, specifics: ImplicitErrorSpecifics) { + import ImplicitError._ + + override def equals(other: Any) = other match { + case o: ImplicitError => o.tpe.toString == tpe.toString && candidateName(this) == candidateName(o) + case _ => false + } + + override def hashCode = (tpe.toString.##, ImplicitError.candidateName(this).##).## + override def toString = s"ImplicitError(${shortName(tpe.toString)}, ${shortName(candidate.toString)}), $nesting, $specifics)" + } + + object ImplicitError { + def unapplyCandidate(e: ImplicitError): Tree = + e.candidate match { + case TypeApply(fun, _) => fun + case a => a + } + + def candidateName(e: ImplicitError): String = + unapplyCandidate(e) match { + case Select(_, name) => name.toString + case Ident(name) => name.toString + case a => a.toString + } + + val candidateRegex: Regex = """.*\.this\.(.*)""".r + + def cleanCandidate(e: ImplicitError): String = + unapplyCandidate(e).toString match { + case candidateRegex(suf) => suf + case a => a + } + + def shortName(ident: String): String = ident.substring(ident.lastIndexOf(".") + 1) + } +} diff --git a/src/compiler/scala/tools/nsc/typechecker/splain/SplainDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/splain/SplainDiagnostics.scala new file mode 100644 index 000000000000..ca0caa642286 --- /dev/null +++ b/src/compiler/scala/tools/nsc/typechecker/splain/SplainDiagnostics.scala @@ -0,0 +1,26 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc +package typechecker +package splain + +trait SplainDiagnostics extends splain.SplainFormatting { + self: Analyzer => + + import global._ + + def splainFoundReqMsg(found: Type, req: Type): String = { + if (settings.VtypeDiffs) ";\n" + showFormattedL(formatDiff(found, req, top = true), break = true).indent.joinLines + else "" + } +} diff --git a/src/compiler/scala/tools/nsc/typechecker/splain/SplainErrors.scala b/src/compiler/scala/tools/nsc/typechecker/splain/SplainErrors.scala new file mode 100644 index 000000000000..41a96c5403b3 --- /dev/null +++ b/src/compiler/scala/tools/nsc/typechecker/splain/SplainErrors.scala @@ -0,0 +1,64 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc +package typechecker +package splain + +trait SplainErrors { self: Analyzer with SplainFormatting => + import global._ + + def splainPushNotFound(tree: Tree, param: Symbol): Unit = + ImplicitErrors.stack.headOption.foreach { pt => + val specifics = ImplicitErrorSpecifics.NotFound(param) + ImplicitErrors.push(ImplicitError(pt, tree, ImplicitErrors.nesting, specifics)) + } + + def splainPushOrReportNotFound(tree: Tree, param: Symbol, annotationMsg: String): String = + if (settings.Vimplicits) + if (ImplicitErrors.nested) { + splainPushNotFound(tree, param) + "" + } + else pluginsNoImplicitFoundError(param, ImplicitErrors.errors, formatImplicitError(param, ImplicitErrors.errors, annotationMsg)) + else "" + + def splainPushNonconformantBonds( + tpe: Type, + candidate: Tree, + targs: List[Type], + tparams: List[Symbol], + originalError: Option[AbsTypeError], + ): Unit = { + if (settings.Vimplicits) { + val specifics = ImplicitErrorSpecifics.NonconformantBounds(targs, tparams, originalError) + ImplicitErrors.push(ImplicitError(tpe, candidate, ImplicitErrors.nesting, specifics)) + } + } + + def splainPushImplicitSearchFailure(implicitTree: Tree, expectedType: Type, originalError: AbsTypeError): Unit = { + def pushImpFailure(fun: Tree, args: List[Tree]): Unit = { + fun.tpe match { + case PolyType(tparams, restpe) if tparams.nonEmpty && sameLength(tparams, args) => + splainPushNonconformantBonds(expectedType, implicitTree, mapList(args)(_.tpe), tparams, Some(originalError)) + case _ => + } + } + if (settings.Vimplicits) { + implicitTree match { + case TypeApply(fun, args) => pushImpFailure(fun, args) + case Apply(TypeApply(fun, args), _) => pushImpFailure(fun, args) + case _ => + } + } + } +} diff --git a/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatData.scala b/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatData.scala new file mode 100644 index 000000000000..4dae6b1b5e31 --- /dev/null +++ b/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatData.scala @@ -0,0 +1,92 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc +package typechecker +package splain + +import scala.annotation.tailrec + +object Formatted { + @tailrec def comparator(formatted: Formatted): String = formatted match { + case Infix(left, _, _, _) => comparator(left) + case Simple(tpe) => tpe.name + case Qualified(Nil, tpe) => tpe.name + case Qualified(path, tpe) => s"${path.mkString}${tpe.name}" + case UnitForm => "()" + case Applied(cons, _) => comparator(cons) + case TupleForm(Nil) => "()" + case TupleForm(h :: _) => comparator(h) + case FunctionForm(Nil, ret, _) => comparator(ret) + case FunctionForm(h :: _, _, _) => comparator(h) + case RefinedForm(Nil, _) => "()" + case RefinedForm(h :: _, _) => comparator(h) + case Diff(l, _) => comparator(l) + case Decl(sym, _) => comparator(sym) + case DeclDiff(sym, _, _) => comparator(sym) + case ByName(tpe) => comparator(tpe) + } + + implicit val Ord: Ordering[Formatted] = (x, y) => Ordering[String].compare(comparator(x), comparator(y)) +} + +sealed trait Formatted { + def length: Int = this match { + case Infix(infix, left, right, top) => infix.length + left.length + right.length + 2 + case Simple(tpe) => tpe.name.length + case Qualified(path, tpe) => path.map(_.length).sum + path.length + tpe.name.length + case UnitForm => 4 + case Applied(cons, args) => args.map(_.length).sum + ( args.length - 1) * 2 + cons.length + 2 + case TupleForm(elems) => elems.map(_.length).sum + (elems.length - 1) + 2 + case FunctionForm(args, ret, top) => args.map(_.length).sum + ( args.length - 1) + 2 + ret.length + 4 + case RefinedForm(elems, decls) => elems.map(_.length).sum + (elems.length - 1) * 6 + case Diff(lhs, rhs) => lhs.length + rhs.length + 1 + case Decl(sym, rhs) => sym.length + rhs.length + 8 + case DeclDiff(sym, lhs, rhs) => sym.length + lhs.length + rhs.length + 9 + case ByName(tpe) => tpe.length + 5 + } +} + +sealed trait FormattedName { val name: String } +case class SimpleName(name: String) extends FormattedName +case class InfixName(name: String) extends FormattedName + +case class Infix(infix: Formatted, left: Formatted, right: Formatted, top: Boolean) extends Formatted +case class Simple(tpe: FormattedName) extends Formatted +case class Qualified(path: List[String], tpe: FormattedName) extends Formatted +case object UnitForm extends Formatted +case class Applied(cons: Formatted, args: List[Formatted]) extends Formatted +case class TupleForm(elems: List[Formatted]) extends Formatted +case class FunctionForm(args: List[Formatted], ret: Formatted, top: Boolean) extends Formatted +case class RefinedForm(elems: List[Formatted], decls: List[Formatted]) extends Formatted +case class Diff(left: Formatted, right: Formatted) extends Formatted +case class Decl(sym: Formatted, rhs: Formatted) extends Formatted +case class DeclDiff(sym: Formatted, left: Formatted, right: Formatted) extends Formatted +case class ByName(tpe: Formatted) extends Formatted + +sealed trait TypeRepr { + def flat: String + def lines: List[String] + def tokenize: String = lines.mkString(" ") + def joinLines: String = lines.mkString("\n") + def indent: TypeRepr +} + +case class BrokenType(lines: List[String]) extends TypeRepr { + def flat = lines.mkString(" ") + def indent = BrokenType(lines.map(" " + _)) +} + +case class FlatType(flat: String) extends TypeRepr { + def lines = List(flat) + def indent = FlatType(s" $flat") +} diff --git a/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatting.scala b/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatting.scala new file mode 100644 index 000000000000..909a52c8de2b --- /dev/null +++ b/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatting.scala @@ -0,0 +1,537 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc +package typechecker +package splain + +import scala.collection.mutable +import scala.reflect.internal.TypeDebugging.AnsiColor._ + +class FormatCache[K, V](cache: mutable.Map[K, V]) { + def apply(k: K, orElse: => V): V = cache.getOrElseUpdate(k, orElse) +} + +object FormatCache { + def apply[K, V]() = new FormatCache[K, V](mutable.Map()) +} + +trait SplainFormatters { + self: Analyzer => + + import global._, definitions._ + + def formatType(tpe: Type, top: Boolean): Formatted + + object Refined { + def unapply(tpe: Type): Option[(List[Type], Scope)] = tpe match { + case RefinedType(parents, decls) => Some((parents, decls)) + case t @ SingleType(_, _) => unapply(t.underlying) + case _ => None + } + } + + trait SpecialFormatter { + def apply[A]( + tpe: Type, simple: String, args: List[A], formattedArgs: => List[Formatted], top: Boolean, + )(rec: (A, Boolean) => Formatted): Option[Formatted] + + def diff(left: Type, right: Type, top: Boolean): Option[Formatted] + } + + object FunctionFormatter extends SpecialFormatter { + def apply[A]( + tpe: Type, simple: String, args: List[A], formattedArgs: => List[Formatted], top: Boolean, + )(rec: (A, Boolean) => Formatted) = { + if (simple.startsWith("Function")) { + val fmtArgs = formattedArgs + val (params, returnt) = fmtArgs.splitAt(fmtArgs.length - 1) + Some(FunctionForm(params, returnt.headOption.getOrElse(UnitForm), top)) + } else None + } + + def diff(left: Type, right: Type, top: Boolean) = None + } + + object TupleFormatter extends SpecialFormatter { + def apply[A]( + tpe: Type, simple: String, args: List[A], formattedArgs: => List[Formatted], top: Boolean + )(rec: (A, Boolean) => Formatted) = { + if (simple.startsWith("Tuple")) Some(TupleForm(formattedArgs)) + else None + } + + def diff(left: Type, right: Type, top: Boolean) = None + } + + object RefinedFormatter extends SpecialFormatter { + object DeclSymbol { + def unapply(sym: Symbol): Option[(Formatted, Formatted)] = + if (sym.hasRawInfo) Some((Simple(SimpleName(sym.simpleName.toString)), formatType(sym.rawInfo, true))) + else None + } + + def ignoredTypes: List[Type] = List(typeOf[Object], typeOf[Any], typeOf[AnyRef]) + + def sanitizeParents: List[Type] => List[Type] = { + case List(tpe) => List(tpe) + case tpes => tpes.filter(t => !ignoredTypes.exists(_ =:= t)) + } + + def formatDecl: Symbol => Formatted = { + case DeclSymbol(n, t) => Decl(n, t) + case sym => Simple(SimpleName(sym.toString)) + } + + def apply[A]( + tpe: Type, simple: String, args: List[A], formattedArgs: => List[Formatted], top: Boolean, + )(rec: (A, Boolean) => Formatted): Option[Formatted] = tpe match { + case Refined(parents, decls) => + Some(RefinedForm(sanitizeParents(parents).map(formatType(_, top)), decls.toList.map(formatDecl))) + case _ => None + } + + val none: Formatted = Simple(SimpleName("")) + + def separate[A](left: List[A], right: List[A]): (List[A], List[A], List[A]) = { + val leftS = Set(left: _*) + val rightS = Set(right: _*) + val common = leftS.intersect(rightS) + val uniqueLeft = leftS -- common + val uniqueRight = rightS -- common + (common.toList, uniqueLeft.toList, uniqueRight.toList) + } + + def matchTypes(left: List[Type], right: List[Type]): List[Formatted] = { + val (common, uniqueLeft, uniqueRight) = separate(left.map(formatType(_, true)), right.map(formatType(_, true))) + val diffs = uniqueLeft.zipAll(uniqueRight, none, none).map { case (l, r) => Diff(l, r) } + common ::: diffs + } + + def filterDecls(syms: List[Symbol]): List[(Formatted, Formatted)] = + syms.collect { case DeclSymbol(sym, rhs) => (sym, rhs) } + + def matchDecls(left: List[Symbol], right: List[Symbol]): List[Formatted] = { + val (common, uniqueLeft, uniqueRight) = separate(filterDecls(left), filterDecls(right)) + val diffs = uniqueLeft + .map(Some(_)) + .zipAll(uniqueRight.map(Some(_)), None, None) + .collect { + case (Some((sym, l)), Some((_, r))) => DeclDiff(sym, l, r) + case (None, Some((sym, r))) => DeclDiff(sym, none, r) + case (Some((sym, l)), None) => DeclDiff(sym, l, none) + } + common.map { case (sym, rhs) => Decl(sym, rhs) } ++ diffs + } + + def diff(left: Type, right: Type, top: Boolean): Option[Formatted] = + (left, right) match { + case (Refined(leftParents, leftDecls), Refined(rightParents, rightDecls)) => + val parents = matchTypes(sanitizeParents(leftParents), sanitizeParents(rightParents)).sorted + val decls = matchDecls(leftDecls.toList, rightDecls.toList).sorted + Some(RefinedForm(parents, decls)) + case _ => None + } + } + + object ByNameFormatter extends SpecialFormatter { + def apply[A]( + tpe: Type, simple: String, args: List[A], formattedArgs: => List[Formatted], top: Boolean, + )(rec: (A, Boolean) => Formatted): Option[Formatted] = tpe match { + case TypeRef(_, ByNameParamClass, List(a)) => Some(ByName(formatType(a, true))) + case _ => None + } + + def diff(left: Type, right: Type, top: Boolean): Option[Formatted] = None + } +} + +trait SplainFormatting extends SplainFormatters { + self: Analyzer => + + import global._ + + val breakInfixLength: Int = 70 + + def dealias(tpe: Type) = + if (isAux(tpe)) tpe + else (tpe match { + case ExistentialType(_, t) => t + case _ => tpe + }).dealias + + def extractArgs(tpe: Type) = tpe match { + case PolyType(params, result) => result.typeArgs.map { + case t if params.contains(t.typeSymbol) => WildcardType + case a => a + } + case t: AliasTypeRef if !isAux(tpe) => + t.betaReduce.typeArgs.map(a => if (a.typeSymbolDirect.isTypeParameter) WildcardType else a) + case _ => tpe.typeArgs + } + + def isRefined(tpe: Type) = tpe.dealias match { + case RefinedType(_, _) => true + case _ => false + } + + def isSymbolic(tpe: Type) = { + val n = tpe.typeConstructor.typeSymbol.name + !isRefined(tpe) && n.encodedName.toString != n.decodedName.toString + } + + def ctorNames(tpe: Type): List[String] = + scala.util.Try(tpe.typeConstructor.toString) + .map(_.split('.').toList) + .getOrElse(List(tpe.toString)) + + def isAux(tpe: Type) = ctorNames(tpe).lastOption.contains("Aux") + + def formatRefinement(sym: Symbol) = { + if (sym.hasRawInfo) s"$sym = ${showType(sym.rawInfo)}" + else sym.toString + } + + def formatAuxSimple(tpe: Type): (List[String], String) = { + val names = ctorNames(tpe) + (names.dropRight(2), ctorNames(tpe).takeRight(2).mkString(".")) + } + + def symbolPath(sym: Symbol): List[String] = + sym + .ownerChain + .takeWhile(sym => sym.isType && !sym.isPackageClass) + .map(_.name.decodedName.toString) + .reverse + + def sanitizePath(path: List[String]): List[String] = + path.takeWhile(_ != "type").filter(!_.contains("$")) + + def pathPrefix: List[String] => String = { + case Nil => "" + case List("") => "" + case a => a.mkString("", ".", ".") + } + + def qualifiedName(path: List[String], name: FormattedName): String = name match { + case SimpleName(name) => s"${pathPrefix(path)}$name" + case InfixName(name) => name + } + + def stripModules(path: List[String], name: FormattedName): String = { + val qName = qualifiedName(path, name) + if (shorthands(qName)) name.name else qName + } + + case class TypeParts(sym: Symbol, tt: Type) { + def modulePath: List[String] = (tt, sym) match { + case (TypeRef(pre, _, _), _) if !pre.toString.isEmpty => sanitizePath(pre.toString.split("\\.").toList) + case (SingleType(_, _), sym) => symbolPath(sym).dropRight(1) + case (_, _) => Nil + } + + def ownerPath: List[String] = { + val parts = sym.ownerChain.reverse.map(_.name.decodedName.toString) + parts.splitAt(Math.max(0, parts.size - 1))._1 + } + + def shortName: String = tt.safeToString.stripPrefix(tt.prefixString.split('.').dropRight(1).mkString(".") + ".") + } + + def stripType(tpe: Type): (List[String], String) = tpe match { + case tt: SingletonType => + val parts = TypeParts(tt.termSymbol, tt) + parts.modulePath -> parts.shortName + + case tt: RefinedType => + val parts = TypeParts(tt.typeSymbol, tt) + parts.modulePath -> parts.shortName + + case _ => + // TODO: should this also use TypeParts ? + val sym = if (tpe.takesTypeArgs) tpe.typeSymbolDirect else tpe.typeSymbol + val symName = sym.name.decodedName.toString + val parts = TypeParts(sym, tpe) + (parts.modulePath, if (sym.isModuleClass) s"$symName.type" else symName) + } + + def formatNormalSimple(tpe: Type): (List[String], String) = tpe match { + case a @ WildcardType => (Nil, a.toString) + case a => stripType(a) + } + + def formatSimpleType(tpe: Type): (List[String], String) = + if (isAux(tpe)) formatAuxSimple(tpe) + else formatNormalSimple(tpe) + + def indentLine(line: String, n: Int = 1, prefix: String = " ") = (prefix * n) + line + def indent(lines: List[String], n: Int = 1, prefix: String = " ") = lines.map(indentLine(_, n, prefix)) + + /** If the args of an applied type constructor are multiline, + * create separate lines for the constructor name and the closing bracket; + * else return a single line. */ + def showTypeApply(cons: String, args: List[TypeRepr], break: Boolean): TypeRepr = { + val flatArgs = bracket(args.map(_.flat)) + val flat = FlatType(s"$cons$flatArgs") + def brokenArgs = args match { + case head :: tail => tail.foldLeft(head.lines)((z, a) => z ::: "," :: a.lines) + case _ => Nil + } + def broken = BrokenType(s"$cons[" :: indent(brokenArgs) ::: List("]")) + if (break) decideBreak(flat, broken) else flat + } + + def showTuple(args: List[String]) = args match { + case head :: Nil => s"Tuple1[$head]" + case _ => args.mkString("(", ",", ")") + } + + def showFuncParams(args: List[String]) = args match { + case head :: Nil => head + case _ => args.mkString("(", ",", ")") + } + + def showRefined(parents: List[String], decls: List[String]) = { + val p = parents.mkString(" with ") + val d = if (decls.isEmpty) "" else decls.mkString(" {", "; ", "}") + s"$p$d" + } + + def bracket[A](params: List[A]) = params.mkString("[", ", ", "]") + + def formatFunction(args: List[String]) = { + val (params, returnt) = args.splitAt(args.length - 1) + s"${showTuple(params)} => ${showTuple(returnt)}" + } + + def decideBreak(flat: FlatType, broken: => BrokenType): TypeRepr = + if (flat.flat.length > breakInfixLength) broken + else flat + + /** Turn a nested infix type structure into a flat list + * {{{ + * ::[A, ::[B, C]]] => List(A, ::, B, ::, C) + * }}} + */ + def flattenInfix(tpe: Infix): List[Formatted] = { + def step(tpe: Formatted): List[Formatted] = tpe match { + case Infix(infix, left, right, _) => left :: infix :: step(right) + case a => List(a) + } + step(tpe) + } + + /** Break a list produced by [[flattenInfix]] into lines by taking two + * elements at a time, then appending the terminal. + * If the expression's length is smaller than the threshold specified via + * plugin parameter, return a single line. */ + def breakInfix(types: List[Formatted]): TypeRepr = { + val form = types.map(showFormattedL(_, break = true)) + def broken = form.sliding(2, 2).flatMap { + case FlatType(tpe) :: FlatType(infix) :: Nil => List(s"$tpe $infix") + case left :: right :: Nil => left.lines ++ right.lines + case last :: Nil => last.lines + case _ => Nil + }.toList + decideBreak(FlatType(form.flatMap(_.lines).mkString(" ")), BrokenType(broken)) + } + + val showFormattedLCache = FormatCache[(Formatted, Boolean), TypeRepr]() + val formatTypeCache = FormatCache[(Type, Boolean), Formatted]() + val formatDiffCache = FormatCache[(Type, Type, Boolean), Formatted]() + + val specialFormatters: List[SpecialFormatter] = + List(FunctionFormatter, TupleFormatter, RefinedFormatter, ByNameFormatter) + + def truncateDecls(decls: List[Formatted]): Boolean = settings.VimplicitsMaxRefined.value < decls.map(_.length).sum + + def showFormattedQualified(path: List[String], name: FormattedName): TypeRepr = + FlatType(stripModules(path, name)) + + def formattedDiff(left: Formatted, right: Formatted): String = (left, right) match { + case (Qualified(lpath, lname), Qualified(rpath, rname)) if lname == rname => + val prefix = lpath.reverseIterator.zip(rpath.reverseIterator).takeWhile { case (l, r) => l == r }.size + 1 + s"${qualifiedName(lpath.takeRight(prefix), lname).red}|${qualifiedName(rpath.takeRight(prefix), rname).green}" + case (left, right) => + val l = showFormatted(left) + val r = showFormatted(right) + s"${l.red}|${r.green}" + } + + def showFormattedLImpl(tpe: Formatted, break: Boolean): TypeRepr = tpe match { + case Simple(name) => FlatType(name.name) + case Qualified(path, name) => showFormattedQualified(path, name) + case Applied(cons, args) => showTypeApply(showFormatted(cons), args.map(showFormattedL(_, break)), break) + case tpe @ Infix(_, _, _, top) => wrapParensRepr(if (break) breakInfix(flattenInfix(tpe)) else FlatType(flattenInfix(tpe).map(showFormatted).mkString(" ")), top) + case UnitForm => FlatType("Unit") + case FunctionForm(args, ret, top) => FlatType(wrapParens(s"${showFuncParams(args.map(showFormatted))} => ${showFormatted(ret)}", top)) + case TupleForm(elems) => FlatType(showTuple(elems.map(showFormatted))) + case RefinedForm(elems, decls) => FlatType(showRefined(elems.map(showFormatted), if (truncateDecls(decls)) List("...") else decls.map(showFormatted))) + case Diff(left, right) => FlatType(formattedDiff(left, right)) + case Decl(sym, rhs) => FlatType(s"type ${showFormatted(sym)} = ${showFormatted(rhs)}") + case DeclDiff(sym, left, right) => FlatType(s"type ${showFormatted(sym)} = ${formattedDiff(left, right)}") + case ByName(tpe) => FlatType(s"(=> ${showFormatted(tpe)})") + } + + def showFormattedL(tpe: Formatted, break: Boolean): TypeRepr = showFormattedLCache((tpe, break), showFormattedLImpl(tpe, break)) + def showFormatted(tpe: Formatted): String = showFormattedL(tpe, break = false).tokenize + def showType(tpe: Type): String = showFormattedL(formatType(tpe, top = true), break = false).joinLines + def showTypeBreakL(tpe: Type): List[String] = showFormattedL(formatType(tpe, top = true), break = true).lines + + def wrapParens(expr: String, top: Boolean): String = if (top) expr else s"($expr)" + + def wrapParensRepr(tpe: TypeRepr, top: Boolean): TypeRepr = tpe match { + case FlatType(tpe) => FlatType(wrapParens(tpe, top)) + case BrokenType(lines) => if (top) tpe else BrokenType("(" :: indent(lines) ::: List(")")) + } + + def formatSpecial[A]( + tpe: Type, simple: String, args: List[A], formattedArgs: => List[Formatted], top: Boolean, + )(rec: (A, Boolean) => Formatted): Option[Formatted] = + specialFormatters.iterator.map(_.apply(tpe, simple, args, formattedArgs, top)(rec)).collectFirst { case Some(a) => a } + + def formatInfix[A]( + path: List[String], simple: String, left: A, right: A, top: Boolean, + )(rec: (A, Boolean) => Formatted): Formatted = + Infix(Qualified(path, InfixName(simple)), rec(left, false), rec(right, false), top) + + def formatWithInfix[A](tpe: Type, args: List[A], top: Boolean)(rec: (A, Boolean) => Formatted): Formatted = { + val (path, simple) = formatSimpleType(tpe) + lazy val formattedArgs = args.map(rec(_, true)) + formatSpecial(tpe, simple, args, formattedArgs, top)(rec).getOrElse { + args match { + case left :: right :: Nil if isSymbolic(tpe) => formatInfix(path, simple, left, right, top)(rec) + case _ :: _ => Applied(Qualified(path, SimpleName(simple)), formattedArgs) + case _ => Qualified(path, SimpleName(simple)) + } + } + } + + def formatTypeImpl(tpe: Type, top: Boolean): Formatted = { + val dtpe = dealias(tpe) + formatWithInfix(dtpe, extractArgs(dtpe), top)(formatType) + } + + def formatType(tpe: Type, top: Boolean): Formatted = formatTypeCache((tpe, top), formatTypeImpl(tpe, top)) + + def formatDiffInfix(left: Type, right: Type, top: Boolean): Formatted = + formatWithInfix(left, extractArgs(left).zip(extractArgs(right)), top) { case ((l, r), t) => formatDiff(l, r, t) } + + def formatDiffSpecial(left: Type, right: Type, top: Boolean): Option[Formatted] = + specialFormatters.iterator.map(_.diff(left, right, top)).collectFirst { case Some(a) => a } + + def formatDiffSimple(left: Type, right: Type): Formatted = + Diff(formatType(left, true), formatType(right, true)) + + def formatDiffImpl(found: Type, req: Type, top: Boolean): Formatted = { + val (left, right) = dealias(found) -> dealias(req) + if (left =:= right) formatType(left, top) + else if (left.typeSymbol == right.typeSymbol) formatDiffInfix(left, right, top) + else formatDiffSpecial(left, right, top).getOrElse(formatDiffSimple(left, right)) + } + + def formatDiff(left: Type, right: Type, top: Boolean): Formatted = + formatDiffCache((left, right, top), formatDiffImpl(left, right, top)) + + def formatNonConfBounds(err: ImplicitErrorSpecifics.NonconformantBounds): List[String] = { + val params = bracket(err.tparams.map(_.defString)) + val types = bracket(err.targs.map(showType)) + List("nonconformant bounds;", types.red, params.green) + } + + def formatNestedImplicit(err: ImplicitError): (String, List[String], Int) = { + val candidate = ImplicitError.cleanCandidate(err) + val problem = s"${candidate.red} invalid because" + val reason = err.specifics match { + case e: ImplicitErrorSpecifics.NotFound => implicitMessage(e.param, NoImplicitFoundAnnotation(err.candidate, e.param)._2) + case e: ImplicitErrorSpecifics.NonconformantBounds => formatNonConfBounds(e) + } + (problem, reason, err.nesting) + } + + def hideImpError(error: ImplicitError): Boolean = error.specifics match { + case ImplicitErrorSpecifics.NonconformantBounds(_, _, _) => true + case ImplicitErrorSpecifics.NotFound(_) => false + } + + def indentTree(tree: List[(String, List[String], Int)], baseIndent: Int): List[String] = { + val nestings = tree.map(_._3).distinct.sorted + tree.flatMap { case (head, tail, nesting) => + val ind = baseIndent + nestings.indexOf(nesting).abs + indentLine(head, ind, "――") :: indent(tail, ind) + } + } + + def formatIndentTree(chain: List[ImplicitError], baseIndent: Int) = + indentTree(chain.map(formatNestedImplicit), baseIndent) + + def deepestLevel(chain: List[ImplicitError]) = + chain.foldLeft(0)((z, a) => if (a.nesting > z) a.nesting else z) + + def formatImplicitChainTreeCompact(chain: List[ImplicitError]): Option[List[String]] = { + chain.headOption.map { head => + val max = deepestLevel(chain) + val leaves = chain.drop(1).dropWhile(_.nesting < max) + val base = if (head.nesting == 0) 0 else 1 + val (fhh, fht, fhn) = formatNestedImplicit(head) + val spacer = if (leaves.nonEmpty && leaves.length < chain.length) List("ā‹®".blue) else Nil + val fh = (fhh, fht ++ spacer, fhn) + val ft = leaves.map(formatNestedImplicit) + indentTree(fh :: ft, base) + } + } + + def formatImplicitChainTreeFull(chain: List[ImplicitError]): List[String] = + formatIndentTree(chain, chain.headOption.map(_.nesting).getOrElse(0)) + + def formatImplicitChainFlat(chain: List[ImplicitError]): List[String] = + chain.map(formatNestedImplicit).flatMap { case (h, t, _) => h :: t } + + def formatImplicitChain(chain: List[ImplicitError]): List[String] = { + val compact = if (settings.VimplicitsVerboseTree) None else formatImplicitChainTreeCompact(chain) + compact.getOrElse(formatImplicitChainTreeFull(chain)) + } + + /** Remove duplicates and special cases that should not be shown. + * In some cases, candidates are reported twice, once as `Foo.f` and once as + * `f`. `ImplicitError.equals` checks the simple names for identity, which + * is suboptimal, but works for 99% of cases. + * Special cases are handled in [[hideImpError]] */ + def formatNestedImplicits(errors: List[ImplicitError]) = { + val visible = errors.filterNot(hideImpError) + val chains = splitChains(visible).map(_.distinct).distinct + chains.map(formatImplicitChain).flatMap("" :: _).drop(1) + } + + def implicitMessage(param: Symbol, annotationMsg: String): List[String] = { + val tpe = param.tpe + val msg = if (annotationMsg.isEmpty) Nil else annotationMsg.split("\n").toList.map(_.blue) :+ "" + val head = s"${"!".red}${"I".blue} ${param.name.toString.yellow}:" + val lines = showTypeBreakL(tpe).map(_.green) match { + case single :: Nil => List(s"$head $single") + case l => head :: indent(l) + } + lines ::: indent(msg) + } + + def splitChains(errors: List[ImplicitError]): List[List[ImplicitError]] = { + errors.foldRight(Nil: List[List[ImplicitError]]) { + case (a, chains @ ((chain @ (prev :: _)) :: tail)) => + if (a.nesting > prev.nesting) List(a) :: chains + else (a :: chain) :: tail + case (a, _) => List(List(a)) + } + } + + def formatImplicitError(param: Symbol, errors: List[ImplicitError], annotationMsg: String) = + ("implicit error;" :: implicitMessage(param, annotationMsg) ::: formatNestedImplicits(errors)).mkString("\n") +} diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala index 21958b77fa44..520d10c73e9e 100644 --- a/src/compiler/scala/tools/nsc/util/ClassPath.scala +++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala @@ -203,17 +203,6 @@ object ClassPath { trait ClassRepresentation { def fileName: String def name: String - /** Low level way to extract the entry name without allocation. */ - final def nameChars(buffer: Array[Char]): Int = { - val ix = fileName.lastIndexOf('.') - val nameLength = if (ix < 0) fileName.length else ix - if (nameLength > buffer.length) - -1 - else { - fileName.getChars(0, fileName.lastIndexOf('.'), buffer, 0) - nameLength - } - } def binary: Option[AbstractFile] def source: Option[AbstractFile] } diff --git a/src/compiler/scala/tools/reflect/FormatInterpolator.scala b/src/compiler/scala/tools/reflect/FormatInterpolator.scala index 1630e44d250d..158ba29c88ad 100644 --- a/src/compiler/scala/tools/reflect/FormatInterpolator.scala +++ b/src/compiler/scala/tools/reflect/FormatInterpolator.scala @@ -69,7 +69,7 @@ abstract class FormatInterpolator { * 5) "...\${smth}%%" => okay, equivalent to "...\${smth}%s%%" * 6) "...\${smth}[%legalJavaConversion]" => okay* * 7) "...\${smth}[%illegalJavaConversion]" => error - * *Legal according to [[https://docs.oracle.com/javase/8/docs/api/java/util/Formatter.html]] + * *Legal according to [[java.util.Formatter]] */ def interpolated(parts: List[Tree], args: List[Tree]) = { val fstring = new StringBuilder diff --git a/src/compiler/scala/tools/reflect/ReflectGlobal.scala b/src/compiler/scala/tools/reflect/ReflectGlobal.scala index 91443b448ee1..4b4de7e96d78 100644 --- a/src/compiler/scala/tools/reflect/ReflectGlobal.scala +++ b/src/compiler/scala/tools/reflect/ReflectGlobal.scala @@ -66,5 +66,7 @@ class ReflectGlobal(currentSettings: Settings, reporter: Reporter, override val override implicit val MirrorTag: ClassTag[Mirror] = ClassTag[Mirror](classOf[Mirror]) override type RuntimeClass = java.lang.Class[_] override implicit val RuntimeClassTag: ClassTag[RuntimeClass] = ClassTag[RuntimeClass](classOf[RuntimeClass]) + + override def openPackageModule(pkgClass: Symbol, force: Boolean): Unit = super.openPackageModule(pkgClass, true) } diff --git a/src/compiler/scala/tools/reflect/ToolBox.scala b/src/compiler/scala/tools/reflect/ToolBox.scala index a8aaf53b9622..fa77e7341c4c 100644 --- a/src/compiler/scala/tools/reflect/ToolBox.scala +++ b/src/compiler/scala/tools/reflect/ToolBox.scala @@ -84,7 +84,7 @@ trait ToolBox[U <: scala.reflect.api.Universe] { * * If `silent` is false, `ToolBoxError` will be thrown in case of an inference error. * If `silent` is true, the typecheck is silent and will return `EmptyTree` if an error occurs. - * Such errors don't vanish and can be inspected by turning on -Xlog-implicits. + * Such errors don't vanish and can be inspected by turning on -Vimplicits. * Unlike in `typecheck`, `silent` is true by default. */ def inferImplicitValue(pt: u.Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: u.Position = u.NoPosition): u.Tree @@ -98,7 +98,7 @@ trait ToolBox[U <: scala.reflect.api.Universe] { * * If `silent` is false, `ToolBoxError` will be thrown in case of an inference error. * If `silent` is true, the typecheck is silent and will return `EmptyTree` if an error occurs. - * Such errors don't vanish and can be inspected by turning on -Xlog-implicits. + * Such errors don't vanish and can be inspected by turning on -Vimplicits. * Unlike in `typecheck`, `silent` is true by default. */ def inferImplicitView(tree: u.Tree, from: u.Type, to: u.Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: u.Position = u.NoPosition): u.Tree diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala index 912c27ee6da5..0af5efeed818 100644 --- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala +++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala @@ -53,7 +53,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => extends ReflectGlobal(settings, reporter0, toolBoxSelf.classLoader) { import definitions._ - private val trace = scala.tools.nsc.util.trace when settings.debug.value + private val trace = scala.tools.nsc.util.trace when settings.isDebug private var wrapCount = 0 @@ -268,7 +268,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => val msym = wrapInPackageAndCompile(mdef.name, mdef) val className = msym.fullName - if (settings.debug) println("generated: "+className) + if (settings.isDebug) println("generated: "+className) def moduleFileName(className: String) = className + "$" val jclazz = jClass.forName(moduleFileName(className), true, classLoader) val jmeth = jclazz.getDeclaredMethods.find(_.getName == wrapperMethodName).get diff --git a/src/compiler/scala/tools/reflect/WrappedProperties.scala b/src/compiler/scala/tools/reflect/WrappedProperties.scala index 2ed0e459da0e..76caefb3c597 100644 --- a/src/compiler/scala/tools/reflect/WrappedProperties.scala +++ b/src/compiler/scala/tools/reflect/WrappedProperties.scala @@ -47,6 +47,7 @@ trait WrappedProperties extends PropertiesTrait { object WrappedProperties { object AccessControl extends WrappedProperties { + @annotation.nowarn("cat=deprecation") // AccessControlException is deprecated on JDK 17 def wrap[T](body: => T) = try Some(body) catch { case _: AccessControlException => None } } } diff --git a/src/compiler/scala/tools/tasty/TastyFlags.scala b/src/compiler/scala/tools/tasty/TastyFlags.scala index 0041a3e3f632..f4e66b066c50 100644 --- a/src/compiler/scala/tools/tasty/TastyFlags.scala +++ b/src/compiler/scala/tools/tasty/TastyFlags.scala @@ -47,8 +47,7 @@ object TastyFlags { final val Deferred = Param.next final val Method = Deferred.next final val Erased = Method.next - final val Internal = Erased.next - final val Inline = Internal.next + final val Inline = Erased.next final val InlineProxy = Inline.next final val Opaque = InlineProxy.next final val Extension = Opaque.next @@ -60,8 +59,7 @@ object TastyFlags { final val Open = Enum.next final val ParamAlias = Open.next final val Infix = ParamAlias.next - - private[TastyFlags] final val maxFlag: Long = ParamAlias.shift + final val Invisible = Infix.next def optFlag(cond: Boolean)(flag: TastyFlagSet): TastyFlagSet = if (cond) flag else EmptyTastyFlags @@ -125,7 +123,6 @@ object TastyFlags { if (is(Deferred)) sb += "Deferred" if (is(Method)) sb += "Method" if (is(Erased)) sb += "Erased" - if (is(Internal)) sb += "Internal" if (is(Inline)) sb += "Inline" if (is(InlineProxy)) sb += "InlineProxy" if (is(Opaque)) sb += "Opaque" @@ -138,24 +135,10 @@ object TastyFlags { if (is(Open)) sb += "Open" if (is(ParamAlias)) sb += "ParamAlias" if (is(Infix)) sb += "Infix" + if (is(Invisible)) sb += "Invisible" sb.mkString(" | ") } } } - case class SingletonSets(val toLong: Long) extends AnyVal { - def map[A](f: TastyFlagSet => A): Iterable[A] = { - val buf = Iterable.newBuilder[A] - val orig = TastyFlagSet(toLong) - var flag = EmptyTastyFlags - while (flag.shift <= maxFlag) { - flag = flag.next - if (orig.is(flag)) { - buf += f(flag) - } - } - buf.result() - } - } - } diff --git a/src/compiler/scala/tools/tasty/TastyFormat.scala b/src/compiler/scala/tools/tasty/TastyFormat.scala index 7aae96aebc15..d62fdfef6434 100644 --- a/src/compiler/scala/tools/tasty/TastyFormat.scala +++ b/src/compiler/scala/tools/tasty/TastyFormat.scala @@ -25,7 +25,7 @@ object TastyFormat { /**Natural number. Each increment of the `MajorVersion` begins a * new series of backward compatible TASTy versions. * - * A TASTy file in either the preceeding or succeeding series is + * A TASTy file in either the preceding or succeeding series is * incompatible with the current value. */ final val MajorVersion: Int = 28 @@ -33,9 +33,9 @@ object TastyFormat { /**Natural number. Each increment of the `MinorVersion`, within * a series declared by the `MajorVersion`, breaks forward * compatibility, but remains backwards compatible, with all - * preceeding `MinorVersion`. + * preceding `MinorVersion`. */ - final val MinorVersion: Int = 0 + final val MinorVersion: Int = 1 /**Natural Number. The `ExperimentalVersion` allows for * experimentation with changes to TASTy without committing @@ -51,14 +51,31 @@ object TastyFormat { * is able to read final TASTy documents if the file's * `MinorVersion` is strictly less than the current value. */ - final val ExperimentalVersion: Int = 1 + final val ExperimentalVersion: Int = 0 /**This method implements a binary relation (`<:<`) between two TASTy versions. + * * We label the lhs `file` and rhs `compiler`. * if `file <:< compiler` then the TASTy file is valid to be read. * - * TASTy versions have a partial order, - * for example `a <:< b` and `b <:< a` are both false if `a` and `b` have different major versions. + * A TASTy version, e.g. `v := 28.0-3` is composed of three fields: + * - v.major == 28 + * - v.minor == 0 + * - v.experimental == 3 + * + * TASTy versions have a partial order, for example, + * `a <:< b` and `b <:< a` are both false if + * - `a` and `b` have different `major` fields. + * - `a` and `b` have the same `major` & `minor` fields, + * but different `experimental` fields, both non-zero. + * + * A TASTy version with a zero value for its `experimental` field + * is considered to be stable. Files with a stable TASTy version + * can be read by a compiler with an unstable TASTy version, + * (where the compiler's TASTy version has a higher `minor` field). + * + * A compiler with a stable TASTy version can never read a file + * with an unstable TASTy version. * * We follow the given algorithm: * ``` @@ -187,7 +204,6 @@ object TastyFormat { final val TRUEconst = 4 final val NULLconst = 5 final val PRIVATE = 6 - final val INTERNAL = 7 final val PROTECTED = 8 final val ABSTRACT = 9 final val FINAL = 10 @@ -223,8 +239,9 @@ object TastyFormat { final val PARAMalias = 41 final val TRANSPARENT = 42 final val INFIX = 43 - final val EMPTYCLAUSE = 44 - final val SPLITCLAUSE = 45 + final val INVISIBLE = 44 + final val EMPTYCLAUSE = 45 + final val SPLITCLAUSE = 46 // Cat. 2: tag Nat @@ -351,7 +368,6 @@ object TastyFormat { def isModifierTag(tag: Int): Boolean = tag match { case PRIVATE - | INTERNAL | PROTECTED | ABSTRACT | FINAL @@ -387,6 +403,7 @@ object TastyFormat { | PARAMalias | EXPORTED | OPEN + | INVISIBLE | ANNOTATION | PRIVATEqualified | PROTECTEDqualified => true @@ -414,7 +431,6 @@ object TastyFormat { case TRUEconst => "TRUEconst" case NULLconst => "NULLconst" case PRIVATE => "PRIVATE" - case INTERNAL => "INTERNAL" case PROTECTED => "PROTECTED" case ABSTRACT => "ABSTRACT" case FINAL => "FINAL" @@ -449,6 +465,7 @@ object TastyFormat { case PARAMsetter => "PARAMsetter" case EXPORTED => "EXPORTED" case OPEN => "OPEN" + case INVISIBLE => "INVISIBLE" case PARAMalias => "PARAMalias" case EMPTYCLAUSE => "EMPTYCLAUSE" case SPLITCLAUSE => "SPLITCLAUSE" diff --git a/src/compiler/scala/tools/tasty/TastyHeaderUnpickler.scala b/src/compiler/scala/tools/tasty/TastyHeaderUnpickler.scala index 783fc41bb5c5..546cdc15e23c 100644 --- a/src/compiler/scala/tools/tasty/TastyHeaderUnpickler.scala +++ b/src/compiler/scala/tools/tasty/TastyHeaderUnpickler.scala @@ -69,8 +69,6 @@ class TastyHeaderUnpickler(reader: TastyReader) { } } - def isAtEnd: Boolean = reader.isAtEnd - private def check(cond: Boolean, msg: => String): Unit = { if (!cond) throw new UnpickleException(msg) } diff --git a/src/intellij/scala.ipr.SAMPLE b/src/intellij/scala.ipr.SAMPLE index c6b626692a57..292cce8c2a87 100644 --- a/src/intellij/scala.ipr.SAMPLE +++ b/src/intellij/scala.ipr.SAMPLE @@ -232,7 +232,7 @@ - + @@ -243,7 +243,7 @@ - + @@ -252,7 +252,7 @@ - + @@ -266,7 +266,7 @@ - + @@ -287,7 +287,7 @@ - + @@ -296,14 +296,14 @@ - + - + @@ -312,7 +312,7 @@ - + @@ -325,13 +325,13 @@ - + - + - - + + @@ -352,10 +352,9 @@ - - + @@ -369,50 +368,45 @@ - - + + - - + - - - + - - + - + - + - - + @@ -420,7 +414,6 @@ - @@ -435,18 +428,17 @@ - - + + - - - + + - + @@ -456,7 +448,7 @@ - + @@ -465,7 +457,7 @@ - + @@ -475,7 +467,7 @@ - + @@ -506,7 +498,7 @@ - + @@ -522,7 +514,7 @@ - + @@ -533,7 +525,7 @@ - + diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala index c99fe6637aff..a72adb3274e9 100644 --- a/src/interactive/scala/tools/nsc/interactive/Global.scala +++ b/src/interactive/scala/tools/nsc/interactive/Global.scala @@ -398,6 +398,15 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") val platform: Global.this.platform.type = Global.this.platform } with BrowsingLoaders + override def openPackageModule(pkgClass: Symbol, force: Boolean): Unit = { + val isPastNamer = force || currentTyperRun == null || (currentTyperRun.currentUnit match { + case unit: RichCompilationUnit => unit.isParsed + case _ => true + }) + if (isPastNamer) super.openPackageModule(pkgClass, true) + else analyzer.packageObjects.deferredOpen.add(pkgClass) + } + // ----------------- Polling --------------------------------------- case class WorkEvent(atNode: Int, atMillis: Long) @@ -1197,54 +1206,36 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") override def positionDelta = 0 override def forImport: Boolean = false } - private val CamelRegex = "([A-Z][^A-Z]*)".r - private def camelComponents(s: String, allowSnake: Boolean): List[String] = { - if (allowSnake && s.forall(c => c.isUpper || c == '_')) s.split('_').toList.filterNot(_.isEmpty) - else CamelRegex.findAllIn("X" + s).toList match { case head :: tail => head.drop(1) :: tail; case Nil => Nil } - } - def camelMatch(entered: Name): Name => Boolean = { - val enteredS = entered.toString - val enteredLowercaseSet = enteredS.toLowerCase().toSet - val allowSnake = !enteredS.contains('_') - - { - candidate: Name => - def candidateChunks = camelComponents(candidate.dropLocal.toString, allowSnake) - // Loosely based on IntelliJ's autocompletion: the user can just write everything in - // lowercase, as we'll let `isl` match `GenIndexedSeqLike` or `isLovely`. - def lenientMatch(entered: String, candidate: List[String], matchCount: Int): Boolean = { - candidate match { - case Nil => entered.isEmpty && matchCount > 0 - case head :: tail => - val enteredAlternatives = Set(entered, entered.capitalize) - val n = head.toIterable.lazyZip(entered).count {case (c, e) => c == e || (c.isUpper && c == e.toUpper)} - head.take(n).inits.exists(init => - enteredAlternatives.exists(entered => - lenientMatch(entered.stripPrefix(init), tail, matchCount + (if (init.isEmpty) 0 else 1)) - ) - ) - } - } - val containsAllEnteredChars = { - // Trying to rule out some candidates quickly before the more expensive `lenientMatch` - val candidateLowercaseSet = candidate.toString.toLowerCase().toSet - enteredLowercaseSet.diff(candidateLowercaseSet).isEmpty - } - containsAllEnteredChars && lenientMatch(enteredS, candidateChunks, 0) - } - } } final def completionsAt(pos: Position): CompletionResult = { val focus1: Tree = typedTreeAt(pos) def typeCompletions(tree: Tree, qual: Tree, nameStart: Int, name: Name): CompletionResult = { val qualPos = qual.pos - val allTypeMembers = typeMembers(qualPos).last + val saved = tree.tpe + // Force `typeMembers` to complete via the prefix, not the type of the Select itself. + tree.setType(ErrorType) + val allTypeMembers = try { + typeMembers(qualPos).last + } finally { + tree.setType(saved) + } val positionDelta: Int = pos.start - nameStart val subName: Name = name.newName(new String(pos.source.content, nameStart, pos.start - nameStart)).encodedName CompletionResult.TypeMembers(positionDelta, qual, tree, allTypeMembers, subName) } focus1 match { + case Apply(Select(qual, name), _) if qual.hasAttachment[InterpolatedString.type] => + // This special case makes CompletionTest.incompleteStringInterpolation work. + // In incomplete code, the parser treats `foo""` as a nested string interpolation, even + // though it is likely that the user wanted to complete `fooBar` before adding the closing brace. + // val fooBar = 42; s"abc ${foo" + // + // TODO: We could also complete the selection here to expand `ra"..."` to `raw"..."`. + val allMembers = scopeMembers(pos) + val positionDelta: Int = pos.start - focus1.pos.start + val subName = name.subName(0, positionDelta) + CompletionResult.ScopeMembers(positionDelta, allMembers, subName, forImport = false) case imp@Import(i @ Ident(name), head :: Nil) if head.name == nme.ERROR => val allMembers = scopeMembers(pos) val nameStart = i.pos.start @@ -1259,9 +1250,13 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") } case sel@Select(qual, name) => val qualPos = qual.pos - def fallback = qualPos.end + 2 + val effectiveQualEnd = if (qualPos.isRange) qualPos.end else qualPos.point - 1 + def fallback = { + effectiveQualEnd + 2 + } val source = pos.source - val nameStart: Int = (focus1.pos.end - 1 to qualPos.end by -1).find(p => + + val nameStart: Int = (focus1.pos.end - 1 to effectiveQualEnd by -1).find(p => source.identifier(source.position(p)).exists(_.length == 0) ).map(_ + 1).getOrElse(fallback) typeCompletions(sel, qual, nameStart, name) diff --git a/src/interactive/scala/tools/nsc/interactive/REPL.scala b/src/interactive/scala/tools/nsc/interactive/REPL.scala index 8fb23516e734..300cf38b3ad0 100644 --- a/src/interactive/scala/tools/nsc/interactive/REPL.scala +++ b/src/interactive/scala/tools/nsc/interactive/REPL.scala @@ -57,7 +57,7 @@ object REPL { } } catch { case ex @ FatalError(msg) => - if (true || command.settings.debug) // !!! + if (true || command.settings.isDebug) // !!! ex.printStackTrace() reporter.error(null, "fatal error: " + msg) } diff --git a/src/library-aux/scala/Any.scala b/src/library-aux/scala/Any.scala index a38b563d9bf0..d514aea60a2f 100644 --- a/src/library-aux/scala/Any.scala +++ b/src/library-aux/scala/Any.scala @@ -110,13 +110,29 @@ abstract class Any { */ final def ## : Int = sys.error("##") - /** Test whether the dynamic type of the receiver object is `T0`. - * - * Note that the result of the test is modulo Scala's erasure semantics. - * Therefore the expression `1.isInstanceOf[String]` will return `false`, while the - * expression `List(1).isInstanceOf[List[String]]` will return `true`. - * In the latter example, because the type argument is erased as part of compilation it is - * not possible to check whether the contents of the list are of the specified type. + /** Test whether the dynamic type of the receiver object has the same erasure as `T0`. + * + * Depending on what `T0` is, the test is done in one of the below ways: + * + * - `T0` is a non-parameterized class type, e.g. `BigDecimal`: this method returns `true` if + * the value of the receiver object is a `BigDecimal` or a subtype of `BigDecimal`. + * - `T0` is a parameterized class type, e.g. `List[Int]`: this method returns `true` if + * the value of the receiver object is some `List[X]` for any `X`. + * For example, `List(1, 2, 3).isInstanceOf[List[String]]` will return true. + * - `T0` is some singleton type `x.type` or literal `x`: this method returns `this.eq(x)`. + * For example, `x.isInstanceOf[1]` is equivalent to `x.eq(1)` + * - `T0` is an intersection `X with Y` or `X & Y: this method is equivalent to `x.isInstanceOf[X] && x.isInstanceOf[Y]` + * - `T0` is a union `X | Y`: this method is equivalent to `x.isInstanceOf[X] || x.isInstanceOf[Y]` + * - `T0` is a type parameter or an abstract type member: this method is equivalent + * to `isInstanceOf[U]` where `U` is `T0`'s upper bound, `Any` if `T0` is unbounded. + * For example, `x.isInstanceOf[A]` where `A` is an unbounded type parameter + * will return true for any value of `x`. + * + * This is exactly equivalent to the type pattern `_: T0` + * + * @note due to the unexpectedness of `List(1, 2, 3).isInstanceOf[List[String]]` returning true and + * `x.isInstanceOf[A]` where `A` is a type parameter or abstract member returning true, + * these forms issue a warning. * * @return `true` if the receiver object is an instance of erasure of type `T0`; `false` otherwise. */ diff --git a/src/library/scala/Enumeration.scala b/src/library/scala/Enumeration.scala index 7b6d77827e72..831ad8682fcc 100644 --- a/src/library/scala/Enumeration.scala +++ b/src/library/scala/Enumeration.scala @@ -310,8 +310,8 @@ abstract class Enumeration (initial: Int) extends Serializable { override protected def fromSpecific(coll: IterableOnce[Value]): ValueSet = ValueSet.fromSpecific(coll) override protected def newSpecificBuilder = ValueSet.newBuilder - def map(f: Value => Value): ValueSet = fromSpecific(new View.Map(toIterable, f)) - def flatMap(f: Value => IterableOnce[Value]): ValueSet = fromSpecific(new View.FlatMap(toIterable, f)) + def map(f: Value => Value): ValueSet = fromSpecific(new View.Map(this, f)) + def flatMap(f: Value => IterableOnce[Value]): ValueSet = fromSpecific(new View.FlatMap(this, f)) // necessary for disambiguation: override def map[B](f: Value => B)(implicit @implicitNotFound(ValueSet.ordMsg) ev: Ordering[B]): immutable.SortedSet[B] = diff --git a/src/library/scala/PartialFunction.scala b/src/library/scala/PartialFunction.scala index 625285fd93fd..d6092990446a 100644 --- a/src/library/scala/PartialFunction.scala +++ b/src/library/scala/PartialFunction.scala @@ -14,17 +14,16 @@ package scala import scala.annotation.nowarn - /** A partial function of type `PartialFunction[A, B]` is a unary function * where the domain does not necessarily include all values of type `A`. - * The function `isDefinedAt` allows to test dynamically if a value is in + * The function [[isDefinedAt]] allows to test dynamically if a value is in * the domain of the function. * * Even if `isDefinedAt` returns true for an `a: A`, calling `apply(a)` may * still throw an exception, so the following code is legal: * * {{{ - * val f: PartialFunction[Int, Any] = { case _ => 1/0 } + * val f: PartialFunction[Int, Any] = { case x => x / 0 } // ArithmeticException: / by zero * }}} * * It is the responsibility of the caller to call `isDefinedAt` before @@ -32,26 +31,52 @@ import scala.annotation.nowarn * `apply` will throw an exception to indicate an error condition. If an * exception is not thrown, evaluation may result in an arbitrary value. * + * The usual way to respect this contract is to call [[applyOrElse]], + * which is expected to be more efficient than calling both `isDefinedAt` + * and `apply`. + * * The main distinction between `PartialFunction` and [[scala.Function1]] is * that the user of a `PartialFunction` may choose to do something different * with input that is declared to be outside its domain. For example: * * {{{ * val sample = 1 to 10 - * val isEven: PartialFunction[Int, String] = { - * case x if x % 2 == 0 => x+" is even" + * def isEven(n: Int) = n % 2 == 0 + * val eveningNews: PartialFunction[Int, String] = { + * case x if isEven(x) => s"\$x is even" + * } + * + * // The method collect is described as "filter + map" + * // because it uses a PartialFunction to select elements + * // to which the function is applied. + * val evenNumbers = sample.collect(eveningNews) + * + * val oddlyEnough: PartialFunction[Int, String] = { + * case x if !isEven(x) => s"\$x is odd" * } * - * // the method collect can use isDefinedAt to select which members to collect - * val evenNumbers = sample collect isEven + * // The method orElse allows chaining another PartialFunction + * // to handle input outside the declared domain. + * val numbers = sample.map(eveningNews orElse oddlyEnough) + * + * // same as + * val numbers = sample.map(n => eveningNews.applyOrElse(n, oddlyEnough)) * - * val isOdd: PartialFunction[Int, String] = { - * case x if x % 2 == 1 => x+" is odd" + * val half: PartialFunction[Int, Int] = { + * case x if isEven(x) => x / 2 * } * - * // the method orElse allows chaining another partial function to handle - * // input outside the declared domain - * val numbers = sample map (isEven orElse isOdd) + * // Calculating the domain of a composition can be expensive. + * val oddByHalf = half.andThen(oddlyEnough) + * + * // Invokes `half.apply` on even elements! + * val oddBalls = sample.filter(oddByHalf.isDefinedAt) + * + * // Better than filter(oddByHalf.isDefinedAt).map(oddByHalf) + * val oddBalls = sample.collect(oddByHalf) + * + * // Providing "default" values. + * val oddsAndEnds = sample.map(n => oddByHalf.applyOrElse(n, (i: Int) => s"[\$i]")) * }}} * * @note Optional [[Function]]s, [[PartialFunction]]s and extractor objects @@ -63,6 +88,10 @@ import scala.annotation.nowarn * | from optional [[Function]] | [[Function1.UnliftOps#unlift]] or [[Function.unlift]] | [[Predef.identity]] | [[Function1.UnliftOps#unlift]] | * | from an extractor | `{ case extractor(x) => x }` | `extractor.unapply _` | [[Predef.identity]] | *   + * + * @define applyOrElseOrElse Note that calling [[isDefinedAt]] on the resulting partial function + * may apply the first partial function and execute its side effect. + * For efficiency, it is recommended to call [[applyOrElse]] instead of [[isDefinedAt]] or [[apply]]. */ trait PartialFunction[-A, +B] extends (A => B) { self => import PartialFunction._ @@ -125,9 +154,7 @@ trait PartialFunction[-A, +B] extends (A => B) { self => * Composes this partial function with another partial function that * gets applied to results of this partial function. * - * Note that calling [[isDefinedAt]] on the resulting partial function may apply the first - * partial function and execute its side effect. It is highly recommended to call [[applyOrElse]] - * instead of [[isDefinedAt]] / [[apply]] for efficiency. + * $applyOrElseOrElse * * @param k the transformation function * @tparam C the result type of the transformation function. @@ -141,9 +168,7 @@ trait PartialFunction[-A, +B] extends (A => B) { self => * Composes another partial function `k` with this partial function so that this * partial function gets applied to results of `k`. * - * Note that calling [[isDefinedAt]] on the resulting partial function may apply the first - * partial function and execute its side effect. It is highly recommended to call [[applyOrElse]] - * instead of [[isDefinedAt]] / [[apply]] for efficiency. + * $applyOrElseOrElse * * @param k the transformation function * @tparam R the parameter type of the transformation function. @@ -353,25 +378,26 @@ object PartialFunction { */ def empty[A, B] : PartialFunction[A, B] = empty_pf - /** Creates a Boolean test based on a value and a partial function. - * It behaves like a 'match' statement with an implied 'case _ => false' - * following the supplied cases. + /** A Boolean test that is the result of the given function where defined, + * and false otherwise. + * + * It behaves like a `case _ => false` were added to the partial function. * * @param x the value to test * @param pf the partial function * @return true, iff `x` is in the domain of `pf` and `pf(x) == true`. */ - def cond[T](x: T)(pf: PartialFunction[T, Boolean]): Boolean = pf.applyOrElse(x, constFalse) + def cond[A](x: A)(pf: PartialFunction[A, Boolean]): Boolean = pf.applyOrElse(x, constFalse) - /** Transforms a PartialFunction[T, U] `pf` into Function1[T, Option[U]] `f` - * whose result is `Some(x)` if the argument is in `pf`'s domain and `None` - * otherwise, and applies it to the value `x`. In effect, it is a - * `'''match'''` statement which wraps all case results in `Some(_)` and - * adds `'''case''' _ => None` to the end. + /** Apply the function to the given value if defined, and return the result + * in a `Some`; otherwise, return `None`. * * @param x the value to test * @param pf the PartialFunction[T, U] * @return `Some(pf(x))` if `pf isDefinedAt x`, `None` otherwise. */ - def condOpt[T,U](x: T)(pf: PartialFunction[T, U]): Option[U] = pf.lift(x) + def condOpt[A, B](x: A)(pf: PartialFunction[A, B]): Option[B] = { + val z = pf.applyOrElse(x, checkFallback[B]) + if (!fallbackOccurred(z)) Some(z) else None + } } diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala index 6577d5d8e403..2dde93553600 100644 --- a/src/library/scala/Predef.scala +++ b/src/library/scala/Predef.scala @@ -137,9 +137,8 @@ object Predef extends LowPriorityImplicits { @inline def valueOf[T](implicit vt: ValueOf[T]): T = vt.value /** The `String` type in Scala has all the methods of the underlying - * `java.lang.String`, of which it is just an alias. - * (See the documentation corresponding to your Java version, - * for example [[https://docs.oracle.com/javase/8/docs/api/java/lang/String.html]].) + * [[java.lang.String]], of which it is just an alias. + * * In addition, extension methods in [[scala.collection.StringOps]] * are added implicitly through the conversion [[augmentString]]. * @group aliases @@ -367,6 +366,7 @@ object Predef extends LowPriorityImplicits { * Format strings are as for `String.format` * (@see java.lang.String.format). */ + @deprecated("Use `formatString.format(value)` instead of `value.formatted(formatString)`,\nor use the `f\"\"` string interpolator. In Java 15 and later, `formatted` resolves to the new method in String which has reversed parameters.", "2.12.16") @inline def formatted(fmtstr: String): String = fmtstr format self } diff --git a/src/library/scala/Product.scala b/src/library/scala/Product.scala index 240a4d43f5c2..96a2277d736e 100644 --- a/src/library/scala/Product.scala +++ b/src/library/scala/Product.scala @@ -59,7 +59,7 @@ trait Product extends Any with Equals { */ def productElementName(n: Int): String = if (n >= 0 && n < productArity) "" - else throw new IndexOutOfBoundsException(s"$n is out of bounds (min 0, max ${productArity-1}") + else throw new IndexOutOfBoundsException(s"$n is out of bounds (min 0, max ${productArity-1})") /** An iterator over the names of all the elements of this product. */ diff --git a/src/library/scala/SerialVersionUID.scala b/src/library/scala/SerialVersionUID.scala index e92e0d9fbd78..7a0b08f6fa23 100644 --- a/src/library/scala/SerialVersionUID.scala +++ b/src/library/scala/SerialVersionUID.scala @@ -20,7 +20,7 @@ package scala * which the JVM's serialization mechanism uses to determine serialization * compatibility between different versions of a class. * - * @see [[https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html `java.io.Serializable`]] + * @see [[java.io.Serializable]] * @see [[Serializable]] */ @deprecatedInheritance("Scheduled for being final in the future", "2.13.0") diff --git a/src/library/scala/Symbol.scala b/src/library/scala/Symbol.scala index 6cf0b0f3b269..c388bde42a98 100644 --- a/src/library/scala/Symbol.scala +++ b/src/library/scala/Symbol.scala @@ -14,17 +14,11 @@ package scala /** This class provides a simple way to get unique objects for equal strings. * Since symbols are interned, they can be compared using reference equality. - * Instances of `Symbol` can be created easily with Scala's built-in quote - * mechanism. - * - * For instance, the Scala term `'mysym` will - * invoke the constructor of the `Symbol` class in the following way: - * `Symbol("mysym")`. */ final class Symbol private (val name: String) extends Serializable { - /** Converts this symbol to a string. + /** A string representation of this symbol. */ - override def toString(): String = "Symbol(" + name + ")" + override def toString(): String = s"Symbol($name)" @throws(classOf[java.io.ObjectStreamException]) private def readResolve(): Any = Symbol.apply(name) @@ -40,8 +34,7 @@ object Symbol extends UniquenessCache[String, Symbol] { /** This is private so it won't appear in the library API, but * abstracted to offer some hope of reusability. */ -private[scala] abstract class UniquenessCache[K, V >: Null] -{ +private[scala] abstract class UniquenessCache[K, V >: Null] { import java.lang.ref.WeakReference import java.util.WeakHashMap import java.util.concurrent.locks.ReentrantReadWriteLock @@ -82,10 +75,10 @@ private[scala] abstract class UniquenessCache[K, V >: Null] } finally wlock.unlock } - - val res = cached() - if (res == null) updateCache() - else res + cached() match { + case null => updateCache() + case res => res + } } def unapply(other: V): Option[K] = keyFromValue(other) } diff --git a/src/library/scala/annotation/ClassfileAnnotation.scala b/src/library/scala/annotation/ClassfileAnnotation.scala index 29acbc52689a..1f6317427b5e 100644 --- a/src/library/scala/annotation/ClassfileAnnotation.scala +++ b/src/library/scala/annotation/ClassfileAnnotation.scala @@ -13,7 +13,7 @@ package scala.annotation /** A base class for classfile annotations. These are stored as - * [[https://docs.oracle.com/javase/8/docs/technotes/guides/language/annotations.html Java annotations]]] + * [[https://docs.oracle.com/javase/8/docs/technotes/guides/language/annotations.html Java annotations]] * in classfiles. */ @deprecated("Annotation classes need to be written in Java in order to be stored in classfiles in a Java-compatible manner", "2.13.0") diff --git a/src/library/scala/collection/ArrayOps.scala b/src/library/scala/collection/ArrayOps.scala index 370acfce2f1a..a4948ac01f2e 100644 --- a/src/library/scala/collection/ArrayOps.scala +++ b/src/library/scala/collection/ArrayOps.scala @@ -123,7 +123,7 @@ object ArrayOps { private[collection] final class ArrayIterator[@specialized(Specializable.Everything) A](xs: Array[A]) extends AbstractIterator[A] with Serializable { private[this] var pos = 0 private[this] val len = xs.length - override def knownSize = len - pos + override def knownSize: Int = len - pos def hasNext: Boolean = pos < len def next(): A = try { val r = xs(pos) @@ -131,7 +131,12 @@ object ArrayOps { r } catch { case _: ArrayIndexOutOfBoundsException => Iterator.empty.next() } override def drop(n: Int): Iterator[A] = { - if (n > 0) pos = Math.min(xs.length, pos + n) + if (n > 0) { + val newPos = pos + n + pos = + if (newPos < 0 /* overflow */) len + else Math.min(len, newPos) + } this } } @@ -1569,18 +1574,18 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form * part of the result, but any following occurrences will. */ - def diff[B >: A](that: Seq[B]): Array[A] = mutable.ArraySeq.make(xs).diff(that).array.asInstanceOf[Array[A]] + def diff[B >: A](that: Seq[B]): Array[A] = mutable.ArraySeq.make(xs).diff(that).toArray[A] /** Computes the multiset intersection between this array and another sequence. - * - * @param that the sequence of elements to intersect with. - * @return a new array which contains all elements of this array - * which also appear in `that`. - * If an element value `x` appears - * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained - * in the result, but any following occurrences will be omitted. - */ - def intersect[B >: A](that: Seq[B]): Array[A] = mutable.ArraySeq.make(xs).intersect(that).array.asInstanceOf[Array[A]] + * + * @param that the sequence of elements to intersect with. + * @return a new array which contains all elements of this array + * which also appear in `that`. + * If an element value `x` appears + * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained + * in the result, but any following occurrences will be omitted. + */ + def intersect[B >: A](that: Seq[B]): Array[A] = mutable.ArraySeq.make(xs).intersect(that).toArray[A] /** Groups elements in fixed size blocks by passing a "sliding window" * over them (as opposed to partitioning them, as is done in grouped.) @@ -1592,7 +1597,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * last element (which may be the only element) will be truncated * if there are fewer than `size` elements remaining to be grouped. */ - def sliding(size: Int, step: Int = 1): Iterator[Array[A]] = mutable.ArraySeq.make(xs).sliding(size, step).map(_.array.asInstanceOf[Array[A]]) + def sliding(size: Int, step: Int = 1): Iterator[Array[A]] = mutable.ArraySeq.make(xs).sliding(size, step).map(_.toArray[A]) /** Iterates over combinations. A _combination_ of length `n` is a subsequence of * the original array, with the elements taken in order. Thus, `Array("x", "y")` and `Array("y", "y")` @@ -1609,7 +1614,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * Array("a", "b", "b", "b", "c").combinations(2) == Iterator(Array(a, b), Array(a, c), Array(b, b), Array(b, c)) * }}} */ - def combinations(n: Int): Iterator[Array[A]] = mutable.ArraySeq.make(xs).combinations(n).map(_.array.asInstanceOf[Array[A]]) + def combinations(n: Int): Iterator[Array[A]] = mutable.ArraySeq.make(xs).combinations(n).map(_.toArray[A]) /** Iterates over distinct permutations. * @@ -1618,7 +1623,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * Array("a", "b", "b").permutations == Iterator(Array(a, b, b), Array(b, a, b), Array(b, b, a)) * }}} */ - def permutations: Iterator[Array[A]] = mutable.ArraySeq.make(xs).permutations.map(_.array.asInstanceOf[Array[A]]) + def permutations: Iterator[Array[A]] = mutable.ArraySeq.make(xs).permutations.map(_.toArray[A]) // we have another overload here, so we need to duplicate this method /** Tests whether this array contains the given sequence at a given index. diff --git a/src/library/scala/collection/BitSet.scala b/src/library/scala/collection/BitSet.scala index ccac61a72a9c..e8ca89806455 100644 --- a/src/library/scala/collection/BitSet.scala +++ b/src/library/scala/collection/BitSet.scala @@ -295,11 +295,11 @@ trait BitSetOps[+C <: BitSet with BitSetOps[C]] * @return a new bitset resulting from applying the given function ''f'' to * each element of this bitset and collecting the results */ - def map(f: Int => Int): C = fromSpecific(new View.Map(toIterable, f)) + def map(f: Int => Int): C = fromSpecific(new View.Map(this, f)) - def flatMap(f: Int => IterableOnce[Int]): C = fromSpecific(new View.FlatMap(toIterable, f)) + def flatMap(f: Int => IterableOnce[Int]): C = fromSpecific(new View.FlatMap(this, f)) - def collect(pf: PartialFunction[Int, Int]): C = fromSpecific(super[SortedSetOps].collect(pf).toIterable) + def collect(pf: PartialFunction[Int, Int]): C = fromSpecific(super[SortedSetOps].collect(pf)) override def partition(p: Int => Boolean): (C, C) = { val left = filter(p) diff --git a/src/library/scala/collection/IndexedSeq.scala b/src/library/scala/collection/IndexedSeq.scala index 18b66b710b07..65a30efe4030 100644 --- a/src/library/scala/collection/IndexedSeq.scala +++ b/src/library/scala/collection/IndexedSeq.scala @@ -47,15 +47,7 @@ trait IndexedSeqOps[+A, +CC[_], +C] extends Any with SeqOps[A, CC, C] { self => s.asInstanceOf[S with EfficientSplit] } - override def reverseIterator: Iterator[A] = new AbstractIterator[A] { - private[this] var i = self.length - def hasNext: Boolean = 0 < i - def next(): A = - if (0 < i) { - i -= 1 - self(i) - } else Iterator.empty.next() - } + override def reverseIterator: Iterator[A] = view.reverseIterator override def foldRight[B](z: B)(op: (A, B) => B): B = { val it = reverseIterator diff --git a/src/library/scala/collection/IndexedSeqView.scala b/src/library/scala/collection/IndexedSeqView.scala index a1b3d4d5e32b..737f032d2060 100644 --- a/src/library/scala/collection/IndexedSeqView.scala +++ b/src/library/scala/collection/IndexedSeqView.scala @@ -49,14 +49,15 @@ trait IndexedSeqView[+A] extends IndexedSeqOps[A, View, View[A]] with SeqView[A] object IndexedSeqView { @SerialVersionUID(3L) - private final class IndexedSeqViewIterator[A](self: IndexedSeqView[A]) extends AbstractIterator[A] with Serializable { + private[collection] class IndexedSeqViewIterator[A](self: IndexedSeqView[A]) extends AbstractIterator[A] with Serializable { private[this] var current = 0 - private[this] var remainder = self.size + private[this] var remainder = self.length override def knownSize: Int = remainder - def hasNext = remainder > 0 + @inline private[this] def _hasNext: Boolean = remainder > 0 + def hasNext: Boolean = _hasNext def next(): A = - if (hasNext) { - val r = self.apply(current) + if (_hasNext) { + val r = self(current) current += 1 remainder -= 1 r @@ -82,37 +83,38 @@ object IndexedSeqView { } } @SerialVersionUID(3L) - private final class IndexedSeqViewReverseIterator[A](self: IndexedSeqView[A]) extends AbstractIterator[A] with Serializable { - private[this] var pos = self.size - 1 - private[this] var remainder = self.size - def hasNext: Boolean = remainder > 0 + private[collection] class IndexedSeqViewReverseIterator[A](self: IndexedSeqView[A]) extends AbstractIterator[A] with Serializable { + private[this] var remainder = self.length + private[this] var pos = remainder - 1 + @inline private[this] def _hasNext: Boolean = remainder > 0 + def hasNext: Boolean = _hasNext def next(): A = - if (pos < 0) throw new NoSuchElementException - else { + if (_hasNext) { val r = self(pos) pos -= 1 remainder -= 1 r - } + } else Iterator.empty.next() - override def drop(n: Int): Iterator[A] = { - if (n > 0) { - pos -= n - remainder = Math.max(0, remainder - n) + // from < 0 means don't move pos, until < 0 means don't limit remainder + // + override protected def sliceIterator(from: Int, until: Int): Iterator[A] = { + if (_hasNext) { + if (remainder <= from) remainder = 0 // exhausted by big skip + else if (from <= 0) { // no skip, pos is same + if (until >= 0 && until < remainder) remainder = until // ...limited by until + } + else { + pos -= from // skip ahead + if (until >= 0 && until < remainder) { // ...limited by until + if (until <= from) remainder = 0 // ...exhausted if limit is smaller than skip + else remainder = until - from // ...limited by until, less the skip + } + else remainder -= from // ...otherwise just less the skip + } } this } - - - override def sliceIterator(from: Int, until: Int): Iterator[A] = { - val startCutoff = pos - val untilCutoff = startCutoff - remainder + 1 - val nextStartCutoff = if (from < 0) startCutoff else if (startCutoff - from < 0) 0 else startCutoff - from - val nextUntilCutoff = if (until < 0) startCutoff else if (startCutoff - until < untilCutoff) untilCutoff else startCutoff - until + 1 - remainder = Math.max(0, nextStartCutoff - nextUntilCutoff + 1) - pos = nextStartCutoff - this - } } /** An `IndexedSeqOps` whose collection type and collection type constructor are unknown */ diff --git a/src/library/scala/collection/Iterable.scala b/src/library/scala/collection/Iterable.scala index c76b1c9f6f2d..db4f7b919943 100644 --- a/src/library/scala/collection/Iterable.scala +++ b/src/library/scala/collection/Iterable.scala @@ -13,6 +13,7 @@ package scala package collection +import scala.annotation.nowarn import scala.annotation.unchecked.uncheckedVariance import scala.collection.mutable.Builder import scala.collection.View.{LeftPartitionMapped, RightPartitionMapped} @@ -29,6 +30,7 @@ trait Iterable[+A] extends IterableOnce[A] with IterableFactoryDefaults[A, Iterable] { // The collection itself + @deprecated("toIterable is internal and will be made protected; its name is similar to `toList` or `toSeq`, but it doesn't copy non-immutable collections", "2.13.7") final def toIterable: this.type = this final protected def coll: this.type = this @@ -133,13 +135,15 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable /** * @return This collection as an `Iterable[A]`. No new collection will be built if `this` is already an `Iterable[A]`. */ + // Should be `protected def asIterable`, or maybe removed altogether if it's not needed + @deprecated("toIterable is internal and will be made protected; its name is similar to `toList` or `toSeq`, but it doesn't copy non-immutable collections", "2.13.7") def toIterable: Iterable[A] /** Converts this $coll to an unspecified Iterable. Will return * the same collection if this instance is already Iterable. * @return An Iterable containing all elements of this $coll. */ - @deprecated("Use toIterable instead", "2.13.0") + @deprecated("toTraversable is internal and will be made protected; its name is similar to `toList` or `toSeq`, but it doesn't copy non-immutable collections", "2.13.0") final def toTraversable: Traversable[A] = toIterable override def isTraversableAgain: Boolean = true @@ -830,7 +834,10 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable // A helper for tails and inits. private[this] def iterateUntilEmpty(f: Iterable[A] => Iterable[A]): Iterator[C] = { - val it = Iterator.iterate(toIterable)(f).takeWhile(_.nonEmpty) + // toIterable ties the knot between `this: IterableOnceOps[A, CC, C]` and `this.tail: C` + // `this.tail.tail` doesn't compile as `C` is unbounded + // `Iterable.from(this)` would eagerly copy non-immutable collections + val it = Iterator.iterate(toIterable: @nowarn("cat=deprecation"))(f).takeWhile(_.nonEmpty) (it ++ Iterator.single(Iterable.empty)).map(fromSpecific) } @@ -994,7 +1001,7 @@ trait MapFactoryDefaults[K, +V, override protected def newSpecificBuilder: mutable.Builder[(K, V @uncheckedVariance), CC[K, V @uncheckedVariance]] = mapFactory.newBuilder[K, V] override def empty: CC[K, V @uncheckedVariance] = (this: AnyRef) match { // Implemented here instead of in TreeSeqMap since overriding empty in TreeSeqMap is not forwards compatible (should be moved) - case self: immutable.TreeSeqMap[K, V] => immutable.TreeSeqMap.empty(self.orderedBy).asInstanceOf[CC[K, V]] + case self: immutable.TreeSeqMap[_, _] => immutable.TreeSeqMap.empty(self.orderedBy).asInstanceOf[CC[K, V]] case _ => mapFactory.empty } diff --git a/src/library/scala/collection/IterableOnce.scala b/src/library/scala/collection/IterableOnce.scala index acea80075acc..a9ab03a00117 100644 --- a/src/library/scala/collection/IterableOnce.scala +++ b/src/library/scala/collection/IterableOnce.scala @@ -325,7 +325,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => /** Selects all elements of this $coll which satisfy a predicate. * * @param p the predicate used to test elements. - * @return a new iterator consisting of all elements of this $coll that satisfy the given + * @return a new $coll consisting of all elements of this $coll that satisfy the given * predicate `p`. The order of the elements is preserved. */ def filter(p: A => Boolean): C diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala index 572dc4078f63..1970d3babb62 100644 --- a/src/library/scala/collection/Iterator.scala +++ b/src/library/scala/collection/Iterator.scala @@ -409,9 +409,9 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite def indexWhere(p: A => Boolean, from: Int = 0): Int = { var i = math.max(from, 0) - drop(from) - while (hasNext) { - if (p(next())) return i + val dropped = drop(from) + while (dropped.hasNext) { + if (p(dropped.next())) return i i += 1 } -1 @@ -498,7 +498,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite */ def withFilter(p: A => Boolean): Iterator[A] = filter(p) - def collect[B](pf: PartialFunction[A, B]): Iterator[B] = new AbstractIterator[B] { + def collect[B](pf: PartialFunction[A, B]): Iterator[B] = new AbstractIterator[B] with (A => B) { // Manually buffer to avoid extra layer of wrapping with buffered private[this] var hd: B = _ @@ -508,12 +508,14 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite // BE REALLY CAREFUL TO KEEP COMMENTS AND NUMBERS IN SYNC! private[this] var status = 0/*Seek*/ + def apply(value: A): B = Statics.pfMarker.asInstanceOf[B] + def hasNext = { val marker = Statics.pfMarker while (status == 0/*Seek*/) { if (self.hasNext) { val x = self.next() - val v = pf.applyOrElse(x, ((x: A) => marker).asInstanceOf[A => B]) + val v = pf.applyOrElse(x, this) if (marker ne v.asInstanceOf[AnyRef]) { hd = v status = 1/*Found*/ @@ -633,14 +635,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite def next() = if (hasNext) { hdDefined = false; hd } else Iterator.empty.next() } - def drop(n: Int): Iterator[A] = { - var i = 0 - while (i < n && hasNext) { - next() - i += 1 - } - this - } + def drop(n: Int): Iterator[A] = sliceIterator(n, -1) def dropWhile(p: A => Boolean): Iterator[A] = new AbstractIterator[A] { // Magic value: -1 = hasn't dropped, 0 = found first, 1 = defer to parent iterator @@ -905,31 +900,37 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite def patch[B >: A](from: Int, patchElems: Iterator[B], replaced: Int): Iterator[B] = new AbstractIterator[B] { private[this] var origElems = self - private[this] var i = if (from > 0) from else 0 // Counts down, switch to patch on 0, -1 means use patch first - def hasNext: Boolean = { - if (i == 0) { + // > 0 => that many more elems from `origElems` before switching to `patchElems` + // 0 => need to drop elems from `origElems` and start using `patchElems` + // -1 => have dropped elems from `origElems`, will be using `patchElems` until it's empty + // and then using what's left of `origElems` after the drop + private[this] var state = if (from > 0) from else 0 + + // checks state and handles 0 => -1 + @inline private[this] def switchToPatchIfNeeded(): Unit = + if (state == 0) { origElems = origElems drop replaced - i = -1 + state = -1 } + + def hasNext: Boolean = { + switchToPatchIfNeeded() origElems.hasNext || patchElems.hasNext } def next(): B = { - if (i == 0) { - origElems = origElems drop replaced - i = -1 - } - if (i < 0) { + switchToPatchIfNeeded() + if (state < 0 /* == -1 */) { if (patchElems.hasNext) patchElems.next() else origElems.next() } else { if (origElems.hasNext) { - i -= 1 + state -= 1 origElems.next() } else { - i = -1 + state = -1 patchElems.next() } } @@ -964,6 +965,7 @@ object Iterator extends IterableFactory[Iterator] { def hasNext = false def next() = throw new NoSuchElementException("next on empty iterator") override def knownSize: Int = 0 + override protected def sliceIterator(from: Int, until: Int) = this } /** Creates a target $coll from an existing source collection @@ -981,6 +983,9 @@ object Iterator extends IterableFactory[Iterator] { private[this] var consumed: Boolean = false def hasNext = !consumed def next() = if (consumed) empty.next() else { consumed = true; a } + override protected def sliceIterator(from: Int, until: Int) = + if (consumed || from > 0 || until == 0) empty + else this } override def apply[A](xs: A*): Iterator[A] = xs.iterator diff --git a/src/library/scala/collection/LinearSeq.scala b/src/library/scala/collection/LinearSeq.scala index 9934f3279a3c..7dc67096fbfd 100644 --- a/src/library/scala/collection/LinearSeq.scala +++ b/src/library/scala/collection/LinearSeq.scala @@ -248,7 +248,7 @@ trait LinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with LinearSeq trait StrictOptimizedLinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with StrictOptimizedLinearSeqOps[A, CC, C]] extends Any with LinearSeqOps[A, CC, C] with StrictOptimizedSeqOps[A, CC, C] { // A more efficient iterator implementation than the default LinearSeqIterator override def iterator: Iterator[A] = new AbstractIterator[A] { - private[this] var current: Iterable[A] = toIterable + private[this] var current = StrictOptimizedLinearSeqOps.this def hasNext = !current.isEmpty def next() = { val r = current.head; current = current.tail; r } } @@ -276,7 +276,12 @@ private[collection] final class LinearSeqIterator[A](coll: LinearSeqOps[A, Linea // A call-by-need cell private[this] final class LazyCell(st: => LinearSeqOps[A, LinearSeq, LinearSeq[A]]) { lazy val v = st } - private[this] var these: LazyCell = new LazyCell(coll) + private[this] var these: LazyCell = { + // Reassign reference to avoid creating a private class field and holding a reference to the head. + // LazyCell would otherwise close over `coll`. + val initialHead = coll + new LazyCell(initialHead) + } def hasNext: Boolean = these.v.nonEmpty diff --git a/src/library/scala/collection/Map.scala b/src/library/scala/collection/Map.scala index 5f929fe82e1f..c9ccfc986f16 100644 --- a/src/library/scala/collection/Map.scala +++ b/src/library/scala/collection/Map.scala @@ -29,16 +29,49 @@ trait Map[K, +V] def canEqual(that: Any): Boolean = true + /** + * Equality of maps is implemented using the lookup method [[get]]. This method returns `true` if + * - the argument `o` is a `Map`, + * - the two maps have the same [[size]], and + * - for every `(key, value)` pair in this map, `other.get(key) == Some(value)`. + * + * The implementation of `equals` checks the [[canEqual]] method, so subclasses of `Map` can narrow down the equality + * to specific map types. The `Map` implementations in the standard library can all be compared, their `canEqual` + * methods return `true`. + * + * Note: The `equals` method only respects the equality laws (symmetry, transitivity) if the two maps use the same + * key equivalence function in their lookup operation. For example, the key equivalence operation in a + * [[scala.collection.immutable.TreeMap]] is defined by its ordering. Comparing a `TreeMap` with a `HashMap` leads + * to unexpected results if `ordering.equiv(k1, k2)` (used for lookup in `TreeMap`) is different from `k1 == k2` + * (used for lookup in `HashMap`). + * + * {{{ + * scala> import scala.collection.immutable._ + * scala> val ord: Ordering[String] = _ compareToIgnoreCase _ + * + * scala> TreeMap("A" -> 1)(ord) == HashMap("a" -> 1) + * val res0: Boolean = false + * + * scala> HashMap("a" -> 1) == TreeMap("A" -> 1)(ord) + * val res1: Boolean = true + * }}} + * + * + * @param o The map to which this map is compared + * @return `true` if the two maps are equal according to the description + */ override def equals(o: Any): Boolean = (this eq o.asInstanceOf[AnyRef]) || (o match { - case map: Map[K, _] if map.canEqual(this) => - (this.size == map.size) && - this.forall(kv => map.getOrElse(kv._1, Map.DefaultSentinelFn()) == kv._2) + case map: Map[K @unchecked, _] if map.canEqual(this) => + (this.size == map.size) && { + try this.forall(kv => map.getOrElse(kv._1, Map.DefaultSentinelFn()) == kv._2) + catch { case _: ClassCastException => false } // PR #9565 / scala/bug#12228 + } case _ => false }) - override def hashCode(): Int = MurmurHash3.mapHash(toIterable) + override def hashCode(): Int = MurmurHash3.mapHash(this) // These two methods are not in MapOps so that MapView is not forced to implement them @deprecated("Use - or removed on an immutable Map", "2.13.0") @@ -263,7 +296,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] * @return a new $coll resulting from applying the given function * `f` to each element of this $coll and collecting the results. */ - def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = mapFactory.from(new View.Map(toIterable, f)) + def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = mapFactory.from(new View.Map(this, f)) /** Builds a new collection by applying a partial function to all elements of this $coll * on which the function is defined. @@ -276,7 +309,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] * The order of the elements is preserved. */ def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]): CC[K2, V2] = - mapFactory.from(new View.Collect(toIterable, pf)) + mapFactory.from(new View.Collect(this, pf)) /** Builds a new map by applying a function to all elements of this $coll * and using the elements of the resulting collections. @@ -285,7 +318,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] * @return a new $coll resulting from applying the given collection-valued function * `f` to each element of this $coll and concatenating the results. */ - def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]): CC[K2, V2] = mapFactory.from(new View.FlatMap(toIterable, f)) + def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]): CC[K2, V2] = mapFactory.from(new View.FlatMap(this, f)) /** Returns a new $coll containing the elements from the left hand operand followed by the elements from the * right hand operand. The element type of the $coll is the most specific superclass encompassing @@ -296,7 +329,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] * of this $coll followed by all elements of `suffix`. */ def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]): CC[K, V2] = mapFactory.from(suffix match { - case it: Iterable[(K, V2)] => new View.Concat(toIterable, it) + case it: Iterable[(K, V2)] => new View.Concat(this, it) case _ => iterator.concat(suffix.iterator) }) @@ -310,11 +343,11 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] @deprecated("Consider requiring an immutable Map or fall back to Map.concat.", "2.13.0") def + [V1 >: V](kv: (K, V1)): CC[K, V1] = - mapFactory.from(new View.Appended(toIterable, kv)) + mapFactory.from(new View.Appended(this, kv)) @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = - mapFactory.from(new View.Concat(new View.Appended(new View.Appended(toIterable, elem1), elem2), elems)) + mapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems)) @deprecated("Consider requiring an immutable Map.", "2.13.0") @`inline` def -- (keys: IterableOnce[K]): C = { @@ -328,7 +361,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] case that: Iterable[(K, V1)] => that case that => View.from(that) } - mapFactory.from(new View.Concat(thatIterable, toIterable)) + mapFactory.from(new View.Concat(thatIterable, this)) } } diff --git a/src/library/scala/collection/Seq.scala b/src/library/scala/collection/Seq.scala index 3f98f877413e..04b2e911c3cd 100644 --- a/src/library/scala/collection/Seq.scala +++ b/src/library/scala/collection/Seq.scala @@ -34,11 +34,11 @@ trait Seq[+A] override def equals(o: Any): Boolean = (this eq o.asInstanceOf[AnyRef]) || (o match { - case seq: Seq[A] if seq.canEqual(this) => sameElements(seq) + case seq: Seq[A @unchecked] if seq.canEqual(this) => sameElements(seq) case _ => false }) - override def hashCode(): Int = MurmurHash3.seqHash(toIterable) + override def hashCode(): Int = MurmurHash3.seqHash(this) override def toString(): String = super[Iterable].toString() @@ -857,12 +857,16 @@ trait SeqOps[+A, +CC[_], +C] extends Any def diff[B >: A](that: Seq[B]): C = { val occ = occCounts(that) fromSpecific(iterator.filter { x => - val ox = occ(x) // Avoid multiple map lookups - if (ox == 0) true - else { - occ(x) = ox - 1 - false + var include = false + occ.updateWith(x) { + case None => { + include = true + None + } + case Some(1) => None + case Some(n) => Some(n - 1) } + include }) } @@ -878,11 +882,16 @@ trait SeqOps[+A, +CC[_], +C] extends Any def intersect[B >: A](that: Seq[B]): C = { val occ = occCounts(that) fromSpecific(iterator.filter { x => - val ox = occ(x) // Avoid multiple map lookups - if (ox > 0) { - occ(x) = ox - 1 - true - } else false + var include = true + occ.updateWith(x) { + case None => { + include = false + None + } + case Some(1) => None + case Some(n) => Some(n - 1) + } + include }) } @@ -920,8 +929,11 @@ trait SeqOps[+A, +CC[_], +C] extends Any } protected[collection] def occCounts[B](sq: Seq[B]): mutable.Map[B, Int] = { - val occ = new mutable.HashMap[B, Int]().withDefaultValue(0) - for (y <- sq) occ(y) += 1 + val occ = new mutable.HashMap[B, Int]() + for (y <- sq) occ.updateWith(y) { + case None => Some(1) + case Some(n) => Some(n + 1) + } occ } diff --git a/src/library/scala/collection/Set.scala b/src/library/scala/collection/Set.scala index 188a96e78326..151b04ef1a4a 100644 --- a/src/library/scala/collection/Set.scala +++ b/src/library/scala/collection/Set.scala @@ -28,15 +28,49 @@ trait Set[A] def canEqual(that: Any) = true + /** + * Equality of sets is implemented using the lookup method [[contains]]. This method returns `true` if + * - the argument `that` is a `Set`, + * - the two sets have the same [[size]], and + * - for every `element` this set, `other.contains(element) == true`. + * + * The implementation of `equals` checks the [[canEqual]] method, so subclasses of `Set` can narrow down the equality + * to specific set types. The `Set` implementations in the standard library can all be compared, their `canEqual` + * methods return `true`. + * + * Note: The `equals` method only respects the equality laws (symmetry, transitivity) if the two sets use the same + * element equivalence function in their lookup operation. For example, the element equivalence operation in a + * [[scala.collection.immutable.TreeSet]] is defined by its ordering. Comparing a `TreeSet` with a `HashSet` leads + * to unexpected results if `ordering.equiv(e1, e2)` (used for lookup in `TreeSet`) is different from `e1 == e2` + * (used for lookup in `HashSet`). + * + * {{{ + * scala> import scala.collection.immutable._ + * scala> val ord: Ordering[String] = _ compareToIgnoreCase _ + * + * scala> TreeSet("A")(ord) == HashSet("a") + * val res0: Boolean = false + * + * scala> HashSet("a") == TreeSet("A")(ord) + * val res1: Boolean = true + * }}} + * + * + * @param that The set to which this set is compared + * @return `true` if the two sets are equal according to the description + */ override def equals(that: Any): Boolean = (this eq that.asInstanceOf[AnyRef]) || (that match { - case set: Set[A] if set.canEqual(this) => - (this.size == set.size) && this.subsetOf(set) + case set: Set[A @unchecked] if set.canEqual(this) => + (this.size == set.size) && { + try this.subsetOf(set) + catch { case _: ClassCastException => false } // PR #9565 / scala/bug#12228 + } case _ => false }) - override def hashCode(): Int = MurmurHash3.setHash(toIterable) + override def hashCode(): Int = MurmurHash3.setHash(this) override def iterableFactory: IterableFactory[Set] = Set @@ -81,7 +115,7 @@ trait SetOps[A, +CC[_], +C <: SetOps[A, CC, C]] */ def subsets(len: Int): Iterator[C] = { if (len < 0 || len > size) Iterator.empty - else new SubsetsItr(toIterable.to(IndexedSeq), len) + else new SubsetsItr(this.to(IndexedSeq), len) } /** An iterator over all subsets of this set. @@ -89,7 +123,7 @@ trait SetOps[A, +CC[_], +C <: SetOps[A, CC, C]] * @return the iterator. */ def subsets(): Iterator[C] = new AbstractIterator[C] { - private[this] val elms = toIterable.to(IndexedSeq) + private[this] val elms = SetOps.this.to(IndexedSeq) private[this] var len = 0 private[this] var itr: Iterator[C] = Iterator.empty @@ -187,15 +221,15 @@ trait SetOps[A, +CC[_], +C <: SetOps[A, CC, C]] * @return a new $coll with the given elements added, omitting duplicates. */ def concat(that: collection.IterableOnce[A]): C = fromSpecific(that match { - case that: collection.Iterable[A] => new View.Concat(toIterable, that) + case that: collection.Iterable[A] => new View.Concat(this, that) case _ => iterator.concat(that.iterator) }) @deprecated("Consider requiring an immutable Set or fall back to Set.union", "2.13.0") - def + (elem: A): C = fromSpecific(new View.Appended(toIterable, elem)) + def + (elem: A): C = fromSpecific(new View.Appended(this, elem)) @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") - def + (elem1: A, elem2: A, elems: A*): C = fromSpecific(new View.Concat(new View.Appended(new View.Appended(toIterable, elem1), elem2), elems)) + def + (elem1: A, elem2: A, elems: A*): C = fromSpecific(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems)) /** Alias for `concat` */ @`inline` final def ++ (that: collection.IterableOnce[A]): C = concat(that) diff --git a/src/library/scala/collection/SortedMap.scala b/src/library/scala/collection/SortedMap.scala index 570b09a12b3c..03ab0bb0dadc 100644 --- a/src/library/scala/collection/SortedMap.scala +++ b/src/library/scala/collection/SortedMap.scala @@ -30,14 +30,17 @@ trait SortedMap[K, +V] override def equals(that: Any): Boolean = that match { case _ if this eq that.asInstanceOf[AnyRef] => true - case sm: SortedMap[k, v] if sm.ordering == this.ordering => + case sm: SortedMap[K @unchecked, _] if sm.ordering == this.ordering => (sm canEqual this) && (this.size == sm.size) && { val i1 = this.iterator val i2 = sm.iterator var allEqual = true - while (allEqual && i1.hasNext) - allEqual = i1.next() == i2.next() + while (allEqual && i1.hasNext) { + val kv1 = i1.next() + val kv2 = i2.next() + allEqual = ordering.equiv(kv1._1, kv2._1) && kv1._2 == kv2._2 + } allEqual } case _ => super.equals(that) @@ -150,7 +153,7 @@ trait SortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], * `f` to each element of this $coll and collecting the results. */ def map[K2, V2](f: ((K, V)) => (K2, V2))(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = - sortedMapFactory.from(new View.Map[(K, V), (K2, V2)](toIterable, f)) + sortedMapFactory.from(new View.Map[(K, V), (K2, V2)](this, f)) /** Builds a new sorted map by applying a function to all elements of this $coll * and using the elements of the resulting collections. @@ -160,7 +163,7 @@ trait SortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], * `f` to each element of this $coll and concatenating the results. */ def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = - sortedMapFactory.from(new View.FlatMap(toIterable, f)) + sortedMapFactory.from(new View.FlatMap(this, f)) /** Builds a new sorted map by applying a partial function to all elements of this $coll * on which the function is defined. @@ -171,10 +174,10 @@ trait SortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], * The order of the elements is preserved. */ def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = - sortedMapFactory.from(new View.Collect(toIterable, pf)) + sortedMapFactory.from(new View.Collect(this, pf)) override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]): CC[K, V2] = sortedMapFactory.from(suffix match { - case it: Iterable[(K, V2)] => new View.Concat(toIterable, it) + case it: Iterable[(K, V2)] => new View.Concat(this, it) case _ => iterator.concat(suffix.iterator) })(ordering) @@ -182,10 +185,10 @@ trait SortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]): CC[K, V2] = concat(xs) @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") - override def + [V1 >: V](kv: (K, V1)): CC[K, V1] = sortedMapFactory.from(new View.Appended(toIterable, kv))(ordering) + override def + [V1 >: V](kv: (K, V1)): CC[K, V1] = sortedMapFactory.from(new View.Appended(this, kv))(ordering) @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") - override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(toIterable, elem1), elem2), elems))(ordering) + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems))(ordering) } object SortedMapOps { diff --git a/src/library/scala/collection/SortedSet.scala b/src/library/scala/collection/SortedSet.scala index 77f62dc15e98..c98ca9ae5523 100644 --- a/src/library/scala/collection/SortedSet.scala +++ b/src/library/scala/collection/SortedSet.scala @@ -29,14 +29,14 @@ trait SortedSet[A] extends Set[A] override def equals(that: Any): Boolean = that match { case _ if this eq that.asInstanceOf[AnyRef] => true - case ss: SortedSet[_] if ss.ordering == this.ordering => + case ss: SortedSet[A @unchecked] if ss.ordering == this.ordering => (ss canEqual this) && (this.size == ss.size) && { val i1 = this.iterator val i2 = ss.iterator var allEqual = true while (allEqual && i1.hasNext) - allEqual = i1.next() == i2.next() + allEqual = ordering.equiv(i1.next(), i2.next()) allEqual } case _ => @@ -118,7 +118,7 @@ trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] * `f` to each element of this $coll and collecting the results. */ def map[B](f: A => B)(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = - sortedIterableFactory.from(new View.Map(toIterable, f)) + sortedIterableFactory.from(new View.Map(this, f)) /** Builds a new sorted collection by applying a function to all elements of this $coll * and using the elements of the resulting collections. @@ -129,7 +129,7 @@ trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] * `f` to each element of this $coll and concatenating the results. */ def flatMap[B](f: A => IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = - sortedIterableFactory.from(new View.FlatMap(toIterable, f)) + sortedIterableFactory.from(new View.FlatMap(this, f)) /** Returns a $coll formed from this $coll and another iterable collection * by combining corresponding elements in pairs. @@ -142,7 +142,7 @@ trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] */ def zip[B](that: IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.zipOrdMsg) ev: Ordering[(A @uncheckedVariance, B)]): CC[(A @uncheckedVariance, B)] = // sound bcs of VarianceNote sortedIterableFactory.from(that match { - case that: Iterable[B] => new View.Zip(toIterable, that) + case that: Iterable[B] => new View.Zip(this, that) case _ => iterator.zip(that) }) @@ -156,7 +156,7 @@ trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] * The order of the elements is preserved. */ def collect[B](pf: scala.PartialFunction[A, B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = - sortedIterableFactory.from(new View.Collect(toIterable, pf)) + sortedIterableFactory.from(new View.Collect(this, pf)) } object SortedSetOps { diff --git a/src/library/scala/collection/StrictOptimizedIterableOps.scala b/src/library/scala/collection/StrictOptimizedIterableOps.scala index 3429c2aa4837..a09766cfa912 100644 --- a/src/library/scala/collection/StrictOptimizedIterableOps.scala +++ b/src/library/scala/collection/StrictOptimizedIterableOps.scala @@ -13,6 +13,7 @@ package scala package collection +import scala.annotation.nowarn import scala.annotation.unchecked.uncheckedVariance import scala.runtime.Statics @@ -203,7 +204,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] override def scanLeft[B](z: B)(op: (B, A) => B): CC[B] = { val b = iterableFactory.newBuilder[B] - b.sizeHint(toIterable, delta = 0) + b.sizeHint(this, delta = 0) var acc = z b += acc val it = iterator @@ -254,7 +255,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] */ override def takeRight(n: Int): C = { val b = newSpecificBuilder - b.sizeHintBounded(n, toIterable) + b.sizeHintBounded(n, toIterable: @nowarn("cat=deprecation")) val lead = iterator drop n val it = iterator while (lead.hasNext) { @@ -271,7 +272,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] */ override def dropRight(n: Int): C = { val b = newSpecificBuilder - if (n >= 0) b.sizeHint(toIterable, delta = -n) + if (n >= 0) b.sizeHint(this, delta = -n) val lead = iterator drop n val it = iterator while (lead.hasNext) { diff --git a/src/library/scala/collection/StrictOptimizedSeqOps.scala b/src/library/scala/collection/StrictOptimizedSeqOps.scala index 73f89fa46897..396e53885081 100644 --- a/src/library/scala/collection/StrictOptimizedSeqOps.scala +++ b/src/library/scala/collection/StrictOptimizedSeqOps.scala @@ -75,27 +75,38 @@ trait StrictOptimizedSeqOps [+A, +CC[_], +C] b.result() } - override def diff[B >: A](that: Seq[B]): C = { - val occ = occCounts(that) - val b = newSpecificBuilder - for (x <- this) { - val ox = occ(x) // Avoid multiple map lookups - if (ox == 0) b += x - else occ(x) = ox - 1 + override def diff[B >: A](that: Seq[B]): C = + if (isEmpty || that.isEmpty) coll + else { + val occ = occCounts(that) + val b = newSpecificBuilder + for (x <- this) { + occ.updateWith(x) { + case None => { + b.addOne(x) + None + } + case Some(1) => None + case Some(n) => Some(n - 1) + } + } + b.result() } - b.result() - } - override def intersect[B >: A](that: Seq[B]): C = { - val occ = occCounts(that) - val b = newSpecificBuilder - for (x <- this) { - val ox = occ(x) // Avoid multiple map lookups - if (ox > 0) { - b += x - occ(x) = ox - 1 + override def intersect[B >: A](that: Seq[B]): C = + if (isEmpty || that.isEmpty) empty + else { + val occ = occCounts(that) + val b = newSpecificBuilder + for (x <- this) { + occ.updateWith(x) { + case None => None + case Some(n) => { + b.addOne(x) + if (n == 1) None else Some(n - 1) + } + } } + b.result() } - b.result() - } } diff --git a/src/library/scala/collection/StringOps.scala b/src/library/scala/collection/StringOps.scala index 347282e35717..42a06f6e7ce4 100644 --- a/src/library/scala/collection/StringOps.scala +++ b/src/library/scala/collection/StringOps.scala @@ -158,13 +158,13 @@ object StringOps { } /** Provides extension methods for strings. - * + * * Some of these methods treat strings as a plain collection of [[Char]]s * without any regard for Unicode handling. Unless the user takes Unicode * handling in to account or makes sure the strings don't require such handling, * these methods may result in unpaired or invalidly paired surrogate code * units. - * + * * @define unicodeunaware This method treats a string as a plain sequence of * Char code units and makes no attempt to keep * surrogate pairs or codepoint sequences together. @@ -848,9 +848,13 @@ final class StringOps(private val s: String) extends AnyVal { /** You can follow a string with `.r`, turning it into a `Regex`. E.g. * - * `"""A\w*""".r` is the regular expression for identifiers starting with `A`. + * `"""A\w*""".r` is the regular expression for ASCII-only identifiers starting with `A`. + * + * `"""(?\d\d)-(?\d\d)-(?\d\d\d\d)""".r` matches dates + * and provides its subcomponents through groups named "month", "day" and + * "year". */ - def r: Regex = r() + def r: Regex = new Regex(s) /** You can follow a string with `.r(g1, ... , gn)`, turning it into a `Regex`, * with group names g1 through gn. @@ -861,6 +865,7 @@ final class StringOps(private val s: String) extends AnyVal { * * @param groupNames The names of the groups in the pattern, in the order they appear. */ + @deprecated("use inline group names like (?X) instead", "2.13.7") def r(groupNames: String*): Regex = new Regex(s, groupNames: _*) /** @@ -1430,7 +1435,7 @@ final class StringOps(private val s: String) extends AnyVal { * * @param f the 'split function' mapping the elements of this string to an [[scala.util.Either]] * - * @return a pair of strings: the first one made of those characters returned by `f` that were wrapped in [[scala.util.Left]], + * @return a pair of strings: the first one made of those characters returned by `f` that were wrapped in [[scala.util.Left]], * and the second one made of those wrapped in [[scala.util.Right]]. */ def partitionMap(f: Char => Either[Char,Char]): (String, String) = { diff --git a/src/library/scala/collection/View.scala b/src/library/scala/collection/View.scala index c84c126626f6..441790c3c6e5 100644 --- a/src/library/scala/collection/View.scala +++ b/src/library/scala/collection/View.scala @@ -404,8 +404,14 @@ object View extends IterableFactory[View] { @SerialVersionUID(3L) private[collection] class Patched[A](underlying: SomeIterableOps[A], from: Int, other: IterableOnce[A], replaced: Int) extends AbstractView[A] { - def iterator: Iterator[A] = underlying.iterator.patch(from, other.iterator, replaced) - override def knownSize: Int = if (underlying.knownSize == 0 && other.knownSize == 0) 0 else super.knownSize + // we may be unable to traverse `other` more than once, so we need to cache it if that's the case + private val _other: Iterable[A] = other match { + case other: Iterable[A] => other + case other => LazyList.from(other) + } + + def iterator: Iterator[A] = underlying.iterator.patch(from, _other.iterator, replaced) + override def knownSize: Int = if (underlying.knownSize == 0 && _other.knownSize == 0) 0 else super.knownSize override def isEmpty: Boolean = if (knownSize == 0) true else iterator.isEmpty } diff --git a/src/library/scala/collection/concurrent/MainNode.java b/src/library/scala/collection/concurrent/MainNode.java index 11c09bb2fe5b..f7f022974e9e 100644 --- a/src/library/scala/collection/concurrent/MainNode.java +++ b/src/library/scala/collection/concurrent/MainNode.java @@ -24,6 +24,9 @@ abstract class MainNode extends BasicNode { public abstract int cachedSize(Object ct); + // standard contract + public abstract int knownSize(); + public boolean CAS_PREV(MainNode oldval, MainNode nval) { return updater.compareAndSet(this, oldval, nval); } diff --git a/src/library/scala/collection/concurrent/Map.scala b/src/library/scala/collection/concurrent/Map.scala index ec75b87883f4..897a699a55a4 100644 --- a/src/library/scala/collection/concurrent/Map.scala +++ b/src/library/scala/collection/concurrent/Map.scala @@ -103,6 +103,43 @@ trait Map[K, V] extends scala.collection.mutable.Map[K, V] { } } + /** + * Removes the entry for the specified key if it's currently mapped to the + * specified value. Comparison to the specified value is done using reference + * equality. + * + * Not all map implementations can support removal based on reference + * equality, and for those implementations, object equality is used instead. + * + * $atomicop + * + * @param k key for which the entry should be removed + * @param v value expected to be associated with the specified key if + * the removal is to take place + * @return `true` if the removal took place, `false` otherwise + */ + // TODO: make part of the API in a future version + private[collection] def removeRefEq(k: K, v: V): Boolean = remove(k, v) + + /** + * Replaces the entry for the given key only if it was previously mapped to + * a given value. Comparison to the specified value is done using reference + * equality. + * + * Not all map implementations can support replacement based on reference + * equality, and for those implementations, object equality is used instead. + * + * $atomicop + * + * @param k key for which the entry should be replaced + * @param oldValue value expected to be associated with the specified key + * if replacing is to happen + * @param newValue value to be associated with the specified key + * @return `true` if the entry was replaced, `false` otherwise + */ + // TODO: make part of the API in a future version + private[collection] def replaceRefEq(k: K, oldValue: V, newValue: V): Boolean = replace(k, oldValue, newValue) + /** * Update a mapping for the specified key and its current optionally-mapped value * (`Some` if there is current mapping, `None` if not). @@ -121,14 +158,36 @@ trait Map[K, V] extends scala.collection.mutable.Map[K, V] { @tailrec private def updateWithAux(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { - val previousValue = this.get(key) + val previousValue = get(key) val nextValue = remappingFunction(previousValue) - (previousValue, nextValue) match { - case (None, None) => None - case (None, Some(next)) if this.putIfAbsent(key, next).isEmpty => nextValue - case (Some(prev), None) if this.remove(key, prev) => None - case (Some(prev), Some(next)) if this.replace(key, prev, next) => nextValue - case _ => this.updateWithAux(key)(remappingFunction) + previousValue match { + case Some(prev) => nextValue match { + case Some(next) => if (replaceRefEq(key, prev, next)) return nextValue + case _ => if (removeRefEq(key, prev)) return None + } + case _ => nextValue match { + case Some(next) => if (putIfAbsent(key, next).isEmpty) return nextValue + case _ => return None + } + } + updateWithAux(key)(remappingFunction) + } + + private[collection] def filterInPlaceImpl(p: (K, V) => Boolean): this.type = { + val it = iterator + while (it.hasNext) { + val (k, v) = it.next() + if (!p(k, v)) removeRefEq(k, v) + } + this + } + + private[collection] def mapValuesInPlaceImpl(f: (K, V) => V): this.type = { + val it = iterator + while (it.hasNext) { + val (k, v) = it.next() + replaceRefEq(k, v, f(k, v)) } + this } } diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala index 83e83f5e31be..cc1b08d91e14 100644 --- a/src/library/scala/collection/concurrent/TrieMap.scala +++ b/src/library/scala/collection/concurrent/TrieMap.scala @@ -15,8 +15,9 @@ package collection package concurrent import java.util.concurrent.atomic._ - +import scala.{unchecked => uc} import scala.annotation.tailrec +import scala.collection.concurrent.TrieMap.RemovalPolicy import scala.collection.generic.DefaultSerializable import scala.collection.immutable.{List, Nil} import scala.collection.mutable.GrowableBuilder @@ -114,13 +115,13 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen, equiv: E if ((bmp & flag) != 0) { // 1a) insert below cn.array(pos) match { - case in: INode[K, V] => + case in: INode[K, V] @uc => if (startgen eq in.gen) in.rec_insert(k, v, hc, lev + 5, this, startgen, ct) else { if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_insert(k, v, hc, lev, parent, startgen, ct) else false } - case sn: SNode[K, V] => + case sn: SNode[K, V] @uc => if (sn.hc == hc && equal(sn.k, k, ct)) GCAS(cn, cn.updatedAt(pos, new SNode(sn.k, v, hc), gen), ct) else { val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) @@ -152,11 +153,12 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen, equiv: E * KEY_ABSENT - key wasn't there, insert only, do not overwrite * KEY_PRESENT - key was there, overwrite only, do not insert * other value `v` - only overwrite if the current value is this - * @param hc the hashcode of `k`` + * @param fullEquals whether to use reference or full equals when comparing `v` to the current value + * @param hc the hashcode of `k` * * @return null if unsuccessful, Option[V] otherwise (indicating previous value bound to the key) */ - @tailrec def rec_insertif(k: K, v: V, hc: Int, cond: AnyRef, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): Option[V] = { + @tailrec def rec_insertif(k: K, v: V, hc: Int, cond: AnyRef, fullEquals: Boolean, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): Option[V] = { val m = GCAS_READ(ct) // use -Yinline! m match { @@ -169,13 +171,13 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen, equiv: E if ((bmp & flag) != 0) { // 1a) insert below cn.array(pos) match { - case in: INode[K, V] => - if (startgen eq in.gen) in.rec_insertif(k, v, hc, cond, lev + 5, this, startgen, ct) + case in: INode[K, V] @uc => + if (startgen eq in.gen) in.rec_insertif(k, v, hc, cond, fullEquals, lev + 5, this, startgen, ct) else { - if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_insertif(k, v, hc, cond, lev, parent, startgen, ct) + if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_insertif(k, v, hc, cond, fullEquals, lev, parent, startgen, ct) else null } - case sn: SNode[K, V] => cond match { + case sn: SNode[K, V] @uc => cond match { case INode.KEY_PRESENT_OR_ABSENT => if (sn.hc == hc && equal(sn.k, k, ct)) { if (GCAS(cn, cn.updatedAt(pos, new SNode(sn.k, v, hc), gen), ct)) Some(sn.v) else null @@ -198,7 +200,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen, equiv: E if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null } else None case otherv => - if (sn.hc == hc && equal(sn.k, k, ct) && sn.v == otherv) { + if (sn.hc == hc && equal(sn.k, k, ct) && (if (fullEquals) sn.v == otherv else sn.v.asInstanceOf[AnyRef] eq otherv)) { if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null } else None } @@ -236,7 +238,8 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen, equiv: E } case otherv => ln.get(k) match { - case Some(v0) if v0 == otherv => if (insertln()) Some(otherv.asInstanceOf[V]) else null + case Some(v0) if (if (fullEquals) v0 == otherv else v0.asInstanceOf[AnyRef] eq otherv) => + if (insertln()) Some(otherv.asInstanceOf[V]) else null case _ => None } } @@ -264,19 +267,19 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen, equiv: E val pos = if (bmp == 0xffffffff) idx else Integer.bitCount(bmp & (flag - 1)) val sub = cn.array(pos) sub match { - case in: INode[K, V] => + case in: INode[K, V] @uc => if (ct.isReadOnly || (startgen eq in.gen)) in.rec_lookup(k, hc, lev + 5, this, startgen, ct) else { if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_lookup(k, hc, lev, parent, startgen, ct) else RESTART } - case sn: SNode[K, V] => // 2) singleton node + case sn: SNode[K, V] @uc => // 2) singleton node if (sn.hc == hc && equal(sn.k, k, ct)) sn.v.asInstanceOf[AnyRef] else NO_SUCH_ELEMENT_SENTINEL case basicNode => throw new MatchError(basicNode) } } - case tn: TNode[K, V] => // 3) non-live node + case tn: TNode[_, _] => // 3) non-live node def cleanReadOnly(tn: TNode[K, V]) = if (ct.nonReadOnly) { clean(parent, ct, lev - 5) RESTART @@ -295,15 +298,15 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen, equiv: E * * @param hc the hashcode of `k` * - * @param removeAlways if true, then the value will be removed regardless of the value - * if false, then value will only be removed if it exactly matches v` + * @param removalPolicy policy deciding whether to remove `k` based on `v` and the + * current value associated with `k` (Always, FullEquals, or ReferenceEq) * * @return null if not successful, an Option[V] indicating the previous value otherwise */ def rec_remove( k: K, v: V, - removeAlways: Boolean, + removalPolicy: Int, hc: Int, lev: Int, parent: INode[K, V], @@ -322,14 +325,14 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen, equiv: E val pos = Integer.bitCount(bmp & (flag - 1)) val sub = cn.array(pos) val res = sub match { - case in: INode[K, V] => - if (startgen eq in.gen) in.rec_remove(k, v, removeAlways, hc, lev + 5, this, startgen, ct) + case in: INode[K, V] @uc => + if (startgen eq in.gen) in.rec_remove(k, v, removalPolicy, hc, lev + 5, this, startgen, ct) else { - if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_remove(k, v, removeAlways, hc, lev, parent, startgen, ct) + if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_remove(k, v, removalPolicy, hc, lev, parent, startgen, ct) else null } - case sn: SNode[K, V] => - if (sn.hc == hc && equal(sn.k, k, ct) && (removeAlways || sn.v == v)) { + case sn: SNode[K, V] @uc => + if (sn.hc == hc && equal(sn.k, k, ct) && RemovalPolicy.shouldRemove(removalPolicy)(sn.v, v)) { val ncn = cn.removedAt(pos, flag, gen).toContracted(lev) if (GCAS(cn, ncn, ct)) Some(sn.v) else null } else None @@ -349,8 +352,8 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen, equiv: E else { val pos = Integer.bitCount(bmp & (flag - 1)) val sub = cn.array(pos) - if (sub eq this) (nonlive: @unchecked) match { - case tn: TNode[K, V] => + if (sub eq this) (nonlive: @uc) match { + case tn: TNode[K, V] @uc => val ncn = cn.updatedAt(pos, tn.copyUntombed, gen).toContracted(lev - 5) if (!parent.GCAS(cn, ncn, ct)) if (ct.readRoot().gen == startgen) cleanParent(nonlive) @@ -373,12 +376,12 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen, equiv: E clean(parent, ct, lev - 5) null case ln: LNode[K, V] => - if (removeAlways) { + if (removalPolicy == RemovalPolicy.Always) { val optv = ln.get(k) val nn = ln.removed(k, ct) if (GCAS(ln, nn, ct)) optv else null } else ln.get(k) match { - case optv @ Some(v0) if v0 == v => + case optv @ Some(v0) if RemovalPolicy.shouldRemove(removalPolicy)(v, v0) => val nn = ln.removed(k, ct) if (GCAS(ln, nn, ct)) optv else null case _ => None @@ -397,10 +400,11 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen, equiv: E def isNullInode(ct: TrieMap[K, V]) = GCAS_READ(ct) eq null - def cachedSize(ct: TrieMap[K, V]): Int = { - val m = GCAS_READ(ct) - m.cachedSize(ct) - } + def cachedSize(ct: TrieMap[K, V]): Int = + GCAS_READ(ct).cachedSize(ct) + + def knownSize(ct: TrieMap[K, V]): Int = + GCAS_READ(ct).knownSize() /* this is a quiescent method! */ def string(lev: Int) = "%sINode -> %s".format(" " * lev, mainnode match { @@ -437,6 +441,8 @@ private[concurrent] final class FailedNode[K, V](p: MainNode[K, V]) extends Main def cachedSize(ct: AnyRef): Int = throw new UnsupportedOperationException + def knownSize: Int = throw new UnsupportedOperationException + override def toString = "FailedNode(%s)".format(p) } @@ -455,7 +461,7 @@ private[collection] final class SNode[K, V](final val k: K, final val v: V, fina def string(lev: Int) = (" " * lev) + "SNode(%s, %s, %x)".format(k, v, hc) } - +// Tomb Node, used to ensure proper ordering during removals private[collection] final class TNode[K, V](final val k: K, final val v: V, final val hc: Int) extends MainNode[K, V] with KVNode[K, V] { def copy = new TNode(k, v, hc) @@ -463,10 +469,11 @@ private[collection] final class TNode[K, V](final val k: K, final val v: V, fina def copyUntombed = new SNode(k, v, hc) def kvPair = (k, v) def cachedSize(ct: AnyRef): Int = 1 + def knownSize: Int = 1 def string(lev: Int) = (" " * lev) + "TNode(%s, %s, %x, !)".format(k, v, hc) } - +// List Node, leaf node that handles hash collisions private[collection] final class LNode[K, V](val entries: List[(K, V)], equiv: Equiv[K]) extends MainNode[K, V] { @@ -491,7 +498,7 @@ private[collection] final class LNode[K, V](val entries: List[(K, V)], equiv: Eq def removed(k: K, ct: TrieMap[K, V]): MainNode[K, V] = { val updmap = entries.filterNot(entry => equiv.equiv(entry._1, k)) - if (updmap.size > 1) new LNode(updmap, equiv) + if (updmap.sizeIs > 1) new LNode(updmap, equiv) else { val (k, v) = updmap.iterator.next() new TNode(k, v, ct.computeHash(k)) // create it tombed so that it gets compressed on subsequent accesses @@ -502,14 +509,16 @@ private[collection] final class LNode[K, V](val entries: List[(K, V)], equiv: Eq def cachedSize(ct: AnyRef): Int = entries.size + def knownSize: Int = -1 // shouldn't ever be empty, and the size of a list is not known + def string(lev: Int) = (" " * lev) + "LNode(%s)".format(entries.mkString(", ")) } - +// Ctrie Node, contains bitmap and array of references to branch nodes private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[BasicNode], val gen: Gen) extends CNodeBase[K, V] { // this should only be called from within read-only snapshots - def cachedSize(ct: AnyRef) = { + def cachedSize(ct: AnyRef): Int = { val currsz = READ_SIZE() if (currsz != -1) currsz else { @@ -519,6 +528,8 @@ private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[Ba } } + def knownSize: Int = READ_SIZE() // this should only ever return -1 if unknown + // lends itself towards being parallelizable by choosing // a random starting offset in the array // => if there are concurrent size computations, they start @@ -535,9 +546,9 @@ private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[Ba while (i < array.length) { val pos = (i + offset) % array.length array(pos) match { - case sn: SNode[_, _] => sz += 1 - case in: INode[K, V] => sz += in.cachedSize(ct) - case basicNode => throw new MatchError(basicNode) + case sn: SNode[_, _] => sz += 1 + case in: INode[K, V] @uc => sz += in.cachedSize(ct) + case basicNode => throw new MatchError(basicNode) } i += 1 } @@ -581,8 +592,8 @@ private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[Ba val narr = new Array[BasicNode](len) while (i < len) { arr(i) match { - case in: INode[K, V] => narr(i) = in.copyToGen(ngen, ct) - case bn: BasicNode => narr(i) = bn + case in: INode[K, V] @uc => narr(i) = in.copyToGen(ngen, ct) + case bn: BasicNode => narr(i) = bn } i += 1 } @@ -595,7 +606,7 @@ private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[Ba } def toContracted(lev: Int): MainNode[K, V] = if (array.length == 1 && lev > 0) array(0) match { - case sn: SNode[K, V] => sn.copyTombed + case sn: SNode[K, V] @uc => sn.copyTombed case _ => this } else this @@ -613,11 +624,11 @@ private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[Ba while (i < arr.length) { // construct new bitmap val sub = arr(i) sub match { - case in: INode[K, V] => + case in: INode[K, V] @uc => val inodemain = in.gcasRead(ct) assert(inodemain ne null) tmparray(i) = resurrect(in, inodemain) - case sn: SNode[K, V] => + case sn: SNode[K, V] @uc => tmparray(i) = sn case basicNode => throw new MatchError(basicNode) } @@ -629,19 +640,16 @@ private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[Ba def string(lev: Int): String = "CNode %x\n%s".format(bitmap, array.map(_.string(lev + 1)).mkString("\n")) - private def collectLocalElems: Seq[String] = array.flatMap({ - case sn: SNode[K, V] => Iterable.single(sn.kvPair._2.toString) - case in: INode[K, V] => Iterable.single(scala.Predef.augmentString(in.toString).drop(14) + "(" + in.gen + ")") - case basicNode => throw new MatchError(basicNode) - }) - override def toString = { - val elems = collectLocalElems - "CNode(sz: %d; %s)".format(elems.size, elems.sorted.mkString(", ")) + def elems: Seq[String] = array.flatMap { + case sn: SNode[K, V] @uc => Iterable.single(sn.kvPair._2.toString) + case in: INode[K, V] @uc => Iterable.single(augmentString(in.toString).drop(14) + "(" + in.gen + ")") + case basicNode => throw new MatchError(basicNode) + } + f"CNode(sz: ${elems.size}%d; ${elems.sorted.mkString(", ")})" } } - private[concurrent] object CNode { def dual[K, V](x: SNode[K, V], xhc: Int, y: SNode[K, V], yhc: Int, lev: Int, gen: Gen, equiv: Equiv[K]): MainNode[K, V] = if (lev < 35) { @@ -678,6 +686,7 @@ private[concurrent] case class RDCSS_Descriptor[K, V](old: INode[K, V], expected * * For details, see: [[http://lampwww.epfl.ch/~prokopec/ctries-snapshot.pdf]] */ +@SerialVersionUID(-5212455458703321708L) final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater[TrieMap[K, V], AnyRef], hashf: Hashing[K], ef: Equiv[K]) extends scala.collection.mutable.AbstractMap[K, V] with scala.collection.concurrent.Map[K, V] @@ -745,17 +754,17 @@ final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater private[concurrent] def RDCSS_READ_ROOT(abort: Boolean = false): INode[K, V] = { val r = /*READ*/root r match { - case in: INode[K, V] => in - case desc: RDCSS_Descriptor[K, V] => RDCSS_Complete(abort) - case x => throw new MatchError(x) + case in: INode[K, V] @uc => in + case desc: RDCSS_Descriptor[K, V] @uc => RDCSS_Complete(abort) + case x => throw new MatchError(x) } } @tailrec private def RDCSS_Complete(abort: Boolean): INode[K, V] = { val v = /*READ*/root v match { - case in: INode[K, V] => in - case desc: RDCSS_Descriptor[K, V] => + case in: INode[K, V] @uc => in + case desc: RDCSS_Descriptor[K, V] @uc => val RDCSS_Descriptor(ov, exp, nv) = desc if (abort) { if (CAS_ROOT(desc, ov)) ov @@ -789,11 +798,11 @@ final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater if (!r.rec_insert(k, v, hc, 0, null, r.gen, this)) inserthc(k, hc, v) } - @tailrec private def insertifhc(k: K, hc: Int, v: V, cond: AnyRef): Option[V] = { + @tailrec private def insertifhc(k: K, hc: Int, v: V, cond: AnyRef, fullEquals: Boolean): Option[V] = { val r = RDCSS_READ_ROOT() - val ret = r.rec_insertif(k, v, hc, cond, 0, null, r.gen, this) - if (ret eq null) insertifhc(k, hc, v, cond) + val ret = r.rec_insertif(k, v, hc, cond, fullEquals, 0, null, r.gen, this) + if (ret eq null) insertifhc(k, hc, v, cond, fullEquals) else ret } @@ -815,15 +824,15 @@ final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater * * @param k the key to remove * @param v the value compare with the value found associated with the key - * @param removeAlways if true, then `k` will be removed whether or not its value matches `v` - * if false, then `k` will ONLY be removed if its value matches `v` + * @param removalPolicy policy deciding whether to remove `k` based on `v` and the + * current value associated with `k` (Always, FullEquals, or ReferenceEq) * @return an Option[V] indicating the previous value */ - @tailrec private def removehc(k: K, v: V, removeAlways: Boolean, hc: Int): Option[V] = { + @tailrec private def removehc(k: K, v: V, removalPolicy: Int, hc: Int): Option[V] = { val r = RDCSS_READ_ROOT() - val res = r.rec_remove(k, v, removeAlways, hc, 0, null, r.gen, this) + val res = r.rec_remove(k, v, removalPolicy, hc, 0, null, r.gen, this) if (res ne null) res - else removehc(k, v, removeAlways, hc) + else removehc(k, v, removalPolicy, hc) } @@ -900,7 +909,7 @@ final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater override def put(key: K, value: V): Option[V] = { val hc = computeHash(key) - insertifhc(key, hc, value, INode.KEY_PRESENT_OR_ABSENT) + insertifhc(key, hc, value, INode.KEY_PRESENT_OR_ABSENT, fullEquals = false /* unused */) } override def update(k: K, v: V): Unit = { @@ -915,7 +924,7 @@ final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater override def remove(k: K): Option[V] = { val hc = computeHash(k) - removehc(k = k, v = null.asInstanceOf[V], removeAlways = true, hc = hc) + removehc(k = k, v = null.asInstanceOf[V], RemovalPolicy.Always, hc = hc) } def subtractOne(k: K) = { @@ -925,7 +934,7 @@ final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater def putIfAbsent(k: K, v: V): Option[V] = { val hc = computeHash(k) - insertifhc(k, hc, v, INode.KEY_ABSENT) + insertifhc(k, hc, v, INode.KEY_ABSENT, fullEquals = false /* unused */) } // TODO once computeIfAbsent is added to concurrent.Map, @@ -950,7 +959,7 @@ final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater lookuphc(k, hc) match { case INodeBase.NO_SUCH_ELEMENT_SENTINEL => val v = op - insertifhc(k, hc, v, INode.KEY_ABSENT) match { + insertifhc(k, hc, v, INode.KEY_ABSENT, fullEquals = false /* unused */) match { case Some(oldValue) => oldValue case None => v } @@ -960,17 +969,27 @@ final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater def remove(k: K, v: V): Boolean = { val hc = computeHash(k) - removehc(k, v, removeAlways = false, hc).nonEmpty + removehc(k, v, RemovalPolicy.FullEquals, hc).nonEmpty + } + + override private[collection] def removeRefEq(k: K, v: V): Boolean = { + val hc = computeHash(k) + removehc(k, v, RemovalPolicy.ReferenceEq, hc).nonEmpty } def replace(k: K, oldvalue: V, newvalue: V): Boolean = { val hc = computeHash(k) - insertifhc(k, hc, newvalue, oldvalue.asInstanceOf[AnyRef]).nonEmpty + insertifhc(k, hc, newvalue, oldvalue.asInstanceOf[AnyRef], fullEquals = true).nonEmpty + } + + override private[collection] def replaceRefEq(k: K, oldValue: V, newValue: V): Boolean = { + val hc = computeHash(k) + insertifhc(k, hc, newValue, oldValue.asInstanceOf[AnyRef], fullEquals = false).nonEmpty } def replace(k: K, v: V): Option[V] = { val hc = computeHash(k) - insertifhc(k, hc, v, INode.KEY_PRESENT) + insertifhc(k, hc, v, INode.KEY_PRESENT, fullEquals = false /* unused */) } def iterator: Iterator[(K, V)] = { @@ -1004,16 +1023,14 @@ final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater // END extra overrides /////////////////////////////////////////////////////////////////// - - private def cachedSize() = { - val r = RDCSS_READ_ROOT() - r.cachedSize(this) - } - override def size: Int = if (nonReadOnly) readOnlySnapshot().size - else cachedSize() - override def isEmpty: Boolean = size == 0 + else RDCSS_READ_ROOT().cachedSize(this) + override def knownSize: Int = + if (nonReadOnly) -1 + else RDCSS_READ_ROOT().knownSize(this) + override def isEmpty: Boolean = + (if (nonReadOnly) readOnlySnapshot() else this).sizeIs == 0 // sizeIs checks knownSize override protected[this] def className = "TrieMap" } @@ -1034,6 +1051,19 @@ object TrieMap extends MapFactory[TrieMap] { class MangledHashing[K] extends Hashing[K] { def hash(k: K): Int = scala.util.hashing.byteswap32(k.##) } + + private[concurrent] object RemovalPolicy { + final val Always = 0 + final val FullEquals = 1 + final val ReferenceEq = 2 + + def shouldRemove[V](removalPolicy: Int)(a: V, b: V): Boolean = + removalPolicy match { + case Always => true + case FullEquals => a == b + case ReferenceEq => a.asInstanceOf[AnyRef] eq b.asInstanceOf[AnyRef] + } + } } // non-final as an extension point for parallel collections @@ -1094,11 +1124,9 @@ private[collection] class TrieMapIterator[K, V](var level: Int, private var ct: if (npos < stack(depth).length) { stackpos(depth) = npos stack(depth)(npos) match { - case sn: SNode[K, V] => - current = sn - case in: INode[K, V] => - readin(in) - case basicNode => throw new MatchError(basicNode) + case sn: SNode[K, V] @uc => current = sn + case in: INode[K, V] @uc => readin(in) + case basicNode => throw new MatchError(basicNode) } } else { depth -= 1 diff --git a/src/library/scala/collection/immutable/ArraySeq.scala b/src/library/scala/collection/immutable/ArraySeq.scala index 943ce9935303..ac246bca6f95 100644 --- a/src/library/scala/collection/immutable/ArraySeq.scala +++ b/src/library/scala/collection/immutable/ArraySeq.scala @@ -68,7 +68,15 @@ sealed abstract class ArraySeq[+A] ArraySeq.unsafeWrapArray(dest).asInstanceOf[ArraySeq[B]] } - override def map[B](f: A => B): ArraySeq[B] = iterableFactory.tabulate(length)(i => f(apply(i))) + override def map[B](f: A => B): ArraySeq[B] = { + val a = new Array[Any](size) + var i = 0 + while (i < a.length){ + a(i) = f(apply(i)) + i += 1 + } + ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[B]] + } override def prepended[B >: A](elem: B): ArraySeq[B] = ArraySeq.unsafeWrapArray(unsafeArray.prepended[Any](elem)).asInstanceOf[ArraySeq[B]] diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala index 4ba5208aad5a..7a9231231d32 100644 --- a/src/library/scala/collection/immutable/HashMap.scala +++ b/src/library/scala/collection/immutable/HashMap.scala @@ -20,7 +20,7 @@ import scala.annotation.unchecked.{uncheckedVariance => uV} import scala.collection.Hashing.improve import scala.collection.Stepper.EfficientSplit import scala.collection.generic.DefaultSerializable -import scala.collection.mutable.ReusableBuilder +import scala.collection.mutable, mutable.ReusableBuilder import scala.collection.{Iterator, MapFactory, MapFactoryDefaults, Stepper, StepperShape, mutable} import scala.runtime.AbstractFunction2 import scala.runtime.Statics.releaseFence @@ -169,7 +169,7 @@ final class HashMap[K, +V] private[immutable] (private[immutable] val rootNode: if (newNode eq hm.rootNode) hm else newHashMapOrThis(rootNode.concat(hm.rootNode, 0)) } - case hm: collection.mutable.HashMap[K, V] => + case hm: mutable.HashMap[K @unchecked, V @unchecked] => val iter = hm.nodeIterator var current = rootNode while (iter.hasNext) { @@ -254,7 +254,7 @@ final class HashMap[K, +V] private[immutable] (private[immutable] val rootNode: override def equals(that: Any): Boolean = that match { - case map: HashMap[K, V] => (this eq map) || (this.rootNode == map.rootNode) + case map: HashMap[_, _] => (this eq map) || (this.rootNode == map.rootNode) case _ => super.equals(that) } @@ -628,11 +628,11 @@ private final class BitmapIndexedMapNode[K, +V]( if ((dataMap & bitpos) != 0) { val index = indexFrom(dataMap, mask, bitpos) - if (key == getKey(index)) getValue(index) else throw new NoSuchElementException + if (key == getKey(index)) getValue(index) else throw new NoSuchElementException(s"key not found: $key") } else if ((nodeMap & bitpos) != 0) { getNode(indexFrom(nodeMap, mask, bitpos)).apply(key, originalHash, keyHash, shift + BitPartitionSize) } else { - throw new NoSuchElementException + throw new NoSuchElementException(s"key not found: $key") } } @@ -1171,7 +1171,7 @@ private final class BitmapIndexedMapNode[K, +V]( } override def mergeInto[V1 >: V](that: MapNode[K, V1], builder: HashMapBuilder[K, V1], shift: Int)(mergef: ((K, V), (K, V1)) => (K, V1)): Unit = that match { - case bm: BitmapIndexedMapNode[K, V] => + case bm: BitmapIndexedMapNode[K, V] @unchecked => if (size == 0) { that.buildTo(builder) return @@ -1270,13 +1270,13 @@ private final class BitmapIndexedMapNode[K, +V]( index += 1 } } - case _: HashCollisionMapNode[K, V] => + case _: HashCollisionMapNode[_, _] => throw new Exception("Cannot merge BitmapIndexedMapNode with HashCollisionMapNode") } override def equals(that: Any): Boolean = that match { - case node: BitmapIndexedMapNode[K, V] => + case node: BitmapIndexedMapNode[_, _] => (this eq node) || (this.cachedJavaKeySetHashCode == node.cachedJavaKeySetHashCode) && (this.nodeMap == node.nodeMap) && @@ -1307,7 +1307,7 @@ private final class BitmapIndexedMapNode[K, +V]( throw new UnsupportedOperationException("Trie nodes do not support hashing.") override def concat[V1 >: V](that: MapNode[K, V1], shift: Int): BitmapIndexedMapNode[K, V1] = that match { - case bm: BitmapIndexedMapNode[K, V] => + case bm: BitmapIndexedMapNode[K, V] @unchecked => if (size == 0) return bm else if (bm.size == 0 || (bm eq this)) return this else if (bm.size == 1) { @@ -1821,7 +1821,7 @@ private final class HashCollisionMapNode[K, +V ]( releaseFence() - private[immutable] def indexOf(key: K): Int = { + private[immutable] def indexOf(key: Any): Int = { val iter = content.iterator var i = 0 while (iter.hasNext) { @@ -1944,7 +1944,7 @@ private final class HashCollisionMapNode[K, +V ]( override def equals(that: Any): Boolean = that match { - case node: HashCollisionMapNode[K, V] => + case node: HashCollisionMapNode[_, _] => (this eq node) || (this.hash == node.hash) && (this.content.length == node.content.length) && { diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala index 67bcb2924fda..1785ceb2c0ea 100644 --- a/src/library/scala/collection/immutable/HashSet.scala +++ b/src/library/scala/collection/immutable/HashSet.scala @@ -176,7 +176,7 @@ final class HashSet[A] private[immutable](private[immutable] val rootNode: Bitma override def equals(that: Any): Boolean = that match { - case set: HashSet[A] => (this eq set) || (this.rootNode == set.rootNode) + case set: HashSet[_] => (this eq set) || (this.rootNode == set.rootNode) case _ => super.equals(that) } @@ -1377,7 +1377,7 @@ private final class BitmapIndexedSetNode[A]( override def equals(that: Any): Boolean = that match { - case node: BitmapIndexedSetNode[A] => + case node: BitmapIndexedSetNode[_] => (this eq node) || (this.cachedJavaKeySetHashCode == node.cachedJavaKeySetHashCode) && (this.nodeMap == node.nodeMap) && @@ -1805,7 +1805,7 @@ private final class HashCollisionSetNode[A](val originalHash: Int, val hash: Int override def equals(that: Any): Boolean = that match { - case node: HashCollisionSetNode[A] => + case node: HashCollisionSetNode[_] => (this eq node) || (this.hash == node.hash) && (this.content.size == node.content.size) && diff --git a/src/library/scala/collection/immutable/IntMap.scala b/src/library/scala/collection/immutable/IntMap.scala index f3055deb0812..240821b11460 100644 --- a/src/library/scala/collection/immutable/IntMap.scala +++ b/src/library/scala/collection/immutable/IntMap.scala @@ -323,9 +323,9 @@ sealed abstract class IntMap[+T] extends AbstractMap[Int, T] case IntMap.Nil => IntMap.Tip(key, value) } - def map[V2](f: ((Int, T)) => (Int, V2)): IntMap[V2] = intMapFrom(new View.Map(toIterable, f)) + def map[V2](f: ((Int, T)) => (Int, V2)): IntMap[V2] = intMapFrom(new View.Map(this, f)) - def flatMap[V2](f: ((Int, T)) => IterableOnce[(Int, V2)]): IntMap[V2] = intMapFrom(new View.FlatMap(toIterable, f)) + def flatMap[V2](f: ((Int, T)) => IterableOnce[(Int, V2)]): IntMap[V2] = intMapFrom(new View.FlatMap(this, f)) override def concat[V1 >: T](that: collection.IterableOnce[(Int, V1)]): IntMap[V1] = super.concat(that).asInstanceOf[IntMap[V1]] // Already has correct type but not declared as such diff --git a/src/library/scala/collection/immutable/LazyList.scala b/src/library/scala/collection/immutable/LazyList.scala index 58ff4a8970a2..db0e9d180b22 100644 --- a/src/library/scala/collection/immutable/LazyList.scala +++ b/src/library/scala/collection/immutable/LazyList.scala @@ -68,7 +68,7 @@ import scala.runtime.Statics * val fibs: LazyList[BigInt] = * BigInt(0) #:: BigInt(1) #:: * fibs.zip(fibs.tail).map{ n => - * println(s"Adding ${n._1} and ${n._2}") + * println(s"Adding \${n._1} and \${n._2}") * n._1 + n._2 * } * fibs.take(5).foreach(println) diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala index cc39450f3c95..dc117a0bdb72 100644 --- a/src/library/scala/collection/immutable/List.scala +++ b/src/library/scala/collection/immutable/List.scala @@ -614,6 +614,39 @@ sealed abstract class List[+A] } } + // TODO: uncomment once bincompat allows (reference: scala/scala#9365) + /* + // Override for performance: traverse only as much as needed + // and share tail when nothing needs to be filtered out anymore + override def diff[B >: A](that: collection.Seq[B]): AnyRef = { + if (that.isEmpty || this.isEmpty) this + else if (tail.isEmpty) if (that.contains(head)) Nil else this + else { + val occ = occCounts(that) + val b = new ListBuffer[A]() + @tailrec + def rec(remainder: List[A]): List[A] = { + if(occ.isEmpty) b.prependToList(remainder) + else remainder match { + case Nil => b.result() + case head :: next => { + occ.updateWith(head){ + case None => { + b.append(head) + None + } + case Some(1) => None + case Some(n) => Some(n - 1) + } + rec(next) + } + } + } + rec(this) + } + } + */ + } // Internal code that mutates `next` _must_ call `Statics.releaseFence()` if either immediately, or diff --git a/src/library/scala/collection/immutable/LongMap.scala b/src/library/scala/collection/immutable/LongMap.scala index aed44f57a966..c418dc7616ac 100644 --- a/src/library/scala/collection/immutable/LongMap.scala +++ b/src/library/scala/collection/immutable/LongMap.scala @@ -63,9 +63,9 @@ object LongMap { private[immutable] case object Nil extends LongMap[Nothing] { // Important, don't remove this! See IntMap for explanation. override def equals(that : Any) = that match { - case (that: AnyRef) if (this eq that) => true - case (that: LongMap[_]) => false // The only empty LongMaps are eq Nil - case that => super.equals(that) + case _: this.type => true + case _: LongMap[_] => false // The only empty LongMaps are eq Nil + case _ => super.equals(that) } } diff --git a/src/library/scala/collection/immutable/Queue.scala b/src/library/scala/collection/immutable/Queue.scala index ae90826cd2bf..eb12f6fd8b14 100644 --- a/src/library/scala/collection/immutable/Queue.scala +++ b/src/library/scala/collection/immutable/Queue.scala @@ -97,6 +97,11 @@ sealed class Queue[+A] protected(protected val in: List[A], protected val out: L else if (in.nonEmpty) new Queue(Nil, in.reverse.tail) else throw new NoSuchElementException("tail on empty queue") + override def last: A = + if (in.nonEmpty) in.head + else if (out.nonEmpty) out.last + else throw new NoSuchElementException("last on empty queue") + /* This is made to avoid inefficient implementation of iterator. */ override def forall(p: A => Boolean): Boolean = in.forall(p) && out.forall(p) @@ -117,7 +122,7 @@ sealed class Queue[+A] protected(protected val in: List[A], protected val out: L override def appendedAll[B >: A](that: scala.collection.IterableOnce[B]): Queue[B] = { val newIn = that match { case that: Queue[B] => that.in ++ (that.out reverse_::: this.in) - case that: List[A] => that reverse_::: this.in + case that: List[B] => that reverse_::: this.in case _ => var result: List[B] = this.in val iter = that.iterator diff --git a/src/library/scala/collection/immutable/SeqMap.scala b/src/library/scala/collection/immutable/SeqMap.scala index 8a899ee535c7..013697d64cce 100644 --- a/src/library/scala/collection/immutable/SeqMap.scala +++ b/src/library/scala/collection/immutable/SeqMap.scala @@ -14,7 +14,7 @@ package scala package collection package immutable -import scala.collection.mutable.Builder +import scala.collection.mutable.{Builder, ReusableBuilder} /** * A generic trait for ordered immutable maps. Concrete classes have to provide @@ -48,7 +48,7 @@ object SeqMap extends MapFactory[SeqMap] { case _ => (newBuilder[K, V] ++= it).result() } - def newBuilder[K, V]: Builder[(K, V), SeqMap[K, V]] = VectorMap.newBuilder + def newBuilder[K, V]: Builder[(K, V), SeqMap[K, V]] = new SeqMapBuilderImpl @SerialVersionUID(3L) private object EmptySeqMap extends SeqMap[Any, Nothing] with Serializable { @@ -220,6 +220,55 @@ object SeqMap extends MapFactory[SeqMap] { f(key3, value3) f(key4, value4) } - hashCode + + private[SeqMap] def buildTo[V1 >: V](builder: Builder[(K, V1), SeqMap[K, V1]]): builder.type = + builder.addOne((key1, value1)).addOne((key2, value2)).addOne((key3, value3)).addOne((key4, value4)) + } + + private final class SeqMapBuilderImpl[K, V] extends ReusableBuilder[(K, V), SeqMap[K, V]] { + private[this] var elems: SeqMap[K, V] = SeqMap.empty + private[this] var switchedToVectorMapBuilder: Boolean = false + private[this] var vectorMapBuilder: VectorMapBuilder[K, V] = _ + + override def clear(): Unit = { + elems = SeqMap.empty + if (vectorMapBuilder != null) { + vectorMapBuilder.clear() + } + switchedToVectorMapBuilder = false + } + + override def result(): SeqMap[K, V] = + if (switchedToVectorMapBuilder) vectorMapBuilder.result() else elems + + def addOne(elem: (K, V)) = { + if (switchedToVectorMapBuilder) { + vectorMapBuilder.addOne(elem) + } else if (elems.size < 4) { + elems = elems + elem + } else { + // assert(elems.size == 4) + if (elems.contains(elem._1)) { + elems = elems + elem // will not increase the size of the map + } else { + switchedToVectorMapBuilder = true + if (vectorMapBuilder == null) { + vectorMapBuilder = new VectorMapBuilder + } + elems.asInstanceOf[SeqMap4[K, V]].buildTo(vectorMapBuilder) + vectorMapBuilder.addOne(elem) + } + } + + this + } + + override def addAll(xs: IterableOnce[(K, V)]): this.type = + if (switchedToVectorMapBuilder) { + vectorMapBuilder.addAll(xs) + this + } else { + super.addAll(xs) + } } } diff --git a/src/library/scala/collection/immutable/Set.scala b/src/library/scala/collection/immutable/Set.scala index a8562a878f96..f07eb66991c8 100644 --- a/src/library/scala/collection/immutable/Set.scala +++ b/src/library/scala/collection/immutable/Set.scala @@ -57,7 +57,7 @@ trait SetOps[A, +CC[X], +C <: SetOps[A, CC, C]] @`inline` final override def - (elem: A): C = excl(elem) def diff(that: collection.Set[A]): C = - toIterable.foldLeft(empty)((result, elem) => if (that contains elem) result else result + elem) + foldLeft(empty)((result, elem) => if (that contains elem) result else result + elem) /** Creates a new $coll from this $coll by removing all elements of another * collection. diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index 81165b798580..a0f0e8692f97 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -140,7 +140,7 @@ final class TreeMap[K, +V] private (private val tree: RB.Tree[K, V])(implicit va override def concat[V1 >: V](that: collection.IterableOnce[(K, V1)]): TreeMap[K, V1] = newMapOrSelf(that match { - case tm: TreeMap[K, V] if ordering == tm.ordering => + case tm: TreeMap[K, V] @unchecked if ordering == tm.ordering => RB.union(tree, tm.tree) case ls: LinearSeq[(K,V1)] => if (ls.isEmpty) tree //to avoid the creation of the adder @@ -283,7 +283,7 @@ final class TreeMap[K, +V] private (private val tree: RB.Tree[K, V])(implicit va } } override def equals(obj: Any): Boolean = obj match { - case that: TreeMap[K, V] if ordering == that.ordering => RB.entriesEqual(tree, that.tree) + case that: TreeMap[K @unchecked, _] if ordering == that.ordering => RB.entriesEqual(tree, that.tree) case _ => super.equals(obj) } diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index 51e55782b19f..e51479ae657b 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -221,7 +221,7 @@ final class TreeSet[A] private[immutable] (private[immutable] val tree: RB.Tree[ } override def equals(obj: Any): Boolean = obj match { - case that: TreeSet[A] if ordering == that.ordering => RB.keysEqual(tree, that.tree) + case that: TreeSet[A @unchecked] if ordering == that.ordering => RB.keysEqual(tree, that.tree) case _ => super.equals(obj) } @@ -278,7 +278,7 @@ object TreeSet extends SortedIterableFactory[TreeSet] { case ts: TreeSet[A] if ts.ordering == ordering => if (tree eq null) tree = ts.tree else tree = RB.union(beforePublish(tree), ts.tree)(ordering) - case ts: TreeMap[A, _] if ts.ordering == ordering => + case ts: TreeMap[A @unchecked, _] if ts.ordering == ordering => if (tree eq null) tree = ts.tree0 else tree = RB.union(beforePublish(tree), ts.tree0)(ordering) case _ => diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala index cc91b68902d7..9f76576f2dba 100644 --- a/src/library/scala/collection/immutable/Vector.scala +++ b/src/library/scala/collection/immutable/Vector.scala @@ -43,7 +43,7 @@ object Vector extends StrictOptimizedSeqFactory[Vector] { if (knownSize == 0) empty[E] else if (knownSize > 0 && knownSize <= WIDTH) { val a1: Arr1 = it match { - case as: ArraySeq.ofRef[_] if as.elemTag == classOf[AnyRef] => + case as: ArraySeq.ofRef[_] if as.elemTag.runtimeClass == classOf[AnyRef] => as.unsafeArray.asInstanceOf[Arr1] case it: Iterable[E] => val a1 = new Arr1(knownSize) diff --git a/src/library/scala/collection/mutable/AnyRefMap.scala b/src/library/scala/collection/mutable/AnyRefMap.scala index 2c65c8c7a5f1..c02a10770696 100644 --- a/src/library/scala/collection/mutable/AnyRefMap.scala +++ b/src/library/scala/collection/mutable/AnyRefMap.scala @@ -393,7 +393,7 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi } @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") - override def + [V1 >: V](kv: (K, V1)): AnyRefMap[K, V1] = AnyRefMap.from(new View.Appended(toIterable, kv)) + override def + [V1 >: V](kv: (K, V1)): AnyRefMap[K, V1] = AnyRefMap.from(new View.Appended(this, kv)) @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): AnyRefMap[K, V1] = { @@ -477,9 +477,9 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi // The implicit dummy parameter is necessary to distinguish these methods from the base methods they overload (not override) def map[K2 <: AnyRef, V2](f: ((K, V)) => (K2, V2))(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = - AnyRefMap.from(new View.Map(toIterable, f)) + AnyRefMap.from(new View.Map(this, f)) def flatMap[K2 <: AnyRef, V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = - AnyRefMap.from(new View.FlatMap(toIterable, f)) + AnyRefMap.from(new View.FlatMap(this, f)) def collect[K2 <: AnyRef, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = strictOptimizedCollect(AnyRefMap.newBuilder[K2, V2], pf) diff --git a/src/library/scala/collection/mutable/ArrayBuffer.scala b/src/library/scala/collection/mutable/ArrayBuffer.scala index 269d564c4c37..5fb2357996e1 100644 --- a/src/library/scala/collection/mutable/ArrayBuffer.scala +++ b/src/library/scala/collection/mutable/ArrayBuffer.scala @@ -39,6 +39,7 @@ import scala.util.chaining._ * @define mayNotTerminateInf * @define willNotTerminateInf */ +@SerialVersionUID(-1582447879429021880L) class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) extends AbstractBuffer[A] with IndexedBuffer[A] @@ -51,6 +52,8 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) def this(initialSize: Int) = this(new Array[AnyRef](initialSize max 1), 0) + @transient private[this] var mutationCount: Int = 0 + protected[collection] var array: Array[AnyRef] = initialElements protected var size0 = initialSize @@ -62,14 +65,16 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) override def knownSize: Int = super[IndexedSeqOps].knownSize /** Ensure that the internal array has at least `n` cells. */ - protected def ensureSize(n: Int): Unit = + protected def ensureSize(n: Int): Unit = { array = ArrayBuffer.ensureSize(array, size0, n) + } def sizeHint(size: Int): Unit = if(size > length && size >= 1) ensureSize(size) /** Reduce length to `n`, nulling out all dropped elements */ private def reduceToSize(n: Int): Unit = { + mutationCount += 1 Arrays.fill(array, n, size0, null) size0 = n } @@ -79,7 +84,9 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) * which may replace the array by a shorter one. * This allows releasing some unused memory. */ - def trimToSize(): Unit = resize(length) + def trimToSize(): Unit = { + resize(length) + } /** Trims the `array` buffer size down to either a power of 2 * or Int.MaxValue while keeping first `requiredLength` elements. @@ -99,12 +106,14 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) def update(@deprecatedName("n", "2.13.0") index: Int, elem: A): Unit = { checkWithinBounds(index, index + 1) + mutationCount += 1 array(index) = elem.asInstanceOf[AnyRef] } def length = size0 - override def view: ArrayBufferView[A] = new ArrayBufferView(array, size0) + // TODO: return `IndexedSeqView` rather than `ArrayBufferView` + override def view: ArrayBufferView[A] = new ArrayBufferView(this, () => mutationCount) override def iterableFactory: SeqFactory[ArrayBuffer] = ArrayBuffer @@ -125,10 +134,11 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) } def addOne(elem: A): this.type = { - val i = size0 - ensureSize(size0 + 1) - size0 += 1 - this(i) = elem + mutationCount += 1 + val oldSize = size0 + ensureSize(oldSize + 1) + size0 = oldSize + 1 + this(oldSize) = elem this } @@ -136,9 +146,13 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) override def addAll(elems: IterableOnce[A]): this.type = { elems match { case elems: ArrayBuffer[_] => - ensureSize(length + elems.length) - Array.copy(elems.array, 0, array, length, elems.length) - size0 = length + elems.length + val elemsLength = elems.size0 + if (elemsLength > 0) { + mutationCount += 1 + ensureSize(length + elemsLength) + Array.copy(elems.array, 0, array, length, elemsLength) + size0 = length + elemsLength + } case _ => super.addAll(elems) } this @@ -146,6 +160,7 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) def insert(@deprecatedName("n", "2.13.0") index: Int, elem: A): Unit = { checkWithinBounds(index, index) + mutationCount += 1 ensureSize(size0 + 1) Array.copy(array, index, array, index + 1, size0 - index) size0 += 1 @@ -162,22 +177,23 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) elems match { case elems: collection.Iterable[A] => val elemsLength = elems.size - ensureSize(length + elemsLength) - Array.copy(array, index, array, index + elemsLength, size0 - index) - size0 = size0 + elemsLength - elems match { - case elems: ArrayBuffer[_] => - Array.copy(elems.array, 0, array, index, elemsLength) - case _ => - var i = 0 - val it = elems.iterator - while (i < elemsLength) { - this(index + i) = it.next() - i += 1 - } + if (elemsLength > 0) { + mutationCount += 1 + val len = size0 + val newSize = len + elemsLength + ensureSize(newSize) + Array.copy(array, index, array, index + elemsLength, len - index) + // if `elems eq this`, this copy is safe because + // - `elems.array eq this.array` + // - we didn't overwrite the values being inserted after moving them in + // the previous line + // - `copyElemsToArray` will call `System.arraycopy` + // - `System.arraycopy` will effectively "read" all the values before + // overwriting any of them when two arrays are the the same reference + IterableOnce.copyElemsToArray(elems, array.asInstanceOf[Array[Any]], index, elemsLength) + size0 = newSize // update size AFTER the copy, in case we're inserting a proxy } - case _ => - insertAll(index, ArrayBuffer.from(elems)) + case _ => insertAll(index, ArrayBuffer.from(elems)) } } @@ -230,7 +246,10 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) * @return modified input $coll sorted according to the ordering `ord`. */ override def sortInPlace[B >: A]()(implicit ord: Ordering[B]): this.type = { - if (length > 1) scala.util.Sorting.stableSort(array.asInstanceOf[Array[B]], 0, length) + if (length > 1) { + mutationCount += 1 + scala.util.Sorting.stableSort(array.asInstanceOf[Array[B]], 0, length) + } this } } @@ -295,8 +314,48 @@ object ArrayBuffer extends StrictOptimizedSeqFactory[ArrayBuffer] { } } -final class ArrayBufferView[A](val array: Array[AnyRef], val length: Int) extends AbstractIndexedSeqView[A] { - @throws[ArrayIndexOutOfBoundsException] - def apply(n: Int) = if (n < length) array(n).asInstanceOf[A] else throw new IndexOutOfBoundsException(s"$n is out of bounds (min 0, max ${length - 1})") +// TODO: use `CheckedIndexedSeqView.Id` once we can change the return type of `ArrayBuffer#view` +final class ArrayBufferView[A] private[mutable](underlying: ArrayBuffer[A], mutationCount: () => Int) + extends AbstractIndexedSeqView[A] { + @deprecated("never intended to be public; call ArrayBuffer#view instead", since = "2.13.7") + def this(array: Array[AnyRef], length: Int) = { + // this won't actually track mutation, but it would be a pain to have the implementation + // check if we have a method to get the current mutation count or not on every method and + // change what it does based on that. hopefully no one ever calls this. + this({ + val _array = array + val _length = length + new ArrayBuffer[A](0) { + this.array = _array + this.size0 = _length + } + }, () => 0) + } + + @deprecated("never intended to be public", since = "2.13.7") + def array: Array[AnyRef] = underlying.toArray[Any].asInstanceOf[Array[AnyRef]] + + @throws[IndexOutOfBoundsException] + def apply(n: Int): A = underlying(n) + def length: Int = underlying.length override protected[this] def className = "ArrayBufferView" + + // we could inherit all these from `CheckedIndexedSeqView`, except this class is public + override def iterator: Iterator[A] = new CheckedIndexedSeqView.CheckedIterator(this, mutationCount()) + override def reverseIterator: Iterator[A] = new CheckedIndexedSeqView.CheckedReverseIterator(this, mutationCount()) + + override def appended[B >: A](elem: B): IndexedSeqView[B] = new CheckedIndexedSeqView.Appended(this, elem)(mutationCount) + override def prepended[B >: A](elem: B): IndexedSeqView[B] = new CheckedIndexedSeqView.Prepended(elem, this)(mutationCount) + override def take(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Take(this, n)(mutationCount) + override def takeRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.TakeRight(this, n)(mutationCount) + override def drop(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Drop(this, n)(mutationCount) + override def dropRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.DropRight(this, n)(mutationCount) + override def map[B](f: A => B): IndexedSeqView[B] = new CheckedIndexedSeqView.Map(this, f)(mutationCount) + override def reverse: IndexedSeqView[A] = new CheckedIndexedSeqView.Reverse(this)(mutationCount) + override def slice(from: Int, until: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Slice(this, from, until)(mutationCount) + override def tapEach[U](f: A => U): IndexedSeqView[A] = new CheckedIndexedSeqView.Map(this, { (a: A) => f(a); a})(mutationCount) + + override def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) + override def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) + override def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(prefix, this)(mutationCount) } diff --git a/src/library/scala/collection/mutable/Builder.scala b/src/library/scala/collection/mutable/Builder.scala index 9a4f41df6f7f..0ecc06dff061 100644 --- a/src/library/scala/collection/mutable/Builder.scala +++ b/src/library/scala/collection/mutable/Builder.scala @@ -68,9 +68,11 @@ trait Builder[-A, +To] extends Growable[A] { self => * an IndexedSeqLike, then sizes larger * than collection's size are reduced. */ + // should probably be `boundingColl: IterableOnce[_]`, but binary compatibility final def sizeHintBounded(size: Int, boundingColl: scala.collection.Iterable[_]): Unit = { - if (boundingColl.knownSize != -1) { - sizeHint(scala.math.min(boundingColl.knownSize, size)) + val s = boundingColl.knownSize + if (s != -1) { + sizeHint(scala.math.min(s, size)) } } diff --git a/src/library/scala/collection/mutable/CheckedIndexedSeqView.scala b/src/library/scala/collection/mutable/CheckedIndexedSeqView.scala new file mode 100644 index 000000000000..b9598904375d --- /dev/null +++ b/src/library/scala/collection/mutable/CheckedIndexedSeqView.scala @@ -0,0 +1,117 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +private[mutable] trait CheckedIndexedSeqView[+A] extends IndexedSeqView[A] { + protected val mutationCount: () => Int + + override def iterator: Iterator[A] = new CheckedIndexedSeqView.CheckedIterator(this, mutationCount()) + override def reverseIterator: Iterator[A] = new CheckedIndexedSeqView.CheckedReverseIterator(this, mutationCount()) + + override def appended[B >: A](elem: B): IndexedSeqView[B] = new CheckedIndexedSeqView.Appended(this, elem)(mutationCount) + override def prepended[B >: A](elem: B): IndexedSeqView[B] = new CheckedIndexedSeqView.Prepended(elem, this)(mutationCount) + override def take(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Take(this, n)(mutationCount) + override def takeRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.TakeRight(this, n)(mutationCount) + override def drop(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Drop(this, n)(mutationCount) + override def dropRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.DropRight(this, n)(mutationCount) + override def map[B](f: A => B): IndexedSeqView[B] = new CheckedIndexedSeqView.Map(this, f)(mutationCount) + override def reverse: IndexedSeqView[A] = new CheckedIndexedSeqView.Reverse(this)(mutationCount) + override def slice(from: Int, until: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Slice(this, from, until)(mutationCount) + override def tapEach[U](f: A => U): IndexedSeqView[A] = new CheckedIndexedSeqView.Map(this, { (a: A) => f(a); a})(mutationCount) + + override def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) + override def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) + override def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(prefix, this)(mutationCount) +} + +private[mutable] object CheckedIndexedSeqView { + import IndexedSeqView.SomeIndexedSeqOps + + @SerialVersionUID(3L) + private[mutable] class CheckedIterator[A](self: IndexedSeqView[A], mutationCount: => Int) + extends IndexedSeqView.IndexedSeqViewIterator[A](self) { + private[this] val expectedCount = mutationCount + override def hasNext: Boolean = { + MutationTracker.checkMutationsForIteration(expectedCount, mutationCount) + super.hasNext + } + } + + @SerialVersionUID(3L) + private[mutable] class CheckedReverseIterator[A](self: IndexedSeqView[A], mutationCount: => Int) + extends IndexedSeqView.IndexedSeqViewReverseIterator[A](self) { + private[this] val expectedCount = mutationCount + override def hasNext: Boolean = { + MutationTracker.checkMutationsForIteration(expectedCount, mutationCount) + super.hasNext + } + } + + @SerialVersionUID(3L) + class Id[+A](underlying: SomeIndexedSeqOps[A])(protected val mutationCount: () => Int) + extends IndexedSeqView.Id(underlying) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Appended[+A](underlying: SomeIndexedSeqOps[A], elem: A)(protected val mutationCount: () => Int) + extends IndexedSeqView.Appended(underlying, elem) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Prepended[+A](elem: A, underlying: SomeIndexedSeqOps[A])(protected val mutationCount: () => Int) + extends IndexedSeqView.Prepended(elem, underlying) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Concat[A](prefix: SomeIndexedSeqOps[A], suffix: SomeIndexedSeqOps[A])(protected val mutationCount: () => Int) + extends IndexedSeqView.Concat[A](prefix, suffix) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Take[A](underlying: SomeIndexedSeqOps[A], n: Int)(protected val mutationCount: () => Int) + extends IndexedSeqView.Take(underlying, n) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class TakeRight[A](underlying: SomeIndexedSeqOps[A], n: Int)(protected val mutationCount: () => Int) + extends IndexedSeqView.TakeRight(underlying, n) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Drop[A](underlying: SomeIndexedSeqOps[A], n: Int)(protected val mutationCount: () => Int) + extends IndexedSeqView.Drop[A](underlying, n) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class DropRight[A](underlying: SomeIndexedSeqOps[A], n: Int)(protected val mutationCount: () => Int) + extends IndexedSeqView.DropRight[A](underlying, n) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Map[A, B](underlying: SomeIndexedSeqOps[A], f: A => B)(protected val mutationCount: () => Int) + extends IndexedSeqView.Map(underlying, f) with CheckedIndexedSeqView[B] + + @SerialVersionUID(3L) + class Reverse[A](underlying: SomeIndexedSeqOps[A])(protected val mutationCount: () => Int) + extends IndexedSeqView.Reverse[A](underlying) with CheckedIndexedSeqView[A] { + override def reverse: IndexedSeqView[A] = underlying match { + case x: IndexedSeqView[A] => x + case _ => super.reverse + } + } + + @SerialVersionUID(3L) + class Slice[A](underlying: SomeIndexedSeqOps[A], from: Int, until: Int)(protected val mutationCount: () => Int) + extends AbstractIndexedSeqView[A] with CheckedIndexedSeqView[A] { + protected val lo = from max 0 + protected val hi = (until max 0) min underlying.length + protected val len = (hi - lo) max 0 + @throws[IndexOutOfBoundsException] + def apply(i: Int): A = underlying(lo + i) + def length: Int = len + } +} diff --git a/src/library/scala/collection/mutable/CollisionProofHashMap.scala b/src/library/scala/collection/mutable/CollisionProofHashMap.scala index 639f59c3b190..f7619cd1384f 100644 --- a/src/library/scala/collection/mutable/CollisionProofHashMap.scala +++ b/src/library/scala/collection/mutable/CollisionProofHashMap.scala @@ -13,6 +13,7 @@ package scala.collection package mutable +import scala.{unchecked => uc} import scala.annotation.{implicitNotFound, tailrec, unused} import scala.annotation.unchecked.uncheckedVariance import scala.collection.generic.DefaultSerializationProxy @@ -72,8 +73,8 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double def get(key: K): Option[V] = findNode(key) match { case null => None case nd => Some(nd match { - case nd: LLNode => nd.value - case nd: RBNode => nd.value + case nd: LLNode @uc => nd.value + case nd: RBNode @uc => nd.value }) } @@ -81,15 +82,15 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double override def apply(key: K): V = findNode(key) match { case null => default(key) case nd => nd match { - case nd: LLNode => nd.value - case nd: RBNode => nd.value + case nd: LLNode @uc => nd.value + case nd: RBNode @uc => nd.value } } override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { val nd = findNode(key) if (nd eq null) default else nd match { - case nd: LLNode => nd.value + case nd: LLNode @uc => nd.value case n => n.asInstanceOf[RBNode].value } } @@ -98,7 +99,7 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double val hash = computeHash(elem) table(index(hash)) match { case null => null - case n: LLNode => n.getNode(elem, hash) + case n: LLNode @uc => n.getNode(elem, hash) case n => n.asInstanceOf[RBNode].getNode(elem, hash) } } @@ -129,7 +130,7 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double private[this] def put0(key: K, value: V, getOld: Boolean, hash: Int, idx: Int): Some[V] = { val res = table(idx) match { - case n: RBNode => + case n: RBNode @uc => insert(n, idx, key, hash, value) case _old => val old: LLNode = _old.asInstanceOf[LLNode] @@ -184,16 +185,16 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double val idx = index(hash) table(idx) match { case null => Statics.pfMarker - case t: RBNode => + case t: RBNode @uc => val v = delete(t, idx, elem, hash) if(v.asInstanceOf[AnyRef] ne Statics.pfMarker) contentSize -= 1 v - case nd: LLNode if nd.hash == hash && nd.key == elem => + case nd: LLNode @uc if nd.hash == hash && nd.key == elem => // first element matches table(idx) = nd.next contentSize -= 1 nd.value - case nd: LLNode => + case nd: LLNode @uc => // find an element that matches var prev = nd var next = nd.next @@ -226,10 +227,10 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double i += 1 n match { case null => - case n: RBNode => + case n: RBNode @uc => node = CollisionProofHashMap.minNodeNonNull(n) return true - case n: LLNode => + case n: LLNode @uc => node = n return true } @@ -241,11 +242,11 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double def next(): R = if(!hasNext) Iterator.empty.next() else node match { - case n: RBNode => + case n: RBNode @uc => val r = extract(n) node = CollisionProofHashMap.successor(n ) r - case n: LLNode => + case n: LLNode @uc => val r = extract(n) node = n.next r @@ -289,8 +290,8 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double } @`inline` private[this] def splitBucket(tree: Node, lowBucket: Int, highBucket: Int, mask: Int): Unit = tree match { - case t: LLNode => splitBucket(t, lowBucket, highBucket, mask) - case t: RBNode => splitBucket(t, lowBucket, highBucket, mask) + case t: LLNode @uc => splitBucket(t, lowBucket, highBucket, mask) + case t: RBNode @uc => splitBucket(t, lowBucket, highBucket, mask) } private[this] def splitBucket(list: LLNode, lowBucket: Int, highBucket: Int, mask: Int): Unit = { @@ -361,8 +362,8 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double while(i < len) { val n = table(i) if(n ne null) n match { - case n: LLNode => n.foreach(f) - case n: RBNode => n.foreach(f) + case n: LLNode @uc => n.foreach(f) + case n: RBNode @uc => n.foreach(f) } i += 1 } @@ -374,8 +375,8 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double while(i < len) { val n = table(i) if(n ne null) n match { - case n: LLNode => n.foreachEntry(f) - case n: RBNode => n.foreachEntry(f) + case n: LLNode @uc => n.foreachEntry(f) + case n: RBNode @uc => n.foreachEntry(f) } i += 1 } @@ -390,7 +391,7 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double val idx = index(hash) table(idx) match { case null => () - case n: LLNode => + case n: LLNode @uc => val nd = n.getNode(key, hash) if(nd != null) return nd.value case n => @@ -416,7 +417,7 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double */ def map[K2, V2](f: ((K, V)) => (K2, V2)) (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = - sortedMapFactory.from(new View.Map[(K, V), (K2, V2)](toIterable, f)) + sortedMapFactory.from(new View.Map[(K, V), (K2, V2)](this, f)) /** Builds a new `CollisionProofHashMap` by applying a function to all elements of this $coll * and using the elements of the resulting collections. @@ -427,7 +428,7 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double */ def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]) (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = - sortedMapFactory.from(new View.FlatMap(toIterable, f)) + sortedMapFactory.from(new View.FlatMap(this, f)) /** Builds a new sorted map by applying a partial function to all elements of this $coll * on which the function is defined. @@ -439,10 +440,10 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double */ def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]) (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = - sortedMapFactory.from(new View.Collect(toIterable, pf)) + sortedMapFactory.from(new View.Collect(this, pf)) override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]): CollisionProofHashMap[K, V2] = sortedMapFactory.from(suffix match { - case it: Iterable[(K, V2)] => new View.Concat(toIterable, it) + case it: Iterable[(K, V2)] => new View.Concat(this, it) case _ => iterator.concat(suffix.iterator) }) @@ -451,11 +452,11 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") override def + [V1 >: V](kv: (K, V1)): CollisionProofHashMap[K, V1] = - sortedMapFactory.from(new View.Appended(toIterable, kv)) + sortedMapFactory.from(new View.Appended(this, kv)) @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CollisionProofHashMap[K, V1] = - sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(toIterable, elem1), elem2), elems)) + sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems)) ///////////////////// RedBlackTree code derived from mutable.RedBlackTree: @@ -711,8 +712,8 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double case 1 => val nn = xs.next() val (key, hash, value) = nn match { - case nn: LLNode => (nn.key, nn.hash, nn.value) - case nn: RBNode => (nn.key, nn.hash, nn.value) + case nn: LLNode @uc => (nn.key, nn.hash, nn.value) + case nn: RBNode @uc => (nn.key, nn.hash, nn.value) } new RBNode(key, hash, value, level == maxUsedDepth && level != 1, null, null, null) case n => @@ -721,8 +722,8 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double val nn = xs.next() val right = f(level+1, size-1-leftSize) val (key, hash, value) = nn match { - case nn: LLNode => (nn.key, nn.hash, nn.value) - case nn: RBNode => (nn.key, nn.hash, nn.value) + case nn: LLNode @uc => (nn.key, nn.hash, nn.value) + case nn: RBNode @uc => (nn.key, nn.hash, nn.value) } val n = new RBNode(key, hash, value, false, left, right, null) if(left ne null) left.parent = n diff --git a/src/library/scala/collection/mutable/Map.scala b/src/library/scala/collection/mutable/Map.scala index 8312e7647c4a..610dc01029cc 100644 --- a/src/library/scala/collection/mutable/Map.scala +++ b/src/library/scala/collection/mutable/Map.scala @@ -160,7 +160,7 @@ trait MapOps[K, V, +CC[X, Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]] def clear(): Unit = { keysIterator foreach -= } - override def clone(): C = empty ++= toIterable + override def clone(): C = empty ++= this @deprecated("Use filterInPlace instead", "2.13.0") @inline final def retain(p: (K, V) => Boolean): this.type = filterInPlace(p) @@ -171,17 +171,19 @@ trait MapOps[K, V, +CC[X, Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]] * @param p The test predicate */ def filterInPlace(p: (K, V) => Boolean): this.type = { - if (nonEmpty) { - val array = this.toArray[Any] // scala/bug#7269 toArray avoids ConcurrentModificationException - val arrayLength = array.length - var i = 0 - while (i < arrayLength) { - val (k, v) = array(i).asInstanceOf[(K, V)] - if (!p(k, v)) { - this -= k + if (!isEmpty) this match { + case tm: concurrent.Map[_, _] => tm.asInstanceOf[concurrent.Map[K, V]].filterInPlaceImpl(p) + case _ => + val array = this.toArray[Any] // scala/bug#7269 toArray avoids ConcurrentModificationException + val arrayLength = array.length + var i = 0 + while (i < arrayLength) { + val (k, v) = array(i).asInstanceOf[(K, V)] + if (!p(k, v)) { + this -= k + } + i += 1 } - i += 1 - } } this } @@ -197,8 +199,9 @@ trait MapOps[K, V, +CC[X, Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]] * @return the map itself. */ def mapValuesInPlace(f: (K, V) => V): this.type = { - if (nonEmpty) this match { + if (!isEmpty) this match { case hm: mutable.HashMap[_, _] => hm.asInstanceOf[mutable.HashMap[K, V]].mapValuesInPlaceImpl(f) + case tm: concurrent.Map[_, _] => tm.asInstanceOf[concurrent.Map[K, V]].mapValuesInPlaceImpl(f) case _ => val array = this.toArray[Any] val arrayLength = array.length diff --git a/src/library/scala/collection/mutable/Seq.scala b/src/library/scala/collection/mutable/Seq.scala index e624acc2200e..e83d79987208 100644 --- a/src/library/scala/collection/mutable/Seq.scala +++ b/src/library/scala/collection/mutable/Seq.scala @@ -41,7 +41,7 @@ trait SeqOps[A, +CC[_], +C <: AnyRef] override def clone(): C = { val b = newSpecificBuilder - b ++= toIterable + b ++= this b.result() } diff --git a/src/library/scala/collection/mutable/Set.scala b/src/library/scala/collection/mutable/Set.scala index 900d25c71938..6530e8fedf05 100644 --- a/src/library/scala/collection/mutable/Set.scala +++ b/src/library/scala/collection/mutable/Set.scala @@ -78,7 +78,7 @@ trait SetOps[A, +CC[X], +C <: SetOps[A, CC, C]] } def diff(that: collection.Set[A]): C = - toIterable.foldLeft(empty)((result, elem) => if (that contains elem) result else result += elem) + foldLeft(empty)((result, elem) => if (that contains elem) result else result += elem) @deprecated("Use filterInPlace instead", "2.13.0") @inline final def retain(p: A => Boolean): Unit = filterInPlace(p) @@ -104,7 +104,7 @@ trait SetOps[A, +CC[X], +C <: SetOps[A, CC, C]] this } - override def clone(): C = empty ++= toIterable + override def clone(): C = empty ++= this override def knownSize: Int = super[IterableOps].knownSize } diff --git a/src/library/scala/collection/mutable/Stack.scala b/src/library/scala/collection/mutable/Stack.scala index 1ef701fa2b61..21e442ac9f92 100644 --- a/src/library/scala/collection/mutable/Stack.scala +++ b/src/library/scala/collection/mutable/Stack.scala @@ -17,17 +17,21 @@ import scala.collection.generic.DefaultSerializable import scala.collection.{IterableFactoryDefaults, IterableOnce, SeqFactory, StrictOptimizedSeqFactory, StrictOptimizedSeqOps} /** A stack implements a data structure which allows to store and retrieve - * objects in a last-in-first-out (LIFO) fashion. - * - * @tparam A type of the elements contained in this stack. - * - * @define Coll `Stack` - * @define coll stack - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ + * objects in a last-in-first-out (LIFO) fashion. + * + * Note that operations which consume and produce iterables preserve order, + * rather than reversing it (as would be expected from building a new stack + * by pushing an element at a time). + * + * @tparam A type of the elements contained in this stack. + * + * @define Coll `Stack` + * @define coll stack + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ @migration("Stack is now based on an ArrayDeque instead of a linked list", "2.13.0") class Stack[A] protected (array: Array[AnyRef], start: Int, end: Int) extends ArrayDeque[A](array, start, end) @@ -91,7 +95,7 @@ class Stack[A] protected (array: Array[AnyRef], start: Int, end: Int) * * @return The removed elements */ - def popAll(): scala.collection.Seq[A] = removeAllReverse() + def popAll(): scala.collection.Seq[A] = removeAll() /** * Returns and removes all elements from the top of this stack which satisfy the given predicate diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala index 7f811c97834a..41dfbb609816 100644 --- a/src/library/scala/concurrent/ExecutionContext.scala +++ b/src/library/scala/concurrent/ExecutionContext.scala @@ -106,13 +106,13 @@ trait ExecutionContext { /** * An [[ExecutionContext]] that is also a - * Java [[https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Executor.html Executor]]. + * Java [[java.util.concurrent.Executor Executor]]. */ trait ExecutionContextExecutor extends ExecutionContext with Executor /** * An [[ExecutionContext]] that is also a - * Java [[https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html ExecutorService]]. + * Java [[java.util.concurrent.ExecutorService ExecutorService]]. */ trait ExecutionContextExecutorService extends ExecutionContextExecutor with ExecutorService @@ -287,7 +287,7 @@ object ExecutionContext { */ def fromExecutor(e: Executor): ExecutionContextExecutor = fromExecutor(e, defaultReporter) - /** The default reporter simply prints the stack trace of the `Throwable` to [[https://docs.oracle.com/javase/8/docs/api/java/lang/System.html#err System.err]]. + /** The default reporter simply prints the stack trace of the `Throwable` to [[java.lang.System#err System.err]]. * * @return the function for error reporting */ diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala index 4439b6507f7d..3bcedc53a84a 100644 --- a/src/library/scala/concurrent/Future.scala +++ b/src/library/scala/concurrent/Future.scala @@ -383,10 +383,11 @@ trait Future[+T] extends Awaitable[T] { /** Zips the values of `this` and `that` future, and creates * a new future holding the tuple of their results. * - * If `this` future fails, the resulting future is failed - * with the throwable stored in `this`. - * Otherwise, if `that` future fails, the resulting future is failed - * with the throwable stored in `that`. + * If either input future fails, the resulting future is failed with the same + * throwable, without waiting for the other input future to complete. + * + * If the application of `f` throws a non-fatal throwable, the resulting future + * is failed with that throwable. * * @tparam U the type of the other `Future` * @param that the other `Future` @@ -399,12 +400,11 @@ trait Future[+T] extends Awaitable[T] { /** Zips the values of `this` and `that` future using a function `f`, * and creates a new future holding the result. * - * If `this` future fails, the resulting future is failed - * with the throwable stored in `this`. - * Otherwise, if `that` future fails, the resulting future is failed - * with the throwable stored in `that`. - * If the application of `f` throws a throwable, the resulting future - * is failed with that throwable if it is non-fatal. + * If either input future fails, the resulting future is failed with the same + * throwable, without waiting for the other input future to complete. + * + * If the application of `f` throws a non-fatal throwable, the resulting future + * is failed with that throwable. * * @tparam U the type of the other `Future` * @tparam R the type of the resulting `Future` @@ -413,8 +413,14 @@ trait Future[+T] extends Awaitable[T] { * @return a `Future` with the result of the application of `f` to the results of `this` and `that` * @group Transformations */ - def zipWith[U, R](that: Future[U])(f: (T, U) => R)(implicit executor: ExecutionContext): Future[R] = + def zipWith[U, R](that: Future[U])(f: (T, U) => R)(implicit executor: ExecutionContext): Future[R] = { + // This is typically overriden by the implementation in DefaultPromise, which provides + // symmetric fail-fast behavior regardless of which future fails first. + // + // TODO: remove this implementation and make Future#zipWith abstract + // when we're next willing to make a binary incompatible change flatMap(r1 => that.map(r2 => f(r1, r2)))(if (executor.isInstanceOf[BatchingExecutor]) executor else parasitic) + } /** Creates a new future which holds the result of this future if it was completed successfully, or, if not, * the result of the `that` future if `that` is completed successfully. diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala index 2ec0ebe9a24e..7024344c1184 100644 --- a/src/library/scala/concurrent/impl/Promise.scala +++ b/src/library/scala/concurrent/impl/Promise.scala @@ -72,10 +72,10 @@ private[concurrent] object Promise { **/ @inline @tailrec private[this] final def compressed(current: DefaultPromise[T], target: DefaultPromise[T], owner: DefaultPromise[T]): DefaultPromise[T] = { val value = target.get() - if (value.isInstanceOf[Callbacks[T]]) { + if (value.isInstanceOf[Callbacks[_]]) { if (compareAndSet(current, target)) target // Link else compressed(current = get(), target = target, owner = owner) // Retry - } else if (value.isInstanceOf[Link[T]]) compressed(current = current, target = value.asInstanceOf[Link[T]].get(), owner = owner) // Compress + } else if (value.isInstanceOf[Link[_]]) compressed(current = current, target = value.asInstanceOf[Link[T]].get(), owner = owner) // Compress else /*if (value.isInstanceOf[Try[T]])*/ { owner.unlink(value.asInstanceOf[Try[T]]) // Discard links owner @@ -130,49 +130,85 @@ private[concurrent] object Promise { override final def transformWith[S](f: Try[T] => Future[S])(implicit executor: ExecutionContext): Future[S] = dispatchOrAddCallbacks(get(), new Transformation[T, S](Xform_transformWith, f, executor)) + override final def zipWith[U, R](that: Future[U])(f: (T, U) => R)(implicit executor: ExecutionContext): Future[R] = { + val state = get() + if (state.isInstanceOf[Try[_]]) { + if (state.asInstanceOf[Try[T]].isFailure) this.asInstanceOf[Future[R]] + else { + val l = state.asInstanceOf[Success[T]].get + that.map(r => f(l, r)) + } + } else { + val buffer = new AtomicReference[Success[Any]]() + val zipped = new DefaultPromise[R]() + + val thisF: Try[T] => Unit = { + case left: Success[_] => + val right = buffer.getAndSet(left).asInstanceOf[Success[U]] + if (right ne null) + zipped.tryComplete(try Success(f(left.get, right.get)) catch { case e if NonFatal(e) => Failure(e) }) + case f => // Can only be Failure + zipped.tryComplete(f.asInstanceOf[Failure[R]]) + } + + val thatF: Try[U] => Unit = { + case right: Success[_] => + val left = buffer.getAndSet(right).asInstanceOf[Success[T]] + if (left ne null) + zipped.tryComplete(try Success(f(left.get, right.get)) catch { case e if NonFatal(e) => Failure(e) }) + case f => // Can only be Failure + zipped.tryComplete(f.asInstanceOf[Failure[R]]) + } + // Cheaper than this.onComplete since we already polled the state + this.dispatchOrAddCallbacks(state, new Transformation[T, Unit](Xform_onComplete, thisF, executor)) + that.onComplete(thatF) + zipped.future + } + } + override final def foreach[U](f: T => U)(implicit executor: ExecutionContext): Unit = { val state = get() - if (!state.isInstanceOf[Failure[T]]) dispatchOrAddCallbacks(state, new Transformation[T, Unit](Xform_foreach, f, executor)) + if (!state.isInstanceOf[Failure[_]]) dispatchOrAddCallbacks(state, new Transformation[T, Unit](Xform_foreach, f, executor)) } override final def flatMap[S](f: T => Future[S])(implicit executor: ExecutionContext): Future[S] = { val state = get() - if (!state.isInstanceOf[Failure[T]]) dispatchOrAddCallbacks(state, new Transformation[T, S](Xform_flatMap, f, executor)) + if (!state.isInstanceOf[Failure[_]]) dispatchOrAddCallbacks(state, new Transformation[T, S](Xform_flatMap, f, executor)) else this.asInstanceOf[Future[S]] } override final def map[S](f: T => S)(implicit executor: ExecutionContext): Future[S] = { val state = get() - if (!state.isInstanceOf[Failure[T]]) dispatchOrAddCallbacks(state, new Transformation[T, S](Xform_map, f, executor)) + if (!state.isInstanceOf[Failure[_]]) dispatchOrAddCallbacks(state, new Transformation[T, S](Xform_map, f, executor)) else this.asInstanceOf[Future[S]] } override final def filter(p: T => Boolean)(implicit executor: ExecutionContext): Future[T] = { val state = get() - if (!state.isInstanceOf[Failure[T]]) dispatchOrAddCallbacks(state, new Transformation[T, T](Xform_filter, p, executor)) // Short-circuit if we get a Success + if (!state.isInstanceOf[Failure[_]]) dispatchOrAddCallbacks(state, new Transformation[T, T](Xform_filter, p, executor)) // Short-circuit if we get a Success else this } override final def collect[S](pf: PartialFunction[T, S])(implicit executor: ExecutionContext): Future[S] = { val state = get() - if (!state.isInstanceOf[Failure[T]]) dispatchOrAddCallbacks(state, new Transformation[T, S](Xform_collect, pf, executor)) // Short-circuit if we get a Success + if (!state.isInstanceOf[Failure[_]]) dispatchOrAddCallbacks(state, new Transformation[T, S](Xform_collect, pf, executor)) // Short-circuit if we get a Success else this.asInstanceOf[Future[S]] } override final def recoverWith[U >: T](pf: PartialFunction[Throwable, Future[U]])(implicit executor: ExecutionContext): Future[U] = { val state = get() - if (!state.isInstanceOf[Success[T]]) dispatchOrAddCallbacks(state, new Transformation[T, U](Xform_recoverWith, pf, executor)) // Short-circuit if we get a Failure + if (!state.isInstanceOf[Success[_]]) dispatchOrAddCallbacks(state, new Transformation[T, U](Xform_recoverWith, pf, executor)) // Short-circuit if we get a Failure else this.asInstanceOf[Future[U]] } override final def recover[U >: T](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Future[U] = { val state = get() - if (!state.isInstanceOf[Success[T]]) dispatchOrAddCallbacks(state, new Transformation[T, U](Xform_recover, pf, executor)) // Short-circuit if we get a Failure + if (!state.isInstanceOf[Success[_]]) dispatchOrAddCallbacks(state, new Transformation[T, U](Xform_recover, pf, executor)) // Short-circuit if we get a Failure else this.asInstanceOf[Future[U]] } override final def mapTo[S](implicit tag: scala.reflect.ClassTag[S]): Future[S] = - if (!get().isInstanceOf[Failure[T]]) super[Future].mapTo[S](tag) // Short-circuit if we get a Success + if (!get().isInstanceOf[Failure[_]]) super[Future].mapTo[S](tag) // Short-circuit if we get a Success else this.asInstanceOf[Future[S]] @@ -180,13 +216,13 @@ private[concurrent] object Promise { dispatchOrAddCallbacks(get(), new Transformation[T, Unit](Xform_onComplete, func, executor)) override final def failed: Future[Throwable] = - if (!get().isInstanceOf[Success[T]]) super.failed + if (!get().isInstanceOf[Success[_]]) super.failed else Future.failedFailureFuture // Cached instance in case of already known success @tailrec override final def toString: String = { val state = get() - if (state.isInstanceOf[Try[T]]) "Future("+state+")" - else if (state.isInstanceOf[Link[T]]) state.asInstanceOf[Link[T]].promise(this).toString + if (state.isInstanceOf[Try[_]]) "Future("+state+")" + else if (state.isInstanceOf[Link[_]]) state.asInstanceOf[Link[T]].promise(this).toString else /*if (state.isInstanceOf[Callbacks[T]]) */ "Future()" } @@ -231,25 +267,25 @@ private[concurrent] object Promise { @tailrec // returns null if not completed private final def value0: Try[T] = { val state = get() - if (state.isInstanceOf[Try[T]]) state.asInstanceOf[Try[T]] - else if (state.isInstanceOf[Link[T]]) state.asInstanceOf[Link[T]].promise(this).value0 + if (state.isInstanceOf[Try[_]]) state.asInstanceOf[Try[T]] + else if (state.isInstanceOf[Link[_]]) state.asInstanceOf[Link[T]].promise(this).value0 else /*if (state.isInstanceOf[Callbacks[T]])*/ null } override final def tryComplete(value: Try[T]): Boolean = { val state = get() - if (state.isInstanceOf[Try[T]]) false + if (state.isInstanceOf[Try[_]]) false else tryComplete0(state, resolve(value)) } @tailrec // WARNING: important that the supplied Try really is resolve():d private[Promise] final def tryComplete0(state: AnyRef, resolved: Try[T]): Boolean = - if (state.isInstanceOf[Callbacks[T]]) { + if (state.isInstanceOf[Callbacks[_]]) { if (compareAndSet(state, resolved)) { if (state ne Noop) submitWithValue(state.asInstanceOf[Callbacks[T]], resolved) true } else tryComplete0(get(), resolved) - } else if (state.isInstanceOf[Link[T]]) { + } else if (state.isInstanceOf[Link[_]]) { val p = state.asInstanceOf[Link[T]].promise(this) // If this returns owner/this, we are in a completed link (p ne this) && p.tryComplete0(p.get(), resolved) // Use this to get tailcall optimization and avoid re-resolution } else /* if(state.isInstanceOf[Try[T]]) */ false @@ -257,8 +293,8 @@ private[concurrent] object Promise { override final def completeWith(other: Future[T]): this.type = { if (other ne this) { val state = get() - if (!state.isInstanceOf[Try[T]]) { - val resolved = if (other.isInstanceOf[DefaultPromise[T]]) other.asInstanceOf[DefaultPromise[T]].value0 else other.value.orNull + if (!state.isInstanceOf[Try[_]]) { + val resolved = if (other.isInstanceOf[DefaultPromise[_]]) other.asInstanceOf[DefaultPromise[T]].value0 else other.value.orNull if (resolved ne null) tryComplete0(state, resolved) else other.onComplete(this)(ExecutionContext.parasitic) } @@ -272,10 +308,10 @@ private[concurrent] object Promise { * to the root promise when linking two promises together. */ @tailrec private final def dispatchOrAddCallbacks[C <: Callbacks[T]](state: AnyRef, callbacks: C): C = - if (state.isInstanceOf[Try[T]]) { + if (state.isInstanceOf[Try[_]]) { submitWithValue(callbacks, state.asInstanceOf[Try[T]]) // invariant: callbacks should never be Noop here callbacks - } else if (state.isInstanceOf[Callbacks[T]]) { + } else if (state.isInstanceOf[Callbacks[_]]) { if(compareAndSet(state, if (state ne Noop) concatCallbacks(callbacks, state.asInstanceOf[Callbacks[T]]) else callbacks)) callbacks else dispatchOrAddCallbacks(get(), callbacks) } else /*if (state.isInstanceOf[Link[T]])*/ { @@ -307,10 +343,10 @@ private[concurrent] object Promise { @tailrec private[concurrent] final def linkRootOf(target: DefaultPromise[T], link: Link[T]): Unit = if (this ne target) { val state = get() - if (state.isInstanceOf[Try[T]]) { + if (state.isInstanceOf[Try[_]]) { if(!target.tryComplete0(target.get(), state.asInstanceOf[Try[T]])) throw new IllegalStateException("Cannot link completed promises together") - } else if (state.isInstanceOf[Callbacks[T]]) { + } else if (state.isInstanceOf[Callbacks[_]]) { val l = if (link ne null) link else new Link(target) val p = l.promise(this) if ((this ne p) && compareAndSet(state, l)) { @@ -326,7 +362,7 @@ private[concurrent] object Promise { **/ @tailrec private[concurrent] final def unlink(resolved: Try[T]): Unit = { val state = get() - if (state.isInstanceOf[Link[T]]) { + if (state.isInstanceOf[Link[_]]) { val next = if (compareAndSet(state, resolved)) state.asInstanceOf[Link[T]].get() else this next.unlink(resolved) } else tryComplete0(state, resolved) @@ -432,23 +468,23 @@ private[concurrent] object Promise { case Xform_flatMap => if (v.isInstanceOf[Success[F]]) { val f = fun(v.get) - if (f.isInstanceOf[DefaultPromise[T]]) f.asInstanceOf[DefaultPromise[T]].linkRootOf(this, null) else completeWith(f.asInstanceOf[Future[T]]) + if (f.isInstanceOf[DefaultPromise[_]]) f.asInstanceOf[DefaultPromise[T]].linkRootOf(this, null) else completeWith(f.asInstanceOf[Future[T]]) null } else v case Xform_transform => resolve(fun(v).asInstanceOf[Try[T]]) case Xform_transformWith => val f = fun(v) - if (f.isInstanceOf[DefaultPromise[T]]) f.asInstanceOf[DefaultPromise[T]].linkRootOf(this, null) else completeWith(f.asInstanceOf[Future[T]]) + if (f.isInstanceOf[DefaultPromise[_]]) f.asInstanceOf[DefaultPromise[T]].linkRootOf(this, null) else completeWith(f.asInstanceOf[Future[T]]) null case Xform_foreach => - v foreach fun + v.foreach(fun) null case Xform_onComplete => fun(v) null case Xform_recover => - if (v.isInstanceOf[Failure[F]]) resolve(v.recover(fun.asInstanceOf[PartialFunction[Throwable, F]])) else v //recover F=:=T + if (v.isInstanceOf[Failure[_]]) resolve(v.recover(fun.asInstanceOf[PartialFunction[Throwable, F]])) else v //recover F=:=T case Xform_recoverWith => if (v.isInstanceOf[Failure[F]]) { val f = fun.asInstanceOf[PartialFunction[Throwable, Future[T]]].applyOrElse(v.asInstanceOf[Failure[F]].exception, Future.recoverWithFailed) diff --git a/src/library/scala/jdk/Accumulator.scala b/src/library/scala/jdk/Accumulator.scala index da5f722df4d2..ca1b0215bcd8 100644 --- a/src/library/scala/jdk/Accumulator.scala +++ b/src/library/scala/jdk/Accumulator.scala @@ -54,7 +54,8 @@ import scala.language.implicitConversions * There are two possibilities to process elements of a primitive Accumulator without boxing: * specialized operations of the Accumulator, or the Stepper interface. The most common collection * operations are overloaded or overridden in the primitive Accumulator classes, for example - * [[IntAccumulator.map(f: Int => Int)* IntAccumulator.map]] or [[IntAccumulator.exists]]. Thanks to Scala's function specialization, + * [[IntAccumulator.map(f:Int=>Int)* IntAccumulator.map]] or [[IntAccumulator.exists]]. + * Thanks to Scala's function specialization, * `intAcc.exists(x => testOn(x))` does not incur boxing. * * The [[scala.collection.Stepper]] interface provides iterator-like `hasStep` and `nextStep` methods, and is diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala index 20cec9742ed2..6ea371328d9e 100644 --- a/src/library/scala/math/BigInt.scala +++ b/src/library/scala/math/BigInt.scala @@ -21,9 +21,23 @@ import scala.collection.immutable.NumericRange object BigInt { + private val longMinValueBigInteger = BigInteger.valueOf(Long.MinValue) + private val longMinValue = new BigInt(longMinValueBigInteger, Long.MinValue) + private[this] val minCached = -1024 private[this] val maxCached = 1024 private[this] val cache = new Array[BigInt](maxCached - minCached + 1) + + private[this] def getCached(i: Int): BigInt = { + val offset = i - minCached + var n = cache(offset) + if (n eq null) { + n = new BigInt(null, i.toLong) + cache(offset) = n + } + n + } + private val minusOne = BigInteger.valueOf(-1) /** Constructs a `BigInt` whose value is equal to that of the @@ -33,12 +47,7 @@ object BigInt { * @return the constructed `BigInt` */ def apply(i: Int): BigInt = - if (minCached <= i && i <= maxCached) { - val offset = i - minCached - var n = cache(offset) - if (n eq null) { n = new BigInt(BigInteger.valueOf(i.toLong)); cache(offset) = n } - n - } else new BigInt(BigInteger.valueOf(i.toLong)) + if (minCached <= i && i <= maxCached) getCached(i) else apply(i: Long) /** Constructs a `BigInt` whose value is equal to that of the * specified long value. @@ -47,14 +56,15 @@ object BigInt { * @return the constructed `BigInt` */ def apply(l: Long): BigInt = - if (minCached <= l && l <= maxCached) apply(l.toInt) - else new BigInt(BigInteger.valueOf(l)) + if (minCached <= l && l <= maxCached) getCached(l.toInt) else { + if (l == Long.MinValue) longMinValue else new BigInt(null, l) + } /** Translates a byte array containing the two's-complement binary * representation of a BigInt into a BigInt. */ def apply(x: Array[Byte]): BigInt = - new BigInt(new BigInteger(x)) + apply(new BigInteger(x)) /** Translates the sign-magnitude representation of a BigInt into a BigInt. * @@ -64,40 +74,44 @@ object BigInt { * the number. */ def apply(signum: Int, magnitude: Array[Byte]): BigInt = - new BigInt(new BigInteger(signum, magnitude)) + apply(new BigInteger(signum, magnitude)) /** Constructs a randomly generated positive BigInt that is probably prime, * with the specified bitLength. */ def apply(bitlength: Int, certainty: Int, rnd: scala.util.Random): BigInt = - new BigInt(new BigInteger(bitlength, certainty, rnd.self)) + apply(new BigInteger(bitlength, certainty, rnd.self)) /** Constructs a randomly generated BigInt, uniformly distributed over the * range `0` to `(2 ^ numBits - 1)`, inclusive. */ def apply(numbits: Int, rnd: scala.util.Random): BigInt = - new BigInt(new BigInteger(numbits, rnd.self)) + apply(new BigInteger(numbits, rnd.self)) /** Translates the decimal String representation of a BigInt into a BigInt. */ def apply(x: String): BigInt = - new BigInt(new BigInteger(x)) + apply(new BigInteger(x)) /** Translates the string representation of a `BigInt` in the * specified `radix` into a BigInt. */ def apply(x: String, radix: Int): BigInt = - new BigInt(new BigInteger(x, radix)) + apply(new BigInteger(x, radix)) /** Translates a `java.math.BigInteger` into a BigInt. */ - def apply(x: BigInteger): BigInt = - new BigInt(x) + def apply(x: BigInteger): BigInt = { + if (x.bitLength <= 63) { + val l = x.longValue + if (minCached <= l && l <= maxCached) getCached(l.toInt) else new BigInt(x, l) + } else new BigInt(x, Long.MinValue) + } /** Returns a positive BigInt that is probably prime, with the specified bitLength. */ def probablePrime(bitLength: Int, rnd: scala.util.Random): BigInt = - new BigInt(BigInteger.probablePrime(bitLength, rnd.self)) + apply(BigInteger.probablePrime(bitLength, rnd.self)) /** Implicit conversion from `Int` to `BigInt`. */ @@ -110,14 +124,103 @@ object BigInt { /** Implicit conversion from `java.math.BigInteger` to `scala.BigInt`. */ implicit def javaBigInteger2bigInt(x: BigInteger): BigInt = apply(x) + + // this method is adapted from Google Guava's version at + // https://github.com/google/guava/blob/master/guava/src/com/google/common/math/LongMath.java + // that code carries the following notice: + // * Copyright (C) 2011 The Guava Authors + // * + // * Licensed under the Apache License, Version 2.0 (the "License") + /** + * Returns the greatest common divisor of a and b. Returns 0 if a == 0 && b == 0. + */ + private def longGcd(a: Long, b: Long): Long = { + // both a and b must be >= 0 + if (a == 0) { // 0 % b == 0, so b divides a, but the converse doesn't hold. + // BigInteger.gcd is consistent with this decision. + return b + } + else if (b == 0) return a // similar logic + /* + * Uses the binary GCD algorithm; see http://en.wikipedia.org/wiki/Binary_GCD_algorithm. This is + * >60% faster than the Euclidean algorithm in benchmarks. + */ + val aTwos = java.lang.Long.numberOfTrailingZeros(a) + var a1 = a >> aTwos // divide out all 2s + + val bTwos = java.lang.Long.numberOfTrailingZeros(b) + var b1 = b >> bTwos + while (a1 != b1) { // both a, b are odd + // The key to the binary GCD algorithm is as follows: + // Both a1 and b1 are odd. Assume a1 > b1; then gcd(a1 - b1, b1) = gcd(a1, b1). + // But in gcd(a1 - b1, b1), a1 - b1 is even and b1 is odd, so we can divide out powers of two. + // We bend over backwards to avoid branching, adapting a technique from + // http://graphics.stanford.edu/~seander/bithacks.html#IntegerMinOrMax + val delta = a1 - b1 // can't overflow, since a1 and b1 are nonnegative + val minDeltaOrZero = delta & (delta >> (java.lang.Long.SIZE - 1)) + // equivalent to Math.min(delta, 0) + a1 = delta - minDeltaOrZero - minDeltaOrZero // sets a to Math.abs(a - b) + + // a is now nonnegative and even + b1 += minDeltaOrZero // sets b to min(old a, b) + + a1 >>= java.lang.Long.numberOfTrailingZeros(a1) // divide out all 2s, since 2 doesn't divide b + + } + a1 << scala.math.min(aTwos, bTwos) + } + } -final class BigInt(val bigInteger: BigInteger) +/** A type with efficient encoding of arbitrary integers. + * + * It wraps `java.math.BigInteger`, with optimization for small values that can be encoded in a `Long`. + */ +final class BigInt private (private var _bigInteger: BigInteger, private val _long: Long) extends ScalaNumber with ScalaNumericConversions with Serializable with Ordered[BigInt] { + // The class has a special encoding for integer that fit in a Long *and* are not equal to Long.MinValue. + // + // The Long value Long.MinValue is a tag specifying that the integer is encoded in the BigInteger field. + // + // There are three possible states for the class fields (_bigInteger, _long) + // 1. (null, l) where l != Long.MinValue, encodes the integer "l" + // 2. (b, l) where l != Long.MinValue; then b is a BigInteger with value l, encodes "l" == "b" + // 3a. (b, Long.MinValue) where b == Long.MinValue, encodes Long.MinValue + // 3b. (b, Long.MinValue) where b does not fit in a Long, encodes "b" + // + // There is only one possible transition 1. -> 2., when the method .bigInteger is called, then the field + // _bigInteger caches the result. + // + // The case 3a. is the only one where the BigInteger could actually fit in a Long, but as its value is used as a + // tag, we'll take the slow path instead. + // + // Additionally, we know that if this.isValidLong is true, then _long is the encoded value. + + /** Public constructor present for compatibility. Use the BigInt.apply companion object method instead. */ + def this(bigInteger: BigInteger) = this( + bigInteger, // even if it is a short BigInteger, we cache the instance + if (bigInteger.bitLength <= 63) + bigInteger.longValue // if _bigInteger is actually equal to Long.MinValue, no big deal, its value acts as a tag + else Long.MinValue + ) + + /** Returns whether the integer is encoded in the Long. Returns true for all values fitting in a Long except + * Long.MinValue. */ + private def longEncoding: Boolean = _long != Long.MinValue + + def bigInteger: BigInteger = { + val read = _bigInteger + if (read ne null) read else { + val write = BigInteger.valueOf(_long) + _bigInteger = write // reference assignment is atomic; this is multi-thread safe (if possibly wasteful) + write + } + } + /** Returns the hash code for this BigInt. */ override def hashCode(): Int = if (isValidLong) unifiedPrimitiveHashcode @@ -132,11 +235,13 @@ final class BigInt(val bigInteger: BigInteger) case that: Float => isValidFloat && toFloat == that case x => isValidLong && unifiedPrimitiveEquals(x) } - override def isValidByte: Boolean = this >= Byte.MinValue && this <= Byte.MaxValue - override def isValidShort: Boolean = this >= Short.MinValue && this <= Short.MaxValue - override def isValidChar: Boolean = this >= Char.MinValue && this <= Char.MaxValue - override def isValidInt: Boolean = this >= Int.MinValue && this <= Int.MaxValue - def isValidLong: Boolean = this >= Long.MinValue && this <= Long.MaxValue + + override def isValidByte: Boolean = _long >= Byte.MinValue && _long <= Byte.MaxValue /* && longEncoding */ + override def isValidShort: Boolean = _long >= Short.MinValue && _long <= Short.MaxValue /* && longEncoding */ + override def isValidChar: Boolean = _long >= Char.MinValue && _long <= Char.MaxValue /* && longEncoding */ + override def isValidInt: Boolean = _long >= Int.MinValue && _long <= Int.MaxValue /* && longEncoding */ + def isValidLong: Boolean = longEncoding || _bigInteger == BigInt.longMinValueBigInteger // rhs of || tests == Long.MinValue + /** Returns `true` iff this can be represented exactly by [[scala.Float]]; otherwise returns `false`. */ def isValidFloat: Boolean = { @@ -178,151 +283,266 @@ final class BigInt(val bigInteger: BigInteger) /** Compares this BigInt with the specified BigInt for equality. */ - def equals (that: BigInt): Boolean = compare(that) == 0 + def equals(that: BigInt): Boolean = + if (this.longEncoding) + that.longEncoding && (this._long == that._long) + else + !that.longEncoding && (this._bigInteger == that._bigInteger) /** Compares this BigInt with the specified BigInt */ - def compare (that: BigInt): Int = this.bigInteger.compareTo(that.bigInteger) + def compare(that: BigInt): Int = + if (this.longEncoding) { + if (that.longEncoding) java.lang.Long.compare(this._long, that._long) else -that._bigInteger.signum() + } else { + if (that.longEncoding) _bigInteger.signum() else this._bigInteger.compareTo(that._bigInteger) + } /** Addition of BigInts */ - def + (that: BigInt): BigInt = new BigInt(this.bigInteger.add(that.bigInteger)) + def +(that: BigInt): BigInt = { + if (this.longEncoding && that.longEncoding) { // fast path + val x = this._long + val y = that._long + val z = x + y + if ((~(x ^ y) & (x ^ z)) >= 0L) return BigInt(z) + } + BigInt(this.bigInteger.add(that.bigInteger)) + } /** Subtraction of BigInts */ - def - (that: BigInt): BigInt = new BigInt(this.bigInteger.subtract(that.bigInteger)) + def -(that: BigInt): BigInt = { + if (this.longEncoding && that.longEncoding) { // fast path + val x = this._long + val y = that._long + val z = x - y + if (((x ^ y) & (x ^ z)) >= 0L) return BigInt(z) + } + BigInt(this.bigInteger.subtract(that.bigInteger)) + } /** Multiplication of BigInts */ - def * (that: BigInt): BigInt = new BigInt(this.bigInteger.multiply(that.bigInteger)) + def *(that: BigInt): BigInt = { + if (this.longEncoding && that.longEncoding) { // fast path + val x = this._long + val y = that._long + val z = x * y + // original code checks the y != Long.MinValue, but when longEncoding is true, that is never the case + // if (x == 0 || (y == z / x && !(x == -1 && y == Long.MinValue))) return BigInt(z) + if (x == 0 || y == z / x) return BigInt(z) + } + BigInt(this.bigInteger.multiply(that.bigInteger)) + } /** Division of BigInts */ - def / (that: BigInt): BigInt = new BigInt(this.bigInteger.divide(that.bigInteger)) + def /(that: BigInt): BigInt = + // in the fast path, note that the original code avoided storing -Long.MinValue in a long: + // if (this._long != Long.MinValue || that._long != -1) return BigInt(this._long / that._long) + // but we know this._long cannot be Long.MinValue, because Long.MinValue is the tag for bigger integers + if (this.longEncoding && that.longEncoding) BigInt(this._long / that._long) + else BigInt(this.bigInteger.divide(that.bigInteger)) /** Remainder of BigInts */ - def % (that: BigInt): BigInt = new BigInt(this.bigInteger.remainder(that.bigInteger)) + def %(that: BigInt): BigInt = + // see / for the original logic regarding Long.MinValue + if (this.longEncoding && that.longEncoding) BigInt(this._long % that._long) + else BigInt(this.bigInteger.remainder(that.bigInteger)) /** Returns a pair of two BigInts containing (this / that) and (this % that). */ - def /% (that: BigInt): (BigInt, BigInt) = { - val dr = this.bigInteger.divideAndRemainder(that.bigInteger) - (new BigInt(dr(0)), new BigInt(dr(1))) - } + def /%(that: BigInt): (BigInt, BigInt) = + if (this.longEncoding && that.longEncoding) { + val x = this._long + val y = that._long + // original line: if (x != Long.MinValue || y != -1) return (BigInt(x / y), BigInt(x % y)) + (BigInt(x / y), BigInt(x % y)) + } else { + val dr = this.bigInteger.divideAndRemainder(that.bigInteger) + (BigInt(dr(0)), BigInt(dr(1))) + } /** Leftshift of BigInt */ - def << (n: Int): BigInt = new BigInt(this.bigInteger.shiftLeft(n)) + def <<(n: Int): BigInt = + if (longEncoding && n <= 0) (this >> (-n)) else BigInt(this.bigInteger.shiftLeft(n)) /** (Signed) rightshift of BigInt */ - def >> (n: Int): BigInt = new BigInt(this.bigInteger.shiftRight(n)) - + def >>(n: Int): BigInt = + if (longEncoding && n >= 0) { + if (n < 64) BigInt(_long >> n) + else if (_long < 0) BigInt(-1) + else BigInt(0) // for _long >= 0 + } else BigInt(this.bigInteger.shiftRight(n)) + /** Bitwise and of BigInts */ - def & (that: BigInt): BigInt = new BigInt(this.bigInteger.and(that.bigInteger)) + def &(that: BigInt): BigInt = + if (this.longEncoding && that.longEncoding) + BigInt(this._long & that._long) + else BigInt(this.bigInteger.and(that.bigInteger)) /** Bitwise or of BigInts */ - def | (that: BigInt): BigInt = new BigInt(this.bigInteger.or (that.bigInteger)) + def |(that: BigInt): BigInt = + if (this.longEncoding && that.longEncoding) + BigInt(this._long | that._long) + else BigInt(this.bigInteger.or(that.bigInteger)) /** Bitwise exclusive-or of BigInts */ - def ^ (that: BigInt): BigInt = new BigInt(this.bigInteger.xor(that.bigInteger)) + def ^(that: BigInt): BigInt = + if (this.longEncoding && that.longEncoding) + BigInt(this._long ^ that._long) + else BigInt(this.bigInteger.xor(that.bigInteger)) /** Bitwise and-not of BigInts. Returns a BigInt whose value is (this & ~that). */ - def &~ (that: BigInt): BigInt = new BigInt(this.bigInteger.andNot(that.bigInteger)) + def &~(that: BigInt): BigInt = + if (this.longEncoding && that.longEncoding) + BigInt(this._long & ~that._long) + else BigInt(this.bigInteger.andNot(that.bigInteger)) /** Returns the greatest common divisor of abs(this) and abs(that) */ - def gcd (that: BigInt): BigInt = new BigInt(this.bigInteger.gcd(that.bigInteger)) + def gcd(that: BigInt): BigInt = + if (this.longEncoding) { + if (this._long == 0) return that.abs + // if (this._long == Long.MinValue) return (-this) gcd that + // this != 0 && this != Long.MinValue + if (that.longEncoding) { + if (that._long == 0) return this.abs + // if (that._long == Long.MinValue) return this gcd (-that) + BigInt(BigInt.longGcd(this._long.abs, that._long.abs)) + } else that gcd this // force the BigInteger on the left + } else { + // this is not a valid long + if (that.longEncoding) { + if (that._long == 0) return this.abs + // if (that._long == Long.MinValue) return this gcd (-that) + val red = (this._bigInteger mod BigInteger.valueOf(that._long.abs)).longValue() + if (red == 0) return that.abs + BigInt(BigInt.longGcd(that._long.abs, red)) + } else BigInt(this.bigInteger.gcd(that.bigInteger)) + } + /** Returns a BigInt whose value is (this mod that). * This method differs from `%` in that it always returns a non-negative BigInt. * @param that A positive number */ - def mod (that: BigInt): BigInt = new BigInt(this.bigInteger.mod(that.bigInteger)) + def mod(that: BigInt): BigInt = + if (this.longEncoding && that.longEncoding) { + val res = this._long % that._long + if (res >= 0) BigInt(res) else BigInt(res + that._long) + } else BigInt(this.bigInteger.mod(that.bigInteger)) /** Returns the minimum of this and that */ - def min (that: BigInt): BigInt = new BigInt(this.bigInteger.min(that.bigInteger)) + def min(that: BigInt): BigInt = + if (this <= that) this else that /** Returns the maximum of this and that */ - def max (that: BigInt): BigInt = new BigInt(this.bigInteger.max(that.bigInteger)) + def max(that: BigInt): BigInt = + if (this >= that) this else that /** Returns a BigInt whose value is (this raised to the power of exp). */ - def pow (exp: Int): BigInt = new BigInt(this.bigInteger.pow(exp)) + def pow(exp: Int): BigInt = BigInt(this.bigInteger.pow(exp)) /** Returns a BigInt whose value is * (this raised to the power of exp modulo m). */ - def modPow (exp: BigInt, m: BigInt): BigInt = - new BigInt(this.bigInteger.modPow(exp.bigInteger, m.bigInteger)) + def modPow(exp: BigInt, m: BigInt): BigInt = BigInt(this.bigInteger.modPow(exp.bigInteger, m.bigInteger)) /** Returns a BigInt whose value is (the inverse of this modulo m). */ - def modInverse (m: BigInt): BigInt = new BigInt(this.bigInteger.modInverse(m.bigInteger)) + def modInverse(m: BigInt): BigInt = BigInt(this.bigInteger.modInverse(m.bigInteger)) /** Returns a BigInt whose value is the negation of this BigInt */ - def unary_- : BigInt = new BigInt(this.bigInteger.negate()) + def unary_- : BigInt = if (longEncoding) BigInt(-_long) else BigInt(this.bigInteger.negate()) /** Returns the absolute value of this BigInt */ - def abs: BigInt = new BigInt(this.bigInteger.abs()) + def abs: BigInt = if (signum < 0) -this else this /** Returns the sign of this BigInt; * -1 if it is less than 0, * +1 if it is greater than 0, * 0 if it is equal to 0. */ - def signum: Int = this.bigInteger.signum() + def signum: Int = if (longEncoding) java.lang.Long.signum(_long) else _bigInteger.signum() /** Returns the sign of this BigInt; * -1 if it is less than 0, * +1 if it is greater than 0, * 0 if it is equal to 0. */ - def sign: BigInt = signum + def sign: BigInt = BigInt(signum) /** Returns the bitwise complement of this BigInt */ - def unary_~ : BigInt = new BigInt(this.bigInteger.not()) + def unary_~ : BigInt = + // it is equal to -(this + 1) + if (longEncoding && _long != Long.MaxValue) BigInt(-(_long + 1)) else BigInt(this.bigInteger.not()) /** Returns true if and only if the designated bit is set. */ - def testBit (n: Int): Boolean = this.bigInteger.testBit(n) + def testBit(n: Int): Boolean = + if (longEncoding) { + if (n <= 63) + (_long & (1L << n)) != 0 + else + _long < 0 // give the sign bit + } else _bigInteger.testBit(n) /** Returns a BigInt whose value is equivalent to this BigInt with the designated bit set. */ - def setBit (n: Int): BigInt = new BigInt(this.bigInteger.setBit(n)) + def setBit(n: Int): BigInt = // note that we do not operate on the Long sign bit #63 + if (longEncoding && n <= 62) BigInt(_long | (1L << n)) else BigInt(this.bigInteger.setBit(n)) /** Returns a BigInt whose value is equivalent to this BigInt with the designated bit cleared. */ - def clearBit(n: Int): BigInt = new BigInt(this.bigInteger.clearBit(n)) + def clearBit(n: Int): BigInt = // note that we do not operate on the Long sign bit #63 + if (longEncoding && n <= 62) BigInt(_long & ~(1L << n)) else BigInt(this.bigInteger.clearBit(n)) /** Returns a BigInt whose value is equivalent to this BigInt with the designated bit flipped. */ - def flipBit (n: Int): BigInt = new BigInt(this.bigInteger.flipBit(n)) + def flipBit(n: Int): BigInt = // note that we do not operate on the Long sign bit #63 + if (longEncoding && n <= 62) BigInt(_long ^ (1L << n)) else BigInt(this.bigInteger.flipBit(n)) /** Returns the index of the rightmost (lowest-order) one bit in this BigInt * (the number of zero bits to the right of the rightmost one bit). */ - def lowestSetBit: Int = this.bigInteger.getLowestSetBit() + def lowestSetBit: Int = + if (longEncoding) { + if (_long == 0) -1 else java.lang.Long.numberOfTrailingZeros(_long) + } else this.bigInteger.getLowestSetBit() /** Returns the number of bits in the minimal two's-complement representation of this BigInt, * excluding a sign bit. */ - def bitLength: Int = this.bigInteger.bitLength() + def bitLength: Int = + // bitLength is defined as ceil(log2(this < 0 ? -this : this + 1))) + // where ceil(log2(x)) = 64 - numberOfLeadingZeros(x - 1) + if (longEncoding) { + if (_long < 0) 64 - java.lang.Long.numberOfLeadingZeros(-(_long + 1)) // takes care of Long.MinValue + else 64 - java.lang.Long.numberOfLeadingZeros(_long) + } else _bigInteger.bitLength() /** Returns the number of bits in the two's complement representation of this BigInt * that differ from its sign bit. */ - def bitCount: Int = this.bigInteger.bitCount() + def bitCount: Int = + if (longEncoding) { + if (_long < 0) java.lang.Long.bitCount(-(_long + 1)) else java.lang.Long.bitCount(_long) + } else this.bigInteger.bitCount() /** Returns true if this BigInt is probably prime, false if it's definitely composite. * @param certainty a measure of the uncertainty that the caller is willing to tolerate: @@ -360,7 +580,7 @@ final class BigInt(val bigInteger: BigInteger) * overall magnitude of the BigInt value as well as return a result with * the opposite sign. */ - def intValue: Int = this.bigInteger.intValue + def intValue: Int = if (longEncoding) _long.toInt else this.bigInteger.intValue /** Converts this BigInt to a long. * If the BigInt is too big to fit in a long, only the low-order 64 bits @@ -368,7 +588,7 @@ final class BigInt(val bigInteger: BigInteger) * overall magnitude of the BigInt value as well as return a result with * the opposite sign. */ - def longValue: Long = this.bigInteger.longValue + def longValue: Long = if (longEncoding) _long else _bigInteger.longValue /** Converts this `BigInt` to a `float`. * If this `BigInt` has too great a magnitude to represent as a float, @@ -382,7 +602,9 @@ final class BigInt(val bigInteger: BigInteger) * it will be converted to `Double.NEGATIVE_INFINITY` or * `Double.POSITIVE_INFINITY` as appropriate. */ - def doubleValue: Double = this.bigInteger.doubleValue + def doubleValue: Double = + if (isValidLong && (-(1L << 53) <= _long && _long <= (1L << 53))) _long.toDouble + else this.bigInteger.doubleValue /** Create a `NumericRange[BigInt]` in range `[start;end)` * with the specified step, where start is the target BigInt. @@ -399,7 +621,7 @@ final class BigInt(val bigInteger: BigInteger) /** Returns the decimal String representation of this BigInt. */ - override def toString(): String = this.bigInteger.toString() + override def toString(): String = if (longEncoding) _long.toString() else _bigInteger.toString() /** Returns the String representation in the specified radix of this BigInt. */ diff --git a/src/library/scala/math/Equiv.scala b/src/library/scala/math/Equiv.scala index efb281ceed3d..f615963f1f04 100644 --- a/src/library/scala/math/Equiv.scala +++ b/src/library/scala/math/Equiv.scala @@ -87,7 +87,7 @@ object Equiv extends LowPriorityEquiv { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: IterableEquiv[CC, T] => this.eqv == that.eqv + case that: IterableEquiv[_, _] => this.eqv == that.eqv case _ => false } override def hashCode(): Int = eqv.hashCode() * iterableSeed @@ -256,7 +256,7 @@ object Equiv extends LowPriorityEquiv { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: OptionEquiv[T] => this.eqv == that.eqv + case that: OptionEquiv[_] => this.eqv == that.eqv case _ => false } override def hashCode(): Int = eqv.hashCode() * optionSeed @@ -273,7 +273,7 @@ object Equiv extends LowPriorityEquiv { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple2Equiv[T1, T2] => + case that: Tuple2Equiv[_, _] => this.eqv1 == that.eqv1 && this.eqv2 == that.eqv2 case _ => false @@ -294,7 +294,7 @@ object Equiv extends LowPriorityEquiv { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple3Equiv[T1, T2, T3] => + case that: Tuple3Equiv[_, _, _] => this.eqv1 == that.eqv1 && this.eqv2 == that.eqv2 && this.eqv3 == that.eqv3 @@ -319,7 +319,7 @@ object Equiv extends LowPriorityEquiv { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple4Equiv[T1, T2, T3, T4] => + case that: Tuple4Equiv[_, _, _, _] => this.eqv1 == that.eqv1 && this.eqv2 == that.eqv2 && this.eqv3 == that.eqv3 && @@ -347,7 +347,7 @@ object Equiv extends LowPriorityEquiv { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple5Equiv[T1, T2, T3, T4, T5] => + case that: Tuple5Equiv[_, _, _, _, _] => this.eqv1 == that.eqv1 && this.eqv2 == that.eqv2 && this.eqv3 == that.eqv3 && @@ -378,7 +378,7 @@ object Equiv extends LowPriorityEquiv { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple6Equiv[T1, T2, T3, T4, T5, T6] => + case that: Tuple6Equiv[_, _, _, _, _, _] => this.eqv1 == that.eqv1 && this.eqv2 == that.eqv2 && this.eqv3 == that.eqv3 && @@ -412,7 +412,7 @@ object Equiv extends LowPriorityEquiv { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple7Equiv[T1, T2, T3, T4, T5, T6, T7] => + case that: Tuple7Equiv[_, _, _, _, _, _, _] => this.eqv1 == that.eqv1 && this.eqv2 == that.eqv2 && this.eqv3 == that.eqv3 && @@ -449,7 +449,7 @@ object Equiv extends LowPriorityEquiv { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple8Equiv[T1, T2, T3, T4, T5, T6, T7, T8] => + case that: Tuple8Equiv[_, _, _, _, _, _, _, _] => this.eqv1 == that.eqv1 && this.eqv2 == that.eqv2 && this.eqv3 == that.eqv3 && @@ -489,7 +489,7 @@ object Equiv extends LowPriorityEquiv { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple9Equiv[T1, T2, T3, T4, T5, T6, T7, T8, T9] => + case that: Tuple9Equiv[_, _, _, _, _, _, _, _, _] => this.eqv1 == that.eqv1 && this.eqv2 == that.eqv2 && this.eqv3 == that.eqv3 && diff --git a/src/library/scala/math/Ordering.scala b/src/library/scala/math/Ordering.scala index c1adece01993..8333cc52cf77 100644 --- a/src/library/scala/math/Ordering.scala +++ b/src/library/scala/math/Ordering.scala @@ -22,10 +22,10 @@ import scala.annotation.migration * instances of a type. * * Ordering's companion object defines many implicit objects to deal with - * subtypes of AnyVal (e.g. Int, Double), String, and others. + * subtypes of [[AnyVal]] (e.g. `Int`, `Double`), `String`, and others. * * To sort instances by one or more member variables, you can take advantage - * of these built-in orderings using Ordering.by and Ordering.on: + * of these built-in orderings using [[Ordering.by]] and [[Ordering.on]]: * * {{{ * import scala.util.Sorting @@ -38,9 +38,10 @@ import scala.annotation.migration * Sorting.quickSort(pairs)(Ordering[(Int, String)].on(x => (x._3, x._1))) * }}} * - * An Ordering[T] is implemented by specifying compare(a:T, b:T), which - * decides how to order two instances a and b. Instances of Ordering[T] can be - * used by things like scala.util.Sorting to sort collections like Array[T]. + * An `Ordering[T]` is implemented by specifying the [[compare]] method, + * `compare(a: T, b: T): Int`, which decides how to order two instances + * `a` and `b`. Instances of `Ordering[T]` can be used by things like + * `scala.util.Sorting` to sort collections like `Array[T]`. * * For example: * @@ -52,21 +53,21 @@ import scala.annotation.migration * * // sort by age * object AgeOrdering extends Ordering[Person] { - * def compare(a:Person, b:Person) = a.age compare b.age + * def compare(a:Person, b:Person) = a.age.compare(b.age) * } * Sorting.quickSort(people)(AgeOrdering) * }}} * - * This trait and scala.math.Ordered both provide this same functionality, but - * in different ways. A type T can be given a single way to order itself by - * extending Ordered. Using Ordering, this same type may be sorted in many - * other ways. Ordered and Ordering both provide implicits allowing them to be + * This trait and [[scala.math.Ordered]] both provide this same functionality, but + * in different ways. A type `T` can be given a single way to order itself by + * extending `Ordered`. Using `Ordering`, this same type may be sorted in many + * other ways. `Ordered` and `Ordering` both provide implicits allowing them to be * used interchangeably. * - * You can import scala.math.Ordering.Implicits to gain access to other + * You can `import scala.math.Ordering.Implicits._` to gain access to other * implicit orderings. * - * @see [[scala.math.Ordered]], [[scala.util.Sorting]] + * @see [[scala.math.Ordered]], [[scala.util.Sorting]], [[scala.math.Ordering.Implicits]] */ @annotation.implicitNotFound(msg = "No implicit Ordering defined for ${T}.") trait Ordering[T] extends Comparator[T] with PartialOrdering[T] with Serializable { @@ -258,7 +259,7 @@ object Ordering extends LowPriorityOrderingImplicits { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Reverse[T] => this.outer == that.outer + case that: Reverse[_] => this.outer == that.outer case _ => false } override def hashCode(): Int = outer.hashCode() * reverseSeed @@ -279,7 +280,7 @@ object Ordering extends LowPriorityOrderingImplicits { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: IterableOrdering[CC, T] => this.ord == that.ord + case that: IterableOrdering[_, _] => this.ord == that.ord case _ => false } override def hashCode(): Int = ord.hashCode() * iterableSeed @@ -591,7 +592,7 @@ object Ordering extends LowPriorityOrderingImplicits { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: OptionOrdering[T] => this.optionOrdering == that.optionOrdering + case that: OptionOrdering[_] => this.optionOrdering == that.optionOrdering case _ => false } override def hashCode(): Int = optionOrdering.hashCode() * optionSeed @@ -622,7 +623,7 @@ object Ordering extends LowPriorityOrderingImplicits { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple2Ordering[T1, T2] => + case that: Tuple2Ordering[_, _] => this.ord1 == that.ord1 && this.ord2 == that.ord2 case _ => false @@ -646,7 +647,7 @@ object Ordering extends LowPriorityOrderingImplicits { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple3Ordering[T1, T2, T3] => + case that: Tuple3Ordering[_, _, _] => this.ord1 == that.ord1 && this.ord2 == that.ord2 && this.ord3 == that.ord3 @@ -675,7 +676,7 @@ object Ordering extends LowPriorityOrderingImplicits { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple4Ordering[T1, T2, T3, T4] => + case that: Tuple4Ordering[_, _, _, _] => this.ord1 == that.ord1 && this.ord2 == that.ord2 && this.ord3 == that.ord3 && @@ -708,7 +709,7 @@ object Ordering extends LowPriorityOrderingImplicits { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple5Ordering[T1, T2, T3, T4, T5] => + case that: Tuple5Ordering[_, _, _, _, _] => this.ord1 == that.ord1 && this.ord2 == that.ord2 && this.ord3 == that.ord3 && @@ -745,7 +746,7 @@ object Ordering extends LowPriorityOrderingImplicits { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple6Ordering[T1, T2, T3, T4, T5, T6] => + case that: Tuple6Ordering[_, _, _, _, _, _] => this.ord1 == that.ord1 && this.ord2 == that.ord2 && this.ord3 == that.ord3 && @@ -786,7 +787,7 @@ object Ordering extends LowPriorityOrderingImplicits { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple7Ordering[T1, T2, T3, T4, T5, T6, T7] => + case that: Tuple7Ordering[_, _, _, _, _, _, _] => this.ord1 == that.ord1 && this.ord2 == that.ord2 && this.ord3 == that.ord3 && @@ -831,7 +832,7 @@ object Ordering extends LowPriorityOrderingImplicits { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple8Ordering[T1, T2, T3, T4, T5, T6, T7, T8] => + case that: Tuple8Ordering[_, _, _, _, _, _, _, _] => this.ord1 == that.ord1 && this.ord2 == that.ord2 && this.ord3 == that.ord3 && @@ -880,7 +881,7 @@ object Ordering extends LowPriorityOrderingImplicits { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple9Ordering[T1, T2, T3, T4, T5, T6, T7, T8, T9] => + case that: Tuple9Ordering[_, _, _, _, _, _, _, _, _] => this.ord1 == that.ord1 && this.ord2 == that.ord2 && this.ord3 == that.ord3 && diff --git a/src/library/scala/reflect/ClassTag.scala b/src/library/scala/reflect/ClassTag.scala index b3ef8f781a9d..5226bb5577a8 100644 --- a/src/library/scala/reflect/ClassTag.scala +++ b/src/library/scala/reflect/ClassTag.scala @@ -15,8 +15,8 @@ package reflect import java.lang.{Class => jClass} import java.lang.ref.{WeakReference => jWeakReference} - import scala.annotation.{implicitNotFound, nowarn} +import scala.runtime.ClassValueCompat /** * @@ -116,7 +116,7 @@ object ClassTag { val Null : ClassTag[scala.Null] = Manifest.Null private val cacheDisabled = java.lang.Boolean.getBoolean("scala.reflect.classtag.cache.disable") - private[this] object cache extends ClassValue[jWeakReference[ClassTag[_]]] { + private[this] object cache extends ClassValueCompat[jWeakReference[ClassTag[_]]] { override def computeValue(runtimeClass: jClass[_]): jWeakReference[ClassTag[_]] = new jWeakReference(computeTag(runtimeClass)) diff --git a/src/library/scala/reflect/package.scala b/src/library/scala/reflect/package.scala index 33faadc783ad..67551c7f6e80 100644 --- a/src/library/scala/reflect/package.scala +++ b/src/library/scala/reflect/package.scala @@ -12,7 +12,8 @@ package scala -import java.lang.reflect.{ AccessibleObject => jAccessibleObject } +import java.lang.reflect.{AccessibleObject => jAccessibleObject} +import scala.annotation.nowarn package object reflect { @@ -54,7 +55,10 @@ package object reflect { * attempt, it is caught and discarded. */ def ensureAccessible[T <: jAccessibleObject](m: T): T = { - if (!m.isAccessible) { + // This calls `setAccessible` unnecessarily, because `isAccessible` is only `true` if `setAccessible(true)` + // was called before, not if the reflected object is inherently accessible. + // TODO: replace by `canAccess` once we're on JDK 9+ + if (!m.isAccessible: @nowarn("cat=deprecation")) { try m setAccessible true catch { case _: SecurityException => } // does nothing } diff --git a/src/library/scala/runtime/ClassValueCompat.scala b/src/library/scala/runtime/ClassValueCompat.scala new file mode 100644 index 000000000000..908c36c6ef3b --- /dev/null +++ b/src/library/scala/runtime/ClassValueCompat.scala @@ -0,0 +1,53 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime + + +import scala.runtime.ClassValueCompat._ + +private[scala] abstract class ClassValueCompat[T] extends ClassValueInterface[T] { self => + private val instance: ClassValueInterface[T] = + if (classValueAvailable) new JavaClassValue() + else new FallbackClassValue() + + private class JavaClassValue extends ClassValue[T] with ClassValueInterface[T] { + override def computeValue(cls: Class[_]): T = self.computeValue(cls) + } + + private class FallbackClassValue extends ClassValueInterface[T] { + override def get(cls: Class[_]): T = self.computeValue(cls) + + override def remove(cls: Class[_]): Unit = {} + } + + def get(cls: Class[_]): T = instance.get(cls) + + def remove(cls: Class[_]): Unit = instance.remove(cls) + + protected def computeValue(cls: Class[_]): T +} + +private[scala] object ClassValueCompat { + trait ClassValueInterface[T] { + def get(cls: Class[_]): T + + def remove(cls: Class[_]): Unit + } + + private val classValueAvailable: Boolean = try { + Class.forName("java.lang.ClassValue", false, classOf[Object].getClassLoader) + true + } catch { + case _: ClassNotFoundException => false + } +} diff --git a/src/library/scala/runtime/ModuleSerializationProxy.java b/src/library/scala/runtime/ModuleSerializationProxy.java deleted file mode 100644 index 0a587ade415b..000000000000 --- a/src/library/scala/runtime/ModuleSerializationProxy.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.runtime; - -import java.io.Serializable; -import java.security.AccessController; -import java.security.PrivilegedActionException; -import java.security.PrivilegedExceptionAction; -import java.util.HashSet; -import java.util.Set; - -/** A serialization proxy for singleton objects */ -public final class ModuleSerializationProxy implements Serializable { - private static final long serialVersionUID = 1L; - private final Class moduleClass; - private static final ClassValue instances = new ClassValue() { - @Override - protected Object computeValue(Class type) { - try { - return AccessController.doPrivileged((PrivilegedExceptionAction) () -> type.getField("MODULE$").get(null)); - } catch (PrivilegedActionException e) { - return rethrowRuntime(e.getCause()); - } - } - }; - - private static Object rethrowRuntime(Throwable e) { - Throwable cause = e.getCause(); - if (cause instanceof RuntimeException) throw (RuntimeException) cause; - else throw new RuntimeException(cause); - } - - public ModuleSerializationProxy(Class moduleClass) { - this.moduleClass = moduleClass; - } - - @SuppressWarnings("unused") - private Object readResolve() { - return instances.get(moduleClass); - } -} diff --git a/src/library/scala/runtime/ModuleSerializationProxy.scala b/src/library/scala/runtime/ModuleSerializationProxy.scala new file mode 100644 index 000000000000..42b3f992d626 --- /dev/null +++ b/src/library/scala/runtime/ModuleSerializationProxy.scala @@ -0,0 +1,46 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime + +import java.io.Serializable +import java.security.PrivilegedActionException +import java.security.PrivilegedExceptionAction +import scala.annotation.nowarn + +private[runtime] object ModuleSerializationProxy { + private val instances = new ClassValueCompat[Object] { + @nowarn("cat=deprecation") // AccessController is deprecated on JDK 17 + def getModule(cls: Class[_]): Object = + java.security.AccessController.doPrivileged( + (() => cls.getField("MODULE$").get(null)): PrivilegedExceptionAction[Object]) + override protected def computeValue(cls: Class[_]): Object = + try getModule(cls) + catch { + case e: PrivilegedActionException => + rethrowRuntime(e.getCause) + } + } + + private def rethrowRuntime(e: Throwable): Object = { + val cause = e.getCause + cause match { + case exception: RuntimeException => throw exception + case _ => throw new RuntimeException(cause) + } + } +} + +@SerialVersionUID(1L) +final class ModuleSerializationProxy(moduleClass: Class[_]) extends Serializable { + private def readResolve = ModuleSerializationProxy.instances.get(moduleClass) +} diff --git a/src/library/scala/runtime/Statics.java b/src/library/scala/runtime/Statics.java index 09288e09fbfd..886d000592ef 100644 --- a/src/library/scala/runtime/Statics.java +++ b/src/library/scala/runtime/Statics.java @@ -159,7 +159,7 @@ private static MethodHandle mkHandle() { MethodHandles.Lookup lookup = MethodHandles.lookup(); try { return lookup.findStatic(Class.forName("java.lang.invoke.VarHandle"), "releaseFence", MethodType.methodType(Void.TYPE)); - } catch (ClassNotFoundException e) { + } catch (NoSuchMethodException | ClassNotFoundException e) { try { Class unsafeClass = Class.forName("sun.misc.Unsafe"); return lookup.findVirtual(unsafeClass, "storeFence", MethodType.methodType(void.class)).bindTo(findUnsafe(unsafeClass)); @@ -168,7 +168,7 @@ private static MethodHandle mkHandle() { error.addSuppressed(e); throw error; } - } catch (NoSuchMethodException | IllegalAccessException e) { + } catch (IllegalAccessException e) { throw new ExceptionInInitializerError(e); } } diff --git a/src/library/scala/sys/SystemProperties.scala b/src/library/scala/sys/SystemProperties.scala index 4ae753aa8f80..aa2f0bd5d06c 100644 --- a/src/library/scala/sys/SystemProperties.scala +++ b/src/library/scala/sys/SystemProperties.scala @@ -52,6 +52,7 @@ extends mutable.AbstractMap[String, String] { def subtractOne (key: String): this.type = { wrapAccess(System.clearProperty(key)) ; this } def addOne (kv: (String, String)): this.type = { wrapAccess(System.setProperty(kv._1, kv._2)) ; this } + @annotation.nowarn("cat=deprecation") // AccessControlException is deprecated on JDK 17 def wrapAccess[T](body: => T): Option[T] = try Some(body) catch { case _: AccessControlException => None } } diff --git a/src/library/scala/util/Either.scala b/src/library/scala/util/Either.scala index adcc686234a4..60e7cf6991ac 100644 --- a/src/library/scala/util/Either.scala +++ b/src/library/scala/util/Either.scala @@ -690,7 +690,7 @@ object Either { * * @throws NoSuchElementException if the projection is `Left`. */ - @deprecated("Use `Either.getOrElse` instead", "2.13.0") + @deprecated("Use `Either.toOption.get` instead", "2.13.0") def get: B = e match { case Right(b) => b case _ => throw new NoSuchElementException("Either.right.get on Left") diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala index ff9634e2cc7e..f10723cb4b1d 100644 --- a/src/library/scala/util/Properties.scala +++ b/src/library/scala/util/Properties.scala @@ -109,7 +109,7 @@ private[scala] trait PropertiesTrait { * or "version (unknown)" if it cannot be determined. */ val versionString = "version " + scalaPropOrElse("version.number", "(unknown)") - val copyrightString = scalaPropOrElse("copyright.string", "Copyright 2002-2020, LAMP/EPFL and Lightbend, Inc.") + val copyrightString = scalaPropOrElse("copyright.string", "Copyright 2002-2021, LAMP/EPFL and Lightbend, Inc.") /** This is the encoding to use reading in source files, overridden with -encoding. * Note that it uses "prop" i.e. looks in the scala jar, not the system properties. diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala index 66ead3f03107..eadb9170a192 100644 --- a/src/library/scala/util/matching/Regex.scala +++ b/src/library/scala/util/matching/Regex.scala @@ -33,7 +33,8 @@ import java.util.regex.{ Pattern, Matcher } * and, if it does, to extract or transform the parts that match. * * === Usage === - * This class delegates to the [[java.util.regex]] package of the Java Platform. + + * This class delegates to the [[https://docs.oracle.com/javase/8/docs/api/java/util/regex/package-summary.html java.util.regex]] package of the Java Platform. * See the documentation for [[java.util.regex.Pattern]] for details about * the regular expression syntax for pattern strings. * @@ -218,15 +219,18 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends * val namedYears = for (m <- namedDate findAllMatchIn dates) yield m group "year" * }}} * - * Group names supplied to the constructor are preferred to inline group names - * when retrieving matched groups by name. Not all platforms support inline names. + * Inline group names are preferred over group names supplied to the constructor + * when retrieving matched groups by name. Group names supplied to the constructor + * should be considered deprecated. * * This constructor does not support options as flags, which must be - * supplied as inline flags in the pattern string: `(?idmsux-idmsux)`. + * supplied as inline flags in the pattern string: `(?idmsuxU)`. * * @param regex The regular expression to compile. * @param groupNames Names of capturing groups. */ + // we cannot add the alternative `def this(regex: String)` in a forward binary compatible way: + // @deprecated("use inline group names like (?X) instead", "2.13.7") def this(regex: String, groupNames: String*) = this(Pattern.compile(regex), groupNames: _*) /** Tries to match a [[java.lang.CharSequence]]. @@ -395,7 +399,7 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends def hasNext = matchIterator.hasNext def next(): Match = { matchIterator.next() - new Match(matchIterator.source, matchIterator.matcher, matchIterator.groupNames).force + new Match(matchIterator.source, matchIterator.matcher, matchIterator._groupNames).force } } } @@ -620,6 +624,7 @@ object Regex { val source: CharSequence /** The names of the groups, or an empty sequence if none defined */ + @deprecated("groupNames does not include inline group names, and should not be used anymore", "2.13.7") val groupNames: Seq[String] /** The number of capturing groups in the pattern. @@ -686,7 +691,11 @@ object Regex { if (end(i) >= 0) source.subSequence(end(i), source.length) else null - private[this] lazy val nameToIndex: Map[String, Int] = Map[String, Int]() ++ ("" :: groupNames.toList).zipWithIndex + @scala.annotation.nowarn("msg=deprecated") + private def groupNamesNowarn: Seq[String] = groupNames + + private[this] lazy val nameToIndex: Map[String, Int] = + Map[String, Int]() ++ ("" :: groupNamesNowarn.toList).zipWithIndex /** Returns the group with the given name. * @@ -699,7 +708,7 @@ object Regex { * @throws IllegalArgumentException if the requested group name is not defined */ def group(id: String): String = ( - if (groupNames.isEmpty) + if (groupNamesNowarn.isEmpty) matcher group id else nameToIndex.get(id) match { @@ -715,7 +724,10 @@ object Regex { /** Provides information about a successful match. */ class Match(val source: CharSequence, protected[matching] val matcher: Matcher, - val groupNames: Seq[String]) extends MatchData { + _groupNames: Seq[String]) extends MatchData { + + @deprecated("groupNames does not include inline group names, and should not be used anymore", "2.13.7") + val groupNames: Seq[String] = _groupNames /** The index of the first matched character. */ val start: Int = matcher.start @@ -790,9 +802,12 @@ object Regex { * * @see [[java.util.regex.Matcher]] */ - class MatchIterator(val source: CharSequence, val regex: Regex, val groupNames: Seq[String]) + class MatchIterator(val source: CharSequence, val regex: Regex, private[Regex] val _groupNames: Seq[String]) extends AbstractIterator[String] with Iterator[String] with MatchData { self => + @deprecated("groupNames does not include inline group names, and should not be used anymore", "2.13.7") + val groupNames: Seq[String] = _groupNames + protected[Regex] val matcher = regex.pattern.matcher(source) // 0 = not yet matched, 1 = matched, 2 = advanced to match, 3 = no more matches @@ -854,14 +869,14 @@ object Regex { /** Convert to an iterator that yields MatchData elements instead of Strings. */ def matchData: Iterator[Match] = new AbstractIterator[Match] { def hasNext = self.hasNext - def next() = { self.next(); new Match(source, matcher, groupNames).force } + def next() = { self.next(); new Match(source, matcher, _groupNames).force } } /** Convert to an iterator that yields MatchData elements instead of Strings and has replacement support. */ private[matching] def replacementData = new AbstractIterator[Match] with Replacement { def matcher = self.matcher def hasNext = self.hasNext - def next() = { self.next(); new Match(source, matcher, groupNames).force } + def next() = { self.next(); new Match(source, matcher, _groupNames).force } } } diff --git a/src/manual/scala/man1/scalac.scala b/src/manual/scala/man1/scalac.scala index b4a83e3cbf37..88788133debd 100644 --- a/src/manual/scala/man1/scalac.scala +++ b/src/manual/scala/man1/scalac.scala @@ -220,9 +220,6 @@ object scalac extends Command { Definition( CmdOption("Xlog-implicit-conversions"), "Print a message whenever an implicit conversion is inserted."), - Definition( - CmdOption("Xlog-implicits"), - "Show more detail on why some implicits are not applicable."), Definition( CmdOption("Xlog-reflective-calls"), "Print a message when a reflective method call is generated."), diff --git a/src/manual/scala/tools/docutil/EmitManPage.scala b/src/manual/scala/tools/docutil/EmitManPage.scala index 0c748377c987..441883e62395 100644 --- a/src/manual/scala/tools/docutil/EmitManPage.scala +++ b/src/manual/scala/tools/docutil/EmitManPage.scala @@ -93,7 +93,7 @@ object EmitManPage { case BlockQuote(text) => out println ".TP" emitText(text) - out.println + out.println() case CodeSample(text) => out println "\n.nf" @@ -104,7 +104,7 @@ object EmitManPage { for (item <- lst.items) { out println ".IP" emitText(item) - out.println + out.println() } case lst:NumberedList => @@ -114,7 +114,7 @@ object EmitManPage { val item = lst.items(idx) out.println(".IP \" " + (idx+1) + ".\"") emitText(item) - out.println + out.println() } case TitledPara(title, text) => diff --git a/src/partest/scala/tools/partest/ConsoleLog.scala b/src/partest/scala/tools/partest/ConsoleLog.scala index 89feccd1ef78..5064f0fd5bfd 100644 --- a/src/partest/scala/tools/partest/ConsoleLog.scala +++ b/src/partest/scala/tools/partest/ConsoleLog.scala @@ -65,6 +65,7 @@ class ConsoleLog(colorEnabled: Boolean) { def echoWarning(msg: String) = echo(bold(red(msg))) def printDot(): Unit = printProgress(".") + def printS(): Unit = printProgress(_warning + "s" +_default) def printEx(): Unit = printProgress(_failure + "X" + _default) private def printProgress(icon: String): Unit = synchronized { if (dotCount >= DotWidth) { diff --git a/src/partest/scala/tools/partest/DirectTest.scala b/src/partest/scala/tools/partest/DirectTest.scala index 229b2715df84..d923829b8c14 100644 --- a/src/partest/scala/tools/partest/DirectTest.scala +++ b/src/partest/scala/tools/partest/DirectTest.scala @@ -42,8 +42,11 @@ abstract class DirectTest { def testPath = SFile(sys.props("partest.test-path")) def testOutput = Directory(sys.props("partest.output")) + protected def pathOf(locations: String*) = locations.mkString(sys.props("path.separator")) + // override to add additional settings besides -d testOutput.path - def extraSettings: String = "" + // default is -usejavacp + def extraSettings: String = "-usejavacp" // a default Settings object using only extraSettings def settings: Settings = newSettings(CommandLineParser.tokenize(extraSettings)) // settings factory using given args and also debug settings diff --git a/src/partest/scala/tools/partest/ScaladocModelTest.scala b/src/partest/scala/tools/partest/ScaladocModelTest.scala index 487c962a298a..5a73ce9ee0c2 100644 --- a/src/partest/scala/tools/partest/ScaladocModelTest.scala +++ b/src/partest/scala/tools/partest/ScaladocModelTest.scala @@ -72,7 +72,7 @@ abstract class ScaladocModelTest extends DirectTest { try { // 1 - compile with scaladoc and get the model out - val universe = model.getOrElse({sys.error("Scaladoc Model Test ERROR: No universe generated!")}) + val universe = model.getOrElse { sys.error("Scaladoc Model Test ERROR: No universe generated!") } // 2 - check the model generated testModel(universe.rootPackage) println("Done.") @@ -85,15 +85,15 @@ abstract class ScaladocModelTest extends DirectTest { System.setErr(prevErr) } - private[this] var settings: doc.Settings = null + private[this] var docSettings: doc.Settings = null // create a new scaladoc compiler def newDocFactory: DocFactory = { - settings = new doc.Settings(_ => ()) - settings.scaladocQuietRun = true // yaay, no more "model contains X documentable templates"! + docSettings = new doc.Settings(_ => ()) + docSettings.scaladocQuietRun = true // yaay, no more "model contains X documentable templates"! val args = extraSettings + " " + scaladocSettings - new ScalaDoc.Command((CommandLineParser tokenize (args)), settings) // side-effecting, I think - val docFact = new DocFactory(new ConsoleReporter(settings), settings) + new ScalaDoc.Command((CommandLineParser tokenize (args)), docSettings) // side-effecting, I think + val docFact = new DocFactory(new ConsoleReporter(docSettings), docSettings) docFact } diff --git a/src/partest/scala/tools/partest/SecurityTest.scala b/src/partest/scala/tools/partest/SecurityTest.scala deleted file mode 100644 index ce76d29e67fa..000000000000 --- a/src/partest/scala/tools/partest/SecurityTest.scala +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.partest - -import java.security._ -import java.util._ - -abstract class SecurityTest extends App { - def throwIt(x: Any) = throw new AccessControlException("" + x) - def propertyCheck(p: PropertyPermission): Unit = throwIt(p) - - def check(perm: Permission): Unit = perm match { - case p: PropertyPermission => propertyCheck(p) - case _ => () - } -} diff --git a/src/partest/scala/tools/partest/TestState.scala b/src/partest/scala/tools/partest/TestState.scala index 8867ffe72c8d..3b6dc49444ac 100644 --- a/src/partest/scala/tools/partest/TestState.scala +++ b/src/partest/scala/tools/partest/TestState.scala @@ -30,7 +30,7 @@ sealed abstract class TestState { def shortStatus = if (isOk) "ok" else "!!" - final def andAlso(next: => TestState): TestState = if (isOk) next else this + final def andAlso(next: => TestState): TestState = if (isOk && !isSkipped) next else this override def toString = status } diff --git a/src/partest/scala/tools/partest/nest/AbstractRunner.scala b/src/partest/scala/tools/partest/nest/AbstractRunner.scala index a38ca75e18ed..7f6dd9a5b794 100644 --- a/src/partest/scala/tools/partest/nest/AbstractRunner.scala +++ b/src/partest/scala/tools/partest/nest/AbstractRunner.scala @@ -99,7 +99,8 @@ class AbstractRunner(val config: RunnerSpec.Config, protected final val testSour diffed ::: logged } if (terse) { - if (state.isOk) { printDot() ; Nil } + if (state.isSkipped) { printS(); Nil } + else if (state.isOk) { printDot() ; Nil } else { printEx() ; statusLine(state, durationMs) :: errInfo } } else { echo(statusLine(state, durationMs)) diff --git a/src/partest/scala/tools/partest/nest/DelegatingSecurityManager.scala b/src/partest/scala/tools/partest/nest/DelegatingSecurityManager.scala index 3ba255ad4dc0..66dff5d273c0 100644 --- a/src/partest/scala/tools/partest/nest/DelegatingSecurityManager.scala +++ b/src/partest/scala/tools/partest/nest/DelegatingSecurityManager.scala @@ -16,6 +16,7 @@ import java.io.FileDescriptor import java.net.InetAddress import java.security.Permission +@deprecated("JDK 17 deprecates SecurityManager", since="2.13.7") class DelegatingSecurityManager(delegate: SecurityManager) extends SecurityManager { override def checkExit(status: Int): Unit = if (delegate ne null) delegate.checkExit(status) override def checkPermission(perm: Permission): Unit = if (delegate ne null) delegate.checkPermission(perm) diff --git a/src/partest/scala/tools/partest/nest/Runner.scala b/src/partest/scala/tools/partest/nest/Runner.scala index 8be3bd69d9af..3004010789ea 100644 --- a/src/partest/scala/tools/partest/nest/Runner.scala +++ b/src/partest/scala/tools/partest/nest/Runner.scala @@ -18,6 +18,7 @@ import java.lang.reflect.InvocationTargetException import java.nio.charset.Charset import java.nio.file.{Files, StandardOpenOption} +import scala.annotation.nowarn import scala.collection.mutable.ListBuffer import scala.concurrent.duration.Duration import scala.reflect.internal.FatalError @@ -130,25 +131,15 @@ class Runner(val testInfo: TestInfo, val suiteRunner: AbstractRunner) { /** Fail the action. */ def nextTestActionFailing(reason: String): TestState = nextTestActionExpectTrue(reason, false) - private def assembleTestCommand(outDir: File, logFile: File): List[String] = { - // check whether there is a ".javaopts" file - val argsFile = testFile changeExtension "javaopts" - val javaopts = readOptionsFile(argsFile) + private def assembleTestCommand(outDir: File, javaopts: List[String]): List[String] = { if (javaopts.nonEmpty) - suiteRunner.verbose(s"Found javaopts file '$argsFile', using options: '${javaopts.mkString(",")}'") - - // Note! As this currently functions, suiteRunner.javaOpts must precede argString - // because when an option is repeated to java only the last one wins. - // That means until now all the .javaopts files were being ignored because - // they all attempt to change options which are also defined in - // partest.java_opts, leading to debug output like: - // - // debug: Found javaopts file 'files/shootout/message.scala-2.javaopts', using options: '-Xss32k' - // debug: java -Xss32k -Xss2m -Xms256M -Xmx1024M -classpath [...] + suiteRunner.verbose(s"Using java options: '${javaopts.mkString(",")}'") + val propertyOpts = propertyOptions(fork = true).map { case (k, v) => s"-D$k=$v" } val classpath = joinPaths(extraClasspath ++ testClassPath) + // `javaopts` last; for repeated arguments, the last one wins javaCmdPath +: ( (suiteRunner.javaOpts.split(' ') ++ extraJavaOptions ++ javaopts).filter(_ != "").toList ++ Seq( "-classpath", @@ -224,8 +215,8 @@ class Runner(val testInfo: TestInfo, val suiteRunner: AbstractRunner) { } } - private def execTest(outDir: File, logFile: File): TestState = { - val cmd = assembleTestCommand(outDir, logFile) + private def execTest(outDir: File, logFile: File, javaopts: List[String]): TestState = { + val cmd = assembleTestCommand(outDir, javaopts) pushTranscript((cmd mkString s" \\$EOL ") + " > " + logFile.getName) nextTestAction(runCommand(cmd, logFile)) { @@ -268,7 +259,10 @@ class Runner(val testInfo: TestInfo, val suiteRunner: AbstractRunner) { pushTranscript(s" > ${logFile.getName}") - TrapExit(() => run()) match { + @nowarn("cat=deprecation") // JDK 17 deprecates SecurityManager, so TrapExit is deprecated too + val trapExit = TrapExit + + trapExit(() => run()) match { case Left((status, throwable)) if status != 0 => genFail("non-zero exit code") case _ => @@ -514,9 +508,35 @@ class Runner(val testInfo: TestInfo, val suiteRunner: AbstractRunner) { def description = mkScalacString() lazy val result = { pushTranscript(description) ; attemptCompile(fs) } } + case class SkipRound(fs: List[File], state: TestState) extends CompileRound { + def description: String = state.status + lazy val result = { pushTranscript(description); state } + } + + def compilationRounds(file: File): List[CompileRound] = { + import scala.util.Properties.javaSpecVersion + val Range = """(\d+)(?:(\+)|(?: *\- *(\d+)))?""".r + lazy val currentJavaVersion = javaSpecVersion.stripPrefix("1.").toInt + val allFiles = sources(file) + val skipStates = toolArgsFor(allFiles)("javaVersion", split = false).flatMap({ + case v @ Range(from, plus, to) => + val ok = + if (plus == null) + if (to == null) currentJavaVersion == from.toInt + else from.toInt <= currentJavaVersion && currentJavaVersion <= to.toInt + else + currentJavaVersion >= from.toInt + if (ok) None + else Some(genSkip(s"skipped on Java $javaSpecVersion, only running on $v")) + case v => + Some(genFail(s"invalid javaVersion range in test comment: $v")) + }) + skipStates.headOption match { + case Some(state) => List(SkipRound(List(file), state)) + case _ => groupedFiles(allFiles).flatMap(mixedCompileGroup) + } + } - def compilationRounds(file: File): List[CompileRound] = - groupedFiles(sources(file)).map(mixedCompileGroup).flatten def mixedCompileGroup(allFiles: List[File]): List[CompileRound] = { val (scalaFiles, javaFiles) = allFiles partition (_.isScala) val round1 = if (scalaFiles.isEmpty) None else Some(ScalaAndJava(allFiles)) @@ -533,17 +553,18 @@ class Runner(val testInfo: TestInfo, val suiteRunner: AbstractRunner) { // pass if it checks and didn't crash the compiler // or, OK, we'll let you crash the compiler with a FatalError if you supply a check file def checked(r: CompileRound) = r.result match { + case s: Skip => s case crash @ Crash(_, t, _) if !checkFile.canRead || !t.isInstanceOf[FatalError] => crash - case dnc @ _ => diffIsOk + case _ => diffIsOk } - compilationRounds(testFile).find(!_.result.isOk).map(checked).getOrElse(genFail("expected compilation failure")) + compilationRounds(testFile).find(r => !r.result.isOk || r.result.isSkipped).map(checked).getOrElse(genFail("expected compilation failure")) } // run compilation until failure, evaluate `andAlso` on success def runTestCommon(andAlso: => TestState = genPass()): TestState = runInContext { // DirectCompiler already says compilation failed - val res = compilationRounds(testFile).find(!_.result.isOk).map(_.result).getOrElse(genPass()) + val res = compilationRounds(testFile).find(r => !r.result.isOk || r.result.isSkipped).map(_.result).getOrElse(genPass()) res andAlso andAlso } @@ -639,10 +660,9 @@ class Runner(val testInfo: TestInfo, val suiteRunner: AbstractRunner) { } private def runRunTest(): TestState = { - val argsFile = testFile changeExtension "javaopts" - val javaopts = readOptionsFile(argsFile) + val javaopts = toolArgs("java") val execInProcess = PartestDefaults.execInProcess && javaopts.isEmpty && !Set("specialized", "instrumented").contains(testFile.getParentFile.getName) - def exec() = if (execInProcess) execTestInProcess(outDir, logFile) else execTest(outDir, logFile) + def exec() = if (execInProcess) execTestInProcess(outDir, logFile) else execTest(outDir, logFile, javaopts) def noexec() = genSkip("no-exec: tests compiled but not run") runTestCommon(if (suiteRunner.config.optNoExec) noexec() else exec().andAlso(diffIsOk)) } diff --git a/src/partest/scala/tools/partest/nest/TrapExit.scala b/src/partest/scala/tools/partest/nest/TrapExit.scala index 8e4e1d7cb50b..f5f00dc21859 100644 --- a/src/partest/scala/tools/partest/nest/TrapExit.scala +++ b/src/partest/scala/tools/partest/nest/TrapExit.scala @@ -12,6 +12,7 @@ package scala.tools.partest.nest +@deprecated("JDK 17 deprecates SecurityManager", since="2.13.7") object TrapExit { private class TrapExitThrowable(val status: Int) extends Throwable { diff --git a/src/partest/scala/tools/partest/package.scala b/src/partest/scala/tools/partest/package.scala index b4ba200511e5..5484b5dc8b94 100644 --- a/src/partest/scala/tools/partest/package.scala +++ b/src/partest/scala/tools/partest/package.scala @@ -19,7 +19,6 @@ import scala.concurrent.duration.Duration import scala.io.Codec import scala.jdk.CollectionConverters._ import scala.tools.nsc.util.Exceptional -import scala.util.chaining._ package object partest { type File = java.io.File @@ -129,8 +128,6 @@ package object partest { def fileSeparator = java.io.File.separator def pathSeparator = java.io.File.pathSeparator - def words(s: String): List[String] = (s.trim split "\\s+").toList - def timed[T](body: => T): (T, Long) = { val t1 = System.currentTimeMillis val result = body @@ -143,18 +140,6 @@ package object partest { def basename(name: String): String = Path(name).stripExtension - /** In order to allow for spaces in flags/options, this - * parses .flags, .javaopts, javacopts etc files as follows: - * If it is exactly one line, it is split (naively) on spaces. - * If it contains more than one line, each line is its own - * token, spaces and all. - */ - def readOptionsFile(file: File): List[String] = - file.fileLines match { - case x :: Nil => words(x) - case xs => xs - } - def findProgram(name: String): Option[File] = { val pathDirs = sys.env("PATH") match { case null => List("/usr/local/bin", "/usr/bin", "/bin") @@ -194,17 +179,4 @@ package object partest { def isDebug = sys.props.contains("partest.debug") || sys.env.contains("PARTEST_DEBUG") def debugSettings = sys.props.getOrElse("partest.debug.settings", "") def log(msg: => Any): Unit = if (isDebug) Console.err.println(msg) - - private val printable = raw"\p{Print}".r - - def hexdump(s: String): Iterator[String] = { - var offset = 0 - def hex(bytes: Array[Byte]) = bytes.map(b => f"$b%02x").mkString(" ") - def charFor(byte: Byte): Char = byte.toChar match { case c @ printable() => c ; case _ => '.' } - def ascii(bytes: Array[Byte]) = bytes.map(charFor).mkString - def format(bytes: Array[Byte]): String = - f"$offset%08x ${hex(bytes.slice(0, 8))}%-24s ${hex(bytes.slice(8, 16))}%-24s |${ascii(bytes)}|" - .tap(_ => offset += bytes.length) - s.getBytes(codec.charSet).grouped(16).map(format) - } } diff --git a/src/reflect/scala/reflect/api/TypeTags.scala b/src/reflect/scala/reflect/api/TypeTags.scala index 382577ce3cf4..7dba64a079e7 100644 --- a/src/reflect/scala/reflect/api/TypeTags.scala +++ b/src/reflect/scala/reflect/api/TypeTags.scala @@ -15,6 +15,7 @@ package reflect package api import java.io.ObjectStreamException +import scala.annotation.nowarn /** * A `TypeTag[T]` encapsulates the runtime type representation of some type `T`. @@ -290,7 +291,7 @@ trait TypeTags { self: Universe => def apply[T](mirror1: scala.reflect.api.Mirror[self.type], tpec1: TypeCreator): TypeTag[T] = { (mirror1: AnyRef) match { - case m: scala.reflect.runtime.JavaMirrors#MirrorImpl + case m: scala.reflect.runtime.JavaMirrors#JavaMirror @nowarn("cat=deprecation") if cacheMaterializedTypeTags && tpec1.getClass.getName.contains("$typecreator") && tpec1.getClass.getDeclaredFields.length == 0 => // excludes type creators that splice in bound types. diff --git a/src/reflect/scala/reflect/api/Types.scala b/src/reflect/scala/reflect/api/Types.scala index 2fc29f0bb382..d59241927674 100644 --- a/src/reflect/scala/reflect/api/Types.scala +++ b/src/reflect/scala/reflect/api/Types.scala @@ -137,7 +137,7 @@ trait Types { * Unlike `members` this method doesn't returns inherited members. * * Members in the returned scope might appear in arbitrary order. - * Use `declarations.sorted` to get an ordered list of members. + * Use `decls.sorted` to get an ordered list of members. */ def decls: MemberScope @@ -150,7 +150,7 @@ trait Types { * Unlike `declarations` this method also returns inherited members. * * Members in the returned scope might appear in arbitrary order. - * Use `declarations.sorted` to get an ordered list of members. + * Use `members.sorted` to get an ordered list of members. */ def members: MemberScope diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala index b99f40770791..570a94e960ed 100644 --- a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala +++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala @@ -16,7 +16,7 @@ package internal // todo implement in terms of BitSet import scala.collection.mutable -import util.{Statistics, StatisticsStatics} +import util.Statistics /** A base type sequence (BaseTypeSeq) is an ordered sequence spanning all the base types * of a type. It characterized by the following two laws: @@ -50,8 +50,8 @@ trait BaseTypeSeqs { */ class BaseTypeSeq protected[reflect] (private[BaseTypeSeqs] val parents: List[Type], private[BaseTypeSeqs] val elems: Array[Type]) { self => - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(baseTypeSeqCount) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(baseTypeSeqLenTotal, elems.length) + if (settings.areStatisticsEnabled) statistics.incCounter(baseTypeSeqCount) + if (settings.areStatisticsEnabled) statistics.incCounter(baseTypeSeqLenTotal, elems.length) private[this] val typeSymbols = { val tmp = new Array[Int](elems.length) var i = 0 diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 1727c94fe8d3..35cb296a1bbc 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -411,6 +411,7 @@ trait Definitions extends api.StandardDefinitions { lazy val JavaEnumClass = requiredClass[java.lang.Enum[_]] lazy val JavaUtilMap = requiredClass[java.util.Map[_, _]] lazy val JavaUtilHashMap = requiredClass[java.util.HashMap[_, _]] + lazy val JavaRecordClass = getClassIfDefined("java.lang.Record") lazy val ByNameParamClass = specialPolyClass(tpnme.BYNAME_PARAM_CLASS_NAME, COVARIANT)(_ => AnyTpe) lazy val JavaRepeatedParamClass = specialPolyClass(tpnme.JAVA_REPEATED_PARAM_CLASS_NAME, COVARIANT)(tparam => arrayType(tparam.tpe)) @@ -456,13 +457,13 @@ trait Definitions extends api.StandardDefinitions { else if (isScalaRepeatedParamType(tp)) elementExtract(RepeatedParamClass, tp) orElse tp else tp ) - def repeatedToSingle(tp: Type): Type = elementExtract(RepeatedParamClass, tp) orElse elementExtract(JavaRepeatedParamClass, tp) orElse tp + def repeatedToSingle(tp: Type): Type = elementExtract(RepeatedParamClass, tp) orElse elementExtract(JavaRepeatedParamClass, tp) orElse tp // We don't need to deal with JavaRepeatedParamClass here, as `repeatedToSeq` is only called in the patmat translation for Scala sources. - def repeatedToSeq(tp: Type): Type = elementTransform(RepeatedParamClass, tp)(seqType) orElse tp - def seqToRepeated(tp: Type): Type = elementTransform(SeqClass, tp)(scalaRepeatedType) orElse tp - def isReferenceArray(tp: Type) = elementTest(ArrayClass, tp)(elemtp => elemtp <:< AnyRefTpe || (elemtp eq ObjectTpeJava)) - def isArrayOfSymbol(tp: Type, elem: Symbol) = elementTest(ArrayClass, tp)(_.typeSymbol == elem) - def elementType(container: Symbol, tp: Type): Type = elementExtract(container, tp) + def repeatedToSeq(tp: Type): Type = elementTransform(RepeatedParamClass, tp)(seqType) orElse tp + def seqToRepeated(tp: Type): Type = elementTransform(SeqClass, tp)(scalaRepeatedType) orElse tp + def isReferenceArray(tp: Type) = elementTest(ArrayClass, tp)(elemtp => elemtp <:< AnyRefTpe || (elemtp eq ObjectTpeJava)) + def isArrayOfSymbol(tp: Type, elem: Symbol) = elementTest(ArrayClass, tp)(_.typeSymbol == elem) + def elementType(container: Symbol, tp: Type): Type = elementExtract(container, tp) // Classes treated specially with respect to -Ywarn-unused lazy val SubTypeClass = requiredClass[scala.<:<[_,_]] @@ -474,7 +475,7 @@ trait Definitions extends api.StandardDefinitions { lazy val IteratorClass = requiredClass[scala.collection.Iterator[_]] lazy val IterableClass = requiredClass[scala.collection.Iterable[_]] lazy val ListClass = requiredClass[scala.collection.immutable.List[_]] - def List_cons = getMemberMethod(ListClass, nme.CONS) + def List_cons = getMemberMethod(ListClass, nme.CONS) @migration("SeqClass now refers to scala.collection.immutable.Seq", "2.13.0") lazy val SeqClass = requiredClass[scala.collection.immutable.Seq[_]] lazy val SeqFactoryClass = requiredModule[scala.collection.SeqFactory.type] @@ -640,8 +641,7 @@ trait Definitions extends api.StandardDefinitions { case _ => false }) // The given class has a main method. - def hasJavaMainMethod(sym: Symbol): Boolean = - (sym.tpe member nme.main).alternatives exists isJavaMainMethod + def hasJavaMainMethod(sym: Symbol): Boolean = sym.tpe.member(nme.main).alternatives.exists(isJavaMainMethod) class VarArityClass(name: String, maxArity: Int, countFrom: Int = 0, init: Option[ClassSymbol] = None) extends VarArityClassApi { private[this] val offset = countFrom - init.size @@ -995,7 +995,6 @@ trait Definitions extends api.StandardDefinitions { (sym eq PartialFunctionClass) || (sym eq AbstractPartialFunctionClass) } - private[this] val doSam = settings.isScala212 private[this] val samCache = perRunCaches.newAnyRefMap[Symbol, Symbol]() /** The single abstract method declared by type `tp` (or `NoSymbol` if it cannot be found). * @@ -1008,7 +1007,7 @@ trait Definitions extends api.StandardDefinitions { * It's kind of strange that erasure sees deferredMembers that typer does not (see commented out assert below) */ def samOf(tp: Type): Symbol = - if (doSam && isNonRefinementClassType(unwrapToClass(tp))) { // TODO: is this really faster than computing tpSym below? how about just `tp.typeSymbol.isClass` (and !tpSym.isRefinementClass)? + if (isNonRefinementClassType(unwrapToClass(tp))) { // TODO: is this really faster than computing tpSym below? how about just `tp.typeSymbol.isClass` (and !tpSym.isRefinementClass)? // look at erased type because we (only) care about what ends up in bytecode // (e.g., an alias type is fine as long as is compiles to a single-abstract-method) val tpSym: Symbol = erasure.javaErasure(tp).typeSymbol diff --git a/src/reflect/scala/reflect/internal/Flags.scala b/src/reflect/scala/reflect/internal/Flags.scala index d366c7fce8fb..3df0c63373c3 100644 --- a/src/reflect/scala/reflect/internal/Flags.scala +++ b/src/reflect/scala/reflect/internal/Flags.scala @@ -80,7 +80,7 @@ package internal // 57: notOVERRIDE // 58: notPRIVATE // 59: -// 60: +// 60: SCALA3X // 61: // 62: // 63: @@ -113,6 +113,7 @@ class ModifierFlags { final val LOCAL = 1L << 19 // symbol is local to current class (i.e. private[this] or protected[this] // pre: PRIVATE or PROTECTED are also set final val JAVA = 1L << 20 // symbol was defined by a Java class + final val SCALA3X = 1L << 60 // class was defined in Scala 3 final val STATIC = 1L << 23 // static field, method or class final val CASEACCESSOR = 1L << 24 // symbol is a case parameter (or its accessor, or a GADT skolem) final val TRAIT = 1L << 25 // symbol is a trait @@ -202,7 +203,7 @@ class Flags extends ModifierFlags { // The flags (1L << 59) to (1L << 63) are currently unused. If added to the InitialFlags mask, // they could be used as normal flags. - final val InitialFlags = 0x0007FFFFFFFFFFFFL // normal flags, enabled from the first phase: 1L to (1L << 50) + final val InitialFlags = 0x1007FFFFFFFFFFFFL // normal flags, enabled from the first phase: 1L to (1L << 50) + (1L << 60) final val LateFlags = 0x00F8000000000000L // flags that override flags in (1L << 4) to (1L << 8): DEFERRED, FINAL, INTERFACE, METHOD, MODULE final val AntiFlags = 0x0700000000000000L // flags that cancel flags in 1L to (1L << 2): PROTECTED, OVERRIDE, PRIVATE final val LateShift = 47 @@ -320,7 +321,7 @@ class Flags extends ModifierFlags { /** These flags are not pickled */ - final val FlagsNotPickled = IS_ERROR | OVERLOADED | LIFTED | TRANS_FLAG | LOCKED | TRIEDCOOKING + final val FlagsNotPickled = IS_ERROR | OVERLOADED | LIFTED | TRANS_FLAG | LOCKED | TRIEDCOOKING | SCALA3X // A precaution against future additions to FlagsNotPickled turning out // to be overloaded flags thus not-pickling more than intended. @@ -477,8 +478,8 @@ class Flags extends ModifierFlags { case `notPROTECTED` => "" // (1L << 56) case 0x200000000000000L => "" // (1L << 57) case `notPRIVATE` => "" // (1L << 58) - case NEEDS_TREES => "" // (1L << 59) - case 0x1000000000000000L => "" // (1L << 60) + case NEEDS_TREES => "" // (1L << 59) + case SCALA3X => "" // (1L << 60) case 0x2000000000000000L => "" // (1L << 61) case 0x4000000000000000L => "" // (1L << 62) case 0x8000000000000000L => "" // (1L << 63) diff --git a/src/reflect/scala/reflect/internal/Kinds.scala b/src/reflect/scala/reflect/internal/Kinds.scala index 698be6563c5c..d53da5a4ca37 100644 --- a/src/reflect/scala/reflect/internal/Kinds.scala +++ b/src/reflect/scala/reflect/internal/Kinds.scala @@ -152,7 +152,7 @@ trait Kinds { def kindCheck(cond: Boolean, f: KindErrors => KindErrors): Unit = if (!cond) kindErrors = f(kindErrors) - if (settings.debug) { + if (settings.isDebug) { log("checkKindBoundsHK expected: "+ param +" with params "+ hkparams +" by definition in "+ paramOwner) log("checkKindBoundsHK supplied: "+ arg +" with params "+ hkargs +" from "+ argOwner) log("checkKindBoundsHK under params: "+ underHKParams +" with args "+ withHKArgs) @@ -215,7 +215,7 @@ trait Kinds { else NoKindErrors } - if (settings.debug && (tparams.nonEmpty || targs.nonEmpty)) log( + if (settings.isDebug && (tparams.nonEmpty || targs.nonEmpty)) log( "checkKindBounds0(" + tparams + ", " + targs + ", " + pre + ", " + owner + ", " + explainErrors + ")" ) diff --git a/src/reflect/scala/reflect/internal/Mirrors.scala b/src/reflect/scala/reflect/internal/Mirrors.scala index e7d434ca3a95..4099423cbed8 100644 --- a/src/reflect/scala/reflect/internal/Mirrors.scala +++ b/src/reflect/scala/reflect/internal/Mirrors.scala @@ -61,7 +61,7 @@ trait Mirrors extends api.Mirrors { val result = if (name.isTermName) sym.suchThat(_ hasFlag MODULE) else sym if (result != NoSymbol) result else { - if (settings.debug) { log(sym.info); log(sym.info.members) }//debug + if (settings.isDebug) { log(sym.info); log(sym.info.members) }//debug thisMirror.missingHook(owner, name) orElse { MissingRequirementError.notFound((if (name.isTermName) "object " else "class ")+path+" in "+thisMirror) } diff --git a/src/reflect/scala/reflect/internal/Positions.scala b/src/reflect/scala/reflect/internal/Positions.scala index 54183d7f3867..bfc995d96cc9 100644 --- a/src/reflect/scala/reflect/internal/Positions.scala +++ b/src/reflect/scala/reflect/internal/Positions.scala @@ -345,7 +345,8 @@ trait Positions extends api.Positions { self: SymbolTable => if (t.pos includes pos) { if (isEligible(t)) last = t super.traverse(t) - } else t match { + } + t match { case mdef: MemberDef => val annTrees = mdef.mods.annotations match { case Nil if mdef.symbol != null => diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala index f869bd121981..8d62aea85931 100644 --- a/src/reflect/scala/reflect/internal/Printers.scala +++ b/src/reflect/scala/reflect/internal/Printers.scala @@ -207,7 +207,7 @@ trait Printers extends api.Printers { self: SymbolTable => ) def printFlags(flags: Long, privateWithin: String) = { - val mask: Long = if (settings.debug) -1L else PrintableFlags + val mask: Long = if (settings.isDebug) -1L else PrintableFlags val s = flagsToString(flags & mask, privateWithin) if (s != "") print(s + " ") } @@ -320,7 +320,7 @@ trait Printers extends api.Printers { self: SymbolTable => if (qual.nonEmpty || (checkSymbol && tree.symbol != NoSymbol)) print(resultName + ".") print("super") if (mix.nonEmpty) print(s"[$mix]") - else if (settings.debug) tree.tpe match { + else if (settings.isDebug) tree.tpe match { case st: SuperType => print(s"[${st.supertpe}]") case tp: Type => print(s"[$tp]") case _ => @@ -479,7 +479,7 @@ trait Printers extends api.Printers { self: SymbolTable => case th @ This(qual) => printThis(th, symName(tree, qual)) - case Select(qual: New, name) if !settings.debug => + case Select(qual: New, name) if !settings.isDebug => print(qual) case Select(qualifier, name) => @@ -781,26 +781,30 @@ trait Printers extends api.Printers { self: SymbolTable => print("class ", printedName(name)) printTypeParams(tparams) - val build.SyntacticClassDef(_, _, _, ctorMods, vparamss, earlyDefs, parents, selfType, body) = cl: @unchecked - - // constructor's modifier - if (ctorMods.hasFlag(AccessFlags) || ctorMods.hasAccessBoundary) { - print(" ") - printModifiers(ctorMods, primaryCtorParam = false) - } + cl match { + case build.SyntacticClassDef(_, _, _, ctorMods, vparamss, earlyDefs, parents, selfType, body) => + // constructor's modifier + if (ctorMods.hasFlag(AccessFlags) || ctorMods.hasAccessBoundary) { + print(" ") + printModifiers(ctorMods, primaryCtorParam = false) + } - def printConstrParams(ts: List[ValDef]): Unit = { - parenthesize() { - printImplicitInParamsList(ts) - printSeq(ts)(printVParam(_, primaryCtorParam = true))(print(", ")) - } - } - // constructor's params processing (don't print single empty constructor param list) - vparamss match { - case Nil | List(Nil) if !mods.isCase && !ctorMods.hasFlag(AccessFlags) => - case _ => vparamss foreach printConstrParams + def printConstrParams(ts: List[ValDef]): Unit = { + parenthesize() { + printImplicitInParamsList(ts) + printSeq(ts)(printVParam(_, primaryCtorParam = true))(print(", ")) + } + } + // constructor's params processing (don't print single empty constructor param list) + vparamss match { + case Nil | List(Nil) if !mods.isCase && !ctorMods.hasFlag(AccessFlags) => + case _ => vparamss foreach printConstrParams + } + parents + case _ => + // Can get here with erroneous code, like `{@deprecatedName ` + Nil } - parents } // get trees without default classes and traits (when they are last) diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala index e428747db7cb..f0bdf01331a7 100644 --- a/src/reflect/scala/reflect/internal/Scopes.scala +++ b/src/reflect/scala/reflect/internal/Scopes.scala @@ -17,7 +17,7 @@ package internal import scala.annotation.tailrec import scala.collection.{AbstractIterable, AbstractIterator} import scala.collection.mutable.Clearable -import scala.reflect.internal.util.{Statistics, StatisticsStatics} +import scala.reflect.internal.util.Statistics trait Scopes extends api.Scopes { self: SymbolTable => @@ -515,22 +515,22 @@ trait Scopes extends api.Scopes { self: SymbolTable => /** Create a new scope nested in another one with which it shares its elements */ final def newNestedScope(outer: Scope): Scope = { - val startTime = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.scopePopulationTime) else null + val startTime = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.scopePopulationTime) else null val nested = newScope // not `new Scope`, we must allow the runtime reflection universe to mixin SynchronizedScopes! nested.elems = outer.elems nested.nestinglevel = outer.nestinglevel + 1 if (outer.hashtable ne null) nested.hashtable = java.util.Arrays.copyOf(outer.hashtable, outer.hashtable.length) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.scopePopulationTime, startTime) + if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.scopePopulationTime, startTime) nested } /** Create a new scope with given initial elements */ def newScopeWith(elems: Symbol*): Scope = { - val startTime = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.scopePopulationTime) else null + val startTime = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.scopePopulationTime) else null val scope = newScope elems foreach scope.enter - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.scopePopulationTime, startTime) + if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.scopePopulationTime, startTime) scope } diff --git a/src/reflect/scala/reflect/internal/StdAttachments.scala b/src/reflect/scala/reflect/internal/StdAttachments.scala index 8f820ae11d0c..0c8af3b7601f 100644 --- a/src/reflect/scala/reflect/internal/StdAttachments.scala +++ b/src/reflect/scala/reflect/internal/StdAttachments.scala @@ -116,7 +116,7 @@ trait StdAttachments { */ case object KnownDirectSubclassesCalled extends PlainAttachment - class DottyEnumSingleton(val name: String) extends PlainAttachment + case object DottyEnumSingleton extends PlainAttachment class DottyParameterisedTrait(val params: List[Symbol]) diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index 0c550505f360..a37391d8db37 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -236,6 +236,7 @@ trait StdNames { final val keywords = kw.result } with CommonNames { final val javaKeywords = new JavaKeywords() + final val javaRestrictedIdentifiers = new JavaRestrictedIdentifiers() } abstract class TypeNames extends Keywords with TypeNamesApi { @@ -264,6 +265,7 @@ trait StdNames { final val Object: NameType = nameType("Object") final val PrefixType: NameType = nameType("PrefixType") final val Product: NameType = nameType("Product") + final val Record: NameType = nameType("Record") final val Serializable: NameType = nameType("Serializable") final val Singleton: NameType = nameType("Singleton") final val Throwable: NameType = nameType("Throwable") @@ -322,6 +324,9 @@ trait StdNames { final val scala_ : NameType = nameType("scala") + // Scala 3 special type + val AND: NameType = nme.AND.toTypeName + def dropSingletonName(name: Name): TypeName = (name dropRight SINGLETON_SUFFIX.length).toTypeName def singletonName(name: Name): TypeName = (name append SINGLETON_SUFFIX).toTypeName } @@ -419,8 +424,17 @@ trait StdNames { /** Is name a variable name? */ def isVariableName(name: Name): Boolean = { + import Character.{isHighSurrogate, isLowSurrogate, isLetter, isLowerCase, isValidCodePoint, toCodePoint} val first = name.startChar - ( ((first.isLower && first.isLetter) || first == '_') + def isLowerLetterSupplementary: Boolean = + first == '$' && { + val decoded = name.decoded + isHighSurrogate(decoded.charAt(0)) && decoded.length > 1 && isLowSurrogate(decoded.charAt(1)) && { + val codepoint = toCodePoint(decoded.charAt(0), decoded.charAt(1)) + isValidCodePoint(codepoint) && isLetter(codepoint) && isLowerCase(codepoint) + } + } + ( ((first.isLower && first.isLetter) || first == '_' || isLowerLetterSupplementary) && (name != nme.false_) && (name != nme.true_) && (name != nme.null_) @@ -459,6 +473,7 @@ trait StdNames { def unexpandedName(name: Name): Name = name.lastIndexOf("$$") match { case 0 | -1 => name + case 1 if name.charAt(0) == '_' => if (name.isTermName) nme.WILDCARD else tpnme.WILDCARD case idx0 => // Sketchville - We've found $$ but if it's part of $$$ or $$$$ // or something we need to keep the bonus dollars, so e.g. foo$$$outer @@ -658,6 +673,18 @@ trait StdNames { val long2Long: NameType = nameType("long2Long") val boolean2Boolean: NameType = nameType("boolean2Boolean") + // Scala 3 import syntax + val as: NameType = nameType("as") + + // Scala 3 hard keywords + val `enum`: NameType = nameType("enum") + val `export`: NameType = nameType("export") + val `given`: NameType = nameType("given") + + // Scala 3 soft keywords + val infix: NameType = nameType("infix") + val open: NameType = nameType("open") + // Compiler utilized names val AnnotatedType: NameType = nameType("AnnotatedType") @@ -901,6 +928,7 @@ trait StdNames { val state : NameType = nameType("state") val tr : NameType = nameType(s"tr$$async") val t : NameType = nameType(s"throwable$$async") + val trGetResult : NameType = nameType(s"tryGetResult$$async") // quasiquote interpolators: val q: NameType = nameType("q") @@ -960,6 +988,7 @@ trait StdNames { final val PLUS : NameType = nameType("+") final val STAR : NameType = nameType("*") final val TILDE: NameType = nameType("~") + final val QMARK: NameType = nameType("?") final val isUnary: Set[Name] = Set(MINUS, PLUS, TILDE, BANG) } @@ -1247,6 +1276,15 @@ trait StdNames { final val keywords = kw.result } + // "The identifiers var, yield, and record are restricted identifiers because they are not allowed in some contexts" + // A type identifier is an identifier that is not the character sequence var, yield, or record. + // An unqualified method identifier is an identifier that is not the character sequence yield. + class JavaRestrictedIdentifiers { + final val RECORD: TermName = TermName("record") + final val VAR: TermName = TermName("var") + final val YIELD: TermName = TermName("yield") + } + sealed abstract class SymbolNames { protected def nameType(s: String): TypeName = newTypeNameCached(s) diff --git a/src/reflect/scala/reflect/internal/SymbolPairs.scala b/src/reflect/scala/reflect/internal/SymbolPairs.scala index 83a3d8abca22..7d2f1f895550 100644 --- a/src/reflect/scala/reflect/internal/SymbolPairs.scala +++ b/src/reflect/scala/reflect/internal/SymbolPairs.scala @@ -43,12 +43,12 @@ abstract class SymbolPairs { def rootType: Type = self def lowType: Type = self memberType low - def lowErased: Type = erasure.specialErasure(base)(low.tpe) + def lowErased: Type = erasure.specialErasure(low)(low.tpe) def lowClassBound: Type = classBoundAsSeen(low.tpe.typeSymbol) def highType: Type = self memberType high def highInfo: Type = self memberInfo high - def highErased: Type = erasure.specialErasure(base)(high.tpe) + def highErased: Type = erasure.specialErasure(high)(high.tpe) def highClassBound: Type = classBoundAsSeen(high.tpe.typeSymbol) def isErroneous = low.tpe.isErroneous || high.tpe.isErroneous diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index 9c7abb1e1524..e7b9466ffa95 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -87,15 +87,16 @@ abstract class SymbolTable extends macros.Universe def shouldLogAtThisPhase = false def isPastTyper = false - final def isDeveloper: Boolean = settings.debug.value || settings.developer.value - def picklerPhase: Phase + @inline final def isDeveloper: Boolean = settings.isDebug || settings.isDeveloper + + def picklerPhase: Phase def erasurePhase: Phase def settings: MutableSettings /** Override with final implementation for inlining. */ - def debuglog(msg: => String): Unit = if (settings.debug) log(msg) + def debuglog(msg: => String): Unit = if (settings.isDebug) log(msg) /** dev-warns if dev-warning is enabled and `cond` is true; no-op otherwise */ @inline final def devWarningIf(cond: => Boolean)(msg: => String): Unit = @@ -387,7 +388,7 @@ abstract class SymbolTable extends macros.Universe } /** if there's a `package` member object in `pkgClass`, enter its members into it. */ - def openPackageModule(pkgClass: Symbol): Unit = { + def openPackageModule(pkgClass: Symbol, force: Boolean = false): Unit = { val pkgModule = pkgClass.packageObject def fromSource = pkgModule.rawInfo match { diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index a144fe6e8c63..6594c4dce4f6 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -20,7 +20,7 @@ package internal import scala.collection.immutable import scala.collection.mutable.ListBuffer -import util.{ Statistics, shortClassOfInstance, StatisticsStatics } +import util.{ ReusableInstance, Statistics, shortClassOfInstance } import Flags._ import scala.annotation.tailrec import scala.reflect.io.{AbstractFile, NoAbstractFile} @@ -124,7 +124,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => def isJavaEnum: Boolean = hasJavaEnumFlag def isJavaAnnotation: Boolean = hasJavaAnnotationFlag def isStaticAnnotation: Boolean = - hasJavaAnnotationFlag || isNonBottomSubClass(StaticAnnotationClass) && this != NowarnClass + initialize.hasJavaAnnotationFlag || isNonBottomSubClass(StaticAnnotationClass) && this != NowarnClass def newNestedSymbol(name: Name, pos: Position, newFlags: Long, isClass: Boolean): Symbol = name match { case n: TermName => newTermSymbol(n, pos, newFlags) @@ -292,7 +292,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => def varianceString: String = variance.symbolicString override def flagMask = - if (settings.debug && !isAbstractType) AllFlags + if (settings.isDebug && !isAbstractType) AllFlags else if (owner.isRefinementClass) ExplicitFlags & ~OVERRIDE else ExplicitFlags @@ -838,10 +838,10 @@ trait Symbols extends api.Symbols { self: SymbolTable => isAbstractType && !isExistential && !isTypeParameterOrSkolem && isLocalToBlock /** change name by appending $$ - * Do the same for any accessed symbols or setters/getters. + * Do the same for any accessed symbols to preserve serialization compatibility. * Implementation in TermSymbol. */ - def expandName(base: Symbol): Unit = { } + def expandName(base: Symbol): Unit = () // In java.lang, Predef, or scala package/package object def isInDefaultNamespace = UnqualifiedOwners(effectiveOwner) @@ -970,6 +970,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def isModuleVar = hasFlag(MODULEVAR) + final def isScala3Defined = hasFlag(SCALA3X) + /** * Is this symbol static (i.e. with no outer instance)? * Q: When exactly is a sym marked as STATIC? @@ -2724,7 +2726,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => symbolKind.abbreviation final def kindString: String = - if (settings.debug.value) accurateKindString + if (settings.isDebug) accurateKindString else sanitizedKindString /** If the name of the symbol's owner should be used when you care about @@ -2748,7 +2750,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * If settings.Yshowsymkinds, adds abbreviated symbol kind. */ def nameString: String = { - val name_s = if (settings.debug.value) "" + unexpandedName else unexpandedName.dropLocal.decode + val name_s = if (settings.isDebug) "" + unexpandedName else unexpandedName.dropLocal.decode val kind_s = if (settings.Yshowsymkinds.value) "#" + abbreviatedKindString else "" name_s + idString + kind_s @@ -2775,7 +2777,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * If hasMeaninglessName is true, uses the owner's name to disambiguate identity. */ override def toString: String = { - val simplifyNames = !settings.debug + val simplifyNames = !settings.isDebug if (isPackageObjectOrClass && simplifyNames) s"package object ${owner.decodedName}" else { val kind = kindString @@ -2811,7 +2813,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => def isStructuralThisType = owner.isInitialized && owner.isStructuralRefinement && tp == owner.tpe // scala/bug#8158 // colon+space, preceded by an extra space if needed to prevent the colon glomming onto a symbolic name def postnominalColon: String = if (!followsParens && name.isOperatorName) " : " else ": " - def parents = if (settings.debug) parentsString(tp.parents) else briefParentsString(tp.parents) + def parents = if (settings.isDebug) parentsString(tp.parents) else briefParentsString(tp.parents) def typeRest = if (isClass) " extends " + parents else if (isAliasType) " = " + tp.resultType @@ -2871,7 +2873,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** String representation of existentially bound variable */ def existentialToString = - if (isSingletonExistential && !settings.debug.value) + if (isSingletonExistential && !settings.isDebug) "val " + tpnme.dropSingletonName(name) + ": " + dropSingletonType(info.upperBound) else defString } @@ -2986,18 +2988,12 @@ trait Symbols extends api.Symbols { self: SymbolTable => } /** change name by appending $$ - * Do the same for any accessed symbols or setters/getters + * Do the same for any accessed symbols to preserve serialization compatibility. */ override def expandName(base: Symbol): Unit = { if (!hasFlag(EXPANDEDNAME)) { setFlag(EXPANDEDNAME) - if (hasAccessorFlag && !isDeferred) { - accessed.expandName(base) - } - else if (hasGetter) { - getterIn(owner).expandName(base) - setterIn(owner).expandName(base) - } + if (hasAccessorFlag && !isDeferred) accessed.expandName(base) name = nme.expandedName(name.toTermName, base) } } @@ -3278,7 +3274,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * info for T in Test1 should be >: Nothing <: Test3[_] */ - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(typeSymbolCount) + if (settings.areStatisticsEnabled) statistics.incCounter(typeSymbolCount) } implicit val TypeSymbolTag = ClassTag[TypeSymbol](classOf[TypeSymbol]) @@ -3328,7 +3324,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => owner.newTypeSkolemSymbol(name, origin, pos, newFlags) override def nameString: String = - if (settings.debug.value) (super.nameString + "&" + level) + if ((settings.isDebug)) (super.nameString + "&" + level) else super.nameString } @@ -3498,7 +3494,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => else super.toString ) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(classSymbolCount) + if (settings.areStatisticsEnabled) statistics.incCounter(classSymbolCount) } implicit val ClassSymbolTag = ClassTag[ClassSymbol](classOf[ClassSymbol]) @@ -3508,7 +3504,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => */ class ModuleClassSymbol protected[Symbols] (owner: Symbol, pos: Position, name: TypeName) extends ClassSymbol(owner, pos, name) { - private[this] var module: Symbol = _ + private[this] var moduleSymbol: Symbol = _ private[this] var typeOfThisCache: Type = _ private[this] var typeOfThisPeriod = NoPeriod @@ -3541,8 +3537,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => implicitMembersCacheValue } // The null check seems to be necessary for the reifier. - override def sourceModule = if (module ne null) module else companionModule - override def sourceModule_=(module: Symbol): Unit = { this.module = module } + override def sourceModule = if (moduleSymbol ne null) moduleSymbol else companionModule + override def sourceModule_=(module: Symbol): Unit = { this.moduleSymbol = module } } class PackageObjectClassSymbol protected[Symbols] (owner0: Symbol, pos0: Position) @@ -3597,7 +3593,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => // Avoid issuing lots of redundant errors if (!hasFlag(IS_ERROR)) { globalError(pos, missingMessage) - if (settings.debug.value) + if (settings.isDebug) (new Throwable).printStackTrace this setFlag IS_ERROR @@ -3696,7 +3692,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => if (syms.isEmpty) Nil else { val syms1 = mapList(syms)(symFn) - val map = new SubstSymMap(syms, syms1) + val map = SubstSymMap(syms, syms1) syms1.foreach(_.modifyInfo(map)) syms1 } @@ -3760,7 +3756,19 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** Convenience functions which derive symbols by cloning. */ def cloneSymbols(syms: List[Symbol]): List[Symbol] = - deriveSymbols(syms, _.cloneSymbol) + if (syms.isEmpty) Nil + else { + val syms1 = mapList(syms)(_.cloneSymbol) + cloneSymbolsSubstSymMap.using { (msm: SubstSymMap) => + msm.reset(syms, syms1) + syms1.foreach(_.modifyInfo(msm)) + } + syms1 + } + + private[this] val cloneSymbolsSubstSymMap: ReusableInstance[SubstSymMap] = + ReusableInstance[SubstSymMap](SubstSymMap(), enabled = isCompilerUniverse) + def cloneSymbolsAtOwner(syms: List[Symbol], owner: Symbol): List[Symbol] = deriveSymbols(syms, _ cloneSymbol owner) @@ -3814,7 +3822,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** An exception for cyclic references of symbol definitions */ case class CyclicReference(sym: Symbol, info: Type) extends TypeError("illegal cyclic reference involving " + sym) { - if (settings.debug) printStackTrace() + if (settings.isDebug) printStackTrace() } /** A class for type histories */ diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala index 6ae62eb81581..c3aae72e7788 100644 --- a/src/reflect/scala/reflect/internal/TreeGen.scala +++ b/src/reflect/scala/reflect/internal/TreeGen.scala @@ -149,6 +149,7 @@ abstract class TreeGen { def mkAttributedQualifierIfPossible(prefix: Type): Option[Tree] = prefix match { case NoType | NoPrefix | ErrorType => None case TypeRef(_, sym, _) if sym.isModule || sym.isClass || sym.isType => None + case RefinedType(parents, _) if !parents.exists(_.isStable) => None case pre => Some(mkAttributedQualifier(prefix)) } diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index 47945ed0eed2..821aebd7084b 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -1467,7 +1467,7 @@ trait Trees extends api.Trees { private def requireLegal(value: Any, allowed: Any, what: String) = ( if (value != allowed) { log(s"can't set $what for $self to value other than $allowed") - if (settings.debug && settings.developer) + if (settings.isDebug && settings.isDeveloper) (new Throwable).printStackTrace } ) @@ -1745,7 +1745,7 @@ trait Trees extends api.Trees { lazy val EmptyTreeTypeSubstituter = new TreeTypeSubstituter(List(), List()) - class TreeSymSubstTraverser(val from: List[Symbol], val to: List[Symbol]) extends TypeMapTreeSubstituter(new SubstSymMap(from, to)) { + class TreeSymSubstTraverser(val from: List[Symbol], val to: List[Symbol]) extends TypeMapTreeSubstituter(SubstSymMap(from, to)) { override def toString() = "TreeSymSubstTraverser/" + substituterString("Symbol", "Symbol", from, to) } @@ -1759,7 +1759,7 @@ trait Trees extends api.Trees { * a symbol in `from` will have a new type assigned. */ class TreeSymSubstituter(from: List[Symbol], to: List[Symbol]) extends InternalTransformer { - val symSubst = new SubstSymMap(from, to) + val symSubst = SubstSymMap(from, to) private[this] var mutatedSymbols: List[Symbol] = Nil override def transform(tree: Tree): Tree = { @tailrec diff --git a/src/reflect/scala/reflect/internal/TypeDebugging.scala b/src/reflect/scala/reflect/internal/TypeDebugging.scala index 550bd11bb433..6adab6fbe87e 100644 --- a/src/reflect/scala/reflect/internal/TypeDebugging.scala +++ b/src/reflect/scala/reflect/internal/TypeDebugging.scala @@ -63,24 +63,7 @@ trait TypeDebugging { /** Light color wrappers. */ - object typeDebug { - import scala.io.AnsiColor._ - - private[this] val colorsOk = scala.util.Properties.coloredOutputEnabled - private def inColor(s: String, color: String) = if (colorsOk && s != "") color + s + RESET else s - private def inBold(s: String, color: String) = if (colorsOk && s != "") color + BOLD + s + RESET else s - - def inLightRed(s: String) = inColor(s, RED) - def inLightGreen(s: String) = inColor(s, GREEN) - def inLightMagenta(s: String) = inColor(s, MAGENTA) - def inLightCyan(s: String): String = inColor(s, CYAN) - def inGreen(s: String): String = inBold(s, GREEN) - def inRed(s: String): String = inBold(s, RED) - def inBlue(s: String): String = inBold(s, BLUE) - def inCyan(s: String): String = inBold(s, CYAN) - def inMagenta(s: String) = inBold(s, MAGENTA) - def resetColor(s: String): String = if (colorsOk) s + RESET else s - + object typeDebug extends TypeDebugging.AnsiColor { private def to_s(x: Any): String = x match { // otherwise case classes are caught looking like products case _: Tree | _: Type => "" + x @@ -157,6 +140,36 @@ trait TypeDebugging { def debugString(tp: Type) = debug(tp) } def paramString(tp: Type) = typeDebug.str params tp.params - def typeParamsString(tp: Type) = typeDebug.str brackets (tp.typeParams map (_.defString)) + def typeParamsString(tp: Type) = typeDebug.str.brackets(tp.typeParams.map(_.defString)) def debugString(tp: Type) = typeDebug debugString tp } + +object TypeDebugging { + object AnsiColor extends AnsiColor { + implicit class StringColorOps(private val s: String) extends AnyVal { + def red = inLightRed(s) + def green = inLightGreen(s) + def yellow = inLightYellow(s) + def blue = inLightBlue(s) + } + } + + trait AnsiColor extends scala.io.AnsiColor { + private[this] val colorsOk = scala.util.Properties.coloredOutputEnabled + private def inColor(s: String, color: String) = if (colorsOk && s != "") color + s + RESET else s + private def inBold(s: String, color: String) = if (colorsOk && s != "") color + BOLD + s + RESET else s + + def inLightRed(s: String) = inColor(s, RED) + def inLightBlue(s: String) = inColor(s, BLUE) + def inLightGreen(s: String) = inColor(s, GREEN) + def inLightYellow(s: String): String = inColor(s, YELLOW) + def inLightMagenta(s: String) = inColor(s, MAGENTA) + def inLightCyan(s: String): String = inColor(s, CYAN) + def inGreen(s: String): String = inBold(s, GREEN) + def inRed(s: String): String = inBold(s, RED) + def inBlue(s: String): String = inBold(s, BLUE) + def inCyan(s: String): String = inBold(s, CYAN) + def inMagenta(s: String) = inBold(s, MAGENTA) + def resetColor(s: String): String = if (colorsOk) s + RESET else s + } +} diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 29a5177e9675..d1c46db78d16 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -16,13 +16,13 @@ package internal import java.util.Objects -import scala.collection.{immutable, mutable} +import scala.collection.mutable import scala.ref.WeakReference import mutable.{ListBuffer, LinkedHashSet} import Flags._ import scala.util.control.ControlThrowable import scala.annotation.{tailrec, unused} -import util.{Statistics, StatisticsStatics} +import util.{ReusableInstance, Statistics} import util.ThreeValues._ import Variance._ import Depth._ @@ -98,7 +98,7 @@ trait Types import statistics._ private[this] var explainSwitch = false - @unused private final val emptySymbolSet = immutable.Set.empty[Symbol] + @unused private final val emptySymbolSet = Set.empty[Symbol] @unused private final val breakCycles = settings.breakCycles.value /** In case anyone wants to turn on type parameter bounds being used @@ -176,19 +176,7 @@ trait Types * forwarded here. Some operations are rewrapped again. */ trait RewrappingTypeProxy extends SimpleTypeProxy { - protected def maybeRewrap(newtp: Type) = ( - if (newtp eq underlying) this - else { - // - BoundedWildcardTypes reach here during erroneous compilation: neg/t6258 - // - Higher-kinded exclusion is because [x]CC[x] compares =:= to CC: pos/t3800 - // - Avoid reusing the existing Wrapped(RefinedType) when we've be asked to wrap an =:= RefinementTypeRef, the - // distinction is important in base type sequences. See TypesTest.testExistentialRefinement - // - Otherwise, if newtp =:= underlying, don't rewrap it. - val hasSpecialMeaningBeyond_=:= = newtp.isWildcard || newtp.isHigherKinded || newtp.isInstanceOf[RefinementTypeRef] - if (!hasSpecialMeaningBeyond_=:= && (newtp =:= underlying)) this - else rewrap(newtp) - } - ) + protected def maybeRewrap(newtp: Type) = if (newtp eq underlying) this else rewrap(newtp) protected def rewrap(newtp: Type): Type // the following are all operations in class Type that are overridden in some subclass @@ -692,7 +680,7 @@ trait Types * = Int */ def asSeenFrom(pre: Type, clazz: Symbol): Type = { - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, asSeenFromNanos) else null + val start = if (settings.areStatisticsEnabled) statistics.pushTimer(typeOpsStack, asSeenFromNanos) else null try { val trivial = ( this.isTrivial @@ -708,7 +696,7 @@ trait Types if (m.capturedSkolems.isEmpty) tp1 else deriveType(m.capturedSkolems, _.cloneSymbol setFlag CAPTURED)(tp1) } - } finally if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) + } finally if (settings.areStatisticsEnabled) statistics.popTimer(typeOpsStack, start) } /** The info of `sym`, seen as a member of this type. @@ -754,7 +742,7 @@ trait Types */ def substSym(from: List[Symbol], to: List[Symbol]): Type = if ((from eq to) || from.isEmpty) this - else new SubstSymMap(from, to) apply this + else SubstSymMap(from, to).apply(this) /** Substitute all occurrences of `ThisType(from)` in this type by `to`. * @@ -814,7 +802,7 @@ trait Types /** Is this type a subtype of that type? */ def <:<(that: Type): Boolean = { - if (StatisticsStatics.areSomeColdStatsEnabled) stat_<:<(that) + if (settings.areStatisticsEnabled) stat_<:<(that) else { (this eq that) || (if (explainSwitch) explain("<:", isSubType(_: Type, _: Type), this, that) @@ -836,7 +824,7 @@ trait Types case _ => false } case TypeRef(_, sym, args) => - val that1 = existentialAbstraction(args map (_.typeSymbol), that) + val that1 = existentialAbstraction(args.map(_.typeSymbol), that) (that ne that1) && (this <:< that1) && { debuglog(s"$this.matchesPattern($that) depended on discarding args and testing <:< $that1") true @@ -846,26 +834,26 @@ trait Types }) def stat_<:<(that: Type): Boolean = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(subtypeCount) - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, subtypeNanos) else null + if (settings.areStatisticsEnabled) statistics.incCounter(subtypeCount) + val start = if (settings.areStatisticsEnabled) statistics.pushTimer(typeOpsStack, subtypeNanos) else null val result = (this eq that) || (if (explainSwitch) explain("<:", isSubType(_: Type, _: Type), this, that) else isSubType(this, that)) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) + if (settings.areStatisticsEnabled) statistics.popTimer(typeOpsStack, start) result } /** Is this type a weak subtype of that type? True also for numeric types, i.e. Int weak_<:< Long. */ def weak_<:<(that: Type): Boolean = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(subtypeCount) - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, subtypeNanos) else null + if (settings.areStatisticsEnabled) statistics.incCounter(subtypeCount) + val start = if (settings.areStatisticsEnabled) statistics.pushTimer(typeOpsStack, subtypeNanos) else null val result = ((this eq that) || (if (explainSwitch) explain("weak_<:", isWeakSubType, this, that) else isWeakSubType(this, that))) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) + if (settings.areStatisticsEnabled) statistics.popTimer(typeOpsStack, start) result } @@ -1411,7 +1399,7 @@ trait Types override def underlying: Type = sym.typeOfThis override def isHigherKinded = sym.isRefinementClass && underlying.isHigherKinded override def prefixString = - if (settings.debug) sym.nameString + ".this." + if (settings.isDebug) sym.nameString + ".this." else if (sym.isAnonOrRefinementClass) "this." else if (sym.isOmittablePrefix) "" else if (sym.isModuleClass) sym.fullNameString + "." @@ -1571,18 +1559,16 @@ trait Types /** Bounds notation used in Scala syntax. * For example +This <: scala.collection.generic.Sorted[K,This]. */ - private[internal] def scalaNotation(typeString: Type => String): String = { + private[internal] def scalaNotation(typeString: Type => String): String = (if (emptyLowerBound) "" else " >: " + typeString(lo)) + (if (emptyUpperBound) "" else " <: " + typeString(hi)) - } /** Bounds notation used in https://adriaanm.github.com/files/higher.pdf. * For example *(scala.collection.generic.Sorted[K,This]). */ - private[internal] def starNotation(typeString: Type => String): String = { + private[internal] def starNotation(typeString: Type => String): String = if (emptyLowerBound && emptyUpperBound) "" else if (emptyLowerBound) s"(${typeString(hi)})" else s"(${typeString(lo)}, ${typeString(hi)})" - } override def kind = "TypeBoundsType" override def mapOver(map: TypeMap): Type = { val lo1 = map match { @@ -1689,7 +1675,7 @@ trait Types override def isStructuralRefinement: Boolean = typeSymbol.isAnonOrRefinementClass && (decls exists symbolIsPossibleInRefinement) - protected def shouldForceScope = settings.debug || parents.isEmpty || !decls.isEmpty + protected def shouldForceScope = settings.isDebug || parents.isEmpty || !decls.isEmpty protected def initDecls = fullyInitializeScope(decls) protected def scopeString = if (shouldForceScope) initDecls.mkString("{", "; ", "}") else "" override def safeToString = parentsString(parents) + scopeString @@ -1760,8 +1746,8 @@ trait Types tpe.baseTypeSeqCache = tpWithoutTypeVars.baseTypeSeq lateMap paramToVar } else { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(compoundBaseTypeSeqCount) - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, baseTypeSeqNanos) else null + if (settings.areStatisticsEnabled) statistics.incCounter(compoundBaseTypeSeqCount) + val start = if (settings.areStatisticsEnabled) statistics.pushTimer(typeOpsStack, baseTypeSeqNanos) else null try { tpe.baseTypeSeqCache = undetBaseTypeSeq tpe.baseTypeSeqCache = @@ -1770,7 +1756,7 @@ trait Types else compoundBaseTypeSeq(tpe) } finally { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) + if (settings.areStatisticsEnabled) statistics.popTimer(typeOpsStack, start) } // [Martin] suppressing memoization solves the problem with "same type after erasure" errors // when compiling with @@ -1793,13 +1779,13 @@ trait Types if (period != currentPeriod) { tpe.baseClassesPeriod = currentPeriod if (!isValidForBaseClasses(period)) { - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, baseClassesNanos) else null + val start = if (settings.areStatisticsEnabled) statistics.pushTimer(typeOpsStack, baseClassesNanos) else null try { tpe.baseClassesCache = null tpe.baseClassesCache = tpe.memo(computeBaseClasses(tpe))(tpe.typeSymbol :: _.baseClasses.tail) } finally { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) + if (settings.areStatisticsEnabled) statistics.popTimer(typeOpsStack, start) } } } @@ -1913,7 +1899,7 @@ trait Types private final val Initializing = 1 private final val Initialized = 2 - private type RefMap = Map[Symbol, immutable.Set[Symbol]] + private type RefMap = Map[Symbol, Set[Symbol]] /** All type parameters reachable from given type parameter * by a path which contains at least one expansive reference. @@ -2056,7 +2042,7 @@ trait Types /** A nicely formatted string with newlines and such. */ def formattedToString = parents.mkString("\n with ") + scopeString - override protected def shouldForceScope = settings.debug || decls.size > 1 + override protected def shouldForceScope = settings.isDebug || decls.size > 1 override protected def scopeString = initDecls.mkString(" {\n ", "\n ", "\n}") override def safeToString = if (shouldForceScope) formattedToString else super.safeToString } @@ -2642,7 +2628,7 @@ trait Types } // ensure that symbol is not a local copy with a name coincidence private def needsPreString = ( - settings.debug + settings.isDebug || !shorthands(sym.fullName) || (sym.ownersIterator exists (s => !s.isClass)) ) @@ -2713,12 +2699,12 @@ trait Types case _ => "" } override def safeToString = { - val custom = if (settings.debug) "" else customToString + val custom = if (settings.isDebug) "" else customToString if (custom != "") custom else finishPrefix(preString + sym.nameString + argsString) } override def prefixString = "" + ( - if (settings.debug) + if (settings.isDebug) super.prefixString else if (sym.isOmittablePrefix) "" @@ -2796,13 +2782,13 @@ trait Types if (period != currentPeriod) { tpe.baseTypeSeqPeriod = currentPeriod if (!isValidForBaseClasses(period)) { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(typerefBaseTypeSeqCount) - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, baseTypeSeqNanos) else null + if (settings.areStatisticsEnabled) statistics.incCounter(typerefBaseTypeSeqCount) + val start = if (settings.areStatisticsEnabled) statistics.pushTimer(typeOpsStack, baseTypeSeqNanos) else null try { tpe.baseTypeSeqCache = undetBaseTypeSeq tpe.baseTypeSeqCache = tpe.baseTypeSeqImpl } finally { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) + if (settings.areStatisticsEnabled) statistics.popTimer(typeOpsStack, start) } } } @@ -3152,7 +3138,7 @@ trait Types } override def nameAndArgsString: String = underlying match { - case TypeRef(_, sym, args) if !settings.debug && isRepresentableWithWildcards => + case TypeRef(_, sym, args) if !settings.isDebug && isRepresentableWithWildcards => sym.name.toString + wildcardArgsString(quantified.toSet, args).mkString("[", ",", "]") case TypeRef(_, sym, args) => sym.name.toString + args.mkString("[", ",", "]") + existentialClauses @@ -3192,7 +3178,7 @@ trait Types } override def safeToString: String = underlying match { - case TypeRef(pre, sym, args) if !settings.debug && isRepresentableWithWildcards => + case TypeRef(pre, sym, args) if !settings.isDebug && isRepresentableWithWildcards => val ref = typeRef(pre, sym, Nil).toString val wildcards = wildcardArgsString(quantified.toSet, args) if (wildcards.isEmpty) ref else ref + wildcards.mkString("[", ", ", "]") @@ -3620,7 +3606,7 @@ trait Types // This is a higher-kinded type var with same arity as tp. // If so (see scala/bug#7517), side effect: adds the type constructor itself as a bound. isSubArgs(lhs, rhs, params, AnyDepth) && {addBound(tp.typeConstructor); true} - } else if (settings.isScala213 && numCaptured > 0) { + } else if (numCaptured > 0) { // Simple algorithm as suggested by Paul Chiusano in the comments on scala/bug#2712 // // https://github.com/scala/bug/issues/2712#issuecomment-292374655 @@ -4041,6 +4027,9 @@ trait Types def refinedType(parents: List[Type], owner: Symbol): Type = refinedType(parents, owner, newScope, owner.pos) + private[this] val copyRefinedTypeSSM: ReusableInstance[SubstSymMap] = + ReusableInstance[SubstSymMap](SubstSymMap(), enabled = isCompilerUniverse) + def copyRefinedType(original: RefinedType, parents: List[Type], decls: Scope) = if ((parents eq original.parents) && (decls eq original.decls)) original else { @@ -4055,9 +4044,10 @@ trait Types val syms2 = result.decls.toList val resultThis = result.typeSymbol.thisType val substThisMap = new SubstThisMap(original.typeSymbol, resultThis) - val substMap = new SubstSymMap(syms1, syms2) - for (sym <- syms2) - sym.modifyInfo(info => substMap.apply(substThisMap.apply(info))) + copyRefinedTypeSSM.using { (msm: SubstSymMap) => + msm.reset(syms1, syms2) + syms2.foreach(_.modifyInfo(info => msm.apply(substThisMap.apply(info)))) + } } result } @@ -4294,14 +4284,14 @@ trait Types * - closed: already in closure, and we already searched for new elements. * * Invariant: pending, closed, and border form a partition of `tparams`. - * Each element in tparams goes from pending to border, and from border to closed + * Each element in tparams goes from pending to border, and from border to closed. * We separate border from closed to avoid recomputing `Type.contains` for same elements. */ - val pending = mutable.ListBuffer.empty[Symbol] - var border = mutable.ListBuffer.empty[Symbol] + val pending = ListBuffer.empty[Symbol] + var border = ListBuffer.empty[Symbol] partitionInto(tparams, tpe.contains, border, pending) - val closed = mutable.ListBuffer.empty[Symbol] - var nextBorder = mutable.ListBuffer.empty[Symbol] + val closed = ListBuffer.empty[Symbol] + var nextBorder = ListBuffer.empty[Symbol] while (!border.isEmpty) { nextBorder.clear() pending.filterInPlace { paramTodo => @@ -4318,15 +4308,15 @@ trait Types if (closed.length == tparams.length) tparams else closed.toList } - if (tparams.isEmpty || (tpe0 eq NoType) ) tpe0 + if (tparams.isEmpty || (tpe0 eq NoType)) tpe0 else { - val tpe = normalizeAliases(tpe0) + val tpe = normalizeAliases(tpe0) val extrapolation = new ExistentialExtrapolation(tparams) if (flipVariance) extrapolation.variance = Contravariant - val tpe1 = extrapolation extrapolate tpe + val tpe1 = extrapolation.extrapolate(tpe) newExistentialType(transitiveReferredFrom(tpe1), tpe1) } - } + } // end existentialAbstraction // Hash consing -------------------------------------------------------------- @@ -5039,8 +5029,8 @@ trait Types } if (!needsStripping) (ts, Nil) // fast path for common case else { - val tparams = mutable.ListBuffer[Symbol]() - val stripped = mutable.ListBuffer[Type]() + val tparams = ListBuffer[Symbol]() + val stripped = ListBuffer[Type]() def stripType(tp: Type): Unit = tp match { case rt: RefinedType if isIntersectionTypeForLazyBaseType(rt) => if (expandLazyBaseType) @@ -5192,7 +5182,7 @@ trait Types def this(msg: String) = this(NoPosition, msg) final override def fillInStackTrace() = - if (settings.debug) super.fillInStackTrace() else this + if (settings.isDebug) super.fillInStackTrace() else this } // TODO: RecoverableCyclicReference should be separated from TypeError, @@ -5200,7 +5190,7 @@ trait Types /** An exception for cyclic references from which we can recover */ case class RecoverableCyclicReference(sym: Symbol) extends TypeError("illegal cyclic reference involving " + sym) { - if (settings.debug) printStackTrace() + if (settings.isDebug) printStackTrace() } class NoCommonType(tps: List[Type]) extends ControlThrowable( diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala index d8abf5b30c13..35131dbefba4 100644 --- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala +++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala @@ -58,8 +58,6 @@ abstract class UnPickler { class Scan(_bytes: Array[Byte], offset: Int, classRoot: ClassSymbol, moduleRoot: ModuleSymbol, filename: String) extends PickleBuffer(_bytes, offset, -1) { //println("unpickle " + classRoot + " and " + moduleRoot)//debug - protected def debug = settings.debug.value - checkVersion() private[this] val loadingMirror = mirrorThatLoaded(classRoot) @@ -401,6 +399,12 @@ abstract class UnPickler { ThisType(sym) } + def fixJavaObjectType(typeRef: Type): Type = { + if (classRoot.isJava && typeRef =:= definitions.ObjectTpe) { + definitions.ObjectTpeJava + } else typeRef + } + // We're stuck with the order types are pickled in, but with judicious use // of named parameters we can recapture a declarative flavor in a few cases. // But it's still a rat's nest of ad-hockery. @@ -411,7 +415,7 @@ abstract class UnPickler { case SINGLEtpe => SingleType(readTypeRef(), readSymbolRef().filter(_.isStable)) // scala/bug#7596 account for overloading case SUPERtpe => SuperType(readTypeRef(), readTypeRef()) case CONSTANTtpe => ConstantType(readConstantRef()) - case TYPEREFtpe => TypeRef(readTypeRef(), readSymbolRef(), readTypes()) + case TYPEREFtpe => fixJavaObjectType(TypeRef(readTypeRef(), readSymbolRef(), readTypes())) case TYPEBOUNDStpe => TypeBounds(readTypeRef(), readTypeRef()) case REFINEDtpe | CLASSINFOtpe => CompoundType(readSymbolRef(), readTypes()) case METHODtpe => MethodTypeRef(readTypeRef(), readSymbols()) diff --git a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala index ca8c24d6e8d3..57c880f894c7 100644 --- a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala +++ b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala @@ -16,6 +16,8 @@ package scala package reflect.internal package settings +import scala.reflect.internal.util.StatisticsStatics + /** A mutable Settings object. */ abstract class MutableSettings extends AbsSettings { @@ -63,13 +65,17 @@ abstract class MutableSettings extends AbsSettings { def YstatisticsEnabled: BooleanSetting def Yrecursion: IntSetting - - def isScala212: Boolean - private[scala] def isScala213: Boolean } object MutableSettings { import scala.language.implicitConversions /** Support the common use case, `if (settings.debug) println("Hello, martin.")` */ @inline implicit def reflectSettingToBoolean(s: MutableSettings#BooleanSetting): Boolean = s.value + + implicit class SettingsOps(private val settings: MutableSettings) extends AnyVal { + @inline final def areStatisticsEnabled = (StatisticsStatics.COLD_STATS_GETTER.invokeExact(): Boolean) && settings.YstatisticsEnabled + @inline final def areHotStatisticsEnabled = (StatisticsStatics.HOT_STATS_GETTER.invokeExact(): Boolean) && settings.YhotStatisticsEnabled + @inline final def isDebug: Boolean = (StatisticsStatics.DEBUG_GETTER.invokeExact(): Boolean) && settings.debug + @inline final def isDeveloper: Boolean = (StatisticsStatics.DEVELOPER_GETTER.invokeExact(): Boolean) && settings.developer + } } diff --git a/src/reflect/scala/reflect/internal/tpe/FindMembers.scala b/src/reflect/scala/reflect/internal/tpe/FindMembers.scala index fa2ba469c276..7cc3f799430a 100644 --- a/src/reflect/scala/reflect/internal/tpe/FindMembers.scala +++ b/src/reflect/scala/reflect/internal/tpe/FindMembers.scala @@ -13,7 +13,7 @@ package scala.reflect.internal package tpe -import util.{ReusableInstance, StatisticsStatics} +import util.ReusableInstance import Flags._ import scala.runtime.Statics.releaseFence @@ -51,10 +51,10 @@ trait FindMembers { // Main entry point def apply(): T = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(findMemberCount) - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, findMemberNanos) else null + if (settings.areStatisticsEnabled) statistics.incCounter(findMemberCount) + val start = if (settings.areStatisticsEnabled) statistics.pushTimer(typeOpsStack, findMemberNanos) else null try searchConcreteThenDeferred - finally if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) + finally if (settings.areStatisticsEnabled) statistics.popTimer(typeOpsStack, start) } protected def result: T @@ -316,11 +316,11 @@ trait FindMembers { // Assemble the result from the hand-rolled ListBuffer protected def result: Symbol = if (members eq null) { if (member0 == NoSymbol) { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(noMemberCount) + if (settings.areStatisticsEnabled) statistics.incCounter(noMemberCount) NoSymbol } else member0 } else { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(multMemberCount) + if (settings.areStatisticsEnabled) statistics.incCounter(multMemberCount) lastM.next = Nil releaseFence() initBaseClasses.head.newOverloaded(tpe, members) diff --git a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala index 2e7b7a058b3e..ffb24459fce0 100644 --- a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala +++ b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala @@ -17,7 +17,6 @@ package tpe import scala.collection.mutable import scala.annotation.tailrec -import scala.reflect.internal.util.StatisticsStatics import Variance._ private[internal] trait GlbLubs { @@ -278,8 +277,8 @@ private[internal] trait GlbLubs { case Nil => NothingTpe case t :: Nil => t case _ => - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(lubCount) - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, lubNanos) else null + if (settings.areStatisticsEnabled) statistics.incCounter(lubCount) + val start = if (settings.areStatisticsEnabled) statistics.pushTimer(typeOpsStack, lubNanos) else null try { val res = lub(ts, lubDepth(ts)) // If the number of unapplied type parameters in all incoming @@ -297,7 +296,7 @@ private[internal] trait GlbLubs { finally { lubResults.clear() glbResults.clear() - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) + if (settings.areStatisticsEnabled) statistics.popTimer(typeOpsStack, start) } } @@ -396,7 +395,7 @@ private[internal] trait GlbLubs { // parameters are not handled correctly. val ok = ts forall { t => isSubType(t, lubRefined, depth.decr) || { - if (settings.debug || printLubs) { + if (settings.isDebug || printLubs) { Console.println( "Malformed lub: " + lubRefined + "\n" + "Argument " + t + " does not conform. Falling back to " + lubBase @@ -420,7 +419,7 @@ private[internal] trait GlbLubs { indent = indent + " " assert(indent.length <= 100, "LUB is highly indented") } - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(nestedLubCount) + if (settings.areStatisticsEnabled) statistics.incCounter(nestedLubCount) val res = lub0(ts) if (printLubs) { indent = indent stripSuffix " " @@ -445,14 +444,14 @@ private[internal] trait GlbLubs { case List() => AnyTpe case List(t) => t case ts0 => - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(lubCount) - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, lubNanos) else null + if (settings.areStatisticsEnabled) statistics.incCounter(lubCount) + val start = if (settings.areStatisticsEnabled) statistics.pushTimer(typeOpsStack, lubNanos) else null try { glbNorm(ts0, lubDepth(ts0)) } finally { lubResults.clear() glbResults.clear() - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) + if (settings.areStatisticsEnabled) statistics.popTimer(typeOpsStack, start) } } @@ -575,7 +574,7 @@ private[internal] trait GlbLubs { } } // if (settings.debug.value) { println(indent + "glb of " + ts + " at depth "+depth); indent = indent + " " } //DEBUG - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(nestedLubCount) + if (settings.areStatisticsEnabled) statistics.incCounter(nestedLubCount) glb0(ts) // if (settings.debug.value) { indent = indent.substring(0, indent.length() - 2); log(indent + "glb of " + ts + " is " + res) }//DEBUG } diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala index baaa6d4561c7..92357d0e0e19 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala @@ -18,7 +18,6 @@ package tpe import scala.collection.mutable import util.TriState import scala.annotation.tailrec -import scala.reflect.internal.util.StatisticsStatics trait TypeComparers { self: SymbolTable => @@ -66,7 +65,7 @@ trait TypeComparers { private def isSubPre(pre1: Type, pre2: Type, sym: Symbol) = if ((pre1 ne pre2) && (pre1 ne NoPrefix) && (pre2 ne NoPrefix) && pre1 <:< pre2) { - if (settings.debug) println(s"new isSubPre $sym: $pre1 <:< $pre2") + if (settings.isDebug) println(s"new isSubPre $sym: $pre1 <:< $pre2") true } else false @@ -104,7 +103,7 @@ trait TypeComparers { /** Do `tp1` and `tp2` denote equivalent types? */ def isSameType(tp1: Type, tp2: Type): Boolean = try { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(sametypeCount) + if (settings.areStatisticsEnabled) statistics.incCounter(sametypeCount) subsametypeRecursions += 1 //OPT cutdown on Function0 allocation //was: @@ -179,7 +178,7 @@ trait TypeComparers { sameLength(tparams1, tparams2) && { // corresponds does not check length of two sequences before checking the predicate, // but SubstMap assumes it has been checked (scala/bug#2956) - val substMap = new SubstSymMap(tparams2, tparams1) + val substMap = SubstSymMap(tparams2, tparams1) ( (tparams1 corresponds tparams2)((p1, p2) => methodHigherOrderTypeParamsSameVariance(p1, p2) && p1.info =:= substMap(p2.info)) && (res1 =:= substMap(res2)) @@ -358,8 +357,8 @@ trait TypeComparers { //@M for an example of why we need to generate fresh symbols otherwise, see neg/tcpoly_ticket2101.scala val substitutes = if (isMethod) tparams1 else cloneSymbols(tparams1) - val sub1: Type => Type = if (isMethod) (tp => tp) else new SubstSymMap(tparams1, substitutes) - val sub2: Type => Type = new SubstSymMap(tparams2, substitutes) + val sub1: Type => Type = if (isMethod) (tp => tp) else SubstSymMap(tparams1, substitutes) + val sub2: Type => Type = SubstSymMap(tparams2, substitutes) def cmp(p1: Symbol, p2: Symbol) = sub2(p2.info) <:< sub1(p1.info) (tparams1 corresponds tparams2)(cmp) && (sub1(res1) <:< sub2(res2)) @@ -404,7 +403,7 @@ trait TypeComparers { } def isSub(tp1: Type, tp2: Type) = - settings.isScala213 && isSubHKTypeVar(tp1, tp2) || + isSubHKTypeVar(tp1, tp2) || isSub2(tp1.normalize, tp2.normalize) // @M! normalize reduces higher-kinded typeref to PolyType def isSub2(ntp1: Type, ntp2: Type) = (ntp1, ntp2) match { diff --git a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala index 7af1bb9376a3..9376640a5d17 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala @@ -62,7 +62,7 @@ private[internal] trait TypeConstraints { } def clear(): Unit = { - if (settings.debug) + if (settings.isDebug) self.log("Clearing " + log.size + " entries from the undoLog.") log = Nil } diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala index 5604e7d88e86..96684ffe9f3e 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala @@ -663,21 +663,46 @@ private[internal] trait TypeMaps { override def toString = s"AsSeenFromMap($seenFromPrefix, $seenFromClass)" } - /** A base class to compute all substitutions */ - abstract class SubstMap[T](from: List[Symbol], to: List[T]) extends TypeMap { - // OPT this check was 2-3% of some profiles, demoted to -Xdev - if (isDeveloper) assert(sameLength(from, to), "Unsound substitution from "+ from +" to "+ to) + /** A base class to compute all substitutions. */ + abstract class SubstMap[T >: Null](from0: List[Symbol], to0: List[T]) extends TypeMap { + private[this] var from: List[Symbol] = from0 + private[this] var to: List[T] = to0 private[this] var fromHasTermSymbol = false private[this] var fromMin = Int.MaxValue private[this] var fromMax = Int.MinValue private[this] var fromSize = 0 - from.foreach { - sym => - fromMin = math.min(fromMin, sym.id) - fromMax = math.max(fromMax, sym.id) - fromSize += 1 - if (sym.isTerm) fromHasTermSymbol = true + + // So SubstTypeMap can expose them publicly + // while SubstMap can continue to access them as private fields + protected[this] final def accessFrom: List[Symbol] = from + protected[this] final def accessTo: List[T] = to + + reset(from0, to0) + def reset(from0: List[Symbol], to0: List[T]): this.type = { + // OPT this check was 2-3% of some profiles, demoted to -Xdev + if (isDeveloper) assert(sameLength(from, to), "Unsound substitution from "+ from +" to "+ to) + + from = from0 + to = to0 + + fromHasTermSymbol = false + fromMin = Int.MaxValue + fromMax = Int.MinValue + fromSize = 0 + + def scanFrom(ss: List[Symbol]): Unit = + ss match { + case sym :: rest => + fromMin = math.min(fromMin, sym.id) + fromMax = math.max(fromMax, sym.id) + fromSize += 1 + if (sym.isTerm) fromHasTermSymbol = true + scanFrom(rest) + case _ => () + } + scanFrom(from) + this } /** Are `sym` and `sym1` the same? Can be tuned by subclasses. */ @@ -760,9 +785,12 @@ private[internal] trait TypeMaps { } /** A map to implement the `substSym` method. */ - class SubstSymMap(from: List[Symbol], to: List[Symbol]) extends SubstMap(from, to) { + class SubstSymMap(from0: List[Symbol], to0: List[Symbol]) extends SubstMap[Symbol](from0, to0) { def this(pairs: (Symbol, Symbol)*) = this(pairs.toList.map(_._1), pairs.toList.map(_._2)) + private[this] final def from: List[Symbol] = accessFrom + private[this] final def to: List[Symbol] = accessTo + protected def toType(fromTpe: Type, sym: Symbol) = fromTpe match { case TypeRef(pre, _, args) => copyTypeRef(fromTpe, pre, sym, args) case SingleType(pre, _) => singleType(pre, sym) @@ -821,9 +849,18 @@ private[internal] trait TypeMaps { mapTreeSymbols.transform(tree) } + object SubstSymMap { + def apply(): SubstSymMap = new SubstSymMap() + def apply(from: List[Symbol], to: List[Symbol]): SubstSymMap = new SubstSymMap(from, to) + def apply(fromto: (Symbol, Symbol)): SubstSymMap = new SubstSymMap(fromto) + } + /** A map to implement the `subst` method. */ - class SubstTypeMap(val from: List[Symbol], val to: List[Type]) extends SubstMap(from, to) { - protected def toType(fromtp: Type, tp: Type) = tp + class SubstTypeMap(from0: List[Symbol], to0: List[Type]) extends SubstMap[Type](from0, to0) { + final def from: List[Symbol] = accessFrom + final def to: List[Type] = accessTo + + override protected def toType(fromtp: Type, tp: Type) = tp override def mapOver(tree: Tree, giveup: () => Nothing): Tree = { object trans extends TypeMapTransformer { diff --git a/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala b/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala index e9691b9b404f..8a8540df3cea 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala @@ -39,7 +39,7 @@ private[internal] trait TypeToStrings { // else if (toStringRecursions >= maxToStringRecursions) { devWarning("Exceeded recursion depth attempting to print " + util.shortClassOfInstance(tpe)) - if (settings.debug) + if (settings.isDebug) (new Throwable).printStackTrace "..." diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala index 981a0e3ce140..c42455575db8 100644 --- a/src/reflect/scala/reflect/internal/transform/Erasure.scala +++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala @@ -100,10 +100,11 @@ trait Erasure { assert(!phase.erasedTypes, "Types are erased") val clazz = tref.sym if (valueClassIsParametric(clazz)) { - val underlying = tref.memberType(clazz.derivedValueClassUnbox).resultType - boxingErasure(underlying) + val erasureMap = if (clazz.isScala3Defined) boxing3Erasure else boxingErasure + erasureMap(tref.memberType(clazz.derivedValueClassUnbox).resultType) } else { - scalaErasure(underlyingOfValueClass(clazz)) + val erasureMap = if (clazz.isScala3Defined) scala3Erasure else scalaErasure + erasureMap(underlyingOfValueClass(clazz)) } } @@ -118,6 +119,7 @@ trait Erasure { abstract class ErasureMap extends TypeMap { def mergeParents(parents: List[Type]): Type + def eraseArray(arrayRef: Type, pre: Type, args: List[Type]): Type def eraseNormalClassRef(tref: TypeRef): Type = { val TypeRef(pre, clazz, args) = tref @@ -140,21 +142,15 @@ trait Erasure { apply(st.supertype) case tref @ TypeRef(pre, sym, args) => def isDottyEnumSingleton(sym: Symbol): Boolean = - sym.isModuleClass && sym.sourceModule.hasAttachment[DottyEnumSingleton] - if (sym eq ArrayClass) - if (unboundedGenericArrayLevel(tp) == 1) ObjectTpe - else if (args.head.typeSymbol.isBottomClass) arrayType(ObjectTpe) - else typeRef(apply(pre), sym, args map applyInArray) + sym.isScala3Defined && sym.isModuleClass && sym.sourceModule.hasAttachment[DottyEnumSingleton.type] + if (sym eq ArrayClass) eraseArray(tp, pre, args) else if ((sym eq AnyClass) || (sym eq AnyValClass) || (sym eq SingletonClass)) ObjectTpe else if (sym eq UnitClass) BoxedUnitTpe else if (sym.isRefinementClass) apply(mergeParents(tp.parents)) else if (sym.isDerivedValueClass) eraseDerivedValueClassRef(tref) - else if (isDottyEnumSingleton(sym)) apply(intersectionType(tp.parents)) // TODO [tasty]: dotty enum singletons are not modules. + else if (isDottyEnumSingleton(sym)) apply(mergeParents(tp.parents)) // TODO [tasty]: dotty enum singletons are not modules. else if (sym.isClass) eraseNormalClassRef(tref) - else sym.attachments.get[DottyOpaqueTypeAlias] match { - case Some(alias: DottyOpaqueTypeAlias) => apply(alias.tpe) // TODO [tasty]: refactor if we build-in opaque types - case _ => apply(sym.info.asSeenFrom(pre, sym.owner)) // alias type or abstract type - } + else apply(transparentDealias(sym, pre, sym.owner)) // alias type or abstract type (including opaque type) case PolyType(tparams, restpe) => apply(restpe) case ExistentialType(tparams, restpe) => @@ -246,10 +242,16 @@ trait Erasure { * parents |Ps|, but with duplicate references of Object removed. * - for all other types, the type itself (with any sub-components erased) */ - def erasure(sym: Symbol): ErasureMap = - if (sym == NoSymbol || !sym.enclClass.isJavaDefined) scalaErasure - else if (verifyJavaErasure && sym.isMethod) verifiedJavaErasure - else javaErasure + def erasure(sym: Symbol): ErasureMap = { + if (sym == NoSymbol) return scalaErasure + val enclosing = sym.enclClass + if (enclosing.isJavaDefined) { + if (verifyJavaErasure && sym.isMethod) verifiedJavaErasure + else javaErasure + } + else if (enclosing.isScala3Defined) scala3Erasure + else scalaErasure + } /** This is used as the Scala erasure during the erasure phase itself * It differs from normal erasure in that value classes are erased to ErasedValueTypes which @@ -260,8 +262,9 @@ trait Erasure { erasure(sym)(tp) else if (sym.isClassConstructor) specialConstructorErasure(sym.owner, tp) - else - specialScalaErasure(tp) + else { + specialScalaErasureFor(sym)(tp) + } def specialConstructorErasure(clazz: Symbol, tpe: Type): Type = { tpe match { @@ -271,14 +274,14 @@ trait Erasure { specialConstructorErasure(clazz, restpe) case mt @ MethodType(params, restpe) => MethodType( - cloneSymbolsAndModify(params, specialScalaErasure), + cloneSymbolsAndModify(params, specialScalaErasureFor(clazz)), specialConstructorErasure(clazz, restpe)) case TypeRef(pre, `clazz`, args) => typeRef(pre, clazz, List()) case tp => if (!(clazz == ArrayClass || tp.isError)) assert(clazz == ArrayClass || tp.isError, s"!!! unexpected constructor erasure $tp for $clazz") - specialScalaErasure(tp) + specialScalaErasureFor(clazz)(tp) } } @@ -294,7 +297,8 @@ trait Erasure { * For this reason and others (such as distinguishing constructors from other methods) * erasure is now (Symbol, Type) => Type rather than Type => Type. */ - class ScalaErasureMap extends ErasureMap { + abstract class ScalaErasureMap extends ErasureMap with Scala2JavaArrayErasure { + /** In scala, calculate a useful parent. * An intersection such as `Object with Trait` erases to Trait. */ @@ -302,7 +306,213 @@ trait Erasure { intersectionDominator(parents) } - class JavaErasureMap extends ErasureMap { + trait Scala2JavaArrayErasure { self: ErasureMap => + + def eraseArray(arrayRef: Type, pre: Type, args: List[Type]): Type = + if (unboundedGenericArrayLevel(arrayRef) == 1) ObjectTpe + else if (args.head.typeSymbol.isBottomClass) arrayType(ObjectTpe) + else typeRef(self(pre), ArrayClass, args map applyInArray) + + } + + class Scala3ErasureMap extends ErasureMap { self => + + def mergeParents(parents: List[Type]): Type = { + erasedGlb(parents.map(self(_))) + } + + def mergeParentsInArray(parents: List[Type]): Type = { + erasedGlb(parents.map(super.applyInArray(_))) + } + + override def applyInArray(tp: Type): Type = { + tp match { + case RefinedType(parents, _) => + super.applyInArray(mergeParentsInArray(parents)) + case _ => + super.applyInArray(tp) + } + } + + def eraseArray(arrayRef: Type, pre: Type, args: List[Type]): Type = { + if (isGenericArrayElement(args.head)) ObjectTpe + else typeRef(self(pre), ArrayClass, args map applyInArray) + } + + /** Scala 3 implementation of erasure for intersection types. + * @param components the erased component types of the intersection. + */ + private def erasedGlb(components: List[Type]): Type = { + + /** A comparison function that induces a total order on erased types, + * where `A <= B` implies that the erasure of `A & B` should be A. + * + * This order respects the following properties: + * - ErasedValueTypes <= non-ErasedValueTypes + * - arrays <= non-arrays + * - primitives <= non-primitives + * - real classes <= traits + * - subtypes <= supertypes + * + * Since this isn't enough to order to unrelated classes, we use + * lexicographic ordering of the class symbol full name as a tie-breaker. + * This ensure that `A <= B && B <= A` iff `A =:= B`. + */ + def compareErasedGlb(tp1: Type, tp2: Type): Int = { + // this check is purely an optimization. + if (tp1 eq tp2) return 0 + + val isEVT1 = tp1.isInstanceOf[ErasedValueType] + val isEVT2 = tp2.isInstanceOf[ErasedValueType] + if (isEVT1 && isEVT2) { + return compareErasedGlb( + tp1.asInstanceOf[ErasedValueType].valueClazz.tpe_*, + tp2.asInstanceOf[ErasedValueType].valueClazz.tpe_*) + } + else if (isEVT1) + return -1 + else if (isEVT2) + return 1 + + val sym1 = tp1.baseClasses.head + val sym2 = tp2.baseClasses.head + + def compareClasses: Int = { + if (sym1.isSubClass(sym2)) + -1 + else if (sym2.isSubClass(sym1)) + 1 + else + sym1.fullName.compareTo(sym2.fullName) + } + + val isArray1 = tp1.typeArgs.nonEmpty && sym1.isSubClass(definitions.ArrayClass) + val isArray2 = tp2.typeArgs.nonEmpty && sym2.isSubClass(definitions.ArrayClass) + if (isArray1 && isArray2) + return compareErasedGlb(tp1.typeArgs.head, tp2.typeArgs.head) + else if (isArray1) + return -1 + else if (isArray2) + return 1 + + val isPrimitive1 = sym1.isPrimitiveValueClass + val isPrimitive2 = sym2.isPrimitiveValueClass + if (isPrimitive1 && isPrimitive2) + return compareClasses + else if (isPrimitive1) + return -1 + else if (isPrimitive2) + return 1 + + val isRealClass1 = sym1.isClass && !sym1.isTrait + val isRealClass2 = sym2.isClass && !sym2.isTrait + if (isRealClass1 && isRealClass2) + return compareClasses + else if (isRealClass1) + return -1 + else if (isRealClass2) + return 1 + + compareClasses + } + + components.min((t, u) => compareErasedGlb(t, u)) + } + + /** Dotty implementation of Array Erasure: + * + * Is `Array[tp]` a generic Array that needs to be erased to `Object`? + * This is true if among the subtypes of `Array[tp]` there is either: + * - both a reference array type and a primitive array type + * (e.g. `Array[_ <: Int | String]`, `Array[_ <: Any]`) + * - or two different primitive array types (e.g. `Array[_ <: Int | Double]`) + * In both cases the erased lub of those array types on the JVM is `Object`. + */ + private def isGenericArrayElement(tp: Type): Boolean = { + + object DottyTypeProxy { + + def unapply(tp: Type): Option[Type] = { + val superTpe = translucentSuperType(tp) + if (superTpe ne NoType) Some(superTpe) else None + } + + def translucentSuperType(tp: Type): Type = tp match { + case tp: TypeRef => transparentDealias(tp.sym, tp.pre, tp.sym.owner) + case tp: SingleType => tp.underlying + case tp: ThisType => tp.sym.typeOfThis + case tp: ConstantType => tp.value.tpe + case tp: RefinedType if tp.decls.nonEmpty => intersectionType(tp.parents) + case tp: PolyType => tp.resultType + case tp: ExistentialType => tp.underlying + case tp: TypeBounds => tp.hi + case tp: AnnotatedType => tp.underlying + case tp: SuperType => tp.thistpe.baseType(tp.supertpe.typeSymbol) + case tp => NoType + } + + } + + object DottyAndType { + def unapply(tp: RefinedType): Boolean = tp.decls.isEmpty + } + + /** A symbol that represents the sort of JVM array that values of type `t` can be stored in: + * - If we can always store such values in a reference array, return Object + * - If we can always store them in a specific primitive array, return the + * corresponding primitive class + * - Otherwise, return `NoSymbol`. + */ + def arrayUpperBound(tp: Type): Symbol = tp.dealias match { + case tp: TypeRef if tp.sym.isClass => + val cls = tp.sym + // Only a few classes have both primitives and references as subclasses. + if ((cls eq AnyClass) || (cls eq AnyValClass) || (cls eq SingletonClass)) + NoSymbol + // We only need to check for primitives because derived value classes in arrays are always boxed. + else if (cls.isPrimitiveValueClass) + cls + else + ObjectClass + case DottyTypeProxy(unwrapped) => + arrayUpperBound(unwrapped) + case tp @ DottyAndType() => + // Find first `p` in `parents` where `arrayUpperBound(p) ne NoSymbol` + @tailrec def loop(tps: List[Type]): Symbol = tps match { + case tp :: tps1 => + val ub = arrayUpperBound(tp) + if (ub ne NoSymbol) ub + else loop(tps1) + case nil => NoSymbol + } + loop(tp.parents) + case _ => + NoSymbol + } + + /** Can one of the JVM Array type store all possible values of type `t`? */ + def fitsInJVMArray(tp: Type): Boolean = arrayUpperBound(tp) ne NoSymbol + + def isOpaque(sym: Symbol) = sym.isScala3Defined && !sym.isClass && sym.hasAttachment[DottyOpaqueTypeAlias] + + tp.dealias match { + case tp: TypeRef if !isOpaque(tp.sym) => + !tp.sym.isClass && + !tp.sym.isJavaDefined && // In Java code, Array[T] can never erase to Object + !fitsInJVMArray(tp) + case DottyTypeProxy(unwrapped) => + isGenericArrayElement(unwrapped) + case tp @ DottyAndType() => + tp.parents.forall(isGenericArrayElement) + case tp => + false + } + + } + + } + + class JavaErasureMap extends ErasureMap with Scala2JavaArrayErasure { /** In java, always take the first parent. * An intersection such as `Object with Trait` erases to Object. */ @@ -314,14 +524,27 @@ trait Erasure { } object scalaErasure extends ScalaErasureMap + object scala3Erasure extends Scala3ErasureMap + + trait SpecialScalaErasure extends ErasureMap { + override def eraseDerivedValueClassRef(tref: TypeRef): Type = + ErasedValueType(tref.sym, erasedValueClassArg(tref)) + } /** This is used as the Scala erasure during the erasure phase itself * It differs from normal erasure in that value classes are erased to ErasedValueTypes which * are then later unwrapped to the underlying parameter type in phase posterasure. */ - object specialScalaErasure extends ScalaErasureMap { - override def eraseDerivedValueClassRef(tref: TypeRef): Type = - ErasedValueType(tref.sym, erasedValueClassArg(tref)) + object specialScalaErasure extends ScalaErasureMap with SpecialScalaErasure + + /** This is used as the Scala erasure for Scala 3 methods during the erasure phase itself. + * @see specialScalaErasure + */ + object specialScala3Erasure extends Scala3ErasureMap with SpecialScalaErasure + + def specialScalaErasureFor(sym: Symbol): ErasureMap = { + if (sym.isScala3Defined) specialScala3Erasure + else specialScalaErasure } object javaErasure extends JavaErasureMap @@ -336,7 +559,8 @@ trait Erasure { } } - object boxingErasure extends ScalaErasureMap { + trait BoxingErasure extends ErasureMap { + private[this] var boxPrimitives = true override def applyInArray(tp: Type): Type = { @@ -349,10 +573,15 @@ trait Erasure { override def eraseNormalClassRef(tref: TypeRef) = if (boxPrimitives && isPrimitiveValueClass(tref.sym)) boxedClass(tref.sym).tpe else super.eraseNormalClassRef(tref) + override def eraseDerivedValueClassRef(tref: TypeRef) = super.eraseNormalClassRef(tref) + } + object boxingErasure extends ScalaErasureMap with BoxingErasure + object boxing3Erasure extends Scala3ErasureMap with BoxingErasure + /** The intersection dominator (SLS 3.7) of a list of types is computed as follows. * * - If the list contains one or more occurrences of scala.Array with @@ -388,6 +617,21 @@ trait Erasure { } } + /** For a type alias, get its info as seen from + * the current prefix and owner. + * Sees through opaque type aliases. + */ + def transparentDealias(sym: Symbol, pre: Type, owner: Symbol) = { + @inline def visible(tp: Type) = tp.asSeenFrom(pre, owner) + + if (sym.isScala3Defined && !sym.isClass) + sym.attachments.get[DottyOpaqueTypeAlias] + .map(alias => visible(alias.tpe)) + .getOrElse(visible(sym.info)) + else + visible(sym.info) + } + /** The symbol's erased info. This is the type's erasure, except for the following primitive symbols: * * - $asInstanceOf --> [T]T @@ -414,7 +658,7 @@ trait Erasure { if (sym.isClassConstructor) // TODO: switch on name for all branches -- this one is sym.name == nme.CONSTRUCTOR tp match { case MethodType(params, TypeRef(pre, sym1, args)) => - MethodType(cloneSymbolsAndModify(params, specialErasure(sym)), + MethodType(cloneSymbolsAndModify(params, tp => specialErasure(sym)(tp)), typeRef(specialErasure(sym)(pre), sym1, args)) case x => throw new MatchError(x) } diff --git a/src/reflect/scala/reflect/internal/transform/Transforms.scala b/src/reflect/scala/reflect/internal/transform/Transforms.scala index 8a4bc08c0a45..eecc286f2044 100644 --- a/src/reflect/scala/reflect/internal/transform/Transforms.scala +++ b/src/reflect/scala/reflect/internal/transform/Transforms.scala @@ -49,7 +49,10 @@ trait Transforms { self: SymbolTable => erasure.transformInfo(sym, uncurry.transformInfo(sym, sym.info))) - def transformedType(tpe: Type) = - postErasure.elimErasedValueType(erasure.scalaErasure(uncurry.uncurry(tpe))) + def transformedType(tpe: Type) = { + val symbol = tpe.widen.typeSymbol + val erasureMap = if (symbol.isScala3Defined) erasure.scala3Erasure else erasure.scalaErasure + postErasure.elimErasedValueType(erasureMap(uncurry.uncurry(tpe))) + } } diff --git a/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala b/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala index 04591dc0fa6c..2c50d5cf9443 100644 --- a/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala +++ b/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala @@ -109,8 +109,9 @@ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader) throw new UnsupportedOperationException() } + // TODO: `getPackage` is deprecated in JDK 9+ - what should be overridden instead? override def getPackage(name: String): Package = findAbstractDir(name) match { - case null => super.getPackage(name) + case null => super.getPackage(name): @nowarn("cat=deprecation") case file => packages.getOrElseUpdate(name, { val ctor = classOf[Package].getDeclaredConstructor(classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[URL], classOf[ClassLoader]) ctor.setAccessible(true) diff --git a/src/reflect/scala/reflect/internal/util/AlmostFinalValue.java b/src/reflect/scala/reflect/internal/util/AlmostFinalValue.java index 415f91f9a8ff..f9bb24f00a85 100644 --- a/src/reflect/scala/reflect/internal/util/AlmostFinalValue.java +++ b/src/reflect/scala/reflect/internal/util/AlmostFinalValue.java @@ -14,93 +14,35 @@ import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; -import java.lang.invoke.MethodType; import java.lang.invoke.MutableCallSite; -import java.lang.invoke.SwitchPoint; /** * Represents a value that is wrapped with JVM machinery to allow the JVM - * to speculate on its content and effectively optimize it as if it was final. - * - * This file has been drawn from JSR292 cookbook created by RĆ©mi Forax. - * https://code.google.com/archive/p/jsr292-cookbook/. The explanation of the strategy - * can be found in https://community.oracle.com/blogs/forax/2011/12/17/jsr-292-goodness-almost-static-final-field. - * - * Before copying this file to the repository, I tried to adapt the most important - * parts of this implementation and special case it for `Statistics`, but that - * caused an important performance penalty (~10%). This performance penalty is - * due to the fact that using `static`s for the method handles and all the other + * to speculate on its content and effectively optimize it as if it was a constant. + * + * Originally from the JSR-292 cookbook created by RĆ©mi Forax: + * https://code.google.com/archive/p/jsr292-cookbook/. + * + * Implemented in Java because using `static`s for the method handles and all the other * fields is extremely important for the JVM to correctly optimize the code, and * we cannot do that if we make `Statistics` an object extending `MutableCallSite` - * in Scala. We instead rely on the Java implementation that uses a boxed representation. + * in Scala. + * + * Subsequently specialised for booleans, to avoid needless Boolean boxing. + * + * Finally reworked to default to false and only allow for the value to be toggled on, + * using RĆ©mi Forax's newer "MostlyConstant" as inspiration, in https://github.com/forax/exotic. */ -public class AlmostFinalValue { - private final AlmostFinalCallSite callsite = - new AlmostFinalCallSite(this); - - protected boolean initialValue() { - return false; - } - - public MethodHandle createGetter() { - return callsite.dynamicInvoker(); - } - - public void setValue(boolean value) { - callsite.setValue(value); - } - - private static class AlmostFinalCallSite extends MutableCallSite { - private Boolean value; - private SwitchPoint switchPoint; - private final AlmostFinalValue volatileFinalValue; - private final MethodHandle fallback; - private final Object lock; - - private static final Boolean NONE = null; - private static final MethodHandle FALLBACK; - static { - try { - FALLBACK = MethodHandles.lookup().findVirtual(AlmostFinalCallSite.class, "fallback", - MethodType.methodType(Boolean.TYPE)); - } catch (NoSuchMethodException|IllegalAccessException e) { - throw new AssertionError(e.getMessage(), e); - } - } - - AlmostFinalCallSite(AlmostFinalValue volatileFinalValue) { - super(MethodType.methodType(Boolean.TYPE)); - Object lock = new Object(); - MethodHandle fallback = FALLBACK.bindTo(this); - synchronized(lock) { - value = null; - switchPoint = new SwitchPoint(); - setTarget(fallback); - } - this.volatileFinalValue = volatileFinalValue; - this.lock = lock; - this.fallback = fallback; - } +final class AlmostFinalValue { + private static final MethodHandle K_FALSE = MethodHandles.constant(boolean.class, false); + private static final MethodHandle K_TRUE = MethodHandles.constant(boolean.class, true); + + private final MutableCallSite callsite = new MutableCallSite(K_FALSE); + final MethodHandle invoker = callsite.dynamicInvoker(); - boolean fallback() { - synchronized(lock) { - Boolean value = this.value; - if (value == NONE) { - value = volatileFinalValue.initialValue(); - } - MethodHandle target = switchPoint.guardWithTest(MethodHandles.constant(Boolean.TYPE, value), fallback); - setTarget(target); - return value; - } - } - - void setValue(boolean value) { - synchronized(lock) { - SwitchPoint switchPoint = this.switchPoint; - this.value = value; - this.switchPoint = new SwitchPoint(); - SwitchPoint.invalidateAll(new SwitchPoint[] {switchPoint}); - } - } + void toggleOnAndDeoptimize() { + if (callsite.getTarget() == K_TRUE) return; + callsite.setTarget(K_TRUE); + MutableCallSite.syncAll(new MutableCallSite[] { callsite }); } -} \ No newline at end of file +} diff --git a/src/reflect/scala/reflect/internal/util/ChromeTrace.scala b/src/reflect/scala/reflect/internal/util/ChromeTrace.scala index ff5e19043ca8..8b3c533648ed 100644 --- a/src/reflect/scala/reflect/internal/util/ChromeTrace.scala +++ b/src/reflect/scala/reflect/internal/util/ChromeTrace.scala @@ -43,7 +43,7 @@ final class ChromeTrace(f: Path) extends Closeable { private val traceWriter = FileUtils.newAsyncBufferedWriter(f) private val context = mutable.Stack[JsonContext](TopContext) private val tidCache = new ThreadLocal[String]() { - override def initialValue(): String = Thread.currentThread().getId.formatted("%05d") + override def initialValue(): String = f"${Thread.currentThread().getId}%05d" } objStart() fld("traceEvents") diff --git a/src/reflect/scala/reflect/internal/util/ReusableInstance.scala b/src/reflect/scala/reflect/internal/util/ReusableInstance.scala index 5dea888f6d2e..8853e7d72242 100644 --- a/src/reflect/scala/reflect/internal/util/ReusableInstance.scala +++ b/src/reflect/scala/reflect/internal/util/ReusableInstance.scala @@ -19,17 +19,18 @@ import scala.util.chaining._ * The wrapper is recursion-reentrant: several instances are kept, so * at each depth of reentrance we are reusing the instance for that. * - * An instance is created upon creating this object, and more instances - * are allocated dynamically, on demand, when reentrance occurs. + * An instance is created eagerly, then more instances + * are allocated as needed on re-entry. Once allocated, + * cached instances are not reclaimed for the life of this ReusableInstance. * * Not thread safe. */ -final class ReusableInstance[T <: AnyRef] private (make: => T, enabled: Boolean) { - private[this] val cache = if (enabled) new ArrayBuffer[T](ReusableInstance.InitialSize).tap(_.addOne(make)) else null +final class ReusableInstance[T <: AnyRef] private (make: => T, initialSize: Int) { + private[this] val cache = if (initialSize > 0) new ArrayBuffer[T](initialSize).tap(_.addOne(make)) else null private[this] var taken = 0 @inline def using[R](action: T => R): R = - if (!enabled) + if (cache == null) action(make) else { if (taken == cache.size) @@ -42,6 +43,12 @@ final class ReusableInstance[T <: AnyRef] private (make: => T, enabled: Boolean) object ReusableInstance { private final val InitialSize = 4 - def apply[T <: AnyRef](make: => T): ReusableInstance[T] = new ReusableInstance[T](make, enabled = true) - def apply[T <: AnyRef](make: => T, enabled: Boolean): ReusableInstance[T] = new ReusableInstance[T](make, enabled = enabled) + def apply[T <: AnyRef](make: => T, initialSize: Int): ReusableInstance[T] = new ReusableInstance[T](make, initialSize) + + def apply[T <: AnyRef](make: => T): ReusableInstance[T] = + apply(make, InitialSize) + def apply[T <: AnyRef](make: => T, enabled: Boolean): ReusableInstance[T] = + if (enabled) apply(make) else apply(make, -1) + def apply[T <: AnyRef](make: => T, initialSize: Int, enabled: Boolean): ReusableInstance[T] = + if (enabled) apply(make, initialSize) else apply(make, -1) } diff --git a/src/reflect/scala/reflect/internal/util/Statistics.scala b/src/reflect/scala/reflect/internal/util/Statistics.scala index b9ef1220a003..ce12b1c7a159 100644 --- a/src/reflect/scala/reflect/internal/util/Statistics.scala +++ b/src/reflect/scala/reflect/internal/util/Statistics.scala @@ -22,57 +22,49 @@ import scala.annotation.nowarn import scala.runtime.LongRef abstract class Statistics(val symbolTable: SymbolTable, settings: MutableSettings) { - - initFromSettings(settings) - - def initFromSettings(currentSettings: MutableSettings): Unit = { - enabled = currentSettings.YstatisticsEnabled - hotEnabled = currentSettings.YhotStatisticsEnabled - } - type TimerSnapshot = (Long, Long) /** If enabled, increment counter by one */ @inline final def incCounter(c: Counter): Unit = { - if (areStatisticsLocallyEnabled && c != null) c.value += 1 + if (enabled && c != null) c.value += 1 } /** If enabled, increment counter by given delta */ @inline final def incCounter(c: Counter, delta: Int): Unit = { - if (areStatisticsLocallyEnabled && c != null) c.value += delta + if (enabled && c != null) c.value += delta } /** If enabled, increment counter in map `ctrs` at index `key` by one */ @inline final def incCounter[K](ctrs: QuantMap[K, Counter], key: K) = - if (areStatisticsLocallyEnabled && ctrs != null) ctrs(key).value += 1 + if (enabled && ctrs != null) ctrs(key).value += 1 /** If enabled, start subcounter. While active it will track all increments of * its base counter. */ @inline final def startCounter(sc: SubCounter): (Int, Int) = - if (areStatisticsLocallyEnabled && sc != null) sc.start() else null + if (enabled && sc != null) sc.start() else null /** If enabled, stop subcounter from tracking its base counter. */ @inline final def stopCounter(sc: SubCounter, start: (Int, Int)): Unit = { - if (areStatisticsLocallyEnabled && sc != null) sc.stop(start) + if (enabled && sc != null) sc.stop(start) } /** If enabled, start timer */ @inline final def startTimer(tm: Timer): TimerSnapshot = - if (areStatisticsLocallyEnabled && tm != null) tm.start() else null + if (enabled && tm != null) tm.start() else null /** If enabled, stop timer */ @inline final def stopTimer(tm: Timer, start: TimerSnapshot): Unit = { - if (areStatisticsLocallyEnabled && tm != null) tm.stop(start) + if (enabled && tm != null) tm.stop(start) } /** If enabled, push and start a new timer in timer stack */ @inline final def pushTimer(timers: TimerStack, timer: => StackableTimer): TimerSnapshot = - if (areStatisticsLocallyEnabled && timers != null) timers.push(timer) else null + if (enabled && timers != null) timers.push(timer) else null /** If enabled, stop and pop timer from timer stack */ @inline final def popTimer(timers: TimerStack, prev: TimerSnapshot): Unit = { - if (areStatisticsLocallyEnabled && timers != null) timers.pop(prev) + if (enabled && timers != null) timers.pop(prev) } /** Create a new counter that shows as `prefix` and is active in given phases */ @@ -294,29 +286,8 @@ quant) } private[this] val qs = new mutable.HashMap[String, Quantity] - private[scala] var areColdStatsLocallyEnabled: Boolean = false - private[scala] var areHotStatsLocallyEnabled: Boolean = false - - /** Represents whether normal statistics can or cannot be enabled. */ - @inline final def enabled: Boolean = areColdStatsLocallyEnabled - def enabled_=(cond: Boolean) = { - if (cond && !enabled) { - StatisticsStatics.enableColdStats() - areColdStatsLocallyEnabled = true - } - } - - /** Represents whether hot statistics can or cannot be enabled. */ - @inline final def hotEnabled: Boolean = enabled && areHotStatsLocallyEnabled - def hotEnabled_=(cond: Boolean) = { - if (cond && enabled && !areHotStatsLocallyEnabled) { - StatisticsStatics.enableHotStats() - areHotStatsLocallyEnabled = true - } - } - /** Tells whether statistics should be definitely reported to the user for this `Global` instance. */ - @inline final def areStatisticsLocallyEnabled: Boolean = areColdStatsLocallyEnabled + @inline final def enabled: Boolean = settings.areStatisticsEnabled import scala.reflect.internal.Reporter /** Reports the overhead of measuring statistics via the nanoseconds variation. */ diff --git a/src/reflect/scala/reflect/internal/util/StatisticsStatics.java b/src/reflect/scala/reflect/internal/util/StatisticsStatics.java index dc9021471d87..76c1644e18bf 100644 --- a/src/reflect/scala/reflect/internal/util/StatisticsStatics.java +++ b/src/reflect/scala/reflect/internal/util/StatisticsStatics.java @@ -12,7 +12,6 @@ package scala.reflect.internal.util; -import scala.reflect.internal.util.AlmostFinalValue; import java.lang.invoke.MethodHandle; /** @@ -22,46 +21,18 @@ * which helps performance (see docs to find out why). */ public final class StatisticsStatics { - private static final AlmostFinalValue COLD_STATS = new AlmostFinalValue() { - @Override - protected boolean initialValue() { - return false; - } - }; - - private static final AlmostFinalValue HOT_STATS = new AlmostFinalValue() { - @Override - protected boolean initialValue() { - return false; - } - }; - - private static final MethodHandle COLD_STATS_GETTER = COLD_STATS.createGetter(); - private static final MethodHandle HOT_STATS_GETTER = HOT_STATS.createGetter(); - - public static boolean areSomeColdStatsEnabled() throws Throwable { - return (boolean) COLD_STATS_GETTER.invokeExact(); - } - - public static boolean areSomeHotStatsEnabled() throws Throwable { - return (boolean) HOT_STATS_GETTER.invokeExact(); - } - - public static void enableColdStats() throws Throwable { - if (!areSomeColdStatsEnabled()) - COLD_STATS.setValue(true); - } - - public static void disableColdStats() { - COLD_STATS.setValue(false); - } - - public static void enableHotStats() throws Throwable { - if (!areSomeHotStatsEnabled()) - HOT_STATS.setValue(true); - } - - public static void disableHotStats() { - HOT_STATS.setValue(false); - } + private static final AlmostFinalValue COLD_STATS = new AlmostFinalValue(); + private static final AlmostFinalValue HOT_STATS = new AlmostFinalValue(); + private static final AlmostFinalValue DEBUG = new AlmostFinalValue(); + private static final AlmostFinalValue DEVELOPER = new AlmostFinalValue(); + + public static final MethodHandle COLD_STATS_GETTER = COLD_STATS.invoker; + public static final MethodHandle HOT_STATS_GETTER = HOT_STATS.invoker; + public static final MethodHandle DEBUG_GETTER = DEBUG.invoker; + public static final MethodHandle DEVELOPER_GETTER = DEVELOPER.invoker; + + public static void enableColdStatsAndDeoptimize() { COLD_STATS.toggleOnAndDeoptimize(); } + public static void enableHotStatsAndDeoptimize() { HOT_STATS.toggleOnAndDeoptimize(); } + public static void enableDebugAndDeoptimize() { DEBUG.toggleOnAndDeoptimize(); } + public static void enableDeveloperAndDeoptimize() { DEVELOPER.toggleOnAndDeoptimize(); } } diff --git a/src/reflect/scala/reflect/io/AbstractFile.scala b/src/reflect/scala/reflect/io/AbstractFile.scala index d97e6d23e5ec..72736bfb2f26 100644 --- a/src/reflect/scala/reflect/io/AbstractFile.scala +++ b/src/reflect/scala/reflect/io/AbstractFile.scala @@ -121,7 +121,7 @@ abstract class AbstractFile extends AbstractIterable[AbstractFile] { /** Does this abstract file denote an existing file? */ def exists: Boolean = { - //if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(IOStats.fileExistsCount) + //if (settings.areStatisticsEnabled) statistics.incCounter(IOStats.fileExistsCount) (file eq null) || file.exists } diff --git a/src/reflect/scala/reflect/io/Path.scala b/src/reflect/scala/reflect/io/Path.scala index 26bef55f5796..361805ba8955 100644 --- a/src/reflect/scala/reflect/io/Path.scala +++ b/src/reflect/scala/reflect/io/Path.scala @@ -64,12 +64,12 @@ object Path { def apply(path: String): Path = apply(new JFile(path)) def apply(jfile: JFile): Path = try { def isFile = { - //if (StatisticsStatics.areSomeColdStatsEnabled) Statistics.incCounter(IOStats.fileIsFileCount) + //if (settings.areStatisticsEnabled) Statistics.incCounter(IOStats.fileIsFileCount) jfile.isFile } def isDirectory = { - //if (StatisticsStatics.areSomeColdStatsEnabled) Statistics.incCounter(IOStats.fileIsDirectoryCount) + //if (settings.areStatisticsEnabled) Statistics.incCounter(IOStats.fileIsDirectoryCount) jfile.isDirectory } @@ -206,16 +206,16 @@ class Path private[io] (val jfile: JFile) { def canRead = jfile.canRead() def canWrite = jfile.canWrite() def exists = { - //if (StatisticsStatics.areSomeColdStatsEnabled) Statistics.incCounter(IOStats.fileExistsCount) + //if (settings.areStatisticsEnabled) Statistics.incCounter(IOStats.fileExistsCount) try jfile.exists() catch { case ex: SecurityException => false } } def isFile = { - //if (StatisticsStatics.areSomeColdStatsEnabled) Statistics.incCounter(IOStats.fileIsFileCount) + //if (settings.areStatisticsEnabled) Statistics.incCounter(IOStats.fileIsFileCount) try jfile.isFile() catch { case ex: SecurityException => false } } def isDirectory = { - //if (StatisticsStatics.areSomeColdStatsEnabled) Statistics.incCounter(IOStats.fileIsDirectoryCount) + //if (settings.areStatisticsEnabled) Statistics.incCounter(IOStats.fileIsDirectoryCount) try jfile.isDirectory() catch { case ex: SecurityException => jfile.getPath == "." } } def isAbsolute = jfile.isAbsolute() diff --git a/src/reflect/scala/reflect/io/ZipArchive.scala b/src/reflect/scala/reflect/io/ZipArchive.scala index 24452194f190..a101656e3d17 100644 --- a/src/reflect/scala/reflect/io/ZipArchive.scala +++ b/src/reflect/scala/reflect/io/ZipArchive.scala @@ -17,6 +17,7 @@ package io import java.net.URL import java.io.{ByteArrayInputStream, FilterInputStream, IOException, InputStream} import java.io.{File => JFile} +import java.util.concurrent.{ArrayBlockingQueue, TimeUnit} import java.util.zip.{ZipEntry, ZipFile, ZipInputStream} import java.util.jar.Manifest @@ -24,7 +25,6 @@ import scala.annotation.tailrec import scala.collection.mutable import scala.jdk.CollectionConverters._ import scala.reflect.internal.JDK9Reflectors - import ZipArchive._ /** An abstraction for zip files and streams. Everything is written the way @@ -157,6 +157,31 @@ abstract class ZipArchive(override val file: JFile, release: Option[String]) ext /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ final class FileZipArchive(file: JFile, release: Option[String]) extends ZipArchive(file, release) { def this(file: JFile) = this(file, None) + private object zipFilePool { + private[this] val zipFiles = new ArrayBlockingQueue[ZipFile](Runtime.getRuntime.availableProcessors()) + + def acquire: ZipFile = { + val zf = zipFiles.poll(0, TimeUnit.MILLISECONDS) + zf match { + case null => + openZipFile() + case _ => + zf + } + } + + def release(zf: ZipFile): Unit = { + if (!zipFiles.offer(zf, 0, TimeUnit.MILLISECONDS)) + zf.close() + } + + def close(): Unit = { + val zipFilesToClose = new java.util.ArrayList[ZipFile] + zipFiles.drainTo(zipFilesToClose) + zipFilesToClose.iterator().forEachRemaining(_.close()) + } + } + private[this] def openZipFile(): ZipFile = try { release match { case Some(r) if file.getName.endsWith(".jar") => @@ -186,18 +211,28 @@ final class FileZipArchive(file: JFile, release: Option[String]) extends ZipArch override def sizeOption: Option[Int] = Some(size) // could be stale } - // keeps a file handle open to ZipFile, which forbids file mutation - // on Windows, and leaks memory on all OS (typically by stopping - // classloaders from being garbage collected). But is slightly - // faster than LazyEntry. + // keeps file handle(s) open to ZipFile in the pool this.zipFiles, + // which forbids file mutation on Windows, and leaks memory on all OS (typically by stopping + // classloaders from being garbage collected). But is slightly faster than LazyEntry. + // + // Note: scala/scala#7366 / scala/scala#7644, LeakyEntry _does_ close the file when `Global.close` is called, + // or after a short delay specified by FileBasedCache.deferCloseMs if classpath caching is enabled. + // So the file handle "leak" is far less a problem than it used do be. private[this] class LeakyEntry( - zipFile: ZipFile, - zipEntry: ZipEntry, - name: String + name: String, + time: Long, + size: Int ) extends Entry(name) { - override def lastModified: Long = zipEntry.getTime - override def input: InputStream = zipFile.getInputStream(zipEntry) - override def sizeOption: Option[Int] = Some(zipEntry.getSize.toInt) + override def lastModified: Long = time // could be stale + override def input: InputStream = { + val zipFile = zipFilePool.acquire + val entry = zipFile.getEntry(name) // with `-release`, returns the correct version under META-INF/versions + val delegate = zipFile.getInputStream(entry) + new FilterInputStream(delegate) { + override def close(): Unit = { zipFilePool.release(zipFile) } + } + } + override def sizeOption: Option[Int] = Some(size) } private[this] val dirs = new java.util.HashMap[String, DirEntry]() @@ -205,34 +240,35 @@ final class FileZipArchive(file: JFile, release: Option[String]) extends ZipArch val root = new DirEntry(RootEntry) dirs.put(RootEntry, root) val zipFile = openZipFile() - val enum = zipFile.entries() + val entries = zipFile.entries() try { - while (enum.hasMoreElements) { - val zipEntry = enum.nextElement + while (entries.hasMoreElements) { + val zipEntry = entries.nextElement if (!zipEntry.getName.startsWith("META-INF/versions/")) { - val zipEntryVersioned = if (release.isDefined) { - // JARFile will return the entry for the corresponding release-dependent version here under META-INF/versions - zipFile.getEntry(zipEntry.getName) - } else zipEntry if (!zipEntry.isDirectory) { val dir = getDir(dirs, zipEntry) + val mrEntry = if (release.isDefined) { + zipFile.getEntry(zipEntry.getName) + } else zipEntry val f = if (ZipArchive.closeZipFile) new LazyEntry( zipEntry.getName, - zipEntry.getTime, - zipEntry.getSize.toInt) + mrEntry.getTime, + mrEntry.getSize.toInt) else - new LeakyEntry(zipFile, zipEntryVersioned, zipEntry.getName) + new LeakyEntry(zipEntry.getName, + mrEntry.getTime, + mrEntry.getSize.toInt) dir.entries(f.name) = f } } } } finally { - if (ZipArchive.closeZipFile) zipFile.close() - else closeables ::= zipFile + if (!ZipArchive.closeZipFile) + zipFilePool.release(zipFile) } root } @@ -253,9 +289,8 @@ final class FileZipArchive(file: JFile, release: Option[String]) extends ZipArch case x: FileZipArchive => file.getAbsoluteFile == x.file.getAbsoluteFile case _ => false } - private[this] var closeables: List[java.io.Closeable] = Nil override def close(): Unit = { - closeables.foreach(_.close) + zipFilePool.close() } } /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ diff --git a/src/reflect/scala/reflect/macros/Attachments.scala b/src/reflect/scala/reflect/macros/Attachments.scala index 688721e410e3..05318a84ba5f 100644 --- a/src/reflect/scala/reflect/macros/Attachments.scala +++ b/src/reflect/scala/reflect/macros/Attachments.scala @@ -15,6 +15,7 @@ package reflect package macros import reflect.internal.util.Position +import scala.runtime.ClassValueCompat /** * EXPERIMENTAL @@ -109,7 +110,7 @@ abstract class Attachments { self => } private object Attachments { - private val matchesTagCache = new ClassValue[Function1[Any, Boolean]] { + private val matchesTagCache = new ClassValueCompat[Function1[Any, Boolean]] { override def computeValue(cls: Class[_]): Function[Any, Boolean] = cls.isInstance(_) } } diff --git a/src/reflect/scala/reflect/macros/Typers.scala b/src/reflect/scala/reflect/macros/Typers.scala index 9a0904a1a47e..e702f21ebbb1 100644 --- a/src/reflect/scala/reflect/macros/Typers.scala +++ b/src/reflect/scala/reflect/macros/Typers.scala @@ -91,7 +91,7 @@ trait Typers { * * If `silent` is false, `TypecheckException` will be thrown in case of an inference error. * If `silent` is true, the typecheck is silent and will return `EmptyTree` if an error occurs. - * Such errors don't vanish and can be inspected by turning on -Xlog-implicits. + * Such errors don't vanish and can be inspected by turning on -Vimplicits. * Unlike in `typecheck`, `silent` is true by default. * * @throws scala.reflect.macros.TypecheckException @@ -103,7 +103,7 @@ trait Typers { * * If `silent` is false, `TypecheckException` will be thrown in case of an inference error. * If `silent` is true, the typecheck is silent and will return `EmptyTree` if an error occurs. - * Such errors don't vanish and can be inspected by turning on -Xlog-implicits. + * Such errors don't vanish and can be inspected by turning on -Vimplicits. * Unlike in `typecheck`, `silent` is true by default. * * @throws scala.reflect.macros.TypecheckException diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala index 3d7b7bcd8947..69ff6474c8cb 100644 --- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -38,7 +38,7 @@ import internal.Flags._ import ReflectionUtils._ import scala.annotation.nowarn import scala.reflect.api.TypeCreator -import scala.runtime.{ BoxesRunTime, ScalaRunTime } +import scala.runtime.{BoxesRunTime, ClassValueCompat, ScalaRunTime} private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse with TwoWayCaches { thisUniverse: SymbolTable => @@ -120,7 +120,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive private[this] val fieldCache = new TwoWayCache[jField, TermSymbol] private[this] val tparamCache = new TwoWayCache[jTypeVariable[_ <: GenericDeclaration], TypeSymbol] - private[this] object typeTagCache extends ClassValue[jWeakReference[TypeTag[_]]]() { + private[this] object typeTagCache extends ClassValueCompat[jWeakReference[TypeTag[_]]]() { val typeCreator = new ThreadLocal[TypeCreator]() override protected def computeValue(cls: jClass[_]): jWeakReference[TypeTag[_]] = { @@ -195,9 +195,9 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive object AnnotationClass { def unapply(x: jClass[_]) = x.isAnnotation } object ConstantArg { - def enumToSymbol(enum: Enum[_]): Symbol = { - val staticPartOfEnum = classToScala(enum.getClass).companionSymbol - staticPartOfEnum.info.declaration(TermName(enum.name)) + def enumToSymbol(`enum`: Enum[_]): Symbol = { + val staticPartOfEnum = classToScala(`enum`.getClass).companionSymbol + staticPartOfEnum.info.declaration(TermName(`enum`.name)) } def unapply(schemaAndValue: (jClass[_], Any)): Option[Any] = schemaAndValue match { @@ -638,7 +638,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive def markAbsent(tpe: Type) = setAllInfos(clazz, module, tpe) def handleError(ex: Exception) = { markAbsent(ErrorType) - if (settings.debug) ex.printStackTrace() + if (settings.isDebug) ex.printStackTrace() val msg = ex.getMessage() MissingRequirementError.signal( (if (msg eq null) "reflection error while loading " + clazz.name diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala index be33ed5a6651..c093aa14bd23 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala @@ -67,6 +67,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => this.UseInvokeSpecial this.TypeParamVarargsAttachment this.KnownDirectSubclassesCalled + this.DottyEnumSingleton this.ConstructorNeedsFence this.MultiargInfixAttachment this.NullaryOverrideAdapted @@ -204,6 +205,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => this.abstractTypesToBounds this.dropIllegalStarTypes this.wildcardExtrapolation + this.SubstSymMap this.IsDependentCollector this.ApproximateDependentMap this.identityTypeMap @@ -290,6 +292,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => definitions.JavaEnumClass definitions.JavaUtilMap definitions.JavaUtilHashMap + definitions.JavaRecordClass definitions.ByNameParamClass definitions.JavaRepeatedParamClass definitions.RepeatedParamClass @@ -520,9 +523,12 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => uncurry.DesugaredParameterType erasure.GenericArray erasure.scalaErasure + erasure.scala3Erasure erasure.specialScalaErasure + erasure.specialScala3Erasure erasure.javaErasure erasure.verifiedJavaErasure erasure.boxingErasure + erasure.boxing3Erasure } } diff --git a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala index 671376c2a3a0..820cad5c9b0b 100644 --- a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala +++ b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala @@ -49,7 +49,7 @@ object ReflectionUtils { isAbstractFileClassLoader(clazz.getSuperclass) } def inferClasspath(cl: ClassLoader): String = cl match { - case cl: java.net.URLClassLoader => + case cl: java.net.URLClassLoader if cl.getURLs != null => (cl.getURLs mkString ",") case cl if cl != null && isAbstractFileClassLoader(cl.getClass) => cl.asInstanceOf[{val root: scala.reflect.io.AbstractFile}].root.canonicalPath diff --git a/src/reflect/scala/reflect/runtime/Settings.scala b/src/reflect/scala/reflect/runtime/Settings.scala index 4b8b771f52c5..56786a5581d9 100644 --- a/src/reflect/scala/reflect/runtime/Settings.scala +++ b/src/reflect/scala/reflect/runtime/Settings.scala @@ -15,6 +15,7 @@ package reflect package runtime import scala.reflect.internal.settings.MutableSettings +import scala.reflect.internal.util.StatisticsStatics /** The Settings class for runtime reflection. * This should be refined, so that settings are settable via command @@ -50,17 +51,17 @@ private[reflect] class Settings extends MutableSettings { val Yshowsymowners = new BooleanSetting(false) val Yshowsymkinds = new BooleanSetting(false) val breakCycles = new BooleanSetting(false) - val debug = new BooleanSetting(false) - val developer = new BooleanSetting(false) + val debug = new BooleanSetting(false) { override def postSetHook() = if (v) StatisticsStatics.enableDebugAndDeoptimize() } + val developer = new BooleanSetting(false) { override def postSetHook() = if (v) StatisticsStatics.enableDeveloperAndDeoptimize() } val explaintypes = new BooleanSetting(false) val printtypes = new BooleanSetting(false) val uniqid = new BooleanSetting(false) val verbose = new BooleanSetting(false) - val YhotStatisticsEnabled = new BooleanSetting(false) - val YstatisticsEnabled = new BooleanSetting(false) + val YhotStatisticsEnabled = new BooleanSetting(false) { override def postSetHook() = if (v && YstatisticsEnabled) StatisticsStatics.enableHotStatsAndDeoptimize() } + val YstatisticsEnabled = new BooleanSetting(false) { override def postSetHook() = if (v) StatisticsStatics.enableColdStatsAndDeoptimize() } - val Yrecursion = new IntSetting(0) - def isScala212 = true - private[scala] def isScala213 = true + val Yrecursion = new IntSetting(0) + def isScala212 = true + def isScala213 = true } diff --git a/src/reflect/scala/reflect/runtime/SymbolTable.scala b/src/reflect/scala/reflect/runtime/SymbolTable.scala index 3bb674953521..ccb94eb2dec0 100644 --- a/src/reflect/scala/reflect/runtime/SymbolTable.scala +++ b/src/reflect/scala/reflect/runtime/SymbolTable.scala @@ -25,7 +25,7 @@ private[scala] trait SymbolTable extends internal.SymbolTable with JavaMirrors w if (settings.verbose) println("[reflect-compiler] "+msg) def debugInfo(msg: => String) = - if (settings.debug) info(msg) + if (settings.isDebug) info(msg) /** Declares that this is a runtime reflection universe. * diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala b/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala index d03cb7c83de8..2825764f5a5d 100644 --- a/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala +++ b/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala @@ -13,13 +13,21 @@ package scala.tools.nsc.interpreter package jline +import org.jline.builtins.InputRC +import org.jline.keymap.KeyMap import org.jline.reader.Parser.ParseContext -import org.jline.reader.impl.{DefaultParser, LineReaderImpl} import org.jline.reader._ +import org.jline.reader.impl.{CompletionMatcherImpl, DefaultParser, LineReaderImpl} import org.jline.terminal.Terminal +import java.io.{ByteArrayInputStream, File} +import java.net.{MalformedURLException, URL} import java.util.{List => JList} +import scala.io.Source +import scala.reflect.internal.Chars import scala.tools.nsc.interpreter.shell.{Accumulator, ShellConfig} +import scala.util.Using +import scala.util.control.NonFatal /** A Reader that delegates to JLine3. */ @@ -38,7 +46,7 @@ class Reader private ( case _: EndOfFileException | _: UserInterruptException => reader.getBuffer.delete() ; null } } - def redrawLine(): Unit = ??? + def redrawLine(): Unit = () //see https://github.com/scala/bug/issues/12395, SimpleReader#redrawLine also use `()` def reset(): Unit = accumulator.reset() override def close(): Unit = terminal.close() @@ -68,6 +76,31 @@ object Reader { System.setProperty(LineReader.PROP_SUPPORT_PARSEDLINE, java.lang.Boolean.TRUE.toString()) + def inputrcFileUrl(): Option[URL] = { + sys.props + .get("jline.inputrc") + .flatMap { path => + try Some(new URL(path)) + catch { + case _: MalformedURLException => + Some(new File(path).toURI.toURL) + } + }.orElse { + sys.props.get("user.home").map { home => + val f = new File(home).toPath.resolve(".inputrc").toFile + (if (f.isFile) f else new File("/etc/inputrc")).toURI.toURL + } + } + } + + def urlByteArray(url: URL): Array[Byte] = { + Using.resource(Source.fromURL(url).bufferedReader()) { + bufferedReader => + LazyList.continually(bufferedReader.read).takeWhile(_ != -1).map(_.toByte).toArray + } + } + + lazy val inputrcFileContents: Option[Array[Byte]] = inputrcFileUrl().map(in => urlByteArray(in)) val jlineTerminal = TerminalBuilder.builder().jna(true).build() val completer = new Completion(completion) val parser = new ReplParser(repl) @@ -91,14 +124,68 @@ object Reader { .variable(SECONDARY_PROMPT_PATTERN, config.encolor(config.continueText)) // Continue prompt .variable(WORDCHARS, LineReaderImpl.DEFAULT_WORDCHARS.filterNot("*?.[]~=/&;!#%^(){}<>".toSet)) .option(Option.DISABLE_EVENT_EXPANSION, true) // Otherwise `scala> println(raw"\n".toList)` gives `List(n)` !! + .option(Option.COMPLETE_MATCHER_CAMELCASE, true) + .option(Option.COMPLETE_MATCHER_TYPO, true) + } + object customCompletionMatcher extends CompletionMatcherImpl { + override def compile(options: java.util.Map[LineReader.Option, java.lang.Boolean], prefix: Boolean, line: CompletingParsedLine, caseInsensitive: Boolean, errors: Int, originalGroupName: String): Unit = { + val errorsReduced = line.wordCursor() match { + case 0 | 1 | 2 | 3 => 0 // disable JLine's levenshtein-distance based typo matcher for short strings + case 4 | 5 => math.max(errors, 1) + case _ => errors + } + super.compile(options, prefix, line, caseInsensitive, errorsReduced, originalGroupName) + } + + override def matches(candidates: JList[Candidate]): JList[Candidate] = { + val matching = super.matches(candidates) + matching + } } + builder.completionMatcher(customCompletionMatcher) + val reader = builder.build() + try inputrcFileContents.foreach(f => InputRC.configure(reader, new ByteArrayInputStream(f))) catch { + case NonFatal(_) => + } //ignore + + val keyMap = reader.getKeyMaps.get("main") + + object ScalaShowType { + val Name = "scala-show-type" + private var lastInvokeLocation: Option[(String, Int)] = None + def apply(): Boolean = { + val nextInvokeLocation = Some((reader.getBuffer.toString, reader.getBuffer.cursor())) + val cursor = reader.getBuffer.cursor() + val text = reader.getBuffer.toString + val result = completer.complete(text, cursor, filter = true) + if (lastInvokeLocation == nextInvokeLocation) { + show(Naming.unmangle(result.typedTree)) + lastInvokeLocation = None + } else { + show(result.typeAtCursor) + lastInvokeLocation = nextInvokeLocation + } + true + } + def show(text: String): Unit = if (text != "") { + reader.callWidget(LineReader.CLEAR) + reader.getTerminal.writer.println() + reader.getTerminal.writer.println(text) + reader.callWidget(LineReader.REDRAW_LINE) + reader.callWidget(LineReader.REDISPLAY) + reader.getTerminal.flush() + } + } + reader.getWidgets().put(ScalaShowType.Name, () => ScalaShowType()) + locally { import LineReader._ // VIINS, VICMD, EMACS val keymap = if (config.viMode) VIINS else EMACS reader.getKeyMaps.put(MAIN, reader.getKeyMaps.get(keymap)); + keyMap.bind(new Reference(ScalaShowType.Name), KeyMap.alt(KeyMap.ctrl('t'))) } def secure(p: java.nio.file.Path): Unit = { try scala.reflect.internal.util.OwnerOnlyChmod.chmodFileOrCreateEmpty(p) @@ -167,6 +254,12 @@ object Reader { val (wordCursor, wordIndex) = current match { case Some(t) if t.isIdentifier => (cursor - t.start, tokens.indexOf(t)) + case Some(t) => + val isIdentifierStartKeyword = (t.start until t.end).forall(i => Chars.isIdentifierPart(line.charAt(i))) + if (isIdentifierStartKeyword) + (cursor - t.start, tokens.indexOf(t)) + else + (0, -1) case _ => (0, -1) } @@ -225,45 +318,51 @@ object Reader { class Completion(delegate: shell.Completion) extends shell.Completion with Completer { require(delegate != null) // REPL Completion - def complete(buffer: String, cursor: Int): shell.CompletionResult = delegate.complete(buffer, cursor) + def complete(buffer: String, cursor: Int, filter: Boolean): shell.CompletionResult = delegate.complete(buffer, cursor, filter) // JLine Completer def complete(lineReader: LineReader, parsedLine: ParsedLine, newCandidates: JList[Candidate]): Unit = { - def candidateForResult(line: String, cc: CompletionCandidate): Candidate = { - val value = if (line.startsWith(":")) ":" + cc.defString else cc.defString - val displayed = cc.defString + (cc.arity match { + def candidateForResult(cc: CompletionCandidate, deprecated: Boolean, universal: Boolean): Candidate = { + val value = cc.name + val displayed = cc.name + (cc.arity match { case CompletionCandidate.Nullary => "" case CompletionCandidate.Nilary => "()" case _ => "(" }) val group = null // results may be grouped val descr = // displayed alongside - if (cc.isDeprecated) "deprecated" - else if (cc.isUniversal) "universal" + if (deprecated) "deprecated" + else if (universal) "universal" else null val suffix = null // such as slash after directory name val key = null // same key implies mergeable result val complete = false // more to complete? new Candidate(value, displayed, group, descr, suffix, key, complete) } - val result = complete(parsedLine.line, parsedLine.cursor) - result.candidates.map(_.defString) match { - // the presence of the empty string here is a signal that the symbol - // is already complete and so instead of completing, we want to show - // the user the method signature. there are various JLine 3 features - // one might use to do this instead; sticking to basics for now - case "" :: defStrings if defStrings.nonEmpty => - // specifics here are cargo-culted from Ammonite - lineReader.getTerminal.writer.println() - for (cc <- result.candidates.tail) - lineReader.getTerminal.writer.println(cc.defString) - lineReader.callWidget(LineReader.REDRAW_LINE) - lineReader.callWidget(LineReader.REDISPLAY) - lineReader.getTerminal.flush() - // normal completion - case _ => - for (cc <- result.candidates) - newCandidates.add(candidateForResult(result.line, cc)) + val result = complete(parsedLine.line, parsedLine.cursor, filter = false) + for (group <- result.candidates.groupBy(_.name)) { + // scala/bug#12238 + // Currently, only when all methods are Deprecated should they be displayed `Deprecated` to users. Only handle result of PresentationCompilation#toCandidates. + // We don't handle result of PresentationCompilation#defStringCandidates, because we need to show the deprecated here. + val allDeprecated = group._2.forall(_.isDeprecated) + val allUniversal = group._2.forall(_.isUniversal) + group._2.foreach(cc => newCandidates.add(candidateForResult(cc, allDeprecated, allUniversal))) + } + + val parsedLineWord = parsedLine.word() + result.candidates.filter(_.name == parsedLineWord) match { + case Nil => + case exacts => + val declStrings = exacts.map(_.declString()).filterNot(_ == "") + if (declStrings.nonEmpty) { + lineReader.callWidget(LineReader.CLEAR) + lineReader.getTerminal.writer.println() + for (declString <- declStrings) + lineReader.getTerminal.writer.println(declString) + lineReader.callWidget(LineReader.REDRAW_LINE) + lineReader.callWidget(LineReader.REDISPLAY) + lineReader.getTerminal.flush() + } } } } diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/shell/Completion.scala b/src/repl-frontend/scala/tools/nsc/interpreter/shell/Completion.scala index 17f8c72eb57e..389dd194e824 100644 --- a/src/repl-frontend/scala/tools/nsc/interpreter/shell/Completion.scala +++ b/src/repl-frontend/scala/tools/nsc/interpreter/shell/Completion.scala @@ -14,22 +14,23 @@ package scala.tools.nsc.interpreter package shell trait Completion { - def complete(buffer: String, cursor: Int): CompletionResult + final def complete(buffer: String, cursor: Int): CompletionResult = complete(buffer, cursor, filter = true) + def complete(buffer: String, cursor: Int, filter: Boolean): CompletionResult } object NoCompletion extends Completion { - def complete(buffer: String, cursor: Int) = NoCompletions + def complete(buffer: String, cursor: Int, filter: Boolean) = NoCompletions } -case class CompletionResult(line: String, cursor: Int, candidates: List[CompletionCandidate]) { +case class CompletionResult(line: String, cursor: Int, candidates: List[CompletionCandidate], typeAtCursor: String = "", typedTree: String = "") { final def orElse(other: => CompletionResult): CompletionResult = if (candidates.nonEmpty) this else other } object CompletionResult { val empty: CompletionResult = NoCompletions } -object NoCompletions extends CompletionResult("", -1, Nil) +object NoCompletions extends CompletionResult("", -1, Nil, "", "") case class MultiCompletion(underlying: Completion*) extends Completion { - override def complete(buffer: String, cursor: Int) = - underlying.foldLeft(CompletionResult.empty)((r, c) => r.orElse(c.complete(buffer, cursor))) + override def complete(buffer: String, cursor: Int, filter: Boolean) = + underlying.foldLeft(CompletionResult.empty)((r,c) => r.orElse(c.complete(buffer, cursor, filter))) } diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/shell/ILoop.scala b/src/repl-frontend/scala/tools/nsc/interpreter/shell/ILoop.scala index aece63c03b50..8f51bc84e691 100644 --- a/src/repl-frontend/scala/tools/nsc/interpreter/shell/ILoop.scala +++ b/src/repl-frontend/scala/tools/nsc/interpreter/shell/ILoop.scala @@ -228,7 +228,7 @@ class ILoop(config: ShellConfig, inOverride: BufferedReader = null, .map(d => CompletionResult(buffer, i, d.toDirectory.list.map(x => CompletionCandidate(x.name)).toList)) .getOrElse(NoCompletions) def listedIn(dir: Directory, name: String) = dir.list.filter(_.name.startsWith(name)).map(_.name).toList - def complete(buffer: String, cursor: Int): CompletionResult = + def complete(buffer: String, cursor: Int, filter: Boolean): CompletionResult = buffer.substring(0, cursor) match { case emptyWord(s) => listed(buffer, cursor, Directory.Current) case directorily(s) => listed(buffer, cursor, Option(Path(s))) @@ -247,13 +247,13 @@ class ILoop(config: ShellConfig, inOverride: BufferedReader = null, // complete settings name val settingsCompletion: Completion = new Completion { val trailingWord = """(\S+)$""".r.unanchored - def complete(buffer: String, cursor: Int): CompletionResult = { + def complete(buffer: String, cursor: Int, filter: Boolean): CompletionResult = { buffer.substring(0, cursor) match { case trailingWord(s) => - val maybes = intp.visibleSettings.filter(_.name.startsWith(s)).map(_.name) + val maybes = intp.visibleSettings.filter(x => if (filter) x.name.startsWith(s) else true).map(_.name) .filterNot(cond(_) { case "-"|"-X"|"-Y" => true }).sorted if (maybes.isEmpty) NoCompletions - else CompletionResult(buffer, cursor - s.length, maybes.map(CompletionCandidate(_))) + else CompletionResult(buffer, cursor - s.length, maybes.map(CompletionCandidate(_)), "", "") case _ => NoCompletions } } @@ -541,8 +541,8 @@ class ILoop(config: ShellConfig, inOverride: BufferedReader = null, MultiCompletion(shellCompletion, rc) } val shellCompletion = new Completion { - override def complete(buffer: String, cursor: Int) = - if (buffer.startsWith(":")) colonCompletion(buffer, cursor).complete(buffer, cursor) + override def complete(buffer: String, cursor: Int, filter: Boolean) = + if (buffer.startsWith(":")) colonCompletion(buffer, cursor).complete(buffer, cursor, filter) else NoCompletions } @@ -554,13 +554,13 @@ class ILoop(config: ShellConfig, inOverride: BufferedReader = null, // condition here is a bit weird because of the weird hack we have where // the first candidate having an empty defString means it's not really // completion, but showing the method signature instead - if (candidates.headOption.exists(_.defString.nonEmpty)) { + if (candidates.headOption.exists(_.name.nonEmpty)) { val prefix = if (completions == NoCompletions) "" else what.substring(0, completions.cursor) // hvesalai (emacs sbt-mode maintainer) says it's important to echo only once and not per-line echo( - candidates.map(c => s"[completions] $prefix${c.defString}") + candidates.map(c => s"[completions] $prefix${c.name}") .mkString("\n") ) } diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/shell/LoopCommands.scala b/src/repl-frontend/scala/tools/nsc/interpreter/shell/LoopCommands.scala index 1063971b5f2b..49c985dfdd78 100644 --- a/src/repl-frontend/scala/tools/nsc/interpreter/shell/LoopCommands.scala +++ b/src/repl-frontend/scala/tools/nsc/interpreter/shell/LoopCommands.scala @@ -14,7 +14,6 @@ package scala.tools.nsc.interpreter package shell import java.io.{PrintWriter => JPrintWriter} - import scala.language.implicitConversions import scala.collection.mutable.ListBuffer import scala.tools.nsc.interpreter.ReplStrings.words @@ -60,6 +59,7 @@ trait LoopCommands { // subclasses may provide completions def completion: Completion = NoCompletion + override def toString(): String = name } object LoopCommand { def nullary(name: String, help: String, f: () => Result): LoopCommand = @@ -91,6 +91,10 @@ trait LoopCommands { echo("All commands can be abbreviated, e.g., :he instead of :help.") for (cmd <- commands) echo(formatStr.format(cmd.usageMsg, cmd.help)) + echo("") + echo("Useful default key bindings:") + echo(" TAB code completion") + echo(" CTRL-ALT-T show type at cursor, hit again to show code with types/implicits inferred.") } def ambiguousError(cmd: String): Result = { matchingCommands(cmd) match { @@ -135,15 +139,15 @@ trait LoopCommands { case cmd :: Nil if !cursorAtName => cmd.completion case cmd :: Nil if cmd.name == name => NoCompletion case cmd :: Nil => - val completion = if (cmd.isInstanceOf[NullaryCmd] || cursor < line.length) cmd.name else cmd.name + " " + val completion = ":" + cmd.name new Completion { - def complete(buffer: String, cursor: Int) = - CompletionResult(buffer, cursor = 1, List(CompletionCandidate(completion))) + def complete(buffer: String, cursor: Int, filter: Boolean) = + CompletionResult(buffer, cursor = 1, List(CompletionCandidate(completion)), "", "") } case cmd :: rest => new Completion { - def complete(buffer: String, cursor: Int) = - CompletionResult(buffer, cursor = 1, cmds.map(cmd => CompletionCandidate(cmd.name))) + def complete(buffer: String, cursor: Int, filter: Boolean) = + CompletionResult(buffer, cursor = 1, cmds.map(cmd => CompletionCandidate(":" + cmd.name)), "", "") } } case _ => NoCompletion diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/shell/ReplCompletion.scala b/src/repl-frontend/scala/tools/nsc/interpreter/shell/ReplCompletion.scala index afbc38103e4d..6aedd90048dc 100644 --- a/src/repl-frontend/scala/tools/nsc/interpreter/shell/ReplCompletion.scala +++ b/src/repl-frontend/scala/tools/nsc/interpreter/shell/ReplCompletion.scala @@ -19,7 +19,7 @@ import scala.util.control.NonFatal */ class ReplCompletion(intp: Repl, val accumulator: Accumulator = new Accumulator) extends Completion { - def complete(buffer: String, cursor: Int): CompletionResult = { + def complete(buffer: String, cursor: Int, filter: Boolean): CompletionResult = { // special case for: // // scala> 1 @@ -30,13 +30,13 @@ class ReplCompletion(intp: Repl, val accumulator: Accumulator = new Accumulator) val bufferWithMultiLine = accumulator.toString + bufferWithVar val cursor1 = cursor + (bufferWithMultiLine.length - buffer.length) - codeCompletion(bufferWithMultiLine, cursor1) + codeCompletion(bufferWithMultiLine, cursor1, filter) } // A convenience for testing def complete(before: String, after: String = ""): CompletionResult = complete(before + after, before.length) - private def codeCompletion(buf: String, cursor: Int): CompletionResult = { + private def codeCompletion(buf: String, cursor: Int, filter: Boolean): CompletionResult = { require(cursor >= 0 && cursor <= buf.length) // secret handshakes @@ -49,37 +49,24 @@ class ReplCompletion(intp: Repl, val accumulator: Accumulator = new Accumulator) case Right(result) => try { buf match { case slashPrint() if cursor == buf.length => - CompletionResult(buf, cursor, CompletionCandidate.fromStrings("" :: Naming.unmangle(result.print) :: Nil)) + CompletionResult(buf, cursor, CompletionCandidate.fromStrings("" :: Naming.unmangle(result.print) :: Nil), "", "") case slashPrintRaw() if cursor == buf.length => - CompletionResult(buf, cursor, CompletionCandidate.fromStrings("" :: result.print :: Nil)) + CompletionResult(buf, cursor, CompletionCandidate.fromStrings("" :: result.print :: Nil), "", "") case slashTypeAt(start, end) if cursor == buf.length => - CompletionResult(buf, cursor, CompletionCandidate.fromStrings("" :: result.typeAt(start.toInt, end.toInt) :: Nil)) + CompletionResult(buf, cursor, CompletionCandidate.fromStrings("" :: result.typeAt(start.toInt, end.toInt) :: Nil), "", "") case _ => // under JLine 3, we no longer use the tabCount concept, so tabCount is always 1 // which always gives us all completions - val (c, r) = result.completionCandidates(tabCount = 1) - // scala/bug#12238 - // Currently, only when all methods are Deprecated should they be displayed `Deprecated` to users. Only handle result of PresentationCompilation#toCandidates. - // We don't handle result of PresentationCompilation#defStringCandidates, because we need to show the deprecated here. - if (r.nonEmpty && r.forall(!_.defString.startsWith("def"))) { - val groupByDef = r.groupBy(_.defString) - val allOverrideIsUniversal = groupByDef.filter(f => f._2.forall(_.isUniversal)).keySet - val allOverrideIsDeprecated = groupByDef.filter(f => f._2.forall(_.isDeprecated)).keySet - def isOverrideMethod(candidate: CompletionCandidate): Boolean = groupByDef(candidate.defString).size > 1 - val rewriteDecr = r.map(candidate => { - // If not all overloaded methods are deprecated, but they are overloaded methods, they (all) should be set to false. - val isUniv = if (!allOverrideIsUniversal.contains(candidate.defString) && isOverrideMethod(candidate)) false else candidate.isUniversal - val isDepr = if (!allOverrideIsDeprecated.contains(candidate.defString) && isOverrideMethod(candidate)) false else candidate.isDeprecated - candidate.copy(isUniversal = isUniv, isDeprecated = isDepr) - }) - CompletionResult(buf, c, rewriteDecr) - } else CompletionResult(buf, c, r) + val (c, r) = result.completionCandidates(filter, tabCount = 1) + val typeAtCursor = result.typeAt(cursor, cursor) + CompletionResult(buf, c, r, typeAtCursor, result.print) } } finally result.cleanup() } } catch { case NonFatal(e) => - // e.printStackTrace() + if (intp.settings.debug) + e.printStackTrace() NoCompletions } } diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala index 70ca0d8e227d..099220d7cf4c 100644 --- a/src/repl/scala/tools/nsc/interpreter/IMain.scala +++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala @@ -775,7 +775,8 @@ class IMain(val settings: Settings, parentClassLoaderOverride: Option[ClassLoade } /** One line of code submitted by the user for interpretation */ - class Request(val line: String, origTrees: List[Tree], firstXmlPos: Position = NoPosition, generousImports: Boolean = false, synthetic: Boolean = false) extends ReplRequest { + class Request(val line: String, origTrees: List[Tree], firstXmlPos: Position = NoPosition, + generousImports: Boolean = false, synthetic: Boolean = false, storeResultInVal: Boolean = true) extends ReplRequest { def defines = defHandlers flatMap (_.definedSymbols) def definesTermNames: List[String] = defines collect { case s: TermSymbol => s.decodedName.toString } def imports = importedSymbols @@ -787,9 +788,12 @@ class IMain(val settings: Settings, parentClassLoaderOverride: Option[ClassLoade // The source file contents only has the code originally input by the user, // with unit's body holding the synthetic trees. // When emitting errors, be careful not to refer to the synthetic code - private val unit = new CompilationUnit(new BatchSourceFile(if (synthetic) "" else label, line)) + // pad with a trailing " " so that the synthetic position for enclosing trees does not exactly coincide with the + // position of the user-written code, these seems to confuse the presentation compiler. + private val paddedLine = line + " " + private val unit = new CompilationUnit(new BatchSourceFile(if (synthetic) "" else label, paddedLine)) // a dummy position used for synthetic trees (needed for pres compiler to locate the trees for user input) - private val wholeUnit = Position.range(unit.source, 0, 0, line.length) + private val wholeUnit = Position.range(unit.source, 0, 0, paddedLine.length) private def storeInVal(tree: Tree): Tree = { val resName = newTermName(if (synthetic) freshInternalVarName() else freshUserVarName()) @@ -797,15 +801,18 @@ class IMain(val settings: Settings, parentClassLoaderOverride: Option[ClassLoade } // Wrap last tree in a valdef to give user a nice handle for it (`resN`) - val trees: List[Tree] = origTrees.init :+ { - val tree = origTrees.last - @tailrec def loop(scrut: Tree): Tree = scrut match { - case _: Assign => tree - case _: RefTree | _: TermTree => storeInVal(tree) - case Annotated(_, arg) => loop(arg) - case _ => tree - } - loop(tree) + val trees: List[Tree] = origTrees match { + case xs if !storeResultInVal => xs + case init :+ tree => + @tailrec def loop(scrut: Tree): Tree = scrut match { + case _: Assign => tree + case _: RefTree | _: TermTree => storeInVal(tree) + case Annotated(_, arg) => loop(arg) + case _ => tree + } + init :+ loop(tree) + case xs => + xs // can get here in completion of erroneous code } /** handlers for each tree in this request */ @@ -889,13 +896,13 @@ class IMain(val settings: Settings, parentClassLoaderOverride: Option[ClassLoade else ModuleDef(NoMods, readName, wrapperTempl)) if (isClassBased) - stats += q"""object $readName { val INSTANCE = new ${tq"""${readName.toTypeName}"""} }""" + stats += atPos(wholeUnit.focus)(q"""object $readName { val INSTANCE = new ${tq"""${readName.toTypeName}"""} }""") val unspliced = PackageDef(atPos(wholeUnit.focus)(Ident(lineRep.packageName)), stats.toList) unit.body = spliceUserCode.transform(unspliced) unit.encounteredXml(firstXmlPos) -// settings.Xprintpos.value = true + // settings.Xprintpos.value = true showCode(asCompactString(unit.body)) unit diff --git a/src/repl/scala/tools/nsc/interpreter/Interface.scala b/src/repl/scala/tools/nsc/interpreter/Interface.scala index 73f27ed749e9..790750daf367 100644 --- a/src/repl/scala/tools/nsc/interpreter/Interface.scala +++ b/src/repl/scala/tools/nsc/interpreter/Interface.scala @@ -323,21 +323,24 @@ trait PresentationCompilationResult { def candidates(tabCount: Int): (Int, List[String]) = completionCandidates(tabCount) match { case (cursor, cands) => - (cursor, cands.map(_.defString)) + (cursor, cands.map(_.name)) } - def completionCandidates(tabCount: Int = -1): (Int, List[CompletionCandidate]) + final def completionCandidates(tabCount: Int = -1): (Int, List[CompletionCandidate]) = completionCandidates(filter = true, tabCount) + def completionCandidates(filter: Boolean, tabCount: Int): (Int, List[CompletionCandidate]) } case class CompletionCandidate( - defString: String, + name: String, arity: CompletionCandidate.Arity = CompletionCandidate.Nullary, isDeprecated: Boolean = false, - isUniversal: Boolean = false) + isUniversal: Boolean = false, + declString: () => String = () => "") object CompletionCandidate { sealed trait Arity case object Nullary extends Arity case object Nilary extends Arity + case object Infix extends Arity case object Other extends Arity // purely for convenience def fromStrings(defStrings: List[String]): List[CompletionCandidate] = diff --git a/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala b/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala index 91df89362548..a2128f52cf49 100644 --- a/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala +++ b/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala @@ -12,7 +12,9 @@ package scala.tools.nsc.interpreter -import scala.reflect.internal.util.{Position, RangePosition, StringOps} +import scala.collection.mutable +import scala.reflect.internal.util.{Position, RangePosition} +import scala.tools.nsc.ast.parser.Tokens import scala.tools.nsc.backend.JavaPlatform import scala.tools.nsc.util.ClassPath import scala.tools.nsc.{Settings, interactive} @@ -22,7 +24,7 @@ import scala.tools.nsc.interpreter.Results.{Error, Result} trait PresentationCompilation { self: IMain => - private final val Cursor = IMain.DummyCursorFragment + " " + private final val Cursor = IMain.DummyCursorFragment /** Typecheck a line of REPL input, suitably wrapped with "interpreter wrapper" objects/classes, with the * presentation compiler. The result of this method gives access to the typechecked tree and to autocompletion @@ -34,11 +36,31 @@ trait PresentationCompilation { self: IMain => if (global == null) Left(Error) else { val pc = newPresentationCompiler() - val line1 = buf.patch(cursor, Cursor, 0) - val trees = pc.newUnitParser(line1).parseStats() + def cursorIsInKeyword(): Boolean = { + val scanner = pc.newUnitParser(buf).newScanner() + scanner.init() + while (scanner.token != Tokens.EOF) { + val token = scanner.token + val o = scanner.offset + scanner.nextToken() + if ((o to scanner.lastOffset).contains(cursor)) { + return (!Tokens.isIdentifier(token) && pc.syntaxAnalyzer.token2name.contains(token)) + } + } + false + } + // Support completion of "def format = 42; for" by replacing the keyword with foo_CURSOR_ before + // typechecking. Only do this when needed to be able ot correctly return the type of `foo.bar` + // where `bar` is the complete name of a member. + val line1 = if (!cursorIsInKeyword()) buf else buf.patch(cursor, Cursor, 0) + + val trees = pc.newUnitParser(line1).parseStats() match { + case Nil => List(pc.EmptyTree) + case xs => xs + } val importer = global.mkImporter(pc) //println(s"pc: [[$line1]], <<${trees.size}>>") - val request = new Request(line1, trees map (t => importer.importTree(t)), generousImports = true) + val request = new Request(line1, trees map (t => importer.importTree(t)), generousImports = true, storeResultInVal = false) val origUnit = request.mkUnit val unit = new pc.CompilationUnit(origUnit.source) unit.body = pc.mkImporter(global).importTree(origUnit.body) @@ -89,8 +111,6 @@ trait PresentationCompilation { self: IMain => interactiveGlobal } - private var lastCommonPrefixCompletion: Option[String] = None - abstract class PresentationCompileResult(val compiler: interactive.Global, val inputRange: Position, val cursor: Int, val buf: String) extends PresentationCompilationResult { val unit: compiler.RichCompilationUnit // depmet broken for constructors, can't be ctor arg @@ -120,15 +140,45 @@ trait PresentationCompilation { self: IMain => } } - def typeString(tree: compiler.Tree): String = - compiler.exitingTyper(tree.tpe.toString) + def typeString(tree: compiler.Tree): String = { + tree.tpe match { + case null | compiler.NoType | compiler.ErrorType => "" + case tp if compiler.nme.isReplWrapperName(tp.typeSymbol.name) => "" + case tp => compiler.exitingTyper(tp.toString) + } + } def treeString(tree: compiler.Tree): String = compiler.showCode(tree) override def print = { val tree = treeAt(inputRange) - treeString(tree) + " // : " + tree.tpe.safeToString + import compiler._ + object makeCodePrinterPrintInferredTypes extends Transformer { + private def printableTypeTree(tp: Type): TypeTree = { + val tree = TypeTree(tp) + tree.wasEmpty = false + tree + } + override def transform(tree: Tree): Tree = super.transform(tree) match { + case ValDef(mods, name, tt @ build.SyntacticEmptyTypeTree(), rhs) => + if (tree.symbol != null && tree.symbol != NoSymbol && nme.isReplWrapperName(tree.symbol.owner.name)) { + treeCopy.ValDef(tree, mods &~ (Flag.PRIVATE | Flag.LOCAL), name.dropLocal, printableTypeTree(tt.tpe), rhs) + } else { + treeCopy.ValDef(tree, mods, name, printableTypeTree(tt.tpe), rhs) + } + case DefDef(mods, name, tparams, vparamss, tt @ build.SyntacticEmptyTypeTree(), rhs) => + treeCopy.DefDef(tree, mods, name, tparams, vparamss, printableTypeTree(tt.tpe), rhs) + case t => t + } + + } + val tree1 = makeCodePrinterPrintInferredTypes.transform(tree) + val tpString = typeString(tree1) match { + case "" => "" + case s => " // : " + s + } + treeString(tree1) + tpString } @@ -138,7 +188,7 @@ trait PresentationCompilation { self: IMain => val NoCandidates = (-1, Nil) type Candidates = (Int, List[CompletionCandidate]) - override def completionCandidates(tabCount: Int): Candidates = { + override def completionCandidates(filter: Boolean, tabCount: Int): Candidates = { import compiler._ import CompletionResult.NoResults @@ -161,76 +211,56 @@ trait PresentationCompilation { self: IMain => if (m.sym.paramss.isEmpty) CompletionCandidate.Nullary else if (m.sym.paramss.size == 1 && m.sym.paramss.head.isEmpty) CompletionCandidate.Nilary else CompletionCandidate.Other - def defStringCandidates(matching: List[Member], name: Name, isNew: Boolean): Candidates = { + def defStringCandidates(matching: List[Member], name: Name, isNew: Boolean) = { + val seen = new mutable.HashSet[Symbol]() val ccs = for { member <- matching - if member.symNameDropLocal == name + if seen.add(member.sym) sym <- if (member.sym.isClass && isNew) member.sym.info.decl(nme.CONSTRUCTOR).alternatives else member.sym.alternatives sugared = sym.sugaredSymbolOrSelf } yield { - val tp = member.prefix memberType sym - val desc = Seq(if (isMemberDeprecated(member)) "(deprecated)" else "", if (isMemberUniversal(member)) "(universal)" else "") - val methodOtherDesc = if (!desc.exists(_ != "")) "" else " " + desc.filter(_ != "").mkString(" ") CompletionCandidate( - defString = sugared.defStringSeenAs(tp) + methodOtherDesc, + name = member.symNameDropLocal.decoded, arity = memberArity(member), isDeprecated = isMemberDeprecated(member), - isUniversal = isMemberUniversal(member)) + isUniversal = isMemberUniversal(member), + declString = () => { + if (sym.isPackageObjectOrClass) "" + else { + val tp = member.prefix memberType sym + val desc = Seq(if (isMemberDeprecated(member)) "(deprecated)" else "", if (isMemberUniversal(member)) "(universal)" else "") + val methodOtherDesc = if (!desc.exists(_ != "")) "" else " " + desc.filter(_ != "").mkString(" ") + sugared.defStringSeenAs(tp) + methodOtherDesc + } + }) } - (cursor, CompletionCandidate("") :: ccs.distinct) + ccs } - def toCandidates(members: List[Member]): List[CompletionCandidate] = - members - .map(m => CompletionCandidate(m.symNameDropLocal.decoded, memberArity(m), isMemberDeprecated(m), isMemberUniversal(m))) - .sortBy(_.defString) val found = this.completionsAt(cursor) match { case NoResults => NoCandidates case r => def shouldHide(m: Member): Boolean = - tabCount == 0 && (isMemberDeprecated(m) || isMemberUniversal(m)) - val matching = r.matchingResults().filterNot(shouldHide) - val tabAfterCommonPrefixCompletion = lastCommonPrefixCompletion.contains(buf.substring(inputRange.start, cursor)) && matching.exists(_.symNameDropLocal == r.name) - val doubleTab = tabCount > 0 && matching.forall(_.symNameDropLocal == r.name) - if (tabAfterCommonPrefixCompletion || doubleTab) { - val pos1 = positionOf(cursor) - import compiler._ - val locator = new Locator(pos1) - val tree = locator locateIn unit.body - var isNew = false - new TreeStackTraverser { - override def traverse(t: Tree): Unit = { - if (t eq tree) { - isNew = path.dropWhile { case _: Select | _: Annotated => true; case _ => false}.headOption match { - case Some(_: New) => true - case _ => false - } - } else super.traverse(t) - } - }.traverse(unit.body) - defStringCandidates(matching, r.name, isNew) - } else if (matching.isEmpty) { - // Lenient matching based on camel case and on eliding JavaBean "get" / "is" boilerplate - val camelMatches: List[Member] = r.matchingResults(CompletionResult.camelMatch(_)).filterNot(shouldHide) - val memberCompletions: List[CompletionCandidate] = toCandidates(camelMatches) - def allowCompletion = ( - (memberCompletions.size == 1) - || CompletionResult.camelMatch(r.name)(r.name.newName(StringOps.longestCommonPrefix(memberCompletions.map(_.defString)))) - ) - if (memberCompletions.isEmpty) NoCandidates - else if (allowCompletion) (cursor - r.positionDelta, memberCompletions) - else (cursor, CompletionCandidate("") :: memberCompletions) - } else if (matching.nonEmpty && matching.forall(_.symNameDropLocal == r.name)) - NoCandidates // don't offer completion if the only option has been fully typed already - else { - // regular completion - (cursor - r.positionDelta, toCandidates(matching)) - } + filter && tabCount == 0 && (isMemberDeprecated(m) || isMemberUniversal(m)) + val matching = r.matchingResults(nameMatcher = if (filter) {entered => candidate => candidate.startsWith(entered)} else _ => _ => true).filterNot(shouldHide) + val pos1 = positionOf(cursor) + import compiler._ + val locator = new Locator(pos1) + val tree = locator locateIn unit.body + var isNew = false + new TreeStackTraverser { + override def traverse(t: Tree): Unit = { + if (t eq tree) { + isNew = path.dropWhile { case _: Select | _: Annotated => true; case _ => false}.headOption match { + case Some(_: New) => true + case _ => false + } + } else super.traverse(t) + } + }.traverse(unit.body) + val candidates = defStringCandidates(matching, r.name, isNew) + val pos = cursor - r.positionDelta + (pos, candidates.sortBy(_.name)) } - lastCommonPrefixCompletion = - if (found != NoCandidates && buf.length >= found._1) - Some(buf.substring(inputRange.start, found._1) + StringOps.longestCommonPrefix(found._2.map(_.defString))) - else - None found } diff --git a/src/scaladoc/scala/tools/nsc/ScalaDoc.scala b/src/scaladoc/scala/tools/nsc/ScalaDoc.scala index 644d0b839ed2..3ddbe03c9b35 100644 --- a/src/scaladoc/scala/tools/nsc/ScalaDoc.scala +++ b/src/scaladoc/scala/tools/nsc/ScalaDoc.scala @@ -48,7 +48,7 @@ class ScalaDoc { try { new DocFactory(reporter, docSettings) document command.files } catch { case ex @ FatalError(msg) => - if (docSettings.debug.value) ex.printStackTrace() + if (docSettings.isDebug) ex.printStackTrace() reporter.error(null, "fatal error: " + msg) } finally reporter.finish() diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala index ab5ebf0f17aa..e361e7299010 100644 --- a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala +++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala @@ -96,7 +96,7 @@ trait ScaladocAnalyzer extends Analyzer { typedStats(trees, NoSymbol) useCase.defined = context.scope.toList filterNot (useCase.aliases contains _) - if (settings.debug) + if (settings.isDebug) useCase.defined foreach (sym => println("defined use cases: %s:%s".format(sym, sym.tpe))) useCase.defined diff --git a/src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala b/src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala index 00a888b3f65f..bdec5a30f6b6 100644 --- a/src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala +++ b/src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala @@ -12,7 +12,9 @@ package scala.tools.nsc package doc + import scala.language.implicitConversions + import scala.reflect.internal.util.NoPosition import scala.tools.nsc.Reporting.WarningCategory @@ -63,7 +65,7 @@ trait Uncompilable { def symbols = pairs map (_._1) def templates = symbols.filter(x => x.isClass || x.isTrait || x == AnyRefClass/* which is now a type alias */).toSet def comments = { - if (settings.debug || settings.verbose) + if (settings.isDebug || settings.verbose) inform("Found %d uncompilable files: %s".format(files.size, files mkString ", ")) if (pairs.isEmpty) diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala index 3c82654fb515..32a0cbca5840 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala @@ -341,7 +341,7 @@ trait EntityPage extends HtmlPage { val postamble = List(Div(id = "tooltip"), if (Set("epfl", "EPFL").contains(tpl.universe.settings.docfooter.value)) - Div(id = "footer", elems = Txt("Scala programming documentation. Copyright (c) 2002-2020 ") :: A(href = "https://www.epfl.ch", target = "_top", elems = Txt("EPFL")) :: Txt(" and ") :: A(href = "https://www.lightbend.com", target = "_top", elems = Txt("Lightbend")) :: Txt(".")) + Div(id = "footer", elems = Txt("Scala programming documentation. Copyright (c) 2002-2021 ") :: A(href = "https://www.epfl.ch", target = "_top", elems = Txt("EPFL")) :: Txt(" and ") :: A(href = "https://www.lightbend.com", target = "_top", elems = Txt("Lightbend")) :: Txt(".")) else Div(id = "footer", elems = Txt(tpl.universe.settings.docfooter.value))) diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala index 63f54b3546c9..ee8c63842166 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala @@ -29,7 +29,7 @@ class IndexScript(universe: doc.Universe) extends Page { } val packages = { - val pairs = allPackagesWithTemplates.toIterable.map(_ match { + val pairs = allPackagesWithTemplates.map(_ match { case (pack, templates) => { val merged = mergeByQualifiedName(templates) diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js index d6935dd01ee5..e8b44e9b6744 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js @@ -61,7 +61,7 @@ $(document).ready(function() { return $(elem).attr("data-hidden") == 'true'; }; - $("#linearization li:gt(0)").filter(function(){ + $("#linearization li").slice(1).filter(function(){ return isHiddenClass($(this).attr("name")); }).removeClass("in").addClass("out"); @@ -440,7 +440,7 @@ function filter() { var orderingAlphabetic = $("#order > ol > li.alpha").hasClass("in"); var orderingInheritance = $("#order > ol > li.inherit").hasClass("in"); var orderingGroups = $("#order > ol > li.group").hasClass("in"); - var hiddenSuperclassElementsLinearization = orderingInheritance ? $("#linearization > li:gt(0)") : $("#linearization > li.out"); + var hiddenSuperclassElementsLinearization = orderingInheritance ? $("#linearization > li").slice(1) : $("#linearization > li.out"); var hiddenSuperclassesLinearization = hiddenSuperclassElementsLinearization.map(function() { return $(this).attr("name"); }).get(); diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala index 799fbf760c2e..888dde133742 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala @@ -45,7 +45,7 @@ trait ModelFactoryTypeSupport { appendType0(tp) case tp :: tps => appendType0(tp) - nameBuffer append sep + nameBuffer.append(sep) appendTypes0(tps, sep) } @@ -202,15 +202,16 @@ trait ModelFactoryTypeSupport { /* Polymorphic types */ case PolyType(tparams, result) => assert(tparams.nonEmpty, "polymorphic type must have at least one type parameter") - def typeParamsToString(tps: List[Symbol]): String = if (tps.isEmpty) "" else - tps.map{tparam => - tparam.varianceString + tparam.name + typeParamsToString(tparam.typeParams) - }.mkString("[", ", ", "]") - nameBuffer append typeParamsToString(tparams) + def typeParamsToString(tps: List[Symbol]): String = + if (tps.isEmpty) "" + else + tps.map { tparam => + tparam.varianceString + tparam.unexpandedName + typeParamsToString(tparam.typeParams) + }.mkString("[", ", ", "]") + nameBuffer.append(typeParamsToString(tparams)) appendType0(result) case et@ExistentialType(quantified, underlying) => - def appendInfoStringReduced(sym: Symbol, tp: Type): Unit = { if (sym.isType && !sym.isAliasType && !sym.isClass) { tp match { diff --git a/src/scalap/decoder.properties b/src/scalap/decoder.properties index 3607f029f024..9ac03dd79c51 100644 --- a/src/scalap/decoder.properties +++ b/src/scalap/decoder.properties @@ -1,2 +1,2 @@ version.number=2.0.1 -copyright.string=(c) 2002-2020 LAMP/EPFL +copyright.string=(c) 2002-2021 LAMP/EPFL diff --git a/src/tastytest/scala/tools/tastytest/ClasspathOps.scala b/src/tastytest/scala/tools/tastytest/ClasspathOps.scala new file mode 100644 index 000000000000..257eacf1d781 --- /dev/null +++ b/src/tastytest/scala/tools/tastytest/ClasspathOps.scala @@ -0,0 +1,10 @@ +package scala.tools.tastytest + +import java.net.URL +import java.nio.file.Paths + +object ClasspathOps { + implicit class ClassPathSyntax(private val ls: List[String]) extends AnyVal { + def asURLs: List[URL] = ls.map(Paths.get(_).toUri().toURL()) + } +} diff --git a/src/tastytest/scala/tools/tastytest/Classpaths.scala b/src/tastytest/scala/tools/tastytest/Classpaths.scala new file mode 100644 index 000000000000..5458966fe74d --- /dev/null +++ b/src/tastytest/scala/tools/tastytest/Classpaths.scala @@ -0,0 +1,17 @@ +package scala.tools.tastytest + +import scala.util.Properties +import java.io.File.pathSeparatorChar + +object Classpaths { + + private def classpathProp(name: String) = + Properties.propOrNone(name).map(_.split(pathSeparatorChar).filter(_.nonEmpty).toList).getOrElse(Nil) + + def dottyCompiler: List[String] = classpathProp("tastytest.classpaths.dottyCompiler") + + def scalaReflect: List[String] = classpathProp("tastytest.classpaths.scalaReflect") + + def dottyLibrary: List[String] = classpathProp("tastytest.classpaths.dottyLibrary") + +} diff --git a/src/tastytest/scala/tools/tastytest/Dotc.scala b/src/tastytest/scala/tools/tastytest/Dotc.scala index 6cf7ac10cc3d..8be7725c0810 100644 --- a/src/tastytest/scala/tools/tastytest/Dotc.scala +++ b/src/tastytest/scala/tools/tastytest/Dotc.scala @@ -1,40 +1,94 @@ package scala.tools.tastytest -import scala.util.{ Try, Success } +import scala.util.{Try, Success, Failure} +import scala.util.control.NonFatal -import java.lang.reflect.Modifier +import scala.reflect.internal.util.ScalaClassLoader +import scala.reflect.runtime.ReflectionUtils +import java.lang.reflect.{Modifier, Method} + +import ClasspathOps._ object Dotc extends Script.Command { - private[this] lazy val dotcProcess = processMethod("dotty.tools.dotc.Main") - - def processMethod(mainClassName: String): Array[String] => Try[Boolean] = { - // TODO call it directly when we are bootstrapped - val mainClass = Class.forName(mainClassName) - val reporterClass = Class.forName("dotty.tools.dotc.reporting.Reporter") - val Main_process = mainClass.getMethod("process", classOf[Array[String]]) - assert(Modifier.isStatic(Main_process.getModifiers), s"$mainClassName.process is not static!") - val Reporter_hasErrors = reporterClass.getMethod("hasErrors") - args => Try { - val reporter = Main_process.invoke(null, args) - val hasErrors = Reporter_hasErrors.invoke(reporter).asInstanceOf[Boolean] + final case class ClassLoader private (val parent: ScalaClassLoader) + + def initClassloader(): Try[Dotc.ClassLoader] = + Try(Dotc.ClassLoader(ScalaClassLoader.fromURLs(Classpaths.dottyCompiler.asURLs))) + + def processIn(op: Dotc.ClassLoader => Int): Int = { + Dotc.initClassloader() match { + case Success(cl) => op(cl) + case Failure(err) => + println(red(s"could not initialise Scala 3 classpath: $err")) + 1 + } + } + + def loadClass(name: String)(implicit cl: Dotc.ClassLoader) = + Class.forName(name, true, cl.parent) + + def invokeStatic(method: Method, args: Seq[Any])(implicit cl: Dotc.ClassLoader) = { + assert(Modifier.isStatic(method.getModifiers), s"$method is not static!") + invoke(method, null, args) + } + + def invokeStatic( + className: String, + methodName: String, + args: Seq[String] + )(implicit cl: Dotc.ClassLoader): Try[Object] = { + val cls = loadClass(className) + val method = cls.getMethod(methodName, classOf[Array[String]]) + Try { + invokeStatic(method, Seq(args.toArray)) + } + } + + def invoke(method: Method, obj: AnyRef, args: Seq[Any])(implicit cl: Dotc.ClassLoader) = { + try cl.parent.asContext[AnyRef] { + method.invoke(obj, args.toArray:_*) + } + catch { + case NonFatal(ex) => throw ReflectionUtils.unwrapThrowable(ex) + } + } + + private def dotcProcess(args: Seq[String])(implicit cl: Dotc.ClassLoader) = processMethod("dotty.tools.dotc.Main")(args) + + def processMethod(className: String)(args: Seq[String])(implicit cl: Dotc.ClassLoader): Try[Boolean] = { + val reporterCls = loadClass("dotty.tools.dotc.reporting.Reporter") + val Reporter_hasErrors = reporterCls.getMethod("hasErrors") + for (reporter <- invokeStatic(className, "process", args)) yield { + val hasErrors = invoke(Reporter_hasErrors, reporter, Seq.empty).asInstanceOf[Boolean] !hasErrors } } - def dotc(out: String, classpath: String, additionalSettings: Seq[String], sources: String*): Try[Boolean] = { + def mainMethod(className: String)(args: Seq[String])(implicit cl: Dotc.ClassLoader): Try[Unit] = + for (_ <- invokeStatic(className, "main", args)) yield () + + def dotcVersion(implicit cl: Dotc.ClassLoader): String = { + val compilerPropertiesClass = loadClass("dotty.tools.dotc.config.Properties") + val Properties_simpleVersionString = compilerPropertiesClass.getMethod("simpleVersionString") + invokeStatic(Properties_simpleVersionString, Seq.empty).asInstanceOf[String] + } + + def dotc(out: String, classpath: String, additionalSettings: Seq[String], sources: String*)(implicit cl: Dotc.ClassLoader): Try[Boolean] = { if (sources.isEmpty) { Success(true) } else { - val args = Array( + val libraryDeps = Classpaths.dottyLibrary ++ Classpaths.scalaReflect + val args = Seq( "-d", out, - "-classpath", classpath, + "-classpath", libraryDeps.mkString(classpath + Files.classpathSep, Files.classpathSep, ""), "-deprecation", - //"-language:experimental.erasedDefinitions", "-Xfatal-warnings", - "-usejavacp" ) ++ additionalSettings ++ sources + if (TastyTest.verbose) { + println(yellow(s"Invoking dotc (version $dotcVersion) with args: $args")) + } dotcProcess(args) } } @@ -43,13 +97,15 @@ object Dotc extends Script.Command { val describe: String = s"$commandName " def process(args: String*): Int = { - if (args.length != 2) { - println(red(s"please provide two arguments in sub-command: $describe")) + if (args.length < 2) { + println(red(s"please provide at least two arguments in sub-command: $describe")) return 1 } - val Seq(out, src) = args: @unchecked - val success = dotc(out, out, Nil, src).get - if (success) 0 else 1 + val Seq(out, src, additional @ _*) = args: @unchecked + Dotc.processIn { implicit scala3classloader => + val success = dotc(out, out, additional, src).get + if (success) 0 else 1 + } } } diff --git a/src/tastytest/scala/tools/tastytest/DotcDecompiler.scala b/src/tastytest/scala/tools/tastytest/DotcDecompiler.scala index ff53ccd782b4..41f842b43f33 100644 --- a/src/tastytest/scala/tools/tastytest/DotcDecompiler.scala +++ b/src/tastytest/scala/tools/tastytest/DotcDecompiler.scala @@ -4,10 +4,11 @@ import scala.util.Try object DotcDecompiler extends Script.Command { - private[this] lazy val dotcProcess = Dotc.processMethod("dotty.tools.dotc.decompiler.Main") + private def dotcProcess(args: Seq[String])(implicit cl: Dotc.ClassLoader) = + Dotc.processMethod("dotty.tools.dotc.decompiler.Main")(args) - def decompile(source: String, additionalSettings: Seq[String]): Try[Boolean] = - dotcProcess(("-usejavacp" +: additionalSettings :+ source).toArray) + def decompile(source: String, additionalSettings: Seq[String])(implicit cl: Dotc.ClassLoader): Try[Boolean] = + dotcProcess(("-usejavacp" +: additionalSettings :+ source)) val commandName: String = "dotcd" val describe: String = s"$commandName " @@ -18,8 +19,10 @@ object DotcDecompiler extends Script.Command { return 1 } val Seq(tasty, additionalSettings @ _*) = args: @unchecked - val success = decompile(tasty, additionalSettings).get - if (success) 0 else 1 + Dotc.processIn { implicit scala3classloader => + val success = decompile(tasty, additionalSettings).get + if (success) 0 else 1 + } } } diff --git a/src/tastytest/scala/tools/tastytest/PrintTasty.scala b/src/tastytest/scala/tools/tastytest/PrintTasty.scala new file mode 100644 index 000000000000..f9fcf655b50a --- /dev/null +++ b/src/tastytest/scala/tools/tastytest/PrintTasty.scala @@ -0,0 +1,24 @@ +package scala.tools.tastytest + +import scala.util.Try + +object PrintTasty extends Script.Command { + + def printTasty(tasty: String)(implicit cl: Dotc.ClassLoader): Try[Unit] = + Dotc.mainMethod("dotty.tools.dotc.core.tasty.TastyPrinter")(Seq(tasty)) + + val commandName: String = "printTasty" + val describe: String = s"$commandName " + + def process(args: String*): Int = { + if (args.length != 1) { + println(red(s"please provide 1 argument in sub-command: $describe")) + return 1 + } + Dotc.processIn { implicit scala3classloader => + val success = printTasty(tasty = args.head).isSuccess + if (success) 0 else 1 + } + } + +} diff --git a/src/tastytest/scala/tools/tastytest/TastyTest.scala b/src/tastytest/scala/tools/tastytest/TastyTest.scala index be64ff8ca2f3..d3e9122adbdf 100644 --- a/src/tastytest/scala/tools/tastytest/TastyTest.scala +++ b/src/tastytest/scala/tools/tastytest/TastyTest.scala @@ -14,12 +14,12 @@ import Files._ object TastyTest { - private val verbose = false + private[tastytest] val verbose = false private def log(s: => String): Unit = if (verbose) println(s) - def runSuite(src: String, srcRoot: String, pkgName: String, outDir: Option[String], additionalSettings: Seq[String], additionalDottySettings: Seq[String]): Try[Unit] = for { + def runSuite(src: String, srcRoot: String, pkgName: String, outDir: Option[String], additionalSettings: Seq[String], additionalDottySettings: Seq[String])(implicit cl: Dotc.ClassLoader): Try[Unit] = for { (pre, src2, src3) <- getRunSources(srcRoot/src) out <- outDir.fold(tempDir(pkgName))(dir) _ <- scalacPos(out, sourceRoot=srcRoot/src/"pre", additionalSettings, pre:_*) @@ -29,7 +29,7 @@ object TastyTest { _ <- runMainOn(out, testNames:_*) } yield () - def posSuite(src: String, srcRoot: String, pkgName: String, outDir: Option[String], additionalSettings: Seq[String], additionalDottySettings: Seq[String]): Try[Unit] = for { + def posSuite(src: String, srcRoot: String, pkgName: String, outDir: Option[String], additionalSettings: Seq[String], additionalDottySettings: Seq[String])(implicit cl: Dotc.ClassLoader): Try[Unit] = for { (pre, src2, src3) <- getRunSources(srcRoot/src, preFilters = Set(Scala, Java)) _ = log(s"Sources to compile under test: ${src2.map(cyan).mkString(", ")}") out <- outDir.fold(tempDir(pkgName))(dir) @@ -39,14 +39,14 @@ object TastyTest { _ <- scalacPos(out, sourceRoot=srcRoot/src/"src-2", additionalSettings, src2:_*) } yield () - def negSuite(src: String, srcRoot: String, pkgName: String, outDir: Option[String], additionalSettings: Seq[String], additionalDottySettings: Seq[String]): Try[Unit] = for { + def negSuite(src: String, srcRoot: String, pkgName: String, outDir: Option[String], additionalSettings: Seq[String], additionalDottySettings: Seq[String])(implicit cl: Dotc.ClassLoader): Try[Unit] = for { (src2, src3) <- get2And3Sources(srcRoot/src, src2Filters = Set(Scala, Check, SkipCheck)) out <- outDir.fold(tempDir(pkgName))(dir) _ <- dotcPos(out, sourceRoot=srcRoot/src/"src-3", additionalDottySettings, src3:_*) _ <- scalacNeg(out, additionalSettings, src2:_*) } yield () - def negChangePreSuite(src: String, srcRoot: String, pkgName: String, outDirs: Option[(String, String)], additionalSettings: Seq[String], additionalDottySettings: Seq[String]): Try[Unit] = for { + def negChangePreSuite(src: String, srcRoot: String, pkgName: String, outDirs: Option[(String, String)], additionalSettings: Seq[String], additionalDottySettings: Seq[String])(implicit cl: Dotc.ClassLoader): Try[Unit] = for { (preA, preB, src2, src3) <- getMovePreChangeSources(srcRoot/src, src2Filters = Set(Scala, Check, SkipCheck)) (out1, out2) <- outDirs.fold(tempDir(pkgName) *> tempDir(pkgName))(p => dir(p._1) *> dir(p._2)) _ <- scalacPos(out1, sourceRoot=srcRoot/src/"pre-A", additionalSettings, preA:_*) @@ -55,7 +55,7 @@ object TastyTest { _ <- scalacNeg(out2, additionalSettings, src2:_*) } yield () - def negSuiteIsolated(src: String, srcRoot: String, pkgName: String, outDirs: Option[(String, String)], additionalSettings: Seq[String], additionalDottySettings: Seq[String]): Try[Unit] = for { + def negSuiteIsolated(src: String, srcRoot: String, pkgName: String, outDirs: Option[(String, String)], additionalSettings: Seq[String], additionalDottySettings: Seq[String])(implicit cl: Dotc.ClassLoader): Try[Unit] = for { (src2, src3A, src3B) <- getNegIsolatedSources(srcRoot/src, src2Filters = Set(Scala, Check, SkipCheck)) (out1, out2) <- outDirs.fold(tempDir(pkgName) *> tempDir(pkgName))(p => dir(p._1) *> dir(p._2)) _ <- dotcPos(out1, sourceRoot=srcRoot/src/"src-3-A", additionalDottySettings, src3A:_*) @@ -154,11 +154,12 @@ object TastyTest { } } - def dotcPos(out: String, sourceRoot: String, additionalSettings: Seq[String], sources: String*): Try[Unit] = dotcPos(out, out, sourceRoot, additionalSettings, sources:_*) + def dotcPos(out: String, sourceRoot: String, additionalSettings: Seq[String], sources: String*)(implicit cl: Dotc.ClassLoader): Try[Unit] = dotcPos(out, out, sourceRoot, additionalSettings, sources:_*) - def dotcPos(out: String, classpath: String, sourceRoot: String, additionalSettings: Seq[String], sources: String*): Try[Unit] = { + def dotcPos(out: String, classpath: String, sourceRoot: String, additionalSettings: Seq[String], sources: String*)(implicit cl: Dotc.ClassLoader): Try[Unit] = { log(s"compiling sources in ${yellow(sourceRoot)} with dotc.") - successWhen(Dotc.dotc(out, classpath, additionalSettings, sources:_*))("dotc failed to compile sources.") + val process = Dotc.dotc(out, classpath, additionalSettings, sources:_*) + successWhen(process)("dotc failed to compile sources.") } private def getSourceAsName(path: String): String = @@ -273,7 +274,7 @@ object TastyTest { } case Failure(err) => errors += test - printerrln(s"ERROR: $test failed: ${err.getClass.getSimpleName} ${err.getMessage}") + printerrln(s"ERROR: $test failed: ${err.getClass.getSimpleName} ${err.getMessage} in ${err.getStackTrace().mkString("\n ", "\n ", "")}") } } } diff --git a/src/testkit/scala/tools/testkit/AssertUtil.scala b/src/testkit/scala/tools/testkit/AssertUtil.scala index 824adefe107b..e7b10f5206b4 100644 --- a/src/testkit/scala/tools/testkit/AssertUtil.scala +++ b/src/testkit/scala/tools/testkit/AssertUtil.scala @@ -20,7 +20,6 @@ import scala.collection.mutable import scala.concurrent.{Await, Awaitable} import scala.util.chaining._ import scala.util.{Failure, Success, Try} -import scala.util.Properties.isJavaAtLeast import scala.util.control.{ControlThrowable, NonFatal} import java.time.Duration import java.util.concurrent.{CountDownLatch, TimeUnit} @@ -28,6 +27,7 @@ import java.util.concurrent.atomic.AtomicReference import java.lang.ref._ import java.lang.reflect.{Array => _, _} import java.util.IdentityHashMap +import scala.annotation.nowarn /** This module contains additional higher-level assert statements * that are ultimately based on junit.Assert primitives. @@ -40,16 +40,28 @@ import java.util.IdentityHashMap */ object AssertUtil { - /** Assert on Java 8, but on later versions, just print if assert would fail. */ - def assert8(b: => Boolean, msg: => Any) = - if (!isJavaAtLeast(9)) - assert(b, msg) - else if (!b) - println(s"assert not $msg") - // junit fail is Unit def fail(message: String): Nothing = throw new AssertionError(message) + private val printable = raw"\p{Print}".r + + def hexdump(s: String): Iterator[String] = { + import scala.io.Codec + val codec: Codec = Codec.UTF8 + var offset = 0 + def hex(bytes: Array[Byte]) = bytes.map(b => f"$b%02x").mkString(" ") + def charFor(byte: Byte): Char = byte.toChar match { case c @ printable() => c ; case _ => '.' } + def ascii(bytes: Array[Byte]) = bytes.map(charFor).mkString + def format(bytes: Array[Byte]): String = + f"$offset%08x ${hex(bytes.slice(0, 8))}%-24s ${hex(bytes.slice(8, 16))}%-24s |${ascii(bytes)}|" + .tap(_ => offset += bytes.length) + s.getBytes(codec.charSet).grouped(16).map(format) + } + + private def dump(s: String) = hexdump(s).mkString("\n") + def assertEqualStrings(expected: String)(actual: String) = + assert(expected == actual, s"Expected:\n${dump(expected)}\nActual:\n${dump(actual)}") + private final val timeout = 60 * 1000L // wait a minute private implicit class `ref helper`[A](val r: Reference[A]) extends AnyVal { @@ -116,6 +128,9 @@ object AssertUtil { throw ae } + def assertCond[A](x: A)(pf: PartialFunction[A, Boolean]): Unit = assertTrue(PartialFunction.cond(x)(pf)) + def assertCondNot[A](x: A)(pf: PartialFunction[A, Boolean]): Unit = assertFalse(PartialFunction.cond(x)(pf)) + def assertFails[U](checkMessage: String => Boolean)(body: => U): Unit = assertThrows[AssertionError](body, checkMessage) /** JUnit-style assertion for `IterableLike.sameElements`. @@ -166,7 +181,7 @@ object AssertUtil { def assertZeroNetThreads(body: => Unit): Unit = { val group = new ThreadGroup("junit") try assertZeroNetThreads(group)(body) - finally group.destroy() + finally group.destroy(): @nowarn("cat=deprecation") // deprecated since JDK 16, will be removed } def assertZeroNetThreads[A](group: ThreadGroup)(body: => A): Try[A] = { val testDone = new CountDownLatch(1) @@ -232,6 +247,12 @@ object AssertUtil { * takes a long time, so long as we can verify progress. */ def waitForIt(terminated: => Boolean, progress: Progress = Fast, label: => String = "test"): Unit = { + def value: Option[Boolean] = if (terminated) Some(true) else None + assertTrue(waitFor(value, progress, label)) + } + /** Wait for a value or eventually throw. + */ + def waitFor[A](value: => Option[A], progress: Progress = Fast, label: => String = "test"): A = { val limit = 5 var n = 1 var (dormancy, factor) = progress match { @@ -239,14 +260,13 @@ object AssertUtil { case Fast => (250L, 4) } var period = 0L + var result: Option[A] = None var done = false - var ended = false while (!done && n < limit) { try { - ended = terminated - if (ended) { - done = true - } else { + result = value + done = result.nonEmpty + if (!done) { //println(s"Wait for test condition: $label") Thread.sleep(dormancy) period += dormancy @@ -257,7 +277,10 @@ object AssertUtil { n += 1 dormancy *= factor } - assertTrue(s"Expired after dormancy period $period waiting for termination condition $label", ended) + result match { + case Some(v) => v + case _ => fail(s"Expired after dormancy period $period waiting for termination condition $label") + } } /** How frequently to check a termination condition. */ @@ -294,7 +317,7 @@ class NoTrace[A](body: => A) extends Runnable { case Success(a) => result = Some(a) case Failure(e) => synchronized { uncaught += ((Thread.currentThread, e)) } } - finally group.destroy() + finally group.destroy(): @nowarn("cat=deprecation") // deprecated since JDK 16, will be removed } private[testkit] lazy val errors: List[(Thread, Throwable)] = synchronized(uncaught.toList) diff --git a/src/testkit/scala/tools/testkit/BytecodeTesting.scala b/src/testkit/scala/tools/testkit/BytecodeTesting.scala index 21ca25c629aa..1f3b370f8be2 100644 --- a/src/testkit/scala/tools/testkit/BytecodeTesting.scala +++ b/src/testkit/scala/tools/testkit/BytecodeTesting.scala @@ -32,7 +32,7 @@ import scala.tools.testkit.ASMConverters._ trait BytecodeTesting extends ClearAfterClass { /** - * Overwrite to set additional compiler flags + * Override to set additional compiler flags. */ def compilerArgs = "" diff --git a/test/async/jvm/lazyval.scala b/test/async/jvm/lazyval.scala index 8ea6313d9559..d52f7e275821 100644 --- a/test/async/jvm/lazyval.scala +++ b/test/async/jvm/lazyval.scala @@ -6,9 +6,11 @@ package scala.async.run.lazyval { import org.junit.Test import org.junit.Assert._ + import scala.concurrent._ import scala.concurrent.duration._ import ExecutionContext.Implicits.global + import scala.collection.mutable.ListBuffer import scala.tools.testkit.async.Async.{async, await} object TestUtil { import language.implicitConversions @@ -34,6 +36,34 @@ package scala.async.run.lazyval { assertEquals(43, result) } + + @Test + def localObject(): Unit = { + val result = block(async { + val log = ListBuffer[String]() + object O { + log += "O" + } + await(1) + O + await(1) + O + var i = 0 + while (i <= 2) { + object W { + log += "W(" + i + ")" + } + await(1) + W + await(1) + W + i += 1 + } + log.mkString(",") + }) + + assertEquals("O,W(0),W(1),W(2)", result) + } } } diff --git a/test/benchmarks/README.md b/test/benchmarks/README.md index 45f8e142be9e..71d0462889d4 100644 --- a/test/benchmarks/README.md +++ b/test/benchmarks/README.md @@ -3,50 +3,62 @@ This directory is used by the `bench` subproject of the Scala sbt build. It makes use of the [sbt plugin](https://github.com/ktoso/sbt-jmh) for [JMH](https://openjdk.java.net/projects/code-tools/jmh/). -## Running a benchmark +## About the benchmarks -Benchmarks are built with the bootstrap compiler ("starr") using the library built from the `library` project ("quick"). -If you want to test compiler changes you need to bootstrap with the new compiler. +Benchmarks are built with the reference compiler ("starr") using the library built from the `library` project ("quick"). +If you want to test compiler changes you need to bootstrap a new compiler. -You'll then need to know the fully-qualified name of the benchmark runner class. -The benchmarking classes are organized under `src/main/scala`, +The benchmarking classes are organized under `test/benchmarks/src/main/scala`, in the same package hierarchy as the classes that they test. -Assuming that we're benchmarking `scala.collection.mutable.OpenHashMap`, -the benchmark runner would likely be named `scala.collection.mutable.OpenHashMapRunner`. -Using this example, one would simply run - bench/jmh:runMain scala.collection.mutable.OpenHashMapRunner +The benchmarking classes use the same package hierarchy as the classes that they test +in order to make it easy to expose members of the class under test in package-private scope, +should that be necessary for benchmarking. -in the Scala sbt build. +There are two types of classes in the source directory: +those suffixed `Benchmark`, and a few that are suffixed `Runner`. +(The latter are described below, under "Custom runners".) -The JMH results can be found under `../../target/jmh-results/` (i.e. the main Scala build's `target`, -not the one that contains the benchmark class files). `jmh-results` gets deleted on an sbt `bench/clean`, -so you should copy these files out of `target` if you wish to preserve them. +## Running a normal benchmark -## Creating a benchmark and runner +Use `bench/Jmh/run` and provide the fully qualified name of the benchmark +class: -The benchmarking classes use the same package hierarchy as the classes that they test -in order to make it easy to expose, in package scope, members of the class under test, -should that be necessary for benchmarking. + bench/Jmh/run scala.collection.mutable.ListBufferBenchmark -There are two types of classes in the source directory: -those suffixed `Benchmark` and those suffixed `Runner`. -The former are benchmarks that can be run directly using `bench/jmh:run`; -however, they are normally run from a corresponding class of the latter type, -which is run using `bench/jmh:runMain` (as described above). -This …`Runner` class is useful for setting appropriate JMH command options, +Results are printed to standard output. + +## Custom runners + +Some benchmarks have custom runners. A custom runner +can be useful for setting appropriate JMH command options, and for processing the JMH results into files that can be read by other tools, such as Gnuplot. -The `benchmark.JmhRunner` trait should be woven into any runner class, for the standard behavior that it provides. +Assuming that we're benchmarking `scala.collection.mutable.OpenHashMap`, +the custom runner (if there is one) would likely be named +`scala.collection.mutable.OpenHashMapRunner`. +Using this example, one would run + + bench/Jmh/runMain scala.collection.mutable.OpenHashMapRunner + +in the Scala sbt build. + +Custom runner results are written to `../../target/jmh-results/` (i.e. the main Scala build's `target`, +not the one that contains the benchmark class files). `jmh-results` gets deleted on an sbt `bench/clean`, +so you should copy these files out of `target` if you wish to preserve them. + +If you want to make your own custom runner, extend the `benchmark.JmhRunner` trait, for the standard behavior that it provides. This includes creating output files in a subdirectory of `target/jmh-results` derived from the fully-qualified package name of the `Runner` class. ## Some useful HotSpot options -Adding these to the `jmh:run` or `jmh:runMain` command line may help if you're using the HotSpot (Oracle, OpenJDK) compiler. + +Adding these to the `Jmh/run` or `Jmh/runMain` command line may help if you're using the HotSpot (Oracle, OpenJDK) compiler. They require prefixing with `-jvmArgs`. -See [the Java documentation](https://docs.oracle.com/javase/8/docs/technotes/tools/unix/java.html) for more options. +See [the Java documentation](https://docs.oracle.com/javase/8/docs/technotes/tools/unix/java.html) for more options. ### Viewing JIT compilation events + Adding `-XX:+PrintCompilation` shows when Java methods are being compiled or deoptimized. At the most basic level, these messages will tell you whether the code that you're measuring is still being tuned, @@ -54,16 +66,20 @@ so that you know whether you're running enough warm-up iterations. See [Kris Mok's notes](https://gist.github.com/rednaxelafx/1165804#file-notes-md) to interpret the output in detail. ### Consider GC events + If you're not explicitly performing `System.gc()` calls outside of your benchmarking code, you should add the JVM option `-verbose:gc` to understand the effect that GCs may be having on your tests. ### "Diagnostic" options + These require the `-XX:+UnlockDiagnosticVMOptions` JVM option. #### Viewing inlining events + Add `-XX:+PrintInlining`. #### Viewing the disassembled code + If you're running OpenJDK or Oracle JVM, you may need to install the disassembler library (`hsdis-amd64.so` for the `amd64` architecture). In Debian, this is available in @@ -84,16 +100,16 @@ To show it for _all_ methods, add `-XX:+PrintAssembly`. ### Using JITWatch -[JITWatch](https://github.com/AdoptOpenJDK/jitwatch) is useful to understand how the JVM has JIT compiled +[JITWatch](https://github.com/AdoptOpenJDK/jitwatch) is useful to understand how the JVM has JIT-compiled code. If you install `hsdis`, as described above, machine code disassembly is also created. You can generate the `hotspot.log` file for a benchmark run by adding the [required JVM options](https://github.com/AdoptOpenJDK/jitwatch/wiki/Building-hsdis) -to JMH benchmark execution: +to JMH benchmark execution: ``` -sbt:root> bench/jmh:run scala.collection.mutable.ArrayOpsBenchmark.insertInteger -psize=1000 -f1 -jvmArgs -XX:+UnlockDiagnosticVMOptions -jvmArgs -XX:+TraceClassLoading -jvmArgs -XX:+LogCompilation -jvmArgs -XX:LogFile=target/hotspot.log -jvmArgs -XX:+PrintAssembly +sbt:root> bench/Jmh/run scala.collection.mutable.ArrayOpsBenchmark.insertInteger -psize=1000 -f1 -jvmArgs -XX:+UnlockDiagnosticVMOptions -jvmArgs -XX:+TraceClassLoading -jvmArgs -XX:+LogCompilation -jvmArgs -XX:LogFile=target/hotspot.log -jvmArgs -XX:+PrintAssembly ... [info] Loaded disassembler from /Users/jz/.jabba/jdk/1.8.172/Contents/Home/jre/lib/hsdis-amd64.dylib [info] Decoding compiled method 0x0000000113f60bd0: @@ -114,7 +130,7 @@ sbt:root> bench/jmh:run scala.collection.mutable.ArrayOpsBenchmark.insertInteger JITWatch requires configuration of the class and source path. We generate that with a custom task in our build: ``` -sbt> bench/jmh:jitwatchConfigFile +sbt> bench/Jmh/jitwatchConfigFile ... jmh ... @@ -128,6 +144,7 @@ sbt> ^C Follow instructions in the output above and start gleaning insights! ## Useful reading + * [OpenJDK advice on microbenchmarks](https://wiki.openjdk.java.net/display/HotSpot/MicroBenchmarks) * Brian Goetz's "Java theory and practice" articles: * "[Dynamic compilation and performance measurement](https://www.ibm.com/developerworks/java/library/j-jtp12214/)" diff --git a/test/benchmarks/project/build.properties b/test/benchmarks/project/build.properties deleted file mode 100644 index e67343ae796c..000000000000 --- a/test/benchmarks/project/build.properties +++ /dev/null @@ -1 +0,0 @@ -sbt.version=1.5.0 diff --git a/test/benchmarks/project/plugins.sbt b/test/benchmarks/project/plugins.sbt deleted file mode 100644 index b57429f738ec..000000000000 --- a/test/benchmarks/project/plugins.sbt +++ /dev/null @@ -1 +0,0 @@ -addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.2.27") diff --git a/test/benchmarks/src/main/java/scala/reflect/internal/util/AlmostFinalValueBenchmarkStatics.java b/test/benchmarks/src/main/java/scala/reflect/internal/util/AlmostFinalValueBenchmarkStatics.java new file mode 100644 index 000000000000..966adedb44e1 --- /dev/null +++ b/test/benchmarks/src/main/java/scala/reflect/internal/util/AlmostFinalValueBenchmarkStatics.java @@ -0,0 +1,12 @@ +package scala.reflect.internal.util; + +import java.lang.invoke.MethodHandle; + +final class AlmostFinalValueBenchmarkStatics { + static final boolean STATIC_FINAL_FALSE = false; + + private static final AlmostFinalValue ALMOST_FINAL_FALSE = new AlmostFinalValue(); + private static final MethodHandle ALMOST_FINAL_FALSE_GETTER = ALMOST_FINAL_FALSE.invoker; + + static boolean isTrue() throws Throwable { return (boolean) ALMOST_FINAL_FALSE_GETTER.invokeExact(); } +} diff --git a/test/benchmarks/src/main/scala/scala/BitManipulationBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/BitManipulationBenchmark.scala similarity index 100% rename from test/benchmarks/src/main/scala/scala/BitManipulationBenchmark.scala rename to test/benchmarks/src/main/scala/scala/collection/BitManipulationBenchmark.scala diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/ArraySeqBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/ArraySeqBenchmark.scala index 0be14aab4ce8..f5d8e6361df5 100644 --- a/test/benchmarks/src/main/scala/scala/collection/immutable/ArraySeqBenchmark.scala +++ b/test/benchmarks/src/main/scala/scala/collection/immutable/ArraySeqBenchmark.scala @@ -1,8 +1,6 @@ package scala.collection.immutable import java.util.concurrent.TimeUnit -import java.util.Arrays - import org.openjdk.jmh.annotations._ import org.openjdk.jmh.infra.Blackhole @@ -21,12 +19,14 @@ class ArraySeqBenchmark { var size: Int = _ var integersS: ArraySeq[Int] = _ var stringsS: ArraySeq[String] = _ + var newS: Array[String] = _ @Setup(Level.Trial) def initNumbers: Unit = { val integers = (1 to size).toList val strings = integers.map(_.toString) integersS = ArraySeq.unsafeWrapArray(integers.toArray) stringsS = ArraySeq.unsafeWrapArray(strings.toArray) + newS = Array("a", "b", "c", "d", "e", "f") } @Benchmark def sortedStringOld(bh: Blackhole): Unit = @@ -50,7 +50,7 @@ class ArraySeqBenchmark { private[this] def oldSorted[A](seq: ArraySeq[A])(implicit ord: Ordering[A], tag: ClassTag[A]): ArraySeq[A] = { val len = seq.length val b = ArraySeq.newBuilder[A](tag) - if (len == 1) b ++= seq.toIterable + if (len == 1) b ++= seq else if (len > 1) { b.sizeHint(len) val arr = new Array[AnyRef](len) @@ -68,4 +68,25 @@ class ArraySeqBenchmark { } b.result() } -} + + // newS is used to avoid allocating Strings, while still performing some sort of "mapping". + + @Benchmark def mapSOld(): ArraySeq[AnyRef] = + oldMap(stringsS)(x => newS(x.length)) + + @Benchmark def mapSNew(): ArraySeq[AnyRef] = + stringsS.map(x => newS(x.length)) + + // Mapping an ArraySeq.ofInt results in an ArraySeq.ofRef containing java.lang.Integers. + // Boxing small integers doesn't result in allocations thus the choice of _ & 0xf as the mapping function. + + @Benchmark def mapIOld(): ArraySeq[Int] = + oldMap(integersS)(_ & 0xf) + + @Benchmark def mapINew(): ArraySeq[Int] = + integersS.map(_ & 0xf) + + private def oldMap[A, B](seq: ArraySeq[A])(f: A => B): ArraySeq[B] = + seq.iterableFactory.tabulate(seq.length)(i => f(seq.apply(i))) + +} \ No newline at end of file diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/ListBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/ListBenchmark.scala index 8a44778fae12..c00b2d6be80d 100644 --- a/test/benchmarks/src/main/scala/scala/collection/immutable/ListBenchmark.scala +++ b/test/benchmarks/src/main/scala/scala/collection/immutable/ListBenchmark.scala @@ -24,6 +24,8 @@ class ListBenchmark { var mid: Content = _ var last: Content = _ var replacement: Content = _ + var firstHalf: List[Content] = _ + var lastHalf: List[Content] = _ @Setup(Level.Trial) def initKeys(): Unit = { @@ -31,6 +33,8 @@ class ListBenchmark { mid = Content(size / 2) last = Content(Math.max(0,size -1)) replacement = Content(size * 2 + 1) + firstHalf = values.take(size / 2) + lastHalf = values.drop(size / 2) } @Benchmark def filter_includeAll: Any = { @@ -86,4 +90,28 @@ class ListBenchmark { @Benchmark def partition_exc_last: Any = { values.partition(v => v.value != last.value) } + + @Benchmark def diff_single_mid: Any = { + values.diff(List(mid)) + } + + @Benchmark def diff_single_last: Any = { + values.diff(List(last)) + } + + @Benchmark def diff_notIncluded: Any = { + values.diff(List(Content(-1))) + } + + @Benchmark def diff_identical: Any = { + values.diff(values) + } + + @Benchmark def diff_first_half: Any = { + values.diff(firstHalf) + } + + @Benchmark def diff_last_half: Any = { + values.diff(lastHalf) + } } diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/ArrayBufferBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/ArrayBufferBenchmark.scala new file mode 100644 index 000000000000..7da2ea9f0d37 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/ArrayBufferBenchmark.scala @@ -0,0 +1,151 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.mutable + +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra._ + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 15) +@Measurement(iterations = 15) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class ArrayBufferBenchmark { + @Param(Array(/*"0", "1",*/ "10", "100", "1000", "10000")) + var size: Int = _ + + var ref : ArrayBuffer[Int] = _ + var set : scala.collection.immutable.Set[Int] = _ + var list: List[Int] = _ + + @Setup(Level.Trial) def init: Unit = { + ref = new ArrayBuffer + for (i <- 0 until size) ref += i + set = ref.toSet + list = ref.toList + } + + @Benchmark def filterInPlace(bh: Blackhole): Unit = { + val b = ref.clone() + b.filterInPlace(_ % 2 == 0) + bh.consume(b) + } + + @Benchmark def update(bh: Blackhole): Unit = { + val b = ref.clone() + var i = 0 + while (i < size) { + b.update(i, -1) + i += 2 + } + bh.consume(b) + } + + // append `ArrayBuffer` + @Benchmark def addAll1(bh: Blackhole): Unit = { + val b1 = ref.clone() + val b2 = ref.clone() + b1.addAll(b2) + bh.consume(b1) + } + + // append `Iterable` with known size + @Benchmark def addAll2(bh: Blackhole): Unit = { + val b = ref.clone() + b.addAll(set) + bh.consume(b) + } + + // append `Iterable` without known size + @Benchmark def addAll3(bh: Blackhole): Unit = { + val b = ref.clone() + b.addAll(list) + bh.consume(b) + } + + // append `IterableOnce` without known size + @Benchmark def addAll4(bh: Blackhole): Unit = { + val b = ref.clone() + b.addAll(list.iterator) + bh.consume(b) + } + + // insert `ArrayBuffer` + @Benchmark def insertAll1(bh: Blackhole): Unit = { + val b1 = ref.clone() + val b2 = ref.clone() + b1.insertAll(size / 2, b2) + bh.consume(b1) + } + + // insert `Iterable` with known size + @Benchmark def insertAll2(bh: Blackhole): Unit = { + val b = ref.clone() + b.insertAll(size / 2, set) + bh.consume(b) + } + + // insert `Iterable` without known size + @Benchmark def insertAll3(bh: Blackhole): Unit = { + val b = ref.clone() + b.insertAll(size / 2, list) + bh.consume(b) + } + + // insert `IterableOnce` without known size + @Benchmark def insertAll4(bh: Blackhole): Unit = { + val b = ref.clone() + b.insertAll(size / 2, list.iterator) + bh.consume(b) + } + + @Benchmark def flatMapInPlace1(bh: Blackhole): Unit = { + val b = ref.clone() + val seq = scala.Seq(0, 0) + b.flatMapInPlace { _ => seq } + bh.consume(b) + } + + @Benchmark def iteratorA(bh: Blackhole): Unit = { + val b = ref.clone() + var n = 0 + for (x <- b.iterator) n += x + bh.consume(n) + bh.consume(b) + } + + @Benchmark def iteratorB(bh: Blackhole): Unit = { + val b = ref.clone() + bh.consume(b.iterator.toVector) + bh.consume(b) + } + + @Benchmark def reverseIteratorA(bh: Blackhole): Unit = { + val b = ref.clone() + var n = 0 + for (x <- b.reverseIterator) n += x + bh.consume(n) + bh.consume(b) + } + + @Benchmark def reverseIteratorB(bh: Blackhole): Unit = { + val b = ref.clone() + bh.consume(b.reverseIterator.toVector) + bh.consume(b) + } + +} diff --git a/test/benchmarks/src/main/scala/scala/math/BigIntEulerProblem15Benchmark.scala b/test/benchmarks/src/main/scala/scala/math/BigIntEulerProblem15Benchmark.scala new file mode 100644 index 000000000000..690c078ec2f7 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/math/BigIntEulerProblem15Benchmark.scala @@ -0,0 +1,29 @@ +package scala.math + +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra.Blackhole + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 10) +@Measurement(iterations = 10) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class BigIntEulerProblem15Benchmark { + + @Param(Array("5", "10", "15", "20", "25", "30", "35", "40", "45", "50", "55", + "60", "65", "70", "75", "80", "85", "90", "95", "100")) + var size: Int = _ + + @Benchmark + def eulerProblem15(bh: Blackhole): Unit = { + def f(row: Array[BigInt], c: Int): BigInt = + if (c == 0) row.last else f(row.scan(BigInt(0))(_ + _), c - 1) + def computeAnswer(n: Int): BigInt = f(Array.fill(n + 1)(BigInt(1)), n) + bh.consume(computeAnswer(size)) + } + +} diff --git a/test/benchmarks/src/main/scala/scala/math/BigIntFactorialBenchmark.scala b/test/benchmarks/src/main/scala/scala/math/BigIntFactorialBenchmark.scala new file mode 100644 index 000000000000..0aaa18c029e1 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/math/BigIntFactorialBenchmark.scala @@ -0,0 +1,30 @@ +package scala.math + +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra.Blackhole + +import scala.annotation.tailrec + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 10) +@Measurement(iterations = 10) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class BigIntFactorialBenchmark { + + @Param(Array("5", "10", "15", "20", "25", "30", "35", "40", "45", "50", "55", + "60", "65", "70", "75", "80", "85", "90", "95", "100")) + var size: Int = _ + + @Benchmark + def factorial(bh: Blackhole): Unit = { + @tailrec def fact(i: Int, n: Int, prev: BigInt): BigInt = + if (i > n) prev else fact(i + 1, n, prev * i) + bh.consume(fact(1, size, BigInt(1))) + } + +} diff --git a/test/benchmarks/src/main/scala/scala/math/BigIntRSABenchmark.scala b/test/benchmarks/src/main/scala/scala/math/BigIntRSABenchmark.scala new file mode 100644 index 000000000000..4c93f324e0bd --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/math/BigIntRSABenchmark.scala @@ -0,0 +1,32 @@ +package scala.math + +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra._ + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 10) +@Measurement(iterations = 10) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class BigIntRSABenchmark { + + @Benchmark + def encodeDecode(bh: Blackhole): Unit = { + // private key + val d = BigInt("5617843187844953170308463622230283376298685") + // public key + val n = BigInt("9516311845790656153499716760847001433441357") + val e = 65537 + + // concatenation of "Scala is great" + val plaintext = BigInt("83099097108097032105115032103114101097116") + val ciphertext = plaintext.modPow(e, n) + val recoveredtext = ciphertext.modPow(d, n) + bh.consume(plaintext == recoveredtext) + } + +} diff --git a/test/benchmarks/src/main/scala/reflect/internal/LubBenchmark.scala b/test/benchmarks/src/main/scala/scala/reflect/internal/LubBenchmark.scala similarity index 100% rename from test/benchmarks/src/main/scala/reflect/internal/LubBenchmark.scala rename to test/benchmarks/src/main/scala/scala/reflect/internal/LubBenchmark.scala diff --git a/test/benchmarks/src/main/scala/reflect/internal/SymbolBenchmark.scala b/test/benchmarks/src/main/scala/scala/reflect/internal/SymbolBenchmark.scala similarity index 100% rename from test/benchmarks/src/main/scala/reflect/internal/SymbolBenchmark.scala rename to test/benchmarks/src/main/scala/scala/reflect/internal/SymbolBenchmark.scala diff --git a/test/benchmarks/src/main/scala/scala/reflect/internal/util/AlmostFinalValueBenchmark.scala b/test/benchmarks/src/main/scala/scala/reflect/internal/util/AlmostFinalValueBenchmark.scala new file mode 100644 index 000000000000..70d69178cb19 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/reflect/internal/util/AlmostFinalValueBenchmark.scala @@ -0,0 +1,56 @@ +package scala.reflect.internal.util + +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra.Blackhole + +class AlmostFinalValueBenchSettings extends scala.reflect.runtime.Settings { + val flag = new BooleanSetting(false) + + @inline final def isTrue2: Boolean = AlmostFinalValueBenchmarkStatics.isTrue && flag +} + +object AlmostFinalValueBenchSettings { + implicit class SettingsOps(private val settings: AlmostFinalValueBenchSettings) extends AnyVal { + @inline final def isTrue3: Boolean = AlmostFinalValueBenchmarkStatics.isTrue && settings.flag + } + + @inline def isTrue4(settings: AlmostFinalValueBenchSettings): Boolean = + AlmostFinalValueBenchmarkStatics.isTrue && settings.flag +} + +@Warmup(iterations = 10, time = 1, timeUnit = TimeUnit.SECONDS) +@Measurement(iterations = 10, time = 1, timeUnit = TimeUnit.SECONDS) +@Fork(3) +@BenchmarkMode(Array(Mode.AverageTime)) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class AlmostFinalValueBenchmark { + import AlmostFinalValueBenchmarkStatics.STATIC_FINAL_FALSE + val settings = new AlmostFinalValueBenchSettings(); import settings._ + + private def pretendToWorkHard() = Blackhole.consumeCPU(3) + + @Benchmark def bench0_unit = () + @Benchmark def bench0_usingStaticFinalFalse = if (STATIC_FINAL_FALSE && flag) pretendToWorkHard() + @Benchmark def bench0_workingHard = pretendToWorkHard() + + @Benchmark def bench1_usingAlmostFinalFalse = if (AlmostFinalValueBenchmarkStatics.isTrue && flag) pretendToWorkHard() + @Benchmark def bench2_usingInlineMethod = if (settings.isTrue2) pretendToWorkHard() + @Benchmark def bench3_usingExtMethod = if (settings.isTrue3) pretendToWorkHard() + @Benchmark def bench4_usingObjectMethod = if (AlmostFinalValueBenchSettings.isTrue4(settings)) pretendToWorkHard() + +/* + This benchmark is measuring two things: + 1. verifying that using AlmostFinalValue in an if block makes the block a no-op + 2. verifying and comparing which ergonomic wrapper around AlmostFinalValue maintains that + + The first point is satisfied. + + For the second: + 1. inline instance methods add a null-check overhead, slowing it down + 2. extension methods perform as quickly, are very ergonomic and so are the best choice + 3. object methods also perform as quickly, but can be less ergonomic if it requires an import +*/ +} diff --git a/test/benchmarks/src/main/scala/scala/tools/nsc/transform/patmat/ClassMatchBenchmark.scala b/test/benchmarks/src/main/scala/scala/tools/nsc/transform/patmat/ClassMatchBenchmark.scala new file mode 100644 index 000000000000..fd1f2c681239 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/tools/nsc/transform/patmat/ClassMatchBenchmark.scala @@ -0,0 +1,1127 @@ +package scala.tools.nsc.transform.patmat + +import java.util.concurrent.TimeUnit +import org.openjdk.jmh.annotations.CompilerControl.Mode.DONT_INLINE +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra.Blackhole + +import scala.annotation.switch +import scala.util.Random + +@Warmup(iterations = 10, time = 1, timeUnit = TimeUnit.SECONDS) +@Measurement(iterations = 10, time = 1, timeUnit = TimeUnit.SECONDS) +@Fork(3) +@BenchmarkMode(Array(Mode.AverageTime)) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class ClassMatchBenchmark { + private final val count = 10000 + @Param(Array("4", "8", "16", "32", "64", "128", "256")) private var numCases = 0 + + private var names: Array[Name] = null + private var classValue: ClassValue[Int] = null + + @Setup def setup(): Unit = { + val r = new Random(12345) + val names = Array[Name]( + Name0(), Name1(), Name2(), Name3(), Name4(), Name5(), Name6(), Name7(), Name8(), Name9(), + Name10(), Name11(), Name12(), Name13(), Name14(), Name15(), Name16(), Name17(), Name18(), Name19(), + Name20(), Name21(), Name22(), Name23(), Name24(), Name25(), Name26(), Name27(), Name28(), Name29(), + Name30(), Name31(), Name32(), Name33(), Name34(), Name35(), Name36(), Name37(), Name38(), Name39(), + Name40(), Name41(), Name42(), Name43(), Name44(), Name45(), Name46(), Name47(), Name48(), Name49(), + Name50(), Name51(), Name52(), Name53(), Name54(), Name55(), Name56(), Name57(), Name58(), Name59(), + Name60(), Name61(), Name62(), Name63(), Name64(), Name65(), Name66(), Name67(), Name68(), Name69(), + Name70(), Name71(), Name72(), Name73(), Name74(), Name75(), Name76(), Name77(), Name78(), Name79(), + Name80(), Name81(), Name82(), Name83(), Name84(), Name85(), Name86(), Name87(), Name88(), Name89(), + Name90(), Name91(), Name92(), Name93(), Name94(), Name95(), Name96(), Name97(), Name98(), Name99(), + Name100(), Name101(), Name102(), Name103(), Name104(), Name105(), Name106(), Name107(), Name108(), Name109(), + Name110(), Name111(), Name112(), Name113(), Name114(), Name115(), Name116(), Name117(), Name118(), Name119(), + Name120(), Name121(), Name122(), Name123(), Name124(), Name125(), Name126(), Name127(), Name128(), Name129(), + Name130(), Name131(), Name132(), Name133(), Name134(), Name135(), Name136(), Name137(), Name138(), Name139(), + Name140(), Name141(), Name142(), Name143(), Name144(), Name145(), Name146(), Name147(), Name148(), Name149(), + Name150(), Name151(), Name152(), Name153(), Name154(), Name155(), Name156(), Name157(), Name158(), Name159(), + Name160(), Name161(), Name162(), Name163(), Name164(), Name165(), Name166(), Name167(), Name168(), Name169(), + Name170(), Name171(), Name172(), Name173(), Name174(), Name175(), Name176(), Name177(), Name178(), Name179(), + Name180(), Name181(), Name182(), Name183(), Name184(), Name185(), Name186(), Name187(), Name188(), Name189(), + Name190(), Name191(), Name192(), Name193(), Name194(), Name195(), Name196(), Name197(), Name198(), Name199(), + Name200(), Name201(), Name202(), Name203(), Name204(), Name205(), Name206(), Name207(), Name208(), Name209(), + Name210(), Name211(), Name212(), Name213(), Name214(), Name215(), Name216(), Name217(), Name218(), Name219(), + Name220(), Name221(), Name222(), Name223(), Name224(), Name225(), Name226(), Name227(), Name228(), Name229(), + Name230(), Name231(), Name232(), Name233(), Name234(), Name235(), Name236(), Name237(), Name238(), Name239(), + Name240(), Name241(), Name242(), Name243(), Name244(), Name245(), Name246(), Name247(), Name248(), Name249(), + Name250(), Name251(), Name252(), Name253(), Name254(), Name255(), + ) + this.names = Array.fill(count)(names(r.nextInt(numCases))) + this.classValue = new NameClassValue + } + + @Benchmark @OperationsPerInvocation(count) def patmatShow(bh: Blackhole): Unit = { + val names = this.names + var i = 0 + while (i < names.length) { + val x = names(i) match { + case Name0() => "0" case Name1() => "1" case Name2() => "2" case Name3() => "3" case Name4() => "4" + case Name5() => "5" case Name6() => "6" case Name7() => "7" case Name8() => "8" case Name9() => "9" + case Name10() => "10" case Name11() => "11" case Name12() => "12" case Name13() => "13" case Name14() => "14" + case Name15() => "15" case Name16() => "16" case Name17() => "17" case Name18() => "18" case Name19() => "19" + case Name20() => "20" case Name21() => "21" case Name22() => "22" case Name23() => "23" case Name24() => "24" + case Name25() => "25" case Name26() => "26" case Name27() => "27" case Name28() => "28" case Name29() => "29" + case Name30() => "30" case Name31() => "31" case Name32() => "32" case Name33() => "33" case Name34() => "34" + case Name35() => "35" case Name36() => "36" case Name37() => "37" case Name38() => "38" case Name39() => "39" + case Name40() => "40" case Name41() => "41" case Name42() => "42" case Name43() => "43" case Name44() => "44" + case Name45() => "45" case Name46() => "46" case Name47() => "47" case Name48() => "48" case Name49() => "49" + case Name50() => "50" case Name51() => "51" case Name52() => "52" case Name53() => "53" case Name54() => "54" + case Name55() => "55" case Name56() => "56" case Name57() => "57" case Name58() => "58" case Name59() => "59" + case Name60() => "60" case Name61() => "61" case Name62() => "62" case Name63() => "63" case Name64() => "64" + case Name65() => "65" case Name66() => "66" case Name67() => "67" case Name68() => "68" case Name69() => "69" + case Name70() => "70" case Name71() => "71" case Name72() => "72" case Name73() => "73" case Name74() => "74" + case Name75() => "75" case Name76() => "76" case Name77() => "77" case Name78() => "78" case Name79() => "79" + case Name80() => "80" case Name81() => "81" case Name82() => "82" case Name83() => "83" case Name84() => "84" + case Name85() => "85" case Name86() => "86" case Name87() => "87" case Name88() => "88" case Name89() => "89" + case Name90() => "90" case Name91() => "91" case Name92() => "92" case Name93() => "93" case Name94() => "94" + case Name95() => "95" case Name96() => "96" case Name97() => "97" case Name98() => "98" case Name99() => "99" + case Name100() => "100" case Name101() => "101" case Name102() => "102" case Name103() => "103" case Name104() => "104" + case Name105() => "105" case Name106() => "106" case Name107() => "107" case Name108() => "108" case Name109() => "109" + case Name110() => "110" case Name111() => "111" case Name112() => "112" case Name113() => "113" case Name114() => "114" + case Name115() => "115" case Name116() => "116" case Name117() => "117" case Name118() => "118" case Name119() => "119" + case Name120() => "120" case Name121() => "121" case Name122() => "122" case Name123() => "123" case Name124() => "124" + case Name125() => "125" case Name126() => "126" case Name127() => "127" case Name128() => "128" case Name129() => "129" + case Name130() => "130" case Name131() => "131" case Name132() => "132" case Name133() => "133" case Name134() => "134" + case Name135() => "135" case Name136() => "136" case Name137() => "137" case Name138() => "138" case Name139() => "139" + case Name140() => "140" case Name141() => "141" case Name142() => "142" case Name143() => "143" case Name144() => "144" + case Name145() => "145" case Name146() => "146" case Name147() => "147" case Name148() => "148" case Name149() => "149" + case Name150() => "150" case Name151() => "151" case Name152() => "152" case Name153() => "153" case Name154() => "154" + case Name155() => "155" case Name156() => "156" case Name157() => "157" case Name158() => "158" case Name159() => "159" + case Name160() => "160" case Name161() => "161" case Name162() => "162" case Name163() => "163" case Name164() => "164" + case Name165() => "165" case Name166() => "166" case Name167() => "167" case Name168() => "168" case Name169() => "169" + case Name170() => "170" case Name171() => "171" case Name172() => "172" case Name173() => "173" case Name174() => "174" + case Name175() => "175" case Name176() => "176" case Name177() => "177" case Name178() => "178" case Name179() => "179" + case Name180() => "180" case Name181() => "181" case Name182() => "182" case Name183() => "183" case Name184() => "184" + case Name185() => "185" case Name186() => "186" case Name187() => "187" case Name188() => "188" case Name189() => "189" + case Name190() => "190" case Name191() => "191" case Name192() => "192" case Name193() => "193" case Name194() => "194" + case Name195() => "195" case Name196() => "196" case Name197() => "197" case Name198() => "198" case Name199() => "199" + case Name200() => "200" case Name201() => "201" case Name202() => "202" case Name203() => "203" case Name204() => "204" + case Name205() => "205" case Name206() => "206" case Name207() => "207" case Name208() => "208" case Name209() => "209" + case Name210() => "210" case Name211() => "211" case Name212() => "212" case Name213() => "213" case Name214() => "214" + case Name215() => "215" case Name216() => "216" case Name217() => "217" case Name218() => "218" case Name219() => "219" + case Name220() => "220" case Name221() => "221" case Name222() => "222" case Name223() => "223" case Name224() => "224" + case Name225() => "225" case Name226() => "226" case Name227() => "227" case Name228() => "228" case Name229() => "229" + case Name230() => "230" case Name231() => "231" case Name232() => "232" case Name233() => "233" case Name234() => "234" + case Name235() => "235" case Name236() => "236" case Name237() => "237" case Name238() => "238" case Name239() => "239" + case Name240() => "240" case Name241() => "241" case Name242() => "242" case Name243() => "243" case Name244() => "244" + case Name245() => "245" case Name246() => "246" case Name247() => "247" case Name248() => "248" case Name249() => "249" + case Name250() => "250" case Name251() => "251" case Name252() => "252" case Name253() => "253" case Name254() => "254" + case Name255() => "255" + } + bh.consume(x) + i += 1 + } + } + + @Benchmark @OperationsPerInvocation(count) def virtualShow(bh: Blackhole): Unit = { + val names = this.names + var i = 0 + while (i < names.length) { + bh.consume(names(i).virtualShow) + i += 1 + } + } + + @Benchmark @OperationsPerInvocation(count) def intSwitchShow(bh: Blackhole): Unit = { + val names = this.names + var i = 0 + while (i < names.length) { + val x = (names(i)._id: @switch) match { + case 0 => "0" case 1 => "1" case 2 => "2" case 3 => "3" case 4 => "4" + case 5 => "5" case 6 => "6" case 7 => "7" case 8 => "8" case 9 => "9" + case 10 => "10" case 11 => "11" case 12 => "12" case 13 => "13" case 14 => "14" + case 15 => "15" case 16 => "16" case 17 => "17" case 18 => "18" case 19 => "19" + case 20 => "20" case 21 => "21" case 22 => "22" case 23 => "23" case 24 => "24" + case 25 => "25" case 26 => "26" case 27 => "27" case 28 => "28" case 29 => "29" + case 30 => "30" case 31 => "31" case 32 => "32" case 33 => "33" case 34 => "34" + case 35 => "35" case 36 => "36" case 37 => "37" case 38 => "38" case 39 => "39" + case 40 => "40" case 41 => "41" case 42 => "42" case 43 => "43" case 44 => "44" + case 45 => "45" case 46 => "46" case 47 => "47" case 48 => "48" case 49 => "49" + case 50 => "50" case 51 => "51" case 52 => "52" case 53 => "53" case 54 => "54" + case 55 => "55" case 56 => "56" case 57 => "57" case 58 => "58" case 59 => "59" + case 60 => "60" case 61 => "61" case 62 => "62" case 63 => "63" case 64 => "64" + case 65 => "65" case 66 => "66" case 67 => "67" case 68 => "68" case 69 => "69" + case 70 => "70" case 71 => "71" case 72 => "72" case 73 => "73" case 74 => "74" + case 75 => "75" case 76 => "76" case 77 => "77" case 78 => "78" case 79 => "79" + case 80 => "80" case 81 => "81" case 82 => "82" case 83 => "83" case 84 => "84" + case 85 => "85" case 86 => "86" case 87 => "87" case 88 => "88" case 89 => "89" + case 90 => "90" case 91 => "91" case 92 => "92" case 93 => "93" case 94 => "94" + case 95 => "95" case 96 => "96" case 97 => "97" case 98 => "98" case 99 => "99" + case 100 => "100" case 101 => "101" case 102 => "102" case 103 => "103" case 104 => "104" + case 105 => "105" case 106 => "106" case 107 => "107" case 108 => "108" case 109 => "109" + case 110 => "110" case 111 => "111" case 112 => "112" case 113 => "113" case 114 => "114" + case 115 => "115" case 116 => "116" case 117 => "117" case 118 => "118" case 119 => "119" + case 120 => "120" case 121 => "121" case 122 => "122" case 123 => "123" case 124 => "124" + case 125 => "125" case 126 => "126" case 127 => "127" case 128 => "128" case 129 => "129" + case 130 => "130" case 131 => "131" case 132 => "132" case 133 => "133" case 134 => "134" + case 135 => "135" case 136 => "136" case 137 => "137" case 138 => "138" case 139 => "139" + case 140 => "140" case 141 => "141" case 142 => "142" case 143 => "143" case 144 => "144" + case 145 => "145" case 146 => "146" case 147 => "147" case 148 => "148" case 149 => "149" + case 150 => "150" case 151 => "151" case 152 => "152" case 153 => "153" case 154 => "154" + case 155 => "155" case 156 => "156" case 157 => "157" case 158 => "158" case 159 => "159" + case 160 => "160" case 161 => "161" case 162 => "162" case 163 => "163" case 164 => "164" + case 165 => "165" case 166 => "166" case 167 => "167" case 168 => "168" case 169 => "169" + case 170 => "170" case 171 => "171" case 172 => "172" case 173 => "173" case 174 => "174" + case 175 => "175" case 176 => "176" case 177 => "177" case 178 => "178" case 179 => "179" + case 180 => "180" case 181 => "181" case 182 => "182" case 183 => "183" case 184 => "184" + case 185 => "185" case 186 => "186" case 187 => "187" case 188 => "188" case 189 => "189" + case 190 => "190" case 191 => "191" case 192 => "192" case 193 => "193" case 194 => "194" + case 195 => "195" case 196 => "196" case 197 => "197" case 198 => "198" case 199 => "199" + case 200 => "200" case 201 => "201" case 202 => "202" case 203 => "203" case 204 => "204" + case 205 => "205" case 206 => "206" case 207 => "207" case 208 => "208" case 209 => "209" + case 210 => "210" case 211 => "211" case 212 => "212" case 213 => "213" case 214 => "214" + case 215 => "215" case 216 => "216" case 217 => "217" case 218 => "218" case 219 => "219" + case 220 => "220" case 221 => "221" case 222 => "222" case 223 => "223" case 224 => "224" + case 225 => "225" case 226 => "226" case 227 => "227" case 228 => "228" case 229 => "229" + case 230 => "230" case 231 => "231" case 232 => "232" case 233 => "233" case 234 => "234" + case 235 => "235" case 236 => "236" case 237 => "237" case 238 => "238" case 239 => "239" + case 240 => "240" case 241 => "241" case 242 => "242" case 243 => "243" case 244 => "244" + case 245 => "245" case 246 => "246" case 247 => "247" case 248 => "248" case 249 => "249" + case 250 => "250" case 251 => "251" case 252 => "252" case 253 => "253" case 254 => "254" + case 255 => "255" + } + bh.consume(x) + i += 1 + } + } + + @Benchmark @OperationsPerInvocation(count) def justClassValueLookup(bh: Blackhole): Unit = { + val names = this.names + val classValue = this.classValue + var i = 0 + while (i < names.length) { + bh.consume(classValue.get(names(i).getClass)) + i += 1 + } + } + + @Benchmark @OperationsPerInvocation(count) def classValueShow(bh: Blackhole): Unit = { + val names = this.names + val classValue = this.classValue + var i = 0 + while (i < names.length) { + val x = (classValue.get(names(i).getClass): @switch) match { + case 0 => "0" case 1 => "1" case 2 => "2" case 3 => "3" case 4 => "4" + case 5 => "5" case 6 => "6" case 7 => "7" case 8 => "8" case 9 => "9" + case 10 => "10" case 11 => "11" case 12 => "12" case 13 => "13" case 14 => "14" + case 15 => "15" case 16 => "16" case 17 => "17" case 18 => "18" case 19 => "19" + case 20 => "20" case 21 => "21" case 22 => "22" case 23 => "23" case 24 => "24" + case 25 => "25" case 26 => "26" case 27 => "27" case 28 => "28" case 29 => "29" + case 30 => "30" case 31 => "31" case 32 => "32" case 33 => "33" case 34 => "34" + case 35 => "35" case 36 => "36" case 37 => "37" case 38 => "38" case 39 => "39" + case 40 => "40" case 41 => "41" case 42 => "42" case 43 => "43" case 44 => "44" + case 45 => "45" case 46 => "46" case 47 => "47" case 48 => "48" case 49 => "49" + case 50 => "50" case 51 => "51" case 52 => "52" case 53 => "53" case 54 => "54" + case 55 => "55" case 56 => "56" case 57 => "57" case 58 => "58" case 59 => "59" + case 60 => "60" case 61 => "61" case 62 => "62" case 63 => "63" case 64 => "64" + case 65 => "65" case 66 => "66" case 67 => "67" case 68 => "68" case 69 => "69" + case 70 => "70" case 71 => "71" case 72 => "72" case 73 => "73" case 74 => "74" + case 75 => "75" case 76 => "76" case 77 => "77" case 78 => "78" case 79 => "79" + case 80 => "80" case 81 => "81" case 82 => "82" case 83 => "83" case 84 => "84" + case 85 => "85" case 86 => "86" case 87 => "87" case 88 => "88" case 89 => "89" + case 90 => "90" case 91 => "91" case 92 => "92" case 93 => "93" case 94 => "94" + case 95 => "95" case 96 => "96" case 97 => "97" case 98 => "98" case 99 => "99" + case 100 => "100" case 101 => "101" case 102 => "102" case 103 => "103" case 104 => "104" + case 105 => "105" case 106 => "106" case 107 => "107" case 108 => "108" case 109 => "109" + case 110 => "110" case 111 => "111" case 112 => "112" case 113 => "113" case 114 => "114" + case 115 => "115" case 116 => "116" case 117 => "117" case 118 => "118" case 119 => "119" + case 120 => "120" case 121 => "121" case 122 => "122" case 123 => "123" case 124 => "124" + case 125 => "125" case 126 => "126" case 127 => "127" case 128 => "128" case 129 => "129" + case 130 => "130" case 131 => "131" case 132 => "132" case 133 => "133" case 134 => "134" + case 135 => "135" case 136 => "136" case 137 => "137" case 138 => "138" case 139 => "139" + case 140 => "140" case 141 => "141" case 142 => "142" case 143 => "143" case 144 => "144" + case 145 => "145" case 146 => "146" case 147 => "147" case 148 => "148" case 149 => "149" + case 150 => "150" case 151 => "151" case 152 => "152" case 153 => "153" case 154 => "154" + case 155 => "155" case 156 => "156" case 157 => "157" case 158 => "158" case 159 => "159" + case 160 => "160" case 161 => "161" case 162 => "162" case 163 => "163" case 164 => "164" + case 165 => "165" case 166 => "166" case 167 => "167" case 168 => "168" case 169 => "169" + case 170 => "170" case 171 => "171" case 172 => "172" case 173 => "173" case 174 => "174" + case 175 => "175" case 176 => "176" case 177 => "177" case 178 => "178" case 179 => "179" + case 180 => "180" case 181 => "181" case 182 => "182" case 183 => "183" case 184 => "184" + case 185 => "185" case 186 => "186" case 187 => "187" case 188 => "188" case 189 => "189" + case 190 => "190" case 191 => "191" case 192 => "192" case 193 => "193" case 194 => "194" + case 195 => "195" case 196 => "196" case 197 => "197" case 198 => "198" case 199 => "199" + case 200 => "200" case 201 => "201" case 202 => "202" case 203 => "203" case 204 => "204" + case 205 => "205" case 206 => "206" case 207 => "207" case 208 => "208" case 209 => "209" + case 210 => "210" case 211 => "211" case 212 => "212" case 213 => "213" case 214 => "214" + case 215 => "215" case 216 => "216" case 217 => "217" case 218 => "218" case 219 => "219" + case 220 => "220" case 221 => "221" case 222 => "222" case 223 => "223" case 224 => "224" + case 225 => "225" case 226 => "226" case 227 => "227" case 228 => "228" case 229 => "229" + case 230 => "230" case 231 => "231" case 232 => "232" case 233 => "233" case 234 => "234" + case 235 => "235" case 236 => "236" case 237 => "237" case 238 => "238" case 239 => "239" + case 240 => "240" case 241 => "241" case 242 => "242" case 243 => "243" case 244 => "244" + case 245 => "245" case 246 => "246" case 247 => "247" case 248 => "248" case 249 => "249" + case 250 => "250" case 251 => "251" case 252 => "252" case 253 => "253" case 254 => "254" + case 255 => "255" + } + bh.consume(x) + i += 1 + } + } + + @Benchmark @OperationsPerInvocation(count) def classNameHashSwitchShow(bh: Blackhole): Unit = { + val names = this.names + var i = 0 + while (i < names.length) { + val name = names(i) + val cls = name.getClass + val x = ((cls.getName.##): @switch) match { + case -1200720095 => "0" + case -1200720094 => "1" + case -1200720093 => "2" + case -1200720092 => "3" + case -1200720091 => "4" + case -1200720090 => "5" + case -1200720089 => "6" + case -1200720088 => "7" + case -1200720087 => "8" + case -1200720086 => "9" + case 1432382798 => "10" + case 1432382799 => "11" + case 1432382800 => "12" + case 1432382801 => "13" + case 1432382802 => "14" + case 1432382803 => "15" + case 1432382804 => "16" + case 1432382805 => "17" + case 1432382806 => "18" + case 1432382807 => "19" + case 1432382829 => "20" + case 1432382830 => "21" + case 1432382831 => "22" + case 1432382832 => "23" + case 1432382833 => "24" + case 1432382834 => "25" + case 1432382835 => "26" + case 1432382836 => "27" + case 1432382837 => "28" + case 1432382838 => "29" + case 1432382860 => "30" + case 1432382861 => "31" + case 1432382862 => "32" + case 1432382863 => "33" + case 1432382864 => "34" + case 1432382865 => "35" + case 1432382866 => "36" + case 1432382867 => "37" + case 1432382868 => "38" + case 1432382869 => "39" + case 1432382891 => "40" + case 1432382892 => "41" + case 1432382893 => "42" + case 1432382894 => "43" + case 1432382895 => "44" + case 1432382896 => "45" + case 1432382897 => "46" + case 1432382898 => "47" + case 1432382899 => "48" + case 1432382900 => "49" + case 1432382922 => "50" + case 1432382923 => "51" + case 1432382924 => "52" + case 1432382925 => "53" + case 1432382926 => "54" + case 1432382927 => "55" + case 1432382928 => "56" + case 1432382929 => "57" + case 1432382930 => "58" + case 1432382931 => "59" + case 1432382953 => "60" + case 1432382954 => "61" + case 1432382955 => "62" + case 1432382956 => "63" + case 1432382957 => "64" + case 1432382958 => "65" + case 1432382959 => "66" + case 1432382960 => "67" + case 1432382961 => "68" + case 1432382962 => "69" + case 1432382984 => "70" + case 1432382985 => "71" + case 1432382986 => "72" + case 1432382987 => "73" + case 1432382988 => "74" + case 1432382989 => "75" + case 1432382990 => "76" + case 1432382991 => "77" + case 1432382992 => "78" + case 1432382993 => "79" + case 1432383015 => "80" + case 1432383016 => "81" + case 1432383017 => "82" + case 1432383018 => "83" + case 1432383019 => "84" + case 1432383020 => "85" + case 1432383021 => "86" + case 1432383022 => "87" + case 1432383023 => "88" + case 1432383024 => "89" + case 1432383046 => "90" + case 1432383047 => "91" + case 1432383048 => "92" + case 1432383049 => "93" + case 1432383050 => "94" + case 1432383051 => "95" + case 1432383052 => "96" + case 1432383053 => "97" + case 1432383054 => "98" + case 1432383055 => "99" + case 1454193826 => "100" + case 1454193827 => "101" + case 1454193828 => "102" + case 1454193829 => "103" + case 1454193830 => "104" + case 1454193831 => "105" + case 1454193832 => "106" + case 1454193833 => "107" + case 1454193834 => "108" + case 1454193835 => "109" + case 1454193857 => "110" + case 1454193858 => "111" + case 1454193859 => "112" + case 1454193860 => "113" + case 1454193861 => "114" + case 1454193862 => "115" + case 1454193863 => "116" + case 1454193864 => "117" + case 1454193865 => "118" + case 1454193866 => "119" + case 1454193888 => "120" + case 1454193889 => "121" + case 1454193890 => "122" + case 1454193891 => "123" + case 1454193892 => "124" + case 1454193893 => "125" + case 1454193894 => "126" + case 1454193895 => "127" + case 1454193896 => "128" + case 1454193897 => "129" + case 1454193919 => "130" + case 1454193920 => "131" + case 1454193921 => "132" + case 1454193922 => "133" + case 1454193923 => "134" + case 1454193924 => "135" + case 1454193925 => "136" + case 1454193926 => "137" + case 1454193927 => "138" + case 1454193928 => "139" + case 1454193950 => "140" + case 1454193951 => "141" + case 1454193952 => "142" + case 1454193953 => "143" + case 1454193954 => "144" + case 1454193955 => "145" + case 1454193956 => "146" + case 1454193957 => "147" + case 1454193958 => "148" + case 1454193959 => "149" + case 1454193981 => "150" + case 1454193982 => "151" + case 1454193983 => "152" + case 1454193984 => "153" + case 1454193985 => "154" + case 1454193986 => "155" + case 1454193987 => "156" + case 1454193988 => "157" + case 1454193989 => "158" + case 1454193990 => "159" + case 1454194012 => "160" + case 1454194013 => "161" + case 1454194014 => "162" + case 1454194015 => "163" + case 1454194016 => "164" + case 1454194017 => "165" + case 1454194018 => "166" + case 1454194019 => "167" + case 1454194020 => "168" + case 1454194021 => "169" + case 1454194043 => "170" + case 1454194044 => "171" + case 1454194045 => "172" + case 1454194046 => "173" + case 1454194047 => "174" + case 1454194048 => "175" + case 1454194049 => "176" + case 1454194050 => "177" + case 1454194051 => "178" + case 1454194052 => "179" + case 1454194074 => "180" + case 1454194075 => "181" + case 1454194076 => "182" + case 1454194077 => "183" + case 1454194078 => "184" + case 1454194079 => "185" + case 1454194080 => "186" + case 1454194081 => "187" + case 1454194082 => "188" + case 1454194083 => "189" + case 1454194105 => "190" + case 1454194106 => "191" + case 1454194107 => "192" + case 1454194108 => "193" + case 1454194109 => "194" + case 1454194110 => "195" + case 1454194111 => "196" + case 1454194112 => "197" + case 1454194113 => "198" + case 1454194114 => "199" + case 1454194787 => "200" + case 1454194788 => "201" + case 1454194789 => "202" + case 1454194790 => "203" + case 1454194791 => "204" + case 1454194792 => "205" + case 1454194793 => "206" + case 1454194794 => "207" + case 1454194795 => "208" + case 1454194796 => "209" + case 1454194818 => "210" + case 1454194819 => "211" + case 1454194820 => "212" + case 1454194821 => "213" + case 1454194822 => "214" + case 1454194823 => "215" + case 1454194824 => "216" + case 1454194825 => "217" + case 1454194826 => "218" + case 1454194827 => "219" + case 1454194849 => "220" + case 1454194850 => "221" + case 1454194851 => "222" + case 1454194852 => "223" + case 1454194853 => "224" + case 1454194854 => "225" + case 1454194855 => "226" + case 1454194856 => "227" + case 1454194857 => "228" + case 1454194858 => "229" + case 1454194880 => "230" + case 1454194881 => "231" + case 1454194882 => "232" + case 1454194883 => "233" + case 1454194884 => "234" + case 1454194885 => "235" + case 1454194886 => "236" + case 1454194887 => "237" + case 1454194888 => "238" + case 1454194889 => "239" + case 1454194911 => "240" + case 1454194912 => "241" + case 1454194913 => "242" + case 1454194914 => "243" + case 1454194915 => "244" + case 1454194916 => "245" + case 1454194917 => "246" + case 1454194918 => "247" + case 1454194919 => "248" + case 1454194920 => "249" + case 1454194942 => "250" + case 1454194943 => "251" + case 1454194944 => "252" + case 1454194945 => "253" + case 1454194946 => "254" + case 1454194947 => "255" + case hashCode => throw new MatchError(s"No case for: $name -> $cls -> $hashCode") + } + bh.consume(x) + i += 1 + } + } + +/* + This benchmark compares pattern matching to alternatives, specifically: + 1. using virtual methods instead (like our Tree#transform/traverse) + 2. doing a tableswitch on int field (like our Promise.Transformation) + 3. using a ClassValue as a more efficient way to store the int (like exotic's TypeSwitch) + 4. using the instance's class's name's hash, which are all memoised, in a jumptable + + The results appear to indicate that: + + 1. < 16 cases, patmat beats virtual method calls + 2. = 16 cases, patmat vs virtual overlap in error margins + 3. > 16 cases, patmat loses to virtual method calls + 4. int switching seems to only out perform virtual at 32+ cases + 5. class name hash switching beats class value, up to 32 cases (and matches performance at 64) +*/ +} + +final class NameClassValue extends ClassValue[Int] { + def computeValue(runtimeClass: Class[_]) = runtimeClass match { + case ClsName0 => 0 case ClsName1 => 1 case ClsName2 => 2 case ClsName3 => 3 case ClsName4 => 4 + case ClsName5 => 5 case ClsName6 => 6 case ClsName7 => 7 case ClsName8 => 8 case ClsName9 => 9 + case ClsName10 => 10 case ClsName11 => 11 case ClsName12 => 12 case ClsName13 => 13 case ClsName14 => 14 + case ClsName15 => 15 case ClsName16 => 16 case ClsName17 => 17 case ClsName18 => 18 case ClsName19 => 19 + case ClsName20 => 20 case ClsName21 => 21 case ClsName22 => 22 case ClsName23 => 23 case ClsName24 => 24 + case ClsName25 => 25 case ClsName26 => 26 case ClsName27 => 27 case ClsName28 => 28 case ClsName29 => 29 + case ClsName30 => 30 case ClsName31 => 31 case ClsName32 => 32 case ClsName33 => 33 case ClsName34 => 34 + case ClsName35 => 35 case ClsName36 => 36 case ClsName37 => 37 case ClsName38 => 38 case ClsName39 => 39 + case ClsName40 => 40 case ClsName41 => 41 case ClsName42 => 42 case ClsName43 => 43 case ClsName44 => 44 + case ClsName45 => 45 case ClsName46 => 46 case ClsName47 => 47 case ClsName48 => 48 case ClsName49 => 49 + case ClsName50 => 50 case ClsName51 => 51 case ClsName52 => 52 case ClsName53 => 53 case ClsName54 => 54 + case ClsName55 => 55 case ClsName56 => 56 case ClsName57 => 57 case ClsName58 => 58 case ClsName59 => 59 + case ClsName60 => 60 case ClsName61 => 61 case ClsName62 => 62 case ClsName63 => 63 case ClsName64 => 64 + case ClsName65 => 65 case ClsName66 => 66 case ClsName67 => 67 case ClsName68 => 68 case ClsName69 => 69 + case ClsName70 => 70 case ClsName71 => 71 case ClsName72 => 72 case ClsName73 => 73 case ClsName74 => 74 + case ClsName75 => 75 case ClsName76 => 76 case ClsName77 => 77 case ClsName78 => 78 case ClsName79 => 79 + case ClsName80 => 80 case ClsName81 => 81 case ClsName82 => 82 case ClsName83 => 83 case ClsName84 => 84 + case ClsName85 => 85 case ClsName86 => 86 case ClsName87 => 87 case ClsName88 => 88 case ClsName89 => 89 + case ClsName90 => 90 case ClsName91 => 91 case ClsName92 => 92 case ClsName93 => 93 case ClsName94 => 94 + case ClsName95 => 95 case ClsName96 => 96 case ClsName97 => 97 case ClsName98 => 98 case ClsName99 => 99 + case ClsName100 => 100 case ClsName101 => 101 case ClsName102 => 102 case ClsName103 => 103 case ClsName104 => 104 + case ClsName105 => 105 case ClsName106 => 106 case ClsName107 => 107 case ClsName108 => 108 case ClsName109 => 109 + case ClsName110 => 110 case ClsName111 => 111 case ClsName112 => 112 case ClsName113 => 113 case ClsName114 => 114 + case ClsName115 => 115 case ClsName116 => 116 case ClsName117 => 117 case ClsName118 => 118 case ClsName119 => 119 + case ClsName120 => 120 case ClsName121 => 121 case ClsName122 => 122 case ClsName123 => 123 case ClsName124 => 124 + case ClsName125 => 125 case ClsName126 => 126 case ClsName127 => 127 case ClsName128 => 128 case ClsName129 => 129 + case ClsName130 => 130 case ClsName131 => 131 case ClsName132 => 132 case ClsName133 => 133 case ClsName134 => 134 + case ClsName135 => 135 case ClsName136 => 136 case ClsName137 => 137 case ClsName138 => 138 case ClsName139 => 139 + case ClsName140 => 140 case ClsName141 => 141 case ClsName142 => 142 case ClsName143 => 143 case ClsName144 => 144 + case ClsName145 => 145 case ClsName146 => 146 case ClsName147 => 147 case ClsName148 => 148 case ClsName149 => 149 + case ClsName150 => 150 case ClsName151 => 151 case ClsName152 => 152 case ClsName153 => 153 case ClsName154 => 154 + case ClsName155 => 155 case ClsName156 => 156 case ClsName157 => 157 case ClsName158 => 158 case ClsName159 => 159 + case ClsName160 => 160 case ClsName161 => 161 case ClsName162 => 162 case ClsName163 => 163 case ClsName164 => 164 + case ClsName165 => 165 case ClsName166 => 166 case ClsName167 => 167 case ClsName168 => 168 case ClsName169 => 169 + case ClsName170 => 170 case ClsName171 => 171 case ClsName172 => 172 case ClsName173 => 173 case ClsName174 => 174 + case ClsName175 => 175 case ClsName176 => 176 case ClsName177 => 177 case ClsName178 => 178 case ClsName179 => 179 + case ClsName180 => 180 case ClsName181 => 181 case ClsName182 => 182 case ClsName183 => 183 case ClsName184 => 184 + case ClsName185 => 185 case ClsName186 => 186 case ClsName187 => 187 case ClsName188 => 188 case ClsName189 => 189 + case ClsName190 => 190 case ClsName191 => 191 case ClsName192 => 192 case ClsName193 => 193 case ClsName194 => 194 + case ClsName195 => 195 case ClsName196 => 196 case ClsName197 => 197 case ClsName198 => 198 case ClsName199 => 199 + case ClsName200 => 200 case ClsName201 => 201 case ClsName202 => 202 case ClsName203 => 203 case ClsName204 => 204 + case ClsName205 => 205 case ClsName206 => 206 case ClsName207 => 207 case ClsName208 => 208 case ClsName209 => 209 + case ClsName210 => 210 case ClsName211 => 211 case ClsName212 => 212 case ClsName213 => 213 case ClsName214 => 214 + case ClsName215 => 215 case ClsName216 => 216 case ClsName217 => 217 case ClsName218 => 218 case ClsName219 => 219 + case ClsName220 => 220 case ClsName221 => 221 case ClsName222 => 222 case ClsName223 => 223 case ClsName224 => 224 + case ClsName225 => 225 case ClsName226 => 226 case ClsName227 => 227 case ClsName228 => 228 case ClsName229 => 229 + case ClsName230 => 230 case ClsName231 => 231 case ClsName232 => 232 case ClsName233 => 233 case ClsName234 => 234 + case ClsName235 => 235 case ClsName236 => 236 case ClsName237 => 237 case ClsName238 => 238 case ClsName239 => 239 + case ClsName240 => 240 case ClsName241 => 241 case ClsName242 => 242 case ClsName243 => 243 case ClsName244 => 244 + case ClsName245 => 245 case ClsName246 => 246 case ClsName247 => 247 case ClsName248 => 248 case ClsName249 => 249 + case ClsName250 => 250 case ClsName251 => 251 case ClsName252 => 252 case ClsName253 => 253 case ClsName254 => 254 + case ClsName255 => 255 + } + + private val ClsName0 = classOf[Name0] + private val ClsName1 = classOf[Name1] + private val ClsName2 = classOf[Name2] + private val ClsName3 = classOf[Name3] + private val ClsName4 = classOf[Name4] + private val ClsName5 = classOf[Name5] + private val ClsName6 = classOf[Name6] + private val ClsName7 = classOf[Name7] + private val ClsName8 = classOf[Name8] + private val ClsName9 = classOf[Name9] + private val ClsName10 = classOf[Name10] + private val ClsName11 = classOf[Name11] + private val ClsName12 = classOf[Name12] + private val ClsName13 = classOf[Name13] + private val ClsName14 = classOf[Name14] + private val ClsName15 = classOf[Name15] + private val ClsName16 = classOf[Name16] + private val ClsName17 = classOf[Name17] + private val ClsName18 = classOf[Name18] + private val ClsName19 = classOf[Name19] + private val ClsName20 = classOf[Name20] + private val ClsName21 = classOf[Name21] + private val ClsName22 = classOf[Name22] + private val ClsName23 = classOf[Name23] + private val ClsName24 = classOf[Name24] + private val ClsName25 = classOf[Name25] + private val ClsName26 = classOf[Name26] + private val ClsName27 = classOf[Name27] + private val ClsName28 = classOf[Name28] + private val ClsName29 = classOf[Name29] + private val ClsName30 = classOf[Name30] + private val ClsName31 = classOf[Name31] + private val ClsName32 = classOf[Name32] + private val ClsName33 = classOf[Name33] + private val ClsName34 = classOf[Name34] + private val ClsName35 = classOf[Name35] + private val ClsName36 = classOf[Name36] + private val ClsName37 = classOf[Name37] + private val ClsName38 = classOf[Name38] + private val ClsName39 = classOf[Name39] + private val ClsName40 = classOf[Name40] + private val ClsName41 = classOf[Name41] + private val ClsName42 = classOf[Name42] + private val ClsName43 = classOf[Name43] + private val ClsName44 = classOf[Name44] + private val ClsName45 = classOf[Name45] + private val ClsName46 = classOf[Name46] + private val ClsName47 = classOf[Name47] + private val ClsName48 = classOf[Name48] + private val ClsName49 = classOf[Name49] + private val ClsName50 = classOf[Name50] + private val ClsName51 = classOf[Name51] + private val ClsName52 = classOf[Name52] + private val ClsName53 = classOf[Name53] + private val ClsName54 = classOf[Name54] + private val ClsName55 = classOf[Name55] + private val ClsName56 = classOf[Name56] + private val ClsName57 = classOf[Name57] + private val ClsName58 = classOf[Name58] + private val ClsName59 = classOf[Name59] + private val ClsName60 = classOf[Name60] + private val ClsName61 = classOf[Name61] + private val ClsName62 = classOf[Name62] + private val ClsName63 = classOf[Name63] + private val ClsName64 = classOf[Name64] + private val ClsName65 = classOf[Name65] + private val ClsName66 = classOf[Name66] + private val ClsName67 = classOf[Name67] + private val ClsName68 = classOf[Name68] + private val ClsName69 = classOf[Name69] + private val ClsName70 = classOf[Name70] + private val ClsName71 = classOf[Name71] + private val ClsName72 = classOf[Name72] + private val ClsName73 = classOf[Name73] + private val ClsName74 = classOf[Name74] + private val ClsName75 = classOf[Name75] + private val ClsName76 = classOf[Name76] + private val ClsName77 = classOf[Name77] + private val ClsName78 = classOf[Name78] + private val ClsName79 = classOf[Name79] + private val ClsName80 = classOf[Name80] + private val ClsName81 = classOf[Name81] + private val ClsName82 = classOf[Name82] + private val ClsName83 = classOf[Name83] + private val ClsName84 = classOf[Name84] + private val ClsName85 = classOf[Name85] + private val ClsName86 = classOf[Name86] + private val ClsName87 = classOf[Name87] + private val ClsName88 = classOf[Name88] + private val ClsName89 = classOf[Name89] + private val ClsName90 = classOf[Name90] + private val ClsName91 = classOf[Name91] + private val ClsName92 = classOf[Name92] + private val ClsName93 = classOf[Name93] + private val ClsName94 = classOf[Name94] + private val ClsName95 = classOf[Name95] + private val ClsName96 = classOf[Name96] + private val ClsName97 = classOf[Name97] + private val ClsName98 = classOf[Name98] + private val ClsName99 = classOf[Name99] + private val ClsName100 = classOf[Name100] + private val ClsName101 = classOf[Name101] + private val ClsName102 = classOf[Name102] + private val ClsName103 = classOf[Name103] + private val ClsName104 = classOf[Name104] + private val ClsName105 = classOf[Name105] + private val ClsName106 = classOf[Name106] + private val ClsName107 = classOf[Name107] + private val ClsName108 = classOf[Name108] + private val ClsName109 = classOf[Name109] + private val ClsName110 = classOf[Name110] + private val ClsName111 = classOf[Name111] + private val ClsName112 = classOf[Name112] + private val ClsName113 = classOf[Name113] + private val ClsName114 = classOf[Name114] + private val ClsName115 = classOf[Name115] + private val ClsName116 = classOf[Name116] + private val ClsName117 = classOf[Name117] + private val ClsName118 = classOf[Name118] + private val ClsName119 = classOf[Name119] + private val ClsName120 = classOf[Name120] + private val ClsName121 = classOf[Name121] + private val ClsName122 = classOf[Name122] + private val ClsName123 = classOf[Name123] + private val ClsName124 = classOf[Name124] + private val ClsName125 = classOf[Name125] + private val ClsName126 = classOf[Name126] + private val ClsName127 = classOf[Name127] + private val ClsName128 = classOf[Name128] + private val ClsName129 = classOf[Name129] + private val ClsName130 = classOf[Name130] + private val ClsName131 = classOf[Name131] + private val ClsName132 = classOf[Name132] + private val ClsName133 = classOf[Name133] + private val ClsName134 = classOf[Name134] + private val ClsName135 = classOf[Name135] + private val ClsName136 = classOf[Name136] + private val ClsName137 = classOf[Name137] + private val ClsName138 = classOf[Name138] + private val ClsName139 = classOf[Name139] + private val ClsName140 = classOf[Name140] + private val ClsName141 = classOf[Name141] + private val ClsName142 = classOf[Name142] + private val ClsName143 = classOf[Name143] + private val ClsName144 = classOf[Name144] + private val ClsName145 = classOf[Name145] + private val ClsName146 = classOf[Name146] + private val ClsName147 = classOf[Name147] + private val ClsName148 = classOf[Name148] + private val ClsName149 = classOf[Name149] + private val ClsName150 = classOf[Name150] + private val ClsName151 = classOf[Name151] + private val ClsName152 = classOf[Name152] + private val ClsName153 = classOf[Name153] + private val ClsName154 = classOf[Name154] + private val ClsName155 = classOf[Name155] + private val ClsName156 = classOf[Name156] + private val ClsName157 = classOf[Name157] + private val ClsName158 = classOf[Name158] + private val ClsName159 = classOf[Name159] + private val ClsName160 = classOf[Name160] + private val ClsName161 = classOf[Name161] + private val ClsName162 = classOf[Name162] + private val ClsName163 = classOf[Name163] + private val ClsName164 = classOf[Name164] + private val ClsName165 = classOf[Name165] + private val ClsName166 = classOf[Name166] + private val ClsName167 = classOf[Name167] + private val ClsName168 = classOf[Name168] + private val ClsName169 = classOf[Name169] + private val ClsName170 = classOf[Name170] + private val ClsName171 = classOf[Name171] + private val ClsName172 = classOf[Name172] + private val ClsName173 = classOf[Name173] + private val ClsName174 = classOf[Name174] + private val ClsName175 = classOf[Name175] + private val ClsName176 = classOf[Name176] + private val ClsName177 = classOf[Name177] + private val ClsName178 = classOf[Name178] + private val ClsName179 = classOf[Name179] + private val ClsName180 = classOf[Name180] + private val ClsName181 = classOf[Name181] + private val ClsName182 = classOf[Name182] + private val ClsName183 = classOf[Name183] + private val ClsName184 = classOf[Name184] + private val ClsName185 = classOf[Name185] + private val ClsName186 = classOf[Name186] + private val ClsName187 = classOf[Name187] + private val ClsName188 = classOf[Name188] + private val ClsName189 = classOf[Name189] + private val ClsName190 = classOf[Name190] + private val ClsName191 = classOf[Name191] + private val ClsName192 = classOf[Name192] + private val ClsName193 = classOf[Name193] + private val ClsName194 = classOf[Name194] + private val ClsName195 = classOf[Name195] + private val ClsName196 = classOf[Name196] + private val ClsName197 = classOf[Name197] + private val ClsName198 = classOf[Name198] + private val ClsName199 = classOf[Name199] + private val ClsName200 = classOf[Name200] + private val ClsName201 = classOf[Name201] + private val ClsName202 = classOf[Name202] + private val ClsName203 = classOf[Name203] + private val ClsName204 = classOf[Name204] + private val ClsName205 = classOf[Name205] + private val ClsName206 = classOf[Name206] + private val ClsName207 = classOf[Name207] + private val ClsName208 = classOf[Name208] + private val ClsName209 = classOf[Name209] + private val ClsName210 = classOf[Name210] + private val ClsName211 = classOf[Name211] + private val ClsName212 = classOf[Name212] + private val ClsName213 = classOf[Name213] + private val ClsName214 = classOf[Name214] + private val ClsName215 = classOf[Name215] + private val ClsName216 = classOf[Name216] + private val ClsName217 = classOf[Name217] + private val ClsName218 = classOf[Name218] + private val ClsName219 = classOf[Name219] + private val ClsName220 = classOf[Name220] + private val ClsName221 = classOf[Name221] + private val ClsName222 = classOf[Name222] + private val ClsName223 = classOf[Name223] + private val ClsName224 = classOf[Name224] + private val ClsName225 = classOf[Name225] + private val ClsName226 = classOf[Name226] + private val ClsName227 = classOf[Name227] + private val ClsName228 = classOf[Name228] + private val ClsName229 = classOf[Name229] + private val ClsName230 = classOf[Name230] + private val ClsName231 = classOf[Name231] + private val ClsName232 = classOf[Name232] + private val ClsName233 = classOf[Name233] + private val ClsName234 = classOf[Name234] + private val ClsName235 = classOf[Name235] + private val ClsName236 = classOf[Name236] + private val ClsName237 = classOf[Name237] + private val ClsName238 = classOf[Name238] + private val ClsName239 = classOf[Name239] + private val ClsName240 = classOf[Name240] + private val ClsName241 = classOf[Name241] + private val ClsName242 = classOf[Name242] + private val ClsName243 = classOf[Name243] + private val ClsName244 = classOf[Name244] + private val ClsName245 = classOf[Name245] + private val ClsName246 = classOf[Name246] + private val ClsName247 = classOf[Name247] + private val ClsName248 = classOf[Name248] + private val ClsName249 = classOf[Name249] + private val ClsName250 = classOf[Name250] + private val ClsName251 = classOf[Name251] + private val ClsName252 = classOf[Name252] + private val ClsName253 = classOf[Name253] + private val ClsName254 = classOf[Name254] + private val ClsName255 = classOf[Name255] +} + +sealed abstract class Name(val _id: Int) { + def virtualShow: String +} + +final case class Name0() extends Name(0) { def virtualShow = "0" } +final case class Name1() extends Name(1) { def virtualShow = "1" } +final case class Name2() extends Name(2) { def virtualShow = "2" } +final case class Name3() extends Name(3) { def virtualShow = "3" } +final case class Name4() extends Name(4) { def virtualShow = "4" } +final case class Name5() extends Name(5) { def virtualShow = "5" } +final case class Name6() extends Name(6) { def virtualShow = "6" } +final case class Name7() extends Name(7) { def virtualShow = "7" } +final case class Name8() extends Name(8) { def virtualShow = "8" } +final case class Name9() extends Name(9) { def virtualShow = "9" } +final case class Name10() extends Name(10) { def virtualShow = "10" } +final case class Name11() extends Name(11) { def virtualShow = "11" } +final case class Name12() extends Name(12) { def virtualShow = "12" } +final case class Name13() extends Name(13) { def virtualShow = "13" } +final case class Name14() extends Name(14) { def virtualShow = "14" } +final case class Name15() extends Name(15) { def virtualShow = "15" } +final case class Name16() extends Name(16) { def virtualShow = "16" } +final case class Name17() extends Name(17) { def virtualShow = "17" } +final case class Name18() extends Name(18) { def virtualShow = "18" } +final case class Name19() extends Name(19) { def virtualShow = "19" } +final case class Name20() extends Name(20) { def virtualShow = "20" } +final case class Name21() extends Name(21) { def virtualShow = "21" } +final case class Name22() extends Name(22) { def virtualShow = "22" } +final case class Name23() extends Name(23) { def virtualShow = "23" } +final case class Name24() extends Name(24) { def virtualShow = "24" } +final case class Name25() extends Name(25) { def virtualShow = "25" } +final case class Name26() extends Name(26) { def virtualShow = "26" } +final case class Name27() extends Name(27) { def virtualShow = "27" } +final case class Name28() extends Name(28) { def virtualShow = "28" } +final case class Name29() extends Name(29) { def virtualShow = "29" } +final case class Name30() extends Name(30) { def virtualShow = "30" } +final case class Name31() extends Name(31) { def virtualShow = "31" } +final case class Name32() extends Name(32) { def virtualShow = "32" } +final case class Name33() extends Name(33) { def virtualShow = "33" } +final case class Name34() extends Name(34) { def virtualShow = "34" } +final case class Name35() extends Name(35) { def virtualShow = "35" } +final case class Name36() extends Name(36) { def virtualShow = "36" } +final case class Name37() extends Name(37) { def virtualShow = "37" } +final case class Name38() extends Name(38) { def virtualShow = "38" } +final case class Name39() extends Name(39) { def virtualShow = "39" } +final case class Name40() extends Name(40) { def virtualShow = "40" } +final case class Name41() extends Name(41) { def virtualShow = "41" } +final case class Name42() extends Name(42) { def virtualShow = "42" } +final case class Name43() extends Name(43) { def virtualShow = "43" } +final case class Name44() extends Name(44) { def virtualShow = "44" } +final case class Name45() extends Name(45) { def virtualShow = "45" } +final case class Name46() extends Name(46) { def virtualShow = "46" } +final case class Name47() extends Name(47) { def virtualShow = "47" } +final case class Name48() extends Name(48) { def virtualShow = "48" } +final case class Name49() extends Name(49) { def virtualShow = "49" } +final case class Name50() extends Name(50) { def virtualShow = "50" } +final case class Name51() extends Name(51) { def virtualShow = "51" } +final case class Name52() extends Name(52) { def virtualShow = "52" } +final case class Name53() extends Name(53) { def virtualShow = "53" } +final case class Name54() extends Name(54) { def virtualShow = "54" } +final case class Name55() extends Name(55) { def virtualShow = "55" } +final case class Name56() extends Name(56) { def virtualShow = "56" } +final case class Name57() extends Name(57) { def virtualShow = "57" } +final case class Name58() extends Name(58) { def virtualShow = "58" } +final case class Name59() extends Name(59) { def virtualShow = "59" } +final case class Name60() extends Name(60) { def virtualShow = "60" } +final case class Name61() extends Name(61) { def virtualShow = "61" } +final case class Name62() extends Name(62) { def virtualShow = "62" } +final case class Name63() extends Name(63) { def virtualShow = "63" } +final case class Name64() extends Name(64) { def virtualShow = "64" } +final case class Name65() extends Name(65) { def virtualShow = "65" } +final case class Name66() extends Name(66) { def virtualShow = "66" } +final case class Name67() extends Name(67) { def virtualShow = "67" } +final case class Name68() extends Name(68) { def virtualShow = "68" } +final case class Name69() extends Name(69) { def virtualShow = "69" } +final case class Name70() extends Name(70) { def virtualShow = "70" } +final case class Name71() extends Name(71) { def virtualShow = "71" } +final case class Name72() extends Name(72) { def virtualShow = "72" } +final case class Name73() extends Name(73) { def virtualShow = "73" } +final case class Name74() extends Name(74) { def virtualShow = "74" } +final case class Name75() extends Name(75) { def virtualShow = "75" } +final case class Name76() extends Name(76) { def virtualShow = "76" } +final case class Name77() extends Name(77) { def virtualShow = "77" } +final case class Name78() extends Name(78) { def virtualShow = "78" } +final case class Name79() extends Name(79) { def virtualShow = "79" } +final case class Name80() extends Name(80) { def virtualShow = "80" } +final case class Name81() extends Name(81) { def virtualShow = "81" } +final case class Name82() extends Name(82) { def virtualShow = "82" } +final case class Name83() extends Name(83) { def virtualShow = "83" } +final case class Name84() extends Name(84) { def virtualShow = "84" } +final case class Name85() extends Name(85) { def virtualShow = "85" } +final case class Name86() extends Name(86) { def virtualShow = "86" } +final case class Name87() extends Name(87) { def virtualShow = "87" } +final case class Name88() extends Name(88) { def virtualShow = "88" } +final case class Name89() extends Name(89) { def virtualShow = "89" } +final case class Name90() extends Name(90) { def virtualShow = "90" } +final case class Name91() extends Name(91) { def virtualShow = "91" } +final case class Name92() extends Name(92) { def virtualShow = "92" } +final case class Name93() extends Name(93) { def virtualShow = "93" } +final case class Name94() extends Name(94) { def virtualShow = "94" } +final case class Name95() extends Name(95) { def virtualShow = "95" } +final case class Name96() extends Name(96) { def virtualShow = "96" } +final case class Name97() extends Name(97) { def virtualShow = "97" } +final case class Name98() extends Name(98) { def virtualShow = "98" } +final case class Name99() extends Name(99) { def virtualShow = "99" } +final case class Name100() extends Name(100) { def virtualShow = "100" } +final case class Name101() extends Name(101) { def virtualShow = "101" } +final case class Name102() extends Name(102) { def virtualShow = "102" } +final case class Name103() extends Name(103) { def virtualShow = "103" } +final case class Name104() extends Name(104) { def virtualShow = "104" } +final case class Name105() extends Name(105) { def virtualShow = "105" } +final case class Name106() extends Name(106) { def virtualShow = "106" } +final case class Name107() extends Name(107) { def virtualShow = "107" } +final case class Name108() extends Name(108) { def virtualShow = "108" } +final case class Name109() extends Name(109) { def virtualShow = "109" } +final case class Name110() extends Name(110) { def virtualShow = "110" } +final case class Name111() extends Name(111) { def virtualShow = "111" } +final case class Name112() extends Name(112) { def virtualShow = "112" } +final case class Name113() extends Name(113) { def virtualShow = "113" } +final case class Name114() extends Name(114) { def virtualShow = "114" } +final case class Name115() extends Name(115) { def virtualShow = "115" } +final case class Name116() extends Name(116) { def virtualShow = "116" } +final case class Name117() extends Name(117) { def virtualShow = "117" } +final case class Name118() extends Name(118) { def virtualShow = "118" } +final case class Name119() extends Name(119) { def virtualShow = "119" } +final case class Name120() extends Name(120) { def virtualShow = "120" } +final case class Name121() extends Name(121) { def virtualShow = "121" } +final case class Name122() extends Name(122) { def virtualShow = "122" } +final case class Name123() extends Name(123) { def virtualShow = "123" } +final case class Name124() extends Name(124) { def virtualShow = "124" } +final case class Name125() extends Name(125) { def virtualShow = "125" } +final case class Name126() extends Name(126) { def virtualShow = "126" } +final case class Name127() extends Name(127) { def virtualShow = "127" } +final case class Name128() extends Name(128) { def virtualShow = "128" } +final case class Name129() extends Name(129) { def virtualShow = "129" } +final case class Name130() extends Name(130) { def virtualShow = "130" } +final case class Name131() extends Name(131) { def virtualShow = "131" } +final case class Name132() extends Name(132) { def virtualShow = "132" } +final case class Name133() extends Name(133) { def virtualShow = "133" } +final case class Name134() extends Name(134) { def virtualShow = "134" } +final case class Name135() extends Name(135) { def virtualShow = "135" } +final case class Name136() extends Name(136) { def virtualShow = "136" } +final case class Name137() extends Name(137) { def virtualShow = "137" } +final case class Name138() extends Name(138) { def virtualShow = "138" } +final case class Name139() extends Name(139) { def virtualShow = "139" } +final case class Name140() extends Name(140) { def virtualShow = "140" } +final case class Name141() extends Name(141) { def virtualShow = "141" } +final case class Name142() extends Name(142) { def virtualShow = "142" } +final case class Name143() extends Name(143) { def virtualShow = "143" } +final case class Name144() extends Name(144) { def virtualShow = "144" } +final case class Name145() extends Name(145) { def virtualShow = "145" } +final case class Name146() extends Name(146) { def virtualShow = "146" } +final case class Name147() extends Name(147) { def virtualShow = "147" } +final case class Name148() extends Name(148) { def virtualShow = "148" } +final case class Name149() extends Name(149) { def virtualShow = "149" } +final case class Name150() extends Name(150) { def virtualShow = "150" } +final case class Name151() extends Name(151) { def virtualShow = "151" } +final case class Name152() extends Name(152) { def virtualShow = "152" } +final case class Name153() extends Name(153) { def virtualShow = "153" } +final case class Name154() extends Name(154) { def virtualShow = "154" } +final case class Name155() extends Name(155) { def virtualShow = "155" } +final case class Name156() extends Name(156) { def virtualShow = "156" } +final case class Name157() extends Name(157) { def virtualShow = "157" } +final case class Name158() extends Name(158) { def virtualShow = "158" } +final case class Name159() extends Name(159) { def virtualShow = "159" } +final case class Name160() extends Name(160) { def virtualShow = "160" } +final case class Name161() extends Name(161) { def virtualShow = "161" } +final case class Name162() extends Name(162) { def virtualShow = "162" } +final case class Name163() extends Name(163) { def virtualShow = "163" } +final case class Name164() extends Name(164) { def virtualShow = "164" } +final case class Name165() extends Name(165) { def virtualShow = "165" } +final case class Name166() extends Name(166) { def virtualShow = "166" } +final case class Name167() extends Name(167) { def virtualShow = "167" } +final case class Name168() extends Name(168) { def virtualShow = "168" } +final case class Name169() extends Name(169) { def virtualShow = "169" } +final case class Name170() extends Name(170) { def virtualShow = "170" } +final case class Name171() extends Name(171) { def virtualShow = "171" } +final case class Name172() extends Name(172) { def virtualShow = "172" } +final case class Name173() extends Name(173) { def virtualShow = "173" } +final case class Name174() extends Name(174) { def virtualShow = "174" } +final case class Name175() extends Name(175) { def virtualShow = "175" } +final case class Name176() extends Name(176) { def virtualShow = "176" } +final case class Name177() extends Name(177) { def virtualShow = "177" } +final case class Name178() extends Name(178) { def virtualShow = "178" } +final case class Name179() extends Name(179) { def virtualShow = "179" } +final case class Name180() extends Name(180) { def virtualShow = "180" } +final case class Name181() extends Name(181) { def virtualShow = "181" } +final case class Name182() extends Name(182) { def virtualShow = "182" } +final case class Name183() extends Name(183) { def virtualShow = "183" } +final case class Name184() extends Name(184) { def virtualShow = "184" } +final case class Name185() extends Name(185) { def virtualShow = "185" } +final case class Name186() extends Name(186) { def virtualShow = "186" } +final case class Name187() extends Name(187) { def virtualShow = "187" } +final case class Name188() extends Name(188) { def virtualShow = "188" } +final case class Name189() extends Name(189) { def virtualShow = "189" } +final case class Name190() extends Name(190) { def virtualShow = "190" } +final case class Name191() extends Name(191) { def virtualShow = "191" } +final case class Name192() extends Name(192) { def virtualShow = "192" } +final case class Name193() extends Name(193) { def virtualShow = "193" } +final case class Name194() extends Name(194) { def virtualShow = "194" } +final case class Name195() extends Name(195) { def virtualShow = "195" } +final case class Name196() extends Name(196) { def virtualShow = "196" } +final case class Name197() extends Name(197) { def virtualShow = "197" } +final case class Name198() extends Name(198) { def virtualShow = "198" } +final case class Name199() extends Name(199) { def virtualShow = "199" } +final case class Name200() extends Name(200) { def virtualShow = "200" } +final case class Name201() extends Name(201) { def virtualShow = "201" } +final case class Name202() extends Name(202) { def virtualShow = "202" } +final case class Name203() extends Name(203) { def virtualShow = "203" } +final case class Name204() extends Name(204) { def virtualShow = "204" } +final case class Name205() extends Name(205) { def virtualShow = "205" } +final case class Name206() extends Name(206) { def virtualShow = "206" } +final case class Name207() extends Name(207) { def virtualShow = "207" } +final case class Name208() extends Name(208) { def virtualShow = "208" } +final case class Name209() extends Name(209) { def virtualShow = "209" } +final case class Name210() extends Name(210) { def virtualShow = "210" } +final case class Name211() extends Name(211) { def virtualShow = "211" } +final case class Name212() extends Name(212) { def virtualShow = "212" } +final case class Name213() extends Name(213) { def virtualShow = "213" } +final case class Name214() extends Name(214) { def virtualShow = "214" } +final case class Name215() extends Name(215) { def virtualShow = "215" } +final case class Name216() extends Name(216) { def virtualShow = "216" } +final case class Name217() extends Name(217) { def virtualShow = "217" } +final case class Name218() extends Name(218) { def virtualShow = "218" } +final case class Name219() extends Name(219) { def virtualShow = "219" } +final case class Name220() extends Name(220) { def virtualShow = "220" } +final case class Name221() extends Name(221) { def virtualShow = "221" } +final case class Name222() extends Name(222) { def virtualShow = "222" } +final case class Name223() extends Name(223) { def virtualShow = "223" } +final case class Name224() extends Name(224) { def virtualShow = "224" } +final case class Name225() extends Name(225) { def virtualShow = "225" } +final case class Name226() extends Name(226) { def virtualShow = "226" } +final case class Name227() extends Name(227) { def virtualShow = "227" } +final case class Name228() extends Name(228) { def virtualShow = "228" } +final case class Name229() extends Name(229) { def virtualShow = "229" } +final case class Name230() extends Name(230) { def virtualShow = "230" } +final case class Name231() extends Name(231) { def virtualShow = "231" } +final case class Name232() extends Name(232) { def virtualShow = "232" } +final case class Name233() extends Name(233) { def virtualShow = "233" } +final case class Name234() extends Name(234) { def virtualShow = "234" } +final case class Name235() extends Name(235) { def virtualShow = "235" } +final case class Name236() extends Name(236) { def virtualShow = "236" } +final case class Name237() extends Name(237) { def virtualShow = "237" } +final case class Name238() extends Name(238) { def virtualShow = "238" } +final case class Name239() extends Name(239) { def virtualShow = "239" } +final case class Name240() extends Name(240) { def virtualShow = "240" } +final case class Name241() extends Name(241) { def virtualShow = "241" } +final case class Name242() extends Name(242) { def virtualShow = "242" } +final case class Name243() extends Name(243) { def virtualShow = "243" } +final case class Name244() extends Name(244) { def virtualShow = "244" } +final case class Name245() extends Name(245) { def virtualShow = "245" } +final case class Name246() extends Name(246) { def virtualShow = "246" } +final case class Name247() extends Name(247) { def virtualShow = "247" } +final case class Name248() extends Name(248) { def virtualShow = "248" } +final case class Name249() extends Name(249) { def virtualShow = "249" } +final case class Name250() extends Name(250) { def virtualShow = "250" } +final case class Name251() extends Name(251) { def virtualShow = "251" } +final case class Name252() extends Name(252) { def virtualShow = "252" } +final case class Name253() extends Name(253) { def virtualShow = "253" } +final case class Name254() extends Name(254) { def virtualShow = "254" } +final case class Name255() extends Name(255) { def virtualShow = "255" } diff --git a/test/files/jvm/future-spec/FutureTests.scala b/test/files/jvm/future-spec/FutureTests.scala index f2c83a64aeeb..7181abd144c7 100644 --- a/test/files/jvm/future-spec/FutureTests.scala +++ b/test/files/jvm/future-spec/FutureTests.scala @@ -147,6 +147,7 @@ class FutureTests extends MinimalScalaTest { assert( ECNotUsed(ec => f.filter(_ => fail("filter should not have been called"))(ec)) eq f) assert( ECNotUsed(ec => f.collect({ case _ => fail("collect should not have been called")})(ec)) eq f) assert( ECNotUsed(ec => f.zipWith(f)({ (_,_) => fail("zipWith should not have been called")})(ec)) eq f) + } } diff --git a/test/files/jvm/iinc.check b/test/files/jvm/iinc.check new file mode 100644 index 000000000000..3538a07f8587 --- /dev/null +++ b/test/files/jvm/iinc.check @@ -0,0 +1,18 @@ +def increment + iinc 1 + iinc 54 + iinc 127 + iinc -1 + iinc -54 + iinc -128 +end increment +def wideIncrement + iinc 128 + iinc 8765 + iinc 32767 + iinc -129 + iinc -8765 + iinc -32768 +end wideIncrement +def tooBigForIinc +end tooBigForIinc diff --git a/test/files/jvm/iinc/Increment_1.scala b/test/files/jvm/iinc/Increment_1.scala new file mode 100644 index 000000000000..03251016bfb3 --- /dev/null +++ b/test/files/jvm/iinc/Increment_1.scala @@ -0,0 +1,37 @@ +class Increment { + + // `iinc` + def increment(x: Int): Int = { + var i = x + i += 1 + i += 54 + i += 127 + i -= 1 + i -= 54 + i -= 128 + i + } + + // `wide iinc` + def wideIncrement(x: Int): Int = { + var i = x + i += 128 + i += 8765 + i += 32767 + i -= 129 + i -= 8765 + i -= 32768 + i + } + + def tooBigForIinc(x: Int): Int = { + var i = x + i += 32768 + i += 56789 + i += 2147483647 + i -= 32769 + i -= 56789 + i -= 2147483647 + i + } +} diff --git a/test/files/jvm/iinc/test.scala b/test/files/jvm/iinc/test.scala new file mode 100644 index 000000000000..4743fb1000af --- /dev/null +++ b/test/files/jvm/iinc/test.scala @@ -0,0 +1,17 @@ +import scala.tools.partest.BytecodeTest + +import scala.tools.asm.tree.IincInsnNode + +object Test extends BytecodeTest { + def show: Unit = { + val classNode = loadClassNode("Increment") + for (name <- List("increment", "wideIncrement", "tooBigForIinc")) { + println(s"def $name") + getMethod(classNode, name).instructions.toArray().collect { + case insn: IincInsnNode => println(s" iinc ${insn.incr}") + } + println(s"end $name") + } + } +} + diff --git a/test/files/jvm/javaReflection/Test.scala b/test/files/jvm/javaReflection/Test.scala index 3fb67536ecaa..4fa8568cfeb0 100644 --- a/test/files/jvm/javaReflection/Test.scala +++ b/test/files/jvm/javaReflection/Test.scala @@ -50,8 +50,6 @@ getSimpleName / getCanonicalName / isAnonymousClass / isLocalClass / isSynthetic will change some day). */ -import scala.tools.testkit.AssertUtil.assert8 - object Test { def tr[T](m: => T): String = try { @@ -60,6 +58,13 @@ object Test { else r.toString } catch { case e: InternalError => e.getMessage } + /** Assert on Java 8, but on later versions, just print if assert would fail. */ + def assert8(b: => Boolean, msg: => Any) = + if (!scala.util.Properties.isJavaAtLeast(9)) + assert(b, msg) + else if (!b) + println(s"assert not $msg") + def assertNotAnonymous(c: Class[_]) = assert8(!isAnonymous(c), s"$c is anonymous") def isAnonymous(c: Class[_]) = try { diff --git a/test/files/jvm/methvsfield.javaopts b/test/files/jvm/methvsfield.javaopts deleted file mode 100644 index 9740f07b079b..000000000000 --- a/test/files/jvm/methvsfield.javaopts +++ /dev/null @@ -1 +0,0 @@ --Dneeds.forked.jvm \ No newline at end of file diff --git a/test/files/jvm/methvsfield/Test_2.scala b/test/files/jvm/methvsfield/Test_2.scala index 5389836be277..b9ad46ac7426 100644 --- a/test/files/jvm/methvsfield/Test_2.scala +++ b/test/files/jvm/methvsfield/Test_2.scala @@ -1,3 +1,4 @@ +// java: -Dneeds.forked.jvm // bug #1062 object Test extends App { println((new MethVsField_1).three) diff --git a/test/files/jvm/natives.javaopts b/test/files/jvm/natives.javaopts deleted file mode 100644 index 57b2283c7fb3..000000000000 --- a/test/files/jvm/natives.javaopts +++ /dev/null @@ -1 +0,0 @@ --Dneeds.to.fork \ No newline at end of file diff --git a/test/files/jvm/natives.scala b/test/files/jvm/natives.scala index 2d19f3cbfda0..15a8b298f343 100644 --- a/test/files/jvm/natives.scala +++ b/test/files/jvm/natives.scala @@ -1,3 +1,5 @@ +// java: -Dneeds.to.fork + object Test { //println("java.library.path=" + System.getProperty("java.library.path")) diff --git a/test/files/jvm/scala-concurrent-tck.scala b/test/files/jvm/scala-concurrent-tck.scala index bcbd977e01cf..323eaa6937d7 100644 --- a/test/files/jvm/scala-concurrent-tck.scala +++ b/test/files/jvm/scala-concurrent-tck.scala @@ -9,25 +9,27 @@ import scala.concurrent.{ Awaitable, blocking } -import scala.util.{ Try, Success, Failure } -import scala.concurrent.duration.Duration -import scala.concurrent.duration._ -import scala.reflect.{ classTag, ClassTag } -import scala.tools.testkit.AssertUtil.assertThrows import scala.annotation.tailrec +import scala.concurrent.duration._ +import scala.reflect.{classTag, ClassTag} +import scala.tools.testkit.AssertUtil.{Fast, Slow, assertThrows, waitFor, waitForIt} +import scala.util.{Try, Success, Failure} +import scala.util.chaining._ +import java.util.concurrent.CountDownLatch +import java.util.concurrent.TimeUnit.{MILLISECONDS => Milliseconds, SECONDS => Seconds} trait TestBase { - import scala.tools.testkit.AssertUtil.{Fast, Slow, waitForIt} + trait Done { def apply(proof: => Boolean): Unit } + def once(body: Done => Unit): Unit = { - import java.util.concurrent.{ LinkedBlockingQueue, TimeUnit } - import TimeUnit.{MILLISECONDS => Milliseconds} + import java.util.concurrent.LinkedBlockingQueue val q = new LinkedBlockingQueue[Try[Boolean]] body(new Done { def apply(proof: => Boolean): Unit = q offer Try(proof) }) var tried: Try[Boolean] = null - def check = { tried = q.poll(5000, Milliseconds) ; tried != null } + def check = { tried = q.poll(5000L, Milliseconds) ; tried != null } waitForIt(check, progress = Slow, label = "concurrent-tck") assert(tried.isSuccess) assert(tried.get) @@ -37,22 +39,17 @@ trait TestBase { def test[T](name: String)(body: => T): T = { println(s"starting $name") - val r = body - println(s"finished $name") - r + body.tap(_ => println(s"finished $name")) } def await[A](value: Awaitable[A]): A = { - var a: A = null.asInstanceOf[A] - def check = { + def check: Option[A] = Try(Await.result(value, Duration(500, "ms"))) match { - case Success(x) => a = x ; true - case Failure(_: TimeoutException) => false + case Success(x) => Some(x) + case Failure(_: TimeoutException) => None case Failure(t) => throw t } - } - waitForIt(check, progress = Fast, label = "concurrent-tck test result") - a + waitFor(check, progress = Fast, label = "concurrent-tck test result") } } @@ -989,36 +986,34 @@ class CustomExecutionContext extends TestBase { assert(count >= 1) } - def testUncaughtExceptionReporting(): Unit = once { - done => - import java.util.concurrent.TimeUnit.SECONDS - val example = new InterruptedException() - val latch = new java.util.concurrent.CountDownLatch(1) - @volatile var thread: Thread = null - @volatile var reported: Throwable = null - val ec = ExecutionContext.fromExecutorService(null, t => { - reported = t - latch.countDown() - }) + def testUncaughtExceptionReporting(): Unit = once { done => + val example = new InterruptedException + val latch = new CountDownLatch(1) + @volatile var thread: Thread = null + @volatile var reported: Throwable = null + val ec = ExecutionContext.fromExecutorService(null, t => { + reported = t + latch.countDown() + }) - @tailrec def waitForThreadDeath(turns: Int): Boolean = - if (turns <= 0) false - else if ((thread ne null) && thread.isAlive == false) true - else { - Thread.sleep(10) - waitForThreadDeath(turns - 1) - } + @tailrec def waitForThreadDeath(turns: Int): Boolean = + turns > 0 && (thread != null && !thread.isAlive || { Thread.sleep(10L) ; waitForThreadDeath(turns - 1) }) - try { - ec.execute(() => { - thread = Thread.currentThread - throw example - }) - latch.await(2, SECONDS) - done(waitForThreadDeath(turns = 100) && (reported eq example)) - } finally { - ec.shutdown() - } + def truthfully(b: Boolean): Option[Boolean] = if (b) Some(true) else None + + // jdk17 thread receives pool exception handler, so wait for thread to die slow and painful expired keepalive + def threadIsDead = + waitFor(truthfully(waitForThreadDeath(turns = 100)), progress = Slow, label = "concurrent-tck-thread-death") + + try { + ec.execute(() => { + thread = Thread.currentThread + throw example + }) + latch.await(2, Seconds) + done(threadIsDead && (reported eq example)) + } + finally ec.shutdown() } test("testUncaughtExceptionReporting")(testUncaughtExceptionReporting()) diff --git a/test/files/jvm/t1600.javaopts b/test/files/jvm/t1600.javaopts deleted file mode 100644 index f4038254ba29..000000000000 --- a/test/files/jvm/t1600.javaopts +++ /dev/null @@ -1 +0,0 @@ --Dneeds.forked.jvm.maybe.because.context.classloader \ No newline at end of file diff --git a/test/files/jvm/t1600.scala b/test/files/jvm/t1600.scala index b434862adb1e..da04a5f7c923 100644 --- a/test/files/jvm/t1600.scala +++ b/test/files/jvm/t1600.scala @@ -1,3 +1,4 @@ +// java: -Dneeds.forked.jvm.maybe.because.context.classloader /** * Checks that serialization of hash-based collections works correctly if the hashCode diff --git a/test/files/jvm/t8689.javaopts b/test/files/jvm/t8689.javaopts deleted file mode 100644 index 9740f07b079b..000000000000 --- a/test/files/jvm/t8689.javaopts +++ /dev/null @@ -1 +0,0 @@ --Dneeds.forked.jvm \ No newline at end of file diff --git a/test/files/jvm/t8689.scala b/test/files/jvm/t8689.scala index 3ee20d711a92..2eeb12a12cf1 100644 --- a/test/files/jvm/t8689.scala +++ b/test/files/jvm/t8689.scala @@ -1,3 +1,4 @@ +// java: -Dneeds.forked.jvm object Test { def main(args: Array[String]): Unit = { import scala.concurrent._ diff --git a/test/files/neg/and-future.check b/test/files/neg/and-future.check new file mode 100644 index 000000000000..c7992b38964e --- /dev/null +++ b/test/files/neg/and-future.check @@ -0,0 +1,7 @@ +and-future.scala:9: error: Cannot parse infix type combining `&` and `Map`, please use `Map` as the head of a regular type application. + val b: Int Map X & Int Map Y = Map[Int, X & Y]() // error: unsupported + ^ +and-future.scala:13: error: Cannot parse infix type combining `&` and `Map`, please use `Map` as the head of a regular type application. + val c: (Int Map X) & (Int Map Y) = Map[Int, X & Y]() // error: unsupported + ^ +2 errors diff --git a/test/files/neg/and-future.scala b/test/files/neg/and-future.scala new file mode 100644 index 000000000000..1092c013b186 --- /dev/null +++ b/test/files/neg/and-future.scala @@ -0,0 +1,14 @@ +// scalac: -Xsource:3 +// + +trait X +trait Y + +class Test { + val a: Map[Int, X] & Map[Int, Y] = Map[Int, X & Y]() // ok + val b: Int Map X & Int Map Y = Map[Int, X & Y]() // error: unsupported + + // This one is unambiguous but it's hard to check whether parens were present + // from the parser output so we also emit an error there. + val c: (Int Map X) & (Int Map Y) = Map[Int, X & Y]() // error: unsupported +} diff --git a/test/files/neg/annots-constant-neg.check b/test/files/neg/annots-constant-neg.check index 800e06c70489..f531b2a98540 100644 --- a/test/files/neg/annots-constant-neg.check +++ b/test/files/neg/annots-constant-neg.check @@ -79,7 +79,7 @@ Test.scala:71: error: annotation argument needs to be a constant; found: new sca Test.scala:76: error: multiple constructors for Ann1 with alternatives: (s: String)Ann1 (value: Int)Ann1 - cannot be invoked with (x: String) + [which have no such parameter x] cannot be invoked with (x: String) @Ann1(x = "") def v4 = 0 // err ^ Test.scala:78: error: Ann1 does not take parameters diff --git a/test/files/neg/classOfDeprecation.check b/test/files/neg/classOfDeprecation.check new file mode 100644 index 000000000000..e67fc64fc74d --- /dev/null +++ b/test/files/neg/classOfDeprecation.check @@ -0,0 +1,9 @@ +classOfDeprecation.scala:6: warning: class C is deprecated (since like, forever): no no! + val t = classOf[C] + ^ +classOfDeprecation.scala:7: warning: class C is deprecated (since like, forever): no no! + @ann(classOf[C]) def u = 1 + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/classOfDeprecation.scala b/test/files/neg/classOfDeprecation.scala new file mode 100644 index 000000000000..d7557e3f2e90 --- /dev/null +++ b/test/files/neg/classOfDeprecation.scala @@ -0,0 +1,8 @@ +// scalac: -deprecation -Werror + +@deprecated("no no!", "like, forever") class C +class ann(x: Any) extends annotation.Annotation +object T { + val t = classOf[C] + @ann(classOf[C]) def u = 1 +} diff --git a/test/files/neg/for-comprehension-case.check b/test/files/neg/for-comprehension-case.check new file mode 100644 index 000000000000..549e4943e344 --- /dev/null +++ b/test/files/neg/for-comprehension-case.check @@ -0,0 +1,7 @@ +for-comprehension-case.scala:20: error: '<-' expected but '=' found. + case y = x + 1 + ^ +for-comprehension-case.scala:21: error: illegal start of simple expression + } yield x + y + ^ +2 errors diff --git a/test/files/neg/for-comprehension-case.scala b/test/files/neg/for-comprehension-case.scala new file mode 100644 index 000000000000..d6b14eb91a90 --- /dev/null +++ b/test/files/neg/for-comprehension-case.scala @@ -0,0 +1,22 @@ +class A { + // ok + val a = + for { + case Some(x) <- List(Some(1), None) + y = x + 1 + } yield x + y + + // ok + val b = + for { + Some(x) <- List(Some(1), None) + Some(y) <- List(None, Some(2)) + } yield x+y + + // fail + val c = + for { + case Some(x) <- List(Some(1), None) + case y = x + 1 + } yield x + y +} diff --git a/test/files/neg/forgot-interpolator.scala b/test/files/neg/forgot-interpolator.scala index 7ffc7eace4dc..cc00f9170701 100644 --- a/test/files/neg/forgot-interpolator.scala +++ b/test/files/neg/forgot-interpolator.scala @@ -93,3 +93,21 @@ package curry { def f5 = "I draw the line at $palomino" // no warn } } + +package companions { + class X + object X + class C { + def f1 = "$X" // nowarn companion + def f2 = "$Byte" // nowarn companion + def f3 = "$Char" // nowarn companion + def f4 = "$Short" // nowarn companion + def f5 = "$Int" // nowarn companion + def f6 = "$Float" // nowarn companion + def f7 = "$Double" // nowarn companion + def f8 = "$Character" // nowarn companion + def f9 = "$Integer" // nowarn companion + def f0 = "$companions" // nowarn companion + } +} +package object companions diff --git a/test/files/neg/forward.check b/test/files/neg/forward.check index 12051a1c14f7..79630f888fbd 100644 --- a/test/files/neg/forward.check +++ b/test/files/neg/forward.check @@ -1,10 +1,13 @@ -forward.scala:6: error: forward reference extends over definition of value x +forward.scala:8: error: forward reference to value x defined on line 9 extends over definition of value x def f: Int = x; ^ -forward.scala:10: error: forward reference extends over definition of value x +forward.scala:12: error: forward reference to method g defined on line 14 extends over definition of value x def f: Int = g; ^ -forward.scala:15: error: forward reference extends over definition of variable x +forward.scala:17: error: forward reference to method g defined on line 19 extends over definition of variable x def f: Int = g; ^ -3 errors +forward.scala:29: error: forward reference to value ec defined on line 32 extends over definition of value z + a <- fInt + ^ +4 errors diff --git a/test/files/neg/forward.scala b/test/files/neg/forward.scala index d5c0851f09e3..bf1fc7ac8c95 100644 --- a/test/files/neg/forward.scala +++ b/test/files/neg/forward.scala @@ -1,3 +1,5 @@ +import scala.concurrent._ + object Test { def f: Int = x; val x: Int = f; @@ -21,4 +23,13 @@ object Test { Console.println("foo"); def g: Int = f; } + { + val fInt = Future.successful(1) + val z = for { + a <- fInt + } yield a + + implicit val ec: ExecutionContext = ExecutionContext.Implicits.global + z + } } diff --git a/test/files/neg/implicit-any2stringadd.scala b/test/files/neg/implicit-any2stringadd.scala index 80f1ab29bd85..7e86c89cd31f 100644 --- a/test/files/neg/implicit-any2stringadd.scala +++ b/test/files/neg/implicit-any2stringadd.scala @@ -1,4 +1,4 @@ -// scalac: -Xsource:3 -Xlog-implicits +// scalac: -Xsource:3 -Vimplicits // object Test { true + "what" diff --git a/test/files/neg/implicit-log.check b/test/files/neg/implicit-log.check index c0115c6291a9..541aa6251b25 100644 --- a/test/files/neg/implicit-log.check +++ b/test/files/neg/implicit-log.check @@ -1,13 +1,3 @@ -implicit-log.scala:61: byVal is not a valid implicit value for Int(7) => ?{def unwrap: ?} because: -incompatible: (x: 7): 7 does not match expected type Int(7) => ?{def unwrap: ?} - val res = 7.unwrap() // doesn't work - ^ -implicit-log.scala:70: materializing requested scala.reflect.type.ClassTag[String] using scala.reflect.`package`.materializeClassTag[String]() - val x: java.util.List[String] = List("foo") - ^ -implicit-log.scala:96: materializing requested reflect.runtime.universe.type.TypeTag[Class[_]] using scala.reflect.api.`package`.materializeTypeTag[Class[_]](scala.reflect.runtime.`package`.universe) - println(implicitly[TypeTag[Class[_]]]) - ^ implicit-log.scala:100: error: value baa is not a member of Int 1.baa ^ diff --git a/test/files/neg/implicit-log.scala b/test/files/neg/implicit-log.scala index adfe3acbf0e3..f77085e3c2af 100644 --- a/test/files/neg/implicit-log.scala +++ b/test/files/neg/implicit-log.scala @@ -1,4 +1,4 @@ -/* scalac: -Xlog-implicits -Xsource:3 -Xfatal-warnings */ +/* scalac: -Vimplicits -Xsource:3 -Xfatal-warnings */ package foo diff --git a/test/files/neg/implicit-shadow.check b/test/files/neg/implicit-shadow.check index 423f7c56aa99..d7909b9c3a11 100644 --- a/test/files/neg/implicit-shadow.check +++ b/test/files/neg/implicit-shadow.check @@ -1,10 +1,3 @@ -implicit-shadow.scala:6: is not a valid implicit value for Int(1) => ?{def isEmpty: ?} because: -reference to i2s is ambiguous; -it is imported twice in the same scope by -import C._ -and import B._ - 1.isEmpty - ^ implicit-shadow.scala:6: error: value isEmpty is not a member of Int 1.isEmpty ^ diff --git a/test/files/neg/implicit-shadow.scala b/test/files/neg/implicit-shadow.scala index 7fea7d5d32a0..33725ece13f1 100644 --- a/test/files/neg/implicit-shadow.scala +++ b/test/files/neg/implicit-shadow.scala @@ -1,4 +1,4 @@ -// scalac: -Xlog-implicits +// scalac: -Vimplicits // object Test { import B._, C._ diff --git a/test/files/neg/import-future.check b/test/files/neg/import-future.check new file mode 100644 index 000000000000..000601f45b7d --- /dev/null +++ b/test/files/neg/import-future.check @@ -0,0 +1,4 @@ +import-future.scala:15: error: not found: value unrelated + unrelated(1) // error + ^ +1 error diff --git a/test/files/neg/import-future.scala b/test/files/neg/import-future.scala new file mode 100644 index 000000000000..288fd3d0e240 --- /dev/null +++ b/test/files/neg/import-future.scala @@ -0,0 +1,27 @@ +// scalac: -Xsource:3 +// + +class D { + def *(y: Int): Int = y + def unrelated(y: Int): Int = y +} + +object Test { + val d = new D + + def one: Int = { + import d.`*` + + unrelated(1) // error + + *(1) + } + + def two: Int = { + import d.* + + unrelated(1) + + *(1) + } +} diff --git a/test/files/neg/import-syntax.check b/test/files/neg/import-syntax.check new file mode 100644 index 000000000000..887677e3cfd9 --- /dev/null +++ b/test/files/neg/import-syntax.check @@ -0,0 +1,7 @@ +import-syntax.scala:10: error: Wildcard import cannot be renamed + import d.{* => huh} + ^ +import-syntax.scala:11: error: Wildcard import cannot be renamed + import d.{_ => also_no} + ^ +2 errors diff --git a/test/files/neg/import-syntax.scala b/test/files/neg/import-syntax.scala new file mode 100644 index 000000000000..0e3deb00cce0 --- /dev/null +++ b/test/files/neg/import-syntax.scala @@ -0,0 +1,12 @@ +// scalac: -Xsource:3 + +class D { + def *(y: Int): Int = y + def unrelated(y: Int): Int = y +} + +object nope { + val d = new D + import d.{* => huh} + import d.{_ => also_no} +} diff --git a/test/files/neg/leibniz-liskov.check b/test/files/neg/leibniz-liskov.check index c760861dbbf6..ad2ff6f6d4df 100644 --- a/test/files/neg/leibniz-liskov.check +++ b/test/files/neg/leibniz-liskov.check @@ -55,8 +55,8 @@ leibniz-liskov.scala:21: error: type mismatch; required: F[U] def convert1[T, U](l: List[T])(ev: T =:= U): List[U] = ev.substituteContra(l) ^ -leibniz-liskov.scala:21: error: type mismatch; - found : F[T] +leibniz-liskov.scala:21: error: polymorphic expression cannot be instantiated to expected type; + found : [F[_]]F[T] required: List[U] def convert1[T, U](l: List[T])(ev: T =:= U): List[U] = ev.substituteContra(l) ^ @@ -72,8 +72,8 @@ leibniz-liskov.scala:22: error: type mismatch; required: F[T] def convert2[T, U](l: List[U])(ev: T =:= U): List[T] = ev.substituteCo(l) ^ -leibniz-liskov.scala:22: error: type mismatch; - found : F[U] +leibniz-liskov.scala:22: error: polymorphic expression cannot be instantiated to expected type; + found : [F[_]]F[U] required: List[T] def convert2[T, U](l: List[U])(ev: T =:= U): List[T] = ev.substituteCo(l) ^ @@ -111,8 +111,8 @@ leibniz-liskov.scala:35: error: type mismatch; required: F[U] def convertConsume2[U, T](c: Consumes[T])(ev: U <:< T): Consumes[U] = ev.substituteCo(c) ^ -leibniz-liskov.scala:35: error: type mismatch; - found : F[T] +leibniz-liskov.scala:35: error: polymorphic expression cannot be instantiated to expected type; + found : [F[+_]]F[T] required: LeibnizLiskov.this.Consumes[U] (which expands to) U => Unit def convertConsume2[U, T](c: Consumes[T])(ev: U <:< T): Consumes[U] = ev.substituteCo(c) diff --git a/test/files/neg/macro-invalidret.check b/test/files/neg/macro-invalidret.check index ceba1b88c927..11097f429909 100644 --- a/test/files/neg/macro-invalidret.check +++ b/test/files/neg/macro-invalidret.check @@ -39,9 +39,9 @@ type mismatch for return type: reflect.runtime.universe.Literal does not conform def bar2: Int = macro Impls.foo2 ^ Macros_Test_2.scala:33: error: exception during macro expansion: -#partest !java15 +#partest !java15+ java.lang.NullPointerException -#partest java15 +#partest java15+ java.lang.NullPointerException: Cannot throw exception because "null" is null #partest at Impls$.foo3(Impls_1.scala:7) diff --git a/test/files/neg/main1.check b/test/files/neg/main1.check index e1ba37b9ac90..4d9ef2fba906 100644 --- a/test/files/neg/main1.check +++ b/test/files/neg/main1.check @@ -29,7 +29,7 @@ main1.scala:41: warning: Foo has a valid main method (args: Array[String]): Unit object Foo extends Foo { // Overrides main from the class ^ main1.scala:53: warning: not a valid main method for p6.Main, - because main methods must have the exact signature (Array[String])Unit. + because main methods must have the exact signature `(Array[String]): Unit`. To define an entry point, please define the main method as: def main(args: Array[String]): Unit @@ -42,7 +42,7 @@ main1.scala:59: warning: Main has a main method (args: Array[Int]): Unit, object Main { ^ main1.scala:60: warning: not a valid main method for p7.Main, - because main methods must have the exact signature (Array[String])Unit. + because main methods must have the exact signature `(Array[String]): Unit`. To define an entry point, please define the main method as: def main(args: Array[String]): Unit @@ -55,19 +55,26 @@ main1.scala:66: warning: Main has a main method, object Main { ^ main1.scala:68: warning: not a valid main method for p8.Main, - because main methods must have the exact signature (Array[String])Unit. + because main methods must have the exact signature `(Array[String]): Unit`. To define an entry point, please define the main method as: def main(args: Array[String]): Unit def main(args: Array[Double]) = () ^ main1.scala:67: warning: not a valid main method for p8.Main, - because main methods must have the exact signature (Array[String])Unit. + because main methods must have the exact signature `(Array[String]): Unit`. To define an entry point, please define the main method as: def main(args: Array[String]): Unit def main(args: Array[Int]) = () ^ +main1.scala:74: warning: not a valid main method for t7448.Main, + because main methods must have the exact signature `(Array[String]): Unit`, though Scala runners will forgive a non-Unit result. + To define an entry point, please define the main method as: + def main(args: Array[String]): Unit + + def main(args: Array[String]) = ??? + ^ error: No warnings can be incurred under -Werror. -11 warnings +12 warnings 1 error diff --git a/test/files/neg/main1.scala b/test/files/neg/main1.scala index 88a94d85bbb2..295920808350 100644 --- a/test/files/neg/main1.scala +++ b/test/files/neg/main1.scala @@ -68,3 +68,9 @@ package p8 { def main(args: Array[Double]) = () } } + +package t7448 { + object Main { + def main(args: Array[String]) = ??? + } +} diff --git a/test/files/neg/multiLineOps.check b/test/files/neg/multiLineOps.check index c9882d57e1c2..e3d865c984d4 100644 --- a/test/files/neg/multiLineOps.check +++ b/test/files/neg/multiLineOps.check @@ -1,5 +1,5 @@ multiLineOps.scala:6: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses - +3 // error: Expected a toplevel definition + +3 // warning: a pure expression does nothing in statement position ^ error: No warnings can be incurred under -Werror. 1 warning diff --git a/test/files/neg/multiLineOps.scala b/test/files/neg/multiLineOps.scala index 792528620773..4a92fd9f2c0c 100644 --- a/test/files/neg/multiLineOps.scala +++ b/test/files/neg/multiLineOps.scala @@ -1,7 +1,7 @@ -// scalac: -Werror -Xsource:3 +// scalac: -Werror -Xlint -Xsource:3 class Test { val x = 1 + 2 - +3 // error: Expected a toplevel definition + +3 // warning: a pure expression does nothing in statement position } diff --git a/test/files/neg/not-found.check b/test/files/neg/not-found.check new file mode 100644 index 000000000000..da64a6cfe1fe --- /dev/null +++ b/test/files/neg/not-found.check @@ -0,0 +1,30 @@ +not-found.scala:10: error: not found: value Simple +Identifiers that begin with uppercase are not pattern variables but match the value in scope. + case Simple => 2 + ^ +not-found.scala:11: error: not found: value Simple + case Simple.member => 3 + ^ +not-found.scala:12: error: not found: value sample +Identifiers enclosed in backticks are not pattern variables but match the value in scope. + case `sample` => 4 + ^ +not-found.scala:13: error: not found: type Simple + case _: Simple => 5 + ^ +not-found.scala:14: error: not found: value Simple + case Simple(_) => 6 + ^ +not-found.scala:17: error: object Simple is not a member of package p +did you mean Sample? + def g = p.Simple + ^ +not-found.scala:21: error: not found: value X +Identifiers that begin with uppercase are not pattern variables but match the value in scope. + val X :: Nil = List(42) + ^ +not-found.scala:21: warning: Pattern definition introduces Unit-valued member of T; consider wrapping it in `locally { ... }`. + val X :: Nil = List(42) + ^ +1 warning +7 errors diff --git a/test/files/neg/not-found.scala b/test/files/neg/not-found.scala new file mode 100644 index 000000000000..239548e58bbe --- /dev/null +++ b/test/files/neg/not-found.scala @@ -0,0 +1,22 @@ + +package p + +object Sample + +trait T { + def f(x: Any) = + x match { + case Sample => 1 + case Simple => 2 + case Simple.member => 3 + case `sample` => 4 + case _: Simple => 5 + case Simple(_) => 6 + case _ => 7 + } + def g = p.Simple + + val x :: Nil = List(42) + + val X :: Nil = List(42) +} diff --git a/test/files/neg/open-infix-future.check b/test/files/neg/open-infix-future.check new file mode 100644 index 000000000000..15515fc2ef50 --- /dev/null +++ b/test/files/neg/open-infix-future.check @@ -0,0 +1,22 @@ +open-infix-future.scala:4: error: expected class or object definition +open trait A // error +^ +open-infix-future.scala:5: error: expected class or object definition +open object B // error +^ +open-infix-future.scala:8: error: ';' expected but 'val' found. + infix val a: Int = 1 // error + ^ +open-infix-future.scala:9: error: ';' expected but 'var' found. + infix var b: Int = 1 // error + ^ +open-infix-future.scala:11: error: ';' expected but 'type' found. + open type D // error + ^ +open-infix-future.scala:14: error: illegal start of statement + open class E // error + ^ +open-infix-future.scala:15: error: ';' expected but 'def' found. + open def bla(y: Int) = y // error + ^ +7 errors diff --git a/test/files/neg/open-infix-future.scala b/test/files/neg/open-infix-future.scala new file mode 100644 index 000000000000..2a250f3b006e --- /dev/null +++ b/test/files/neg/open-infix-future.scala @@ -0,0 +1,17 @@ +// scalac: -Xsource:3 +// + +open trait A // error +open object B // error + +class C { + infix val a: Int = 1 // error + infix var b: Int = 1 // error + + open type D // error + + def foo: Unit = { + open class E // error + open def bla(y: Int) = y // error + } +} diff --git a/test/files/neg/patmat-exprs-b.check b/test/files/neg/patmat-exprs-b.check new file mode 100644 index 000000000000..c1a39e7f5565 --- /dev/null +++ b/test/files/neg/patmat-exprs-b.check @@ -0,0 +1,21 @@ +patmat-exprs-b.scala:42: warning: parameter value num in class Add is never used + case class Add[T](args: Iterable[Expr[T]])(implicit @nowarn num: NumericOps[T]) extends ManyArg[T] { + ^ +patmat-exprs-b.scala:46: warning: parameter value num in class Add2 is never used + case class Add2[T](left: Expr[T], right: Expr[T])(implicit @nowarn num: NumericOps[T]) extends TwoArg[T] { + ^ +patmat-exprs-b.scala:49: warning: parameter value num in class Add3 is never used + case class Add3[T](a1: Expr[T], a2: Expr[T], a3: Expr[T])(implicit @nowarn num: NumericOps[T]) extends ManyArg[T] { + ^ +patmat-exprs-b.scala:42: warning: @nowarn annotation does not suppress any warnings + case class Add[T](args: Iterable[Expr[T]])(implicit @nowarn num: NumericOps[T]) extends ManyArg[T] { + ^ +patmat-exprs-b.scala:46: warning: @nowarn annotation does not suppress any warnings + case class Add2[T](left: Expr[T], right: Expr[T])(implicit @nowarn num: NumericOps[T]) extends TwoArg[T] { + ^ +patmat-exprs-b.scala:49: warning: @nowarn annotation does not suppress any warnings + case class Add3[T](a1: Expr[T], a2: Expr[T], a3: Expr[T])(implicit @nowarn num: NumericOps[T]) extends ManyArg[T] { + ^ +error: No warnings can be incurred under -Werror. +6 warnings +1 error diff --git a/test/files/neg/patmat-exprs-b.scala b/test/files/neg/patmat-exprs-b.scala new file mode 100644 index 000000000000..17b2ed63473b --- /dev/null +++ b/test/files/neg/patmat-exprs-b.scala @@ -0,0 +1,53 @@ +// scalac: -Werror -Xlint +// + +import annotation.nowarn + +trait Pattern { + + trait NumericOps[T] extends Serializable { + + def zero: T + + def add(a: T, b: T): T + def add(a: T, b: T, c: T): T = add(a, add(b, c)) + + def sum(terms: Iterable[T]) = terms.foldLeft(zero)(add) + def sum(terms: Iterator[T]) = terms.foldLeft(zero)(add) + } + + trait Expr[T] { + + /** Returns arguments of this operator */ + def args: Iterable[Expr[_]] + + def + (other: Expr[T])(implicit n: NumericOps[T]) = Add(List(this, other)) + + def specialize(implicit num: NumericOps[T]): Expr[T] = + this match { + case Add(Seq(a, b)) => Add2(a, b) + case Add(Seq(a, b, c)) => Add3(a, b, c) + case x => x + } + } + + trait TwoArg[T] extends Expr[T] { + val left: Expr[T] + val right: Expr[T] + val args = List(left, right) + } + + trait ManyArg[T] extends Expr[T] + + case class Add[T](args: Iterable[Expr[T]])(implicit @nowarn num: NumericOps[T]) extends ManyArg[T] { + override def toString = "(" + args.mkString(" + ") + ")" + } + + case class Add2[T](left: Expr[T], right: Expr[T])(implicit @nowarn num: NumericOps[T]) extends TwoArg[T] { + override def toString = "(" + left + " + " + right + ")" + } + case class Add3[T](a1: Expr[T], a2: Expr[T], a3: Expr[T])(implicit @nowarn num: NumericOps[T]) extends ManyArg[T] { + val args = List(a1, a2, a3) + override def toString = "(" + a1 + " + " + a2 + " + " + a3 + ")" + } +} diff --git a/test/files/neg/qmark-deprecated.check b/test/files/neg/qmark-deprecated.check new file mode 100644 index 000000000000..2d96f788ab9d --- /dev/null +++ b/test/files/neg/qmark-deprecated.check @@ -0,0 +1,41 @@ +qmark-deprecated.scala:4: error: using `?` as a type name requires backticks. +class Foo[?] // error + ^ +qmark-deprecated.scala:6: error: using `?` as a type name requires backticks. +class Bar[M[?] <: List[?]] // errors + ^ +qmark-deprecated.scala:10: error: using `?` as a type name requires backticks. + class ? { val x = 1 } // error + ^ +qmark-deprecated.scala:16: error: using `?` as a type name requires backticks. + trait ? // error + ^ +qmark-deprecated.scala:22: error: using `?` as a type name requires backticks. + type ? = Int // error + ^ +qmark-deprecated.scala:33: error: using `?` as a type name requires backticks. + def bar1[?] = {} // error + ^ +qmark-deprecated.scala:35: error: using `?` as a type name requires backticks. + def bar3[M[?]] = {} // error + ^ +qmark-deprecated.scala:38: error: using `?` as a type name requires backticks. + type A[?] = Int // error + ^ +qmark-deprecated.scala:6: warning: Wrap `?` in backticks to continue to use it as an identifier, or use `-Xsource:3` to use it as a wildcard like in Scala 3. +class Bar[M[?] <: List[?]] // errors + ^ +qmark-deprecated.scala:27: warning: Wrap `?` in backticks to continue to use it as an identifier, or use `-Xsource:3` to use it as a wildcard like in Scala 3. + val x: Array[?] = new Array[?](0) // errors + ^ +qmark-deprecated.scala:27: warning: Wrap `?` in backticks to continue to use it as an identifier, or use `-Xsource:3` to use it as a wildcard like in Scala 3. + val x: Array[?] = new Array[?](0) // errors + ^ +qmark-deprecated.scala:30: warning: Wrap `?` in backticks to continue to use it as an identifier, or use `-Xsource:3` to use it as a wildcard like in Scala 3. + def foo1[T <: Array[?]](x: T): Array[?] = x // errors + ^ +qmark-deprecated.scala:30: warning: Wrap `?` in backticks to continue to use it as an identifier, or use `-Xsource:3` to use it as a wildcard like in Scala 3. + def foo1[T <: Array[?]](x: T): Array[?] = x // errors + ^ +5 warnings +8 errors diff --git a/test/files/neg/qmark-deprecated.scala b/test/files/neg/qmark-deprecated.scala new file mode 100644 index 000000000000..c370cfcb2673 --- /dev/null +++ b/test/files/neg/qmark-deprecated.scala @@ -0,0 +1,40 @@ +// scalac: -deprecation -Xfatal-warnings +// + +class Foo[?] // error +class Foo2[`?`] // ok +class Bar[M[?] <: List[?]] // errors +class Bar2[M[`?`] <: List[`?`]] // ok + +object G { + class ? { val x = 1 } // error +} +object G2 { + class `?` { val x = 1 } // ok +} +object H { + trait ? // error +} +object H2 { + trait `?` // ok +} +object I { + type ? = Int // error +} +object I2 { + type `?` = Int // ok + + val x: Array[?] = new Array[?](0) // errors + val y: Array[`?`] = new Array[`?`](0) // ok + + def foo1[T <: Array[?]](x: T): Array[?] = x // errors + def foo2[T <: Array[`?`]](x: T): Array[`?`] = x // ok + + def bar1[?] = {} // error + def bar2[`?`] = {} // ok + def bar3[M[?]] = {} // error + def bar4[M[`?`]] = {} // error + + type A[?] = Int // error + type B[`?`] = Int // ok +} diff --git a/test/files/neg/ref-checks.check b/test/files/neg/ref-checks.check new file mode 100644 index 000000000000..8ea6d6e02b00 --- /dev/null +++ b/test/files/neg/ref-checks.check @@ -0,0 +1,17 @@ +ref-checks.scala:9: error: type arguments [Int] do not conform to trait Chars's type parameter bounds [A <: CharSequence] + @ann[Chars[Int]] val x = 42 + ^ +ref-checks.scala:10: error: type arguments [Double] do not conform to trait Chars's type parameter bounds [A <: CharSequence] + val y: Two[Chars[Long] @uncheckedBounds, Chars[Double]] = null + ^ +ref-checks.scala:11: error: type arguments [X forSome { type X <: Int }] do not conform to trait Chars's type parameter bounds [A <: CharSequence] + def z: Chars[X forSome { type X <: Int }] = null + ^ +ref-checks.scala:18: warning: type DeprecatedAlias in object Test is deprecated + case _: DeprecatedAlias => + ^ +ref-checks.scala:19: warning: class DeprecatedClass in object Test is deprecated + case _: DeprecatedClass => + ^ +2 warnings +3 errors diff --git a/test/files/neg/ref-checks.scala b/test/files/neg/ref-checks.scala new file mode 100644 index 000000000000..e68f25938810 --- /dev/null +++ b/test/files/neg/ref-checks.scala @@ -0,0 +1,22 @@ +// scalac: -deprecation -Werror +import scala.annotation.{StaticAnnotation, nowarn} +import scala.reflect.internal.annotations.uncheckedBounds + +object Test { + trait Chars[A <: CharSequence] + trait Two[A, B] + class ann[A] extends StaticAnnotation + @ann[Chars[Int]] val x = 42 + val y: Two[Chars[Long] @uncheckedBounds, Chars[Double]] = null + def z: Chars[X forSome { type X <: Int }] = null + + @deprecated type DeprecatedAlias = String + @deprecated class DeprecatedClass + @nowarn("cat=deprecation") type UndeprecatedAlias = DeprecatedClass + + ("": Any) match { + case _: DeprecatedAlias => + case _: DeprecatedClass => + case _: UndeprecatedAlias => // no warning here + } +} diff --git a/test/files/neg/scala3-keywords.check b/test/files/neg/scala3-keywords.check new file mode 100644 index 000000000000..7f3a2155509a --- /dev/null +++ b/test/files/neg/scala3-keywords.check @@ -0,0 +1,21 @@ +scala3-keywords.scala:15: warning: Wrap `enum` in backticks to use it as an identifier, it will become a keyword in Scala 3. + val enum: Int = 1 // error + ^ +scala3-keywords.scala:16: warning: Wrap `export` in backticks to use it as an identifier, it will become a keyword in Scala 3. + val export: Int = 1 // error + ^ +scala3-keywords.scala:17: warning: Wrap `given` in backticks to use it as an identifier, it will become a keyword in Scala 3. + val given: Int = 1 // error + ^ +scala3-keywords.scala:18: warning: Wrap `given` in backticks to use it as an identifier, it will become a keyword in Scala 3. + def foo(given: Int) = {} // error + ^ +scala3-keywords.scala:19: warning: Wrap `export` in backticks to use it as an identifier, it will become a keyword in Scala 3. + def bla[export <: Int] = {} // error + ^ +scala3-keywords.scala:21: warning: Wrap `enum` in backticks to use it as an identifier, it will become a keyword in Scala 3. +class enum // error + ^ +error: No warnings can be incurred under -Werror. +6 warnings +1 error diff --git a/test/files/neg/scala3-keywords.scala b/test/files/neg/scala3-keywords.scala new file mode 100644 index 000000000000..23fbce36dc4c --- /dev/null +++ b/test/files/neg/scala3-keywords.scala @@ -0,0 +1,21 @@ +// scalac: -deprecation -Xfatal-warnings +// +class A { + val `enum`: Int = 1 + println(enum) + val `export`: Int = 1 + val `given`: Int = 1 + def foo(`given`: Int) = given + def bla[`export` <: Int] = { + class `enum` + new enum + } +} +class B { + val enum: Int = 1 // error + val export: Int = 1 // error + val given: Int = 1 // error + def foo(given: Int) = {} // error + def bla[export <: Int] = {} // error +} +class enum // error diff --git a/test/files/neg/stmt-expr-discard.check b/test/files/neg/stmt-expr-discard.check index 250de20f98d2..cc22eb1d843b 100644 --- a/test/files/neg/stmt-expr-discard.check +++ b/test/files/neg/stmt-expr-discard.check @@ -1,15 +1,3 @@ -stmt-expr-discard.scala:5: warning: Line starts with an operator that in future -will be taken as an infix expression continued from the previous line. -To force the previous interpretation as a separate statement, -add an explicit `;`, add an empty line, or remove spaces after the operator. - + 2 - ^ -stmt-expr-discard.scala:6: warning: Line starts with an operator that in future -will be taken as an infix expression continued from the previous line. -To force the previous interpretation as a separate statement, -add an explicit `;`, add an empty line, or remove spaces after the operator. - - 4 - ^ stmt-expr-discard.scala:5: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses + 2 ^ @@ -17,5 +5,5 @@ stmt-expr-discard.scala:6: warning: a pure expression does nothing in statement - 4 ^ error: No warnings can be incurred under -Werror. -4 warnings +2 warnings 1 error diff --git a/test/files/neg/surrogates.check b/test/files/neg/surrogates.check new file mode 100644 index 000000000000..3521b9b72817 --- /dev/null +++ b/test/files/neg/surrogates.check @@ -0,0 +1,4 @@ +surrogates.scala:3: error: illegal codepoint in Char constant: '\ud801\udc00' + def `too wide for Char` = '𐐀' + ^ +1 error diff --git a/test/files/neg/surrogates.scala b/test/files/neg/surrogates.scala new file mode 100644 index 000000000000..d8e2ef545a18 --- /dev/null +++ b/test/files/neg/surrogates.scala @@ -0,0 +1,4 @@ + +class C { + def `too wide for Char` = '𐐀' +} diff --git a/test/files/neg/symbol-literal-removal.check b/test/files/neg/symbol-literal-removal.check deleted file mode 100644 index 839b635950fd..000000000000 --- a/test/files/neg/symbol-literal-removal.check +++ /dev/null @@ -1,4 +0,0 @@ -symbol-literal-removal.scala:4: error: symbol literal is unsupported; use Symbol("TestSymbol") instead - val foo = 'TestSymbol - ^ -1 error diff --git a/test/files/neg/symbol-literal-removal.scala b/test/files/neg/symbol-literal-removal.scala deleted file mode 100644 index 0d95ded21fd1..000000000000 --- a/test/files/neg/symbol-literal-removal.scala +++ /dev/null @@ -1,5 +0,0 @@ -// scalac: -Xsource:3 -// -abstract class Foo { - val foo = 'TestSymbol -} diff --git a/test/files/neg/t11374b.check b/test/files/neg/t11374b.check index 4867de39c3a2..f7ec70d4c1d8 100644 --- a/test/files/neg/t11374b.check +++ b/test/files/neg/t11374b.check @@ -1,7 +1,9 @@ t11374b.scala:3: error: not found: value _ +Identifiers enclosed in backticks are not pattern variables but match the value in scope. val Some(`_`) = Option(42) // was crashola ^ t11374b.scala:6: error: not found: value _ +Identifiers enclosed in backticks are not pattern variables but match the value in scope. val Some(`_`) = Option(42) // was crashola ^ t11374b.scala:3: warning: Pattern definition introduces Unit-valued member of C; consider wrapping it in `locally { ... }`. diff --git a/test/files/neg/t12071.check b/test/files/neg/t12071.check new file mode 100644 index 000000000000..88198baf3274 --- /dev/null +++ b/test/files/neg/t12071.check @@ -0,0 +1,41 @@ +t12071.scala:15: error: not found: value c c + `c c` i + ^ +t12071.scala:15: error: postfix operator i needs to be enabled +by making the implicit value scala.language.postfixOps visible. +This can be achieved by adding the import clause 'import scala.language.postfixOps' +or by setting the compiler option -language:postfixOps. +See the Scaladoc for value scala.language.postfixOps for a discussion +why the feature needs to be explicitly enabled. +Line starts with an operator that in future +will be taken as an infix expression continued from the previous line. +To force the previous interpretation as a separate statement, +add an explicit `;`, add an empty line, or remove spaces after the operator. + `c c` i + ^ +t12071.scala:20: warning: Line starts with an operator that in future +will be taken as an infix expression continued from the previous line. +To force the previous interpretation as a separate statement, +add an explicit `;`, add an empty line, or remove spaces after the operator. + + 2 + ^ +t12071.scala:25: warning: Line starts with an operator that in future +will be taken as an infix expression continued from the previous line. +To force the previous interpretation as a separate statement, +add an explicit `;`, add an empty line, or remove spaces after the operator. + + 1 + ^ +t12071.scala:28: warning: Line starts with an operator that in future +will be taken as an infix expression continued from the previous line. +To force the previous interpretation as a separate statement, +add an explicit `;`, add an empty line, or remove spaces after the operator. + `test-1` + `test-2` + ^ +t12071.scala:31: warning: Line starts with an operator that in future +will be taken as an infix expression continued from the previous line. +To force the previous interpretation as a separate statement, +add an explicit `;`, add an empty line, or remove spaces after the operator. + `compareTo` (2 - 1) + ^ +4 warnings +2 errors diff --git a/test/files/neg/t12071.scala b/test/files/neg/t12071.scala new file mode 100644 index 000000000000..f3f9529c147b --- /dev/null +++ b/test/files/neg/t12071.scala @@ -0,0 +1,51 @@ +// scalac: -Werror -Xlint -Xmigration:2.13 + +class C { + def `c c`(n: Int): Int = n + 1 +} + +// backticked operator is candidate for multiline infix, +// but backticked value is an innocent bystander. +// +class t12071 { + def c: C = ??? + def i: Int = 42 + def `n n`: Int = 17 + def f = c + `c c` i + def g = i + + `n n` + def basic = + 1 + + 2 +} + +object C { + def x = 42 + + 1 + + def y = 1 + + `test-1` + `test-2` + + def z = 2 + `compareTo` (2 - 1) + + def `test-1`: Int = 23 + def `test-2`: Int = 42 + def compareTo(x: Int) = println("lol") + + def yy = 1 + /* fails in scala 3 + + + `test-1` + + + `test-2` + */ +} + +object Test extends App { + println(C.x) + println(C.y) + println(C.z) + println(C.yy) +} diff --git a/test/files/neg/t12233.check b/test/files/neg/t12233.check new file mode 100644 index 000000000000..ffa267af2701 --- /dev/null +++ b/test/files/neg/t12233.check @@ -0,0 +1,7 @@ +t12233.scala:4: error: ambiguous implicit values: + both value hehe of type TypeClass[T] + and value evidence$2 of type TypeClass[T] + match expected type TypeClass[T] + def this(i: Int)(implicit hehe: TypeClass[T], j: Int) = this(i, j) + ^ +1 error diff --git a/test/files/neg/t12233.scala b/test/files/neg/t12233.scala new file mode 100644 index 000000000000..b2ad76732461 --- /dev/null +++ b/test/files/neg/t12233.scala @@ -0,0 +1,20 @@ + +trait TypeClass[T] +class Hehe[T: TypeClass](i: Int, j: Int) { + def this(i: Int)(implicit hehe: TypeClass[T], j: Int) = this(i, j) +} + +/* was +t12233.scala:4: error: too many arguments (found 3, expected 1) for constructor Hehe: (implicit evidence$1: TypeClass[T]): Hehe[T] + def this(i: Int)(implicit hehe: TypeClass[T], j: Int) = this(i, j) + ^ +1 error + * now +t12233.scala:4: error: ambiguous implicit values: + both value hehe of type TypeClass[T] + and value evidence$2 of type TypeClass[T] + match expected type TypeClass[T] + def this(i: Int)(implicit hehe: TypeClass[T], j: Int) = this(i, j) + ^ +1 error + */ diff --git a/test/files/neg/t12324.check b/test/files/neg/t12324.check index 5a7bbb7119f6..3ade85f310a8 100644 --- a/test/files/neg/t12324.check +++ b/test/files/neg/t12324.check @@ -13,4 +13,7 @@ t12324.scala:12: error: `@throws` only allowed for methods and constructors t12324.scala:14: error: `@throws` only allowed for methods and constructors def g(): Unit = (): @throws[Exception] ^ -5 errors +t12324.scala:16: error: `@throws` only allowed for methods and constructors + def n(i: Int) = i match { case 42 => 27: @throws[Exception] } // not all cruft reaches refchecks + ^ +6 errors diff --git a/test/files/neg/t12347.check b/test/files/neg/t12347.check new file mode 100644 index 000000000000..0476089c1c4e --- /dev/null +++ b/test/files/neg/t12347.check @@ -0,0 +1,10 @@ +t12347.scala:14: error: unknown parameter name: x + X.f(n = count, x = text) + ^ +t12347.scala:15: error: overloaded method f with alternatives: + (s: String)String + (n: Int,s: String)String + [which have no such parameter x] cannot be applied to (n: Int, x: String) + Y.f(n = count, x = text) + ^ +2 errors diff --git a/test/files/neg/t12347.scala b/test/files/neg/t12347.scala new file mode 100644 index 000000000000..1795ecfc8320 --- /dev/null +++ b/test/files/neg/t12347.scala @@ -0,0 +1,16 @@ + +object X { + def f(n: Int, s: String) = s * n +} + +object Y { + def f(n: Int, s: String) = s * n + def f(s: String) = s * 3 +} + +object Test extends App { + def count = 2 + def text = "hi" + X.f(n = count, x = text) + Y.f(n = count, x = text) +} diff --git a/test/files/neg/t12349.check b/test/files/neg/t12349.check index 2c7426ad6a95..ed6d1b26451d 100644 --- a/test/files/neg/t12349.check +++ b/test/files/neg/t12349.check @@ -33,6 +33,11 @@ def a8(): Unit (defined in class t12349a) override should be public protected[this] override def a8(): Unit = println("Inner12349b#a8()") // weaker access privileges ^ +t12349b.scala:14: error: weaker access privileges in overriding +def a9(): Unit (defined in class t12349a) + override should not be private + private[this] override def a9(): Unit = println("Inner12349b#a9()") // weaker access privileges + ^ t12349b.scala:18: error: weaker access privileges in overriding protected[package t12349] def b3(): Unit (defined in class t12349a) override should not be private @@ -48,6 +53,11 @@ protected[package t12349] def b7(): Unit (defined in class t12349a) override should at least be protected[t12349] private[t12349] override def b7(): Unit = println("Inner12349b#b7()") // weaker access privileges ^ +t12349b.scala:24: error: weaker access privileges in overriding +protected[package t12349] def b9(): Unit (defined in class t12349a) + override should not be private + private[this] override def b9(): Unit = println("Inner12349b#b9()") // weaker access privileges + ^ t12349b.scala:27: error: weaker access privileges in overriding private[package t12349] def c2(): Unit (defined in class t12349a) override should at least be private[t12349] @@ -73,6 +83,11 @@ private[package t12349] def c8(): Unit (defined in class t12349a) override should at least be private[t12349] protected[this] override def c8(): Unit = println("Inner12349b#c8()") // weaker access privileges ^ +t12349b.scala:34: error: weaker access privileges in overriding +private[package t12349] def c9(): Unit (defined in class t12349a) + override should not be private + private[this] override def c9(): Unit = println("Inner12349b#c9()") // weaker access privileges + ^ t12349b.scala:36: error: method d1 overrides nothing override def d1(): Unit = println("Inner12349b#d1()") // overrides nothing ^ @@ -135,6 +150,11 @@ def a8(): Unit (defined in class t12349a) override should be public protected[this] override def a8(): Unit = println("Inner12349c#a8()") // weaker access privileges ^ +t12349c.scala:18: error: weaker access privileges in overriding +def a9(): Unit (defined in class t12349a) + override should not be private + private[this] override def a9(): Unit = println("Inner12349c#a9()") // weaker access privileges + ^ t12349c.scala:22: error: weaker access privileges in overriding protected[package t12349] def b3(): Unit (defined in class t12349a) override should not be private @@ -150,6 +170,11 @@ protected[package t12349] def b7(): Unit (defined in class t12349a) override should at least be protected[t12349] private[pkg] override def b7(): Unit = println("Inner12349c#b7()") // weaker access privileges ^ +t12349c.scala:28: error: weaker access privileges in overriding +protected[package t12349] def b9(): Unit (defined in class t12349a) + override should not be private + private[this] override def b9(): Unit = println("Inner12349c#b9()") // weaker access privileges + ^ t12349c.scala:31: error: weaker access privileges in overriding private[package t12349] def c2(): Unit (defined in class t12349a) override should at least be private[t12349] @@ -185,14 +210,14 @@ private[package t12349] def c8(): Unit (defined in class t12349a) override should at least be private[t12349] protected[this] override def c8(): Unit = println("Inner12349c#c8()") // weaker access privileges ^ +t12349c.scala:38: error: weaker access privileges in overriding +private[package t12349] def c9(): Unit (defined in class t12349a) + override should not be private + private[this] override def c9(): Unit = println("Inner12349c#c9()") // overrides nothing (invisible) + ^ t12349c.scala:30: error: method c1 overrides nothing override def c1(): Unit = println("Inner12349c#c1()") // overrides nothing (invisible) ^ -t12349c.scala:38: error: method c9 overrides nothing. -Note: the super classes of class Inner12349c contain the following, non final members named c9: -private[package t12349] def c9(): Unit - private[this] override def c9(): Unit = println("Inner12349c#c9()") // overrides nothing (invisible) - ^ t12349c.scala:40: error: method d1 overrides nothing override def d1(): Unit = println("Inner12349c#d1()") // overrides nothing ^ @@ -220,4 +245,4 @@ t12349c.scala:47: error: method d8 overrides nothing t12349c.scala:48: error: method d9 overrides nothing private[this] override def d9(): Unit = println("Inner12349c#d9()") // overrides nothing ^ -52 errors +57 errors diff --git a/test/files/neg/t12349/t12349b.scala b/test/files/neg/t12349/t12349b.scala index 19079a3eb003..38b3309779b3 100644 --- a/test/files/neg/t12349/t12349b.scala +++ b/test/files/neg/t12349/t12349b.scala @@ -11,7 +11,7 @@ object t12349b { protected[t12349] override def a6(): Unit = println("Inner12349b#a6()") // weaker access privileges private[t12349] override def a7(): Unit = println("Inner12349b#a7()") // weaker access privileges protected[this] override def a8(): Unit = println("Inner12349b#a8()") // weaker access privileges - private[this] override def a9(): Unit = println("Inner12349b#a9()") // [#9334] + private[this] override def a9(): Unit = println("Inner12349b#a9()") // weaker access privileges override def b1(): Unit = println("Inner12349b#b1()") protected override def b2(): Unit = println("Inner12349b#b2()") @@ -21,7 +21,7 @@ object t12349b { protected[t12349] override def b6(): Unit = println("Inner12349b#b6()") private[t12349] override def b7(): Unit = println("Inner12349b#b7()") // weaker access privileges protected[this] override def b8(): Unit = println("Inner12349b#b8()") // [#12349] - not fixed by PR #9525 - private[this] override def b9(): Unit = println("Inner12349b#b9()") // [#9334] + private[this] override def b9(): Unit = println("Inner12349b#b9()") // weaker access privileges override def c1(): Unit = println("Inner12349b#c1()") protected override def c2(): Unit = println("Inner12349b#c2()") // weaker access privileges @@ -31,7 +31,7 @@ object t12349b { protected[t12349] override def c6(): Unit = println("Inner12349b#c6()") private[t12349] override def c7(): Unit = println("Inner12349b#c7()") protected[this] override def c8(): Unit = println("Inner12349b#c8()") // weaker access privileges - private[this] override def c9(): Unit = println("Inner12349b#c9()") // [#9334] + private[this] override def c9(): Unit = println("Inner12349b#c9()") // weaker access privileges override def d1(): Unit = println("Inner12349b#d1()") // overrides nothing protected override def d2(): Unit = println("Inner12349b#d2()") // overrides nothing diff --git a/test/files/neg/t12349/t12349c.scala b/test/files/neg/t12349/t12349c.scala index 3ad062d33472..942991a22430 100644 --- a/test/files/neg/t12349/t12349c.scala +++ b/test/files/neg/t12349/t12349c.scala @@ -15,7 +15,7 @@ package pkg { protected[pkg] override def a6(): Unit = println("Inner12349c#a6()") // weaker access privileges private[pkg] override def a7(): Unit = println("Inner12349c#a7()") // weaker access privileges protected[this] override def a8(): Unit = println("Inner12349c#a8()") // weaker access privileges - private[this] override def a9(): Unit = println("Inner12349c#a9()") // [#9334] + private[this] override def a9(): Unit = println("Inner12349c#a9()") // weaker access privileges override def b1(): Unit = println("Inner12349c#b1()") protected override def b2(): Unit = println("Inner12349c#b2()") @@ -25,7 +25,7 @@ package pkg { protected[pkg] override def b6(): Unit = println("Inner12349c#b6()") private[pkg] override def b7(): Unit = println("Inner12349c#b7()") // weaker access privileges protected[this] override def b8(): Unit = println("Inner12349c#b8()") // [#12349] - not fixed by PR #9525 - private[this] override def b9(): Unit = println("Inner12349c#b9()") // [#9334] + private[this] override def b9(): Unit = println("Inner12349c#b9()") // weaker access privileges override def c1(): Unit = println("Inner12349c#c1()") // overrides nothing (invisible) protected override def c2(): Unit = println("Inner12349c#c2()") // weaker access privileges diff --git a/test/files/neg/t12380.check b/test/files/neg/t12380.check new file mode 100644 index 000000000000..4b9f7ae63a68 --- /dev/null +++ b/test/files/neg/t12380.check @@ -0,0 +1,8 @@ +Test.scala:1: error: incompatible type in overriding +def m(): String (defined in trait I) + with def m(): Object (defined in class C); + found : (): Object + required: (): String +object Test extends p.J.C with p.J.I { + ^ +1 error diff --git a/test/files/neg/t12380/J.java b/test/files/neg/t12380/J.java new file mode 100644 index 000000000000..280cea1286b1 --- /dev/null +++ b/test/files/neg/t12380/J.java @@ -0,0 +1,14 @@ +package p; + +public class J { + public static class C { + public Object m() { return new Object(); } + } + public interface I { + public String m(); + } + + public static class Test extends C implements I { + @Override public String m() { return ""; } + } +} diff --git a/test/files/neg/t12380/Test.scala b/test/files/neg/t12380/Test.scala new file mode 100644 index 000000000000..976b42ffdb93 --- /dev/null +++ b/test/files/neg/t12380/Test.scala @@ -0,0 +1,5 @@ +object Test extends p.J.C with p.J.I { + def main(args: Array[String]): Unit = { + println((this: p.J.I).m.trim) + } +} diff --git a/test/files/neg/t12394.check b/test/files/neg/t12394.check new file mode 100644 index 000000000000..7dbf4d49d9e5 --- /dev/null +++ b/test/files/neg/t12394.check @@ -0,0 +1,11 @@ +Test.scala:2: error: cannot override final member: +final def m(): Int (defined in class C) + with def m(): Int (defined in trait J) +class S2 extends p.A.C with p.A.J + ^ +Test.scala:4: error: cannot override final member: +final def m(): Int (defined in class C) + with def m(): Int (defined in trait J) +class S3 extends p.A.C with K + ^ +2 errors diff --git a/test/files/neg/t12394/A.java b/test/files/neg/t12394/A.java new file mode 100644 index 000000000000..cf3188018d93 --- /dev/null +++ b/test/files/neg/t12394/A.java @@ -0,0 +1,17 @@ +package p; + +public class A { + public static interface I { + default int m() { return 1; } + } + + public static interface J extends I { + @Override default int m() { return 2; } + } + + public static class C implements I { + @Override public final int m() { return 3; } + } + + public static class D extends C implements J { } +} diff --git a/test/files/neg/t12394/Test.scala b/test/files/neg/t12394/Test.scala new file mode 100644 index 000000000000..8a272c5127cd --- /dev/null +++ b/test/files/neg/t12394/Test.scala @@ -0,0 +1,4 @@ +class S1 extends p.A.D +class S2 extends p.A.C with p.A.J +trait K extends p.A.J +class S3 extends p.A.C with K diff --git a/test/files/neg/t12408.check b/test/files/neg/t12408.check new file mode 100644 index 000000000000..33be21bb4ecd --- /dev/null +++ b/test/files/neg/t12408.check @@ -0,0 +1,30 @@ +t12408.scala:6: warning: abstract type pattern B is unchecked since it is eliminated by erasure + def f1[B] = a match { case _: B => } // warn + ^ +t12408.scala:7: warning: abstract type B in type pattern t12408.Renderer[B] is unchecked since it is eliminated by erasure + def f2[B] = a match { case _: Renderer[B] => } // warn + ^ +t12408.scala:8: warning: non-variable type argument Int in type pattern List[Int] (the underlying of List[Int]) is unchecked since it is eliminated by erasure + def f3[B](xs: List[A]) = xs match { case _: List[Int] => } // warn + ^ +t12408.scala:9: warning: abstract type A in type pattern t12408.Renderer[A] is unchecked since it is eliminated by erasure + def g = a match { case _: Renderer[A] => } // now also warn + ^ +t12408.scala:14: warning: the type test for pattern (A, B, C, D, E, F, G, H, I, J, K, L, M) cannot be checked at runtime because it has type parameters eliminated by erasure + case _: (A,B,C,D,E,F,G,H,I,J,K,L,M) => + ^ +t12408.scala:17: warning: the type test for pattern (A, B, C, D, E, F, G, H, I, J, K, L, M) cannot be checked at runtime because it has type parameters eliminated by erasure + case _: (A,B,C,D,E,F,G,H,I,J,K,L,M) => + ^ +t12408.scala:22: warning: the type test for pattern (A, B, C, D, E, F, G, H, I, J, K, L, M) cannot be checked at runtime because it has type parameters eliminated by erasure + case _: (A,B,C,D,E,F,G,H,I,J,K,L,M) => + ^ +t12408.scala:47: warning: the type test for pattern t12408c.C[A,B] cannot be checked at runtime because it has type parameters eliminated by erasure + def test(t: T[A]) = t match { case _: C[A, B] => } // warn on B + ^ +t12408.scala:65: warning: the type test for pattern reported.Renderer[Page,Props] cannot be checked at runtime because it has type parameters eliminated by erasure + case r: Renderer[Page, Props] => 1 // warn as above + ^ +error: No warnings can be incurred under -Werror. +9 warnings +1 error diff --git a/test/files/neg/t12408.scala b/test/files/neg/t12408.scala new file mode 100644 index 000000000000..ab5879ae5c6d --- /dev/null +++ b/test/files/neg/t12408.scala @@ -0,0 +1,82 @@ +// scalac: -Werror + +package t12408 { + class Renderer[A] + class Test[A](a: Any) { + def f1[B] = a match { case _: B => } // warn + def f2[B] = a match { case _: Renderer[B] => } // warn + def f3[B](xs: List[A]) = xs match { case _: List[Int] => } // warn + def g = a match { case _: Renderer[A] => } // now also warn + } + + trait T[A,B,C,D,E,F,G,H,I,J,K,L,M] { + def f(a: Any) = a match { + case _: (A,B,C,D,E,F,G,H,I,J,K,L,M) => + } + def g[A,B,C,D,E,F,G,H,I,J,K,L,M] = (null: Any) match { + case _: (A,B,C,D,E,F,G,H,I,J,K,L,M) => + } + } + class C[A,B,C,D,E,F,G,H,I,J,K,L,M] { + def f(a: Any) = a match { + case _: (A,B,C,D,E,F,G,H,I,J,K,L,M) => + } + } +} + +package t12408b { + // trait's type params align with class C + sealed trait T[A, B] + final case class C[A, B](a: A, b: B) extends T[A, B] + + class Test[A, B] { + def test(t: T[A, B]) = t match { case _: C[A, B] => } // nowarn + } + object Test extends App { + println { + new Test[String, Int]().test(C("hi", 42)) + } + } +} + +package t12408c { + sealed trait T[A] + final case class C[A, B](a: A, b: B) extends T[A] + + class Test[A, B] { + def test(t: T[A]) = t match { case _: C[A, B] => } // warn on B + } + object Test extends App { + println { + new Test[String, Int]().test(C("hi", 42)) + } + } +} + +package reported { + sealed trait Action[Page] + final case class Renderer[Page, Props]() extends Action[Page] + sealed trait Redirect[Page] extends Action[Page] + + final class RouterLogic[Page, Props] { + + def hmm1(a: Action[Page]): Int = + a match { + case r: Renderer[Page, Props] => 1 // warn as above + case _ => 2 + } + + def hmm2(a: Action[Page]): Int = + a match { + case r: Redirect[Page] => 2 // nowarn + case _ => 1 + } + } +} + +package regression { + object unchecked3 { + /* nowarn */ def tparamLeakage1(x: Any) = x match { case Array() => 1 } + /* nowarn */ def tparamLeakage2(x: Any) = x match { case List() => 1 } + } +} diff --git a/test/files/neg/t12413.check b/test/files/neg/t12413.check new file mode 100644 index 000000000000..fefa9a3e8a80 --- /dev/null +++ b/test/files/neg/t12413.check @@ -0,0 +1,16 @@ +t12413.scala:13: error: inferred type arguments [AnyRef] do not conform to method close's type parameter bounds [Phantom >: AnyRef <: Open] + println(door.close.toString()) + ^ +t12413.scala:14: error: inferred type arguments [AnyRef] do not conform to method close's type parameter bounds [Phantom >: AnyRef <: Open] + println(door.close == 0) + ^ +t12413.scala:15: error: inferred type arguments [AnyRef] do not conform to method open's type parameter bounds [Phantom >: AnyRef <: Open] + println(door.open().toString) + ^ +t12413.scala:16: error: inferred type arguments [AnyRef] do not conform to method open's type parameter bounds [Phantom >: AnyRef <: Open] + println(door.open().toString()) + ^ +t12413.scala:17: error: inferred type arguments [AnyRef] do not conform to method open's type parameter bounds [Phantom >: AnyRef <: Open] + println(door.open() == 0) + ^ +5 errors diff --git a/test/files/neg/t12413.scala b/test/files/neg/t12413.scala new file mode 100644 index 000000000000..505c04f6b33b --- /dev/null +++ b/test/files/neg/t12413.scala @@ -0,0 +1,18 @@ +class Open + +class Door[State] { + def close[Phantom >: State <: Open]: Int = 0 + def open[Phantom >: State <: Open](): Int = 0 +} + +class Test { + val door = new Door[AnyRef] + // the error here happens later (at refchecks) + println(door.close.toString) + // the errors below happen when typing implicit conversions + println(door.close.toString()) + println(door.close == 0) + println(door.open().toString) + println(door.open().toString()) + println(door.open() == 0) +} diff --git a/test/files/neg/t12414.check b/test/files/neg/t12414.check new file mode 100644 index 000000000000..e94e68fb179c --- /dev/null +++ b/test/files/neg/t12414.check @@ -0,0 +1,6 @@ +t12414.scala:12: warning: fruitless type test: a value of type Trait1 cannot also be a Trait2 + case y: Trait2 => + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t12414.scala b/test/files/neg/t12414.scala new file mode 100644 index 000000000000..649fbb23e5b4 --- /dev/null +++ b/test/files/neg/t12414.scala @@ -0,0 +1,15 @@ +// scalac: -Werror + +sealed trait Trait1 +sealed trait Trait2 + +class Class1 extends Trait1 +class Class2 extends Trait2 + +object Test extends App { + def test(x: Trait1): Unit = + x match { + case y: Trait2 => + case _ => + } +} diff --git a/test/files/neg/t12414b.check b/test/files/neg/t12414b.check new file mode 100644 index 000000000000..82da8bfc3fe3 --- /dev/null +++ b/test/files/neg/t12414b.check @@ -0,0 +1,6 @@ +b_2.scala:6: warning: fruitless type test: a value of type Trait1 cannot also be a Trait2 + case y: Trait2 => + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t12414b/a_1.scala b/test/files/neg/t12414b/a_1.scala new file mode 100644 index 000000000000..cdb91902eb37 --- /dev/null +++ b/test/files/neg/t12414b/a_1.scala @@ -0,0 +1,6 @@ + +sealed trait Trait1 +sealed trait Trait2 + +class Class1 extends Trait1 +class Class2 extends Trait2 diff --git a/test/files/neg/t12414b/b_2.scala b/test/files/neg/t12414b/b_2.scala new file mode 100644 index 000000000000..87f5694346eb --- /dev/null +++ b/test/files/neg/t12414b/b_2.scala @@ -0,0 +1,9 @@ +// scalac: -Werror + +object Test extends App { + def test(x: Trait1): Unit = + x match { + case y: Trait2 => + case _ => + } +} diff --git a/test/files/neg/t12433.check b/test/files/neg/t12433.check new file mode 100644 index 000000000000..ff7288bf8858 --- /dev/null +++ b/test/files/neg/t12433.check @@ -0,0 +1,4 @@ +t12433.scala:5: error: not found: value / + def t1 = / + ^ +1 error diff --git a/test/files/neg/t12433.scala b/test/files/neg/t12433.scala new file mode 100644 index 000000000000..c1975ca848db --- /dev/null +++ b/test/files/neg/t12433.scala @@ -0,0 +1,7 @@ +// scalac: -Wunused:nowarn +import annotation.nowarn +object T { + @deprecated def f = 1 + def t1 = / + @nowarn def t2 = f +} diff --git a/test/files/neg/t2462c.scala b/test/files/neg/t2462c.scala index 9e62c8eb9ca8..d059a47ceb09 100644 --- a/test/files/neg/t2462c.scala +++ b/test/files/neg/t2462c.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +// scalac: -Werror // import annotation._ diff --git a/test/files/neg/t2488.check b/test/files/neg/t2488.check index f69ca0a939dc..03b6838519d1 100644 --- a/test/files/neg/t2488.check +++ b/test/files/neg/t2488.check @@ -7,19 +7,19 @@ t2488.scala:7: error: overloaded method f with alternatives: t2488.scala:8: error: overloaded method f with alternatives: ()Int (a: Int,b: Int)Int - cannot be applied to (a: Int, c: Int) + [which have no such parameter c] cannot be applied to (a: Int, c: Int) println(c.f(a = 2, c = 2)) ^ t2488.scala:9: error: overloaded method f with alternatives: ()Int (a: Int,b: Int)Int - cannot be applied to (Int, c: Int) + [which have no such parameter c] cannot be applied to (Int, c: Int) println(c.f(2, c = 2)) ^ t2488.scala:10: error: overloaded method f with alternatives: ()Int (a: Int,b: Int)Int - cannot be applied to (c: Int, Int) + [which have no such parameter c] cannot be applied to (c: Int, Int) println(c.f(c = 2, 2)) ^ t2488.scala:11: error: overloaded method f with alternatives: diff --git a/test/files/neg/t2910.check b/test/files/neg/t2910.check index cdf36f9eaa14..fd98de338b06 100644 --- a/test/files/neg/t2910.check +++ b/test/files/neg/t2910.check @@ -1,16 +1,16 @@ -t2910.scala:3: error: forward reference extends over definition of value ret +t2910.scala:3: error: forward reference to value MyMatch defined on line 4 extends over definition of value ret val ret = l.collect({ case MyMatch(id) => id }) ^ -t2910.scala:9: error: forward reference extends over definition of value z +t2910.scala:9: error: forward reference to lazy value s defined on line 11 extends over definition of value z println(s.length) ^ -t2910.scala:16: error: forward reference extends over definition of value z +t2910.scala:16: error: forward reference to lazy value x defined on line 18 extends over definition of value z x ^ -t2910.scala:30: error: forward reference extends over definition of value x +t2910.scala:30: error: forward reference to value x defined on line 31 extends over definition of value x lazy val f: Int = x ^ -t2910.scala:35: error: forward reference extends over definition of variable x +t2910.scala:35: error: forward reference to lazy value g defined on line 37 extends over definition of variable x lazy val f: Int = g ^ 5 errors diff --git a/test/files/neg/t3692-new.check b/test/files/neg/t3692-new.check index b1d23eb8d24b..93104d8a1e69 100644 --- a/test/files/neg/t3692-new.check +++ b/test/files/neg/t3692-new.check @@ -1,10 +1,10 @@ t3692-new.scala:17: warning: non-variable type argument Int in type pattern scala.collection.immutable.Map[Int,Int] (the underlying of Map[Int,Int]) is unchecked since it is eliminated by erasure case m0: Map[Int, Int] => new java.util.HashMap[Integer, Integer] ^ -t3692-new.scala:18: warning: non-variable type argument Int in type pattern scala.collection.immutable.Map[Int,V] (the underlying of Map[Int,V]) is unchecked since it is eliminated by erasure +t3692-new.scala:18: warning: the type test for pattern scala.collection.immutable.Map[Int,V] (the underlying of Map[Int,V]) cannot be checked at runtime because it has type parameters eliminated by erasure case m1: Map[Int, V] => new java.util.HashMap[Integer, V] ^ -t3692-new.scala:19: warning: non-variable type argument Int in type pattern scala.collection.immutable.Map[T,Int] (the underlying of Map[T,Int]) is unchecked since it is eliminated by erasure +t3692-new.scala:19: warning: the type test for pattern scala.collection.immutable.Map[T,Int] (the underlying of Map[T,Int]) cannot be checked at runtime because it has type parameters eliminated by erasure case m2: Map[T, Int] => new java.util.HashMap[T, Integer] ^ t3692-new.scala:18: warning: unreachable code diff --git a/test/files/neg/t3692-new.scala b/test/files/neg/t3692-new.scala index 1fe209fe9629..063e141cb4a3 100644 --- a/test/files/neg/t3692-new.scala +++ b/test/files/neg/t3692-new.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +// scalac: -Werror // import scala.reflect.{ClassTag, classTag} import java.lang.Integer diff --git a/test/files/neg/t4098.check b/test/files/neg/t4098.check index 590cee98698d..8e15e90abaa3 100644 --- a/test/files/neg/t4098.check +++ b/test/files/neg/t4098.check @@ -1,13 +1,13 @@ -t4098.scala:3: error: forward reference not allowed from self constructor invocation +t4098.scala:3: error: forward reference to method b defined on line 4 not allowed from self constructor invocation this(b) ^ -t4098.scala:8: error: forward reference not allowed from self constructor invocation +t4098.scala:8: error: forward reference to lazy value b defined on line 9 not allowed from self constructor invocation this(b) ^ -t4098.scala:13: error: forward reference not allowed from self constructor invocation +t4098.scala:13: error: forward reference to value b defined on line 14 not allowed from self constructor invocation this(b) ^ -t4098.scala:18: error: forward reference not allowed from self constructor invocation +t4098.scala:18: error: forward reference to method b defined on line 20 not allowed from self constructor invocation this(b) ^ 4 errors diff --git a/test/files/neg/t4419.check b/test/files/neg/t4419.check index 7cf623541a9d..cce4223ecf24 100644 --- a/test/files/neg/t4419.check +++ b/test/files/neg/t4419.check @@ -1,4 +1,4 @@ -t4419.scala:2: error: forward reference extends over definition of value b +t4419.scala:2: error: forward reference to value a defined on line 2 extends over definition of value b { val b = a; val a = 1 ; println(a) } ^ 1 error diff --git a/test/files/neg/t4749.check b/test/files/neg/t4749.check index 2799d8ddc0b7..ee5967c2cd0c 100644 --- a/test/files/neg/t4749.check +++ b/test/files/neg/t4749.check @@ -1,5 +1,5 @@ t4749.scala:5: warning: not a valid main method for bippy.Fail1, - because main methods must have the exact signature (Array[String])Unit. + because main methods must have the exact signature `(Array[String]): Unit`, though Scala runners will forgive a non-Unit result. To define an entry point, please define the main method as: def main(args: Array[String]): Unit @@ -38,7 +38,7 @@ t4749.scala:28: warning: Fail6 has a valid main method (args: Array[String]): Un object Fail6 { ^ t4749.scala:44: warning: not a valid main method for bippy.Win3, - because main methods must have the exact signature (Array[String])Unit. + because main methods must have the exact signature `(Array[String]): Unit`, though Scala runners will forgive a non-Unit result. To define an entry point, please define the main method as: def main(args: Array[String]): Unit diff --git a/test/files/neg/t4762.check b/test/files/neg/t4762.check index bd1c9ebff690..aa7bdcec39eb 100644 --- a/test/files/neg/t4762.check +++ b/test/files/neg/t4762.check @@ -4,6 +4,10 @@ t4762.scala:17: warning: private[this] value x in class B shadows mutable x inhe t4762.scala:50: warning: private[this] value x in class Derived shadows mutable x inherited from class Base. Changes to x will not be visible within class Derived - you may want to give them distinct names. class Derived( x : Int ) extends Base( x ) { override def toString = x.toString } ^ -error: No warnings can be incurred under -Werror. +t4762.scala:13: error: weaker access privileges in overriding +val y: Int (defined in class A) + override should not be private + private[this] def y: Int = 99 + ^ 2 warnings 1 error diff --git a/test/files/neg/t5390.check b/test/files/neg/t5390.check index ddd56cd611ae..0f5b2a3a4e02 100644 --- a/test/files/neg/t5390.check +++ b/test/files/neg/t5390.check @@ -1,4 +1,4 @@ -t5390.scala:7: error: forward reference extends over definition of value b +t5390.scala:7: error: forward reference to value a defined on line 8 extends over definition of value b val b = a.B("") ^ 1 error diff --git a/test/files/neg/t5390b.check b/test/files/neg/t5390b.check index d54d6110b977..55c13c06d7d5 100644 --- a/test/files/neg/t5390b.check +++ b/test/files/neg/t5390b.check @@ -1,4 +1,4 @@ -t5390b.scala:7: error: forward reference extends over definition of value b +t5390b.scala:7: error: forward reference to value a defined on line 8 extends over definition of value b val b = a.B("") ^ 1 error diff --git a/test/files/neg/t5390c.check b/test/files/neg/t5390c.check index 861d6447b81d..1688bb3f4afb 100644 --- a/test/files/neg/t5390c.check +++ b/test/files/neg/t5390c.check @@ -1,4 +1,4 @@ -t5390c.scala:7: error: forward reference extends over definition of value b +t5390c.scala:7: error: forward reference to value a defined on line 8 extends over definition of value b val b = new a.B("") ^ 1 error diff --git a/test/files/neg/t5390d.check b/test/files/neg/t5390d.check index ed117ea9dac2..c814ddd53cb8 100644 --- a/test/files/neg/t5390d.check +++ b/test/files/neg/t5390d.check @@ -1,4 +1,4 @@ -t5390d.scala:7: error: forward reference extends over definition of value b +t5390d.scala:7: error: forward reference to value a defined on line 8 extends over definition of value b val b = a.B.toString ^ 1 error diff --git a/test/files/neg/t5606.check b/test/files/neg/t5606.check new file mode 100644 index 000000000000..c51564f29abe --- /dev/null +++ b/test/files/neg/t5606.check @@ -0,0 +1,16 @@ +t5606.scala:3: error: identifier expected but '_' found. +case class CaseTest[_](someData: String) + ^ +t5606.scala:5: error: using `?` as a type name requires backticks. +case class CaseTest_?[?](someData: String) + ^ +t5606.scala:8: error: identifier expected but '_' found. +case class CaseTest2[_, _](someData: String) + ^ +t5606.scala:11: error: identifier expected but '_' found. + def f[_](x: Int) = ??? + ^ +t5606.scala:23: error: using `?` as a type name requires backticks. + def regress_?[F[?]] = 2 + ^ +5 errors diff --git a/test/files/neg/t5606.scala b/test/files/neg/t5606.scala new file mode 100644 index 000000000000..c44b1e96e378 --- /dev/null +++ b/test/files/neg/t5606.scala @@ -0,0 +1,26 @@ +// scalac: -Xsource:3 +// was: _ taken as ident of type param, but poor interactions below +case class CaseTest[_](someData: String) + +case class CaseTest_?[?](someData: String) + +// was: _ already defined +case class CaseTest2[_, _](someData: String) + +class C { + def f[_](x: Int) = ??? +} + +object Test extends App { + def f0 = new CaseTest("X") + def f1: CaseTest[Int] = new CaseTest[Int]("X") // OK! + def f2: CaseTest[Int] = CaseTest[Int]("X") // CaseTest[Any] + def f3 = new CaseTest[Int]("X").copy() // CaseTest[Any] + def f4 = new CaseTest[Int]("X").copy[Int]() // CaseTest[Any] + + def regress0[F[_]] = 0 + def regress1[F[_, _]] = 1 + def regress_?[F[?]] = 2 + //def regress0[F[_$$1]] = 0; + //def regress1[F[_$$2, _$$3]] = 1 +} diff --git a/test/files/neg/t5606b.check b/test/files/neg/t5606b.check new file mode 100644 index 000000000000..cdbd20ecb3e9 --- /dev/null +++ b/test/files/neg/t5606b.check @@ -0,0 +1,15 @@ +t5606b.scala:4: warning: Top-level wildcard is not allowed and will error under -Xsource:3 +case class CaseTest[_](someData: String) + ^ +t5606b.scala:7: warning: Top-level wildcard is not allowed and will error under -Xsource:3 +case class CaseTest2[_, _](someData: String) + ^ +t5606b.scala:7: warning: Top-level wildcard is not allowed and will error under -Xsource:3 +case class CaseTest2[_, _](someData: String) + ^ +t5606b.scala:10: warning: Top-level wildcard is not allowed and will error under -Xsource:3 + def f[_](x: Int) = ??? + ^ +error: No warnings can be incurred under -Werror. +4 warnings +1 error diff --git a/test/files/neg/t5606b.scala b/test/files/neg/t5606b.scala new file mode 100644 index 000000000000..3931de26d43b --- /dev/null +++ b/test/files/neg/t5606b.scala @@ -0,0 +1,11 @@ +// scalac: -Xlint -Werror +// +// was: _ taken as ident of type param, now a fresh name +case class CaseTest[_](someData: String) + +// was: _ already defined, now a fresh name +case class CaseTest2[_, _](someData: String) + +class C { + def f[_](x: Int) = ??? +} diff --git a/test/files/neg/t5702-neg-bad-brace.check b/test/files/neg/t5702-neg-bad-brace.check index 25b0d96b6cf4..bdd68e43f892 100644 --- a/test/files/neg/t5702-neg-bad-brace.check +++ b/test/files/neg/t5702-neg-bad-brace.check @@ -1,10 +1,7 @@ -t5702-neg-bad-brace.scala:14: error: Unmatched closing brace '}' ignored here +t5702-neg-bad-brace.scala:7: error: Unmatched closing brace '}' ignored here case List(1, _*} => ^ -t5702-neg-bad-brace.scala:14: error: illegal start of simple pattern - case List(1, _*} => - ^ -t5702-neg-bad-brace.scala:15: error: ')' expected but '}' found. - } - ^ -3 errors +t5702-neg-bad-brace.scala:10: error: eof expected but '}' found. +} +^ +2 errors diff --git a/test/files/neg/t5702-neg-bad-brace.scala b/test/files/neg/t5702-neg-bad-brace.scala index 16a341cf8c17..49f55a37b2b2 100644 --- a/test/files/neg/t5702-neg-bad-brace.scala +++ b/test/files/neg/t5702-neg-bad-brace.scala @@ -1,16 +1,9 @@ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val is = List(1,2,3) - is match { -// the erroneous brace is ignored, so we can't halt on it. -// maybe brace healing can detect overlapping unmatched (...} -// In this case, the fix emits an extra error: -// t5702-neg-bad-brace.scala:10: error: Unmatched closing brace '}' ignored here -// t5702-neg-bad-brace.scala:10: error: illegal start of simple pattern (i.e., =>) -// t5702-neg-bad-brace.scala:11: error: ')' expected but '}' found. case List(1, _*} => } } diff --git a/test/files/neg/t5702-neg-ugly-xbrace.scala b/test/files/neg/t5702-neg-ugly-xbrace.scala index 0ff7bfa09d5a..6e2ddb092454 100644 --- a/test/files/neg/t5702-neg-ugly-xbrace.scala +++ b/test/files/neg/t5702-neg-ugly-xbrace.scala @@ -1,6 +1,6 @@ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val xml = appleboychild // This is the more likely typo, and the uglier parse. diff --git a/test/files/neg/t6323a.check b/test/files/neg/t6323a.check index 83966449e7aa..399514cb1ce9 100644 --- a/test/files/neg/t6323a.check +++ b/test/files/neg/t6323a.check @@ -1,15 +1,7 @@ -t6323a.scala:12: materializing requested scala.reflect.type.ClassTag[Test] using scala.reflect.`package`.materializeClassTag[Test]() - val lookAtMe = m.reflect(Test("a",List(5))) - ^ -t6323a.scala:13: materializing requested reflect.runtime.universe.type.TypeTag[Test] using scala.reflect.api.`package`.materializeTypeTag[Test](scala.reflect.runtime.`package`.universe) - val value = u.typeOf[Test] - ^ -t6323a.scala:13: scala.reflect.api.`package`.materializeTypeTag[Test](scala.reflect.runtime.`package`.universe) is not a valid implicit value for reflect.runtime.universe.TypeTag[Test] because: -failed to typecheck the materialized tag: -cannot create a TypeTag referring to class Test.Test local to the reifee: use WeakTypeTag instead - val value = u.typeOf[Test] - ^ -t6323a.scala:13: error: No TypeTag available for Test +t6323a.scala:13: error: implicit error; +!I ttag: reflect.runtime.universe.TypeTag[Test] + No TypeTag available for Test + val value = u.typeOf[Test] ^ 1 error diff --git a/test/files/neg/t6323a.scala b/test/files/neg/t6323a.scala index 34305c69028b..182c31c609a1 100644 --- a/test/files/neg/t6323a.scala +++ b/test/files/neg/t6323a.scala @@ -1,4 +1,4 @@ -// scalac: -Xlog-implicits +// scalac: -Vimplicits // import scala.reflect.runtime.universe._ import scala.reflect.runtime.{currentMirror => m} diff --git a/test/files/neg/t7052.check b/test/files/neg/t7052.check new file mode 100644 index 000000000000..6816f79bde81 --- /dev/null +++ b/test/files/neg/t7052.check @@ -0,0 +1,7 @@ +t7052.scala:9: error: name clash between defined and inherited member: +def apply(xs: Int*): Int in class A and +def apply(xs: Seq[Int]): Int at line 9 +have same type after erasure: (xs: Seq): Int + def apply(xs: Seq[Int]) = 27 + ^ +1 error diff --git a/test/files/neg/t7052.scala b/test/files/neg/t7052.scala new file mode 100644 index 000000000000..0cfad0dce678 --- /dev/null +++ b/test/files/neg/t7052.scala @@ -0,0 +1,21 @@ + +class A { + def apply(xs: Int*) = 42 +} + +/* name clash between defined and inherited member: + */ +class B extends A { + def apply(xs: Seq[Int]) = 27 +} + +/* method apply overrides nothing. +class C extends A { + override def apply(xs: Seq[Int]) = 17 +} + */ + +// ok because different return type +class D extends A { + def apply(xs: Seq[Int]) = "42" +} diff --git a/test/files/neg/t7052b.check b/test/files/neg/t7052b.check new file mode 100644 index 000000000000..c45d895b65c0 --- /dev/null +++ b/test/files/neg/t7052b.check @@ -0,0 +1,6 @@ +t7052b.scala:15: error: method apply overrides nothing. +Note: the super classes of class C contain the following, non final members named apply: +def apply(xs: Int*): Int + override def apply(xs: Seq[Int]) = 17 + ^ +1 error diff --git a/test/files/neg/t7052b.scala b/test/files/neg/t7052b.scala new file mode 100644 index 000000000000..8c410e8bf0ef --- /dev/null +++ b/test/files/neg/t7052b.scala @@ -0,0 +1,21 @@ + +class A { + def apply(xs: Int*) = 42 +} + +/* name clash between defined and inherited member: +class B extends A { + def apply(xs: Seq[Int]) = 27 +} + */ + +/* method apply overrides nothing. + */ +class C extends A { + override def apply(xs: Seq[Int]) = 17 +} + +// ok because different return type +class D extends A { + def apply(xs: Seq[Int]) = "42" +} diff --git a/test/files/neg/t7509.check b/test/files/neg/t7509.check index 03ec8ef282fa..30a01e840b3d 100644 --- a/test/files/neg/t7509.check +++ b/test/files/neg/t7509.check @@ -6,7 +6,4 @@ t7509.scala:3: error: type mismatch; required: R crash(42) ^ -t7509.scala:3: error: could not find implicit value for parameter ev: R - crash(42) - ^ -3 errors +2 errors diff --git a/test/files/neg/t7636.check b/test/files/neg/t7636.check index c05996ef4010..59f474ca90fb 100644 --- a/test/files/neg/t7636.check +++ b/test/files/neg/t7636.check @@ -1,10 +1,10 @@ t7636.scala:3: error: illegal inheritance; - self-type Main.C does not conform to Main.ResultTable[_$3]'s selftype Main.ResultTable[_$3] - class C extends ResultTable(Left(5):Either[_,_])(5) - ^ + self-type Main.bar.type does not conform to Main.Foo[T]'s selftype Main.Foo[T] + object bar extends Foo(5: T forSome { type T }) + ^ t7636.scala:3: error: type mismatch; - found : Either[_$2,_$3(in constructor C)] where type _$3(in constructor C), type _$2 - required: Either[_, _$3(in value )] where type _$3(in value ) - class C extends ResultTable(Left(5):Either[_,_])(5) - ^ + found : T(in constructor bar) where type T(in constructor bar) + required: T(in value ) where type T(in value ) + object bar extends Foo(5: T forSome { type T }) + ^ 2 errors diff --git a/test/files/neg/t7636.scala b/test/files/neg/t7636.scala index 5d5d56a5efd3..27d4c060932f 100644 --- a/test/files/neg/t7636.scala +++ b/test/files/neg/t7636.scala @@ -1,7 +1,7 @@ -object Main extends App{ - class ResultTable[E]( query : Either[_,E] )( columns : Int ) - class C extends ResultTable(Left(5):Either[_,_])(5) +object Main extends App { + class Foo[A](x: A) + object bar extends Foo(5: T forSome { type T }) } -// Inference of the existential type for the parent type argument -// E still fails. That looks tricky to fix, see the comments in scala/bug#7636. +// Inference of the existential type for the parent type argument A still fails. +// That looks tricky to fix, see the comments in scala/bug#7636. // But we at least prevent a cascading NPE. diff --git a/test/files/neg/t7721.check b/test/files/neg/t7721.check index 04ef4858356c..2fa50df39c8d 100644 --- a/test/files/neg/t7721.check +++ b/test/files/neg/t7721.check @@ -22,6 +22,24 @@ t7721.scala:49: warning: abstract type pattern B.this.Foo is unchecked since it t7721.scala:49: warning: abstract type pattern B.this.Bar is unchecked since it is eliminated by erasure case x: Foo with Bar with Concrete => x.bippy + x.barry + x.dingo + x.conco + x.bongo ^ +t7721.scala:13: warning: The outer reference in this type test cannot be checked at run time. + case x: Foo with Concrete => x.bippy + x.conco + ^ +t7721.scala:17: warning: The outer reference in this type test cannot be checked at run time. + case x: Concrete with Foo => x.bippy + x.conco + ^ +t7721.scala:21: warning: The outer reference in this type test cannot be checked at run time. + case x: Foo with Bar => x.bippy + x.barry + ^ +t7721.scala:41: warning: The outer reference in this type test cannot be checked at run time. + case x: Foo with Concrete => x.bippy + x.dingo + x.conco + ^ +t7721.scala:45: warning: The outer reference in this type test cannot be checked at run time. + case x: Concrete with Foo => x.bippy + x.dingo + x.conco + ^ +t7721.scala:49: warning: The outer reference in this type test cannot be checked at run time. + case x: Foo with Bar with Concrete => x.bippy + x.barry + x.dingo + x.conco + x.bongo + ^ error: No warnings can be incurred under -Werror. -8 warnings +14 warnings 1 error diff --git a/test/files/neg/t8035-removed.check b/test/files/neg/t8035-removed.check index 1938c010d557..7c444dcd6840 100644 --- a/test/files/neg/t8035-removed.check +++ b/test/files/neg/t8035-removed.check @@ -13,4 +13,11 @@ t8035-removed.scala:11: error: adaptation of an empty argument list by inserting given arguments: sdf.format() ^ +t8035-removed.scala:14: warning: adapted the argument list to the expected 2-tuple: add additional parens instead + signature: List.::[B >: A](elem: B): List[B] + given arguments: 42, 27 + after adaptation: List.::((42, 27): (Int, Int)) + Nil.::(42, 27) // yeswarn + ^ +1 warning 3 errors diff --git a/test/files/neg/t8035-removed.scala b/test/files/neg/t8035-removed.scala index e3bc04d8ea10..bada37b7d2f1 100644 --- a/test/files/neg/t8035-removed.scala +++ b/test/files/neg/t8035-removed.scala @@ -1,4 +1,4 @@ -// scalac: -Xsource:3.0 +// scalac: -Xsource:3.0 -Xlint -Werror // object Foo { List(1,2,3).toSet() @@ -9,4 +9,7 @@ object Foo { import java.text.SimpleDateFormat val sdf = new SimpleDateFormat("yyyyMMdd-HH0000") sdf.format() + + (42, 27) :: Nil // nowarn + Nil.::(42, 27) // yeswarn } diff --git a/test/files/neg/t8127a.check b/test/files/neg/t8127a.check index ce75d28cf265..764ab5310ff8 100644 --- a/test/files/neg/t8127a.check +++ b/test/files/neg/t8127a.check @@ -1,4 +1,4 @@ -t8127a.scala:7: error: The result type of an unapplySeq method must contain a member `get` to be used as an extractor pattern, no such member exists in Seq[_$1] +t8127a.scala:7: error: Seq[Any] is not a valid result type of an unapplySeq method of an extractor. case H(v) => ^ 1 error diff --git a/test/files/neg/t8127a.scala b/test/files/neg/t8127a.scala index c05facdac1c4..e1bd1559667a 100644 --- a/test/files/neg/t8127a.scala +++ b/test/files/neg/t8127a.scala @@ -7,6 +7,9 @@ object Test { case H(v) => case _ => } - // now: too many patterns for object H offering Boolean: expected 0, found 1 - // was: result type Seq[_$2] of unapplySeq defined in method unapplySeq in object H does not conform to Option[_] } + // later: OK + // then: Seq[Any] is not a valid result type of an unapplySeq method of an extractor. + // and: The result type of an unapplySeq method must contain a member `get` to be used as an extractor pattern, no such member exists in Seq[Any] + // now: too many patterns for object H offering Boolean: expected 0, found 1 + // was: result type Seq[_$2] of unapplySeq defined in method unapplySeq in object H does not conform to Option[_] diff --git a/test/files/neg/t8463.check b/test/files/neg/t8463.check index fe3f19aa4606..572a460728ed 100644 --- a/test/files/neg/t8463.check +++ b/test/files/neg/t8463.check @@ -19,9 +19,4 @@ t8463.scala:5: error: type mismatch; required: T[Long] insertCell(Foo(5)) ^ -t8463.scala:5: error: type mismatch; - found : Test.Foo[T] - required: Test.Foo[Test.Cell] - insertCell(Foo(5)) - ^ -4 errors +3 errors diff --git a/test/files/neg/t9014.check b/test/files/neg/t9014.check new file mode 100644 index 000000000000..650093881062 --- /dev/null +++ b/test/files/neg/t9014.check @@ -0,0 +1,4 @@ +t9014.scala:4: error: Inner is already defined as case class Inner + case class Inner(default: T) + ^ +1 error diff --git a/test/files/neg/t9014.scala b/test/files/neg/t9014.scala new file mode 100644 index 000000000000..32465c3c7dcd --- /dev/null +++ b/test/files/neg/t9014.scala @@ -0,0 +1,7 @@ +object Test { + def spec[@specialized(Byte, Short, Int, Long) T : Integral](t: T) = { + // still broken - specialize can't deal with the synthetic companion object + case class Inner(default: T) + t + } +} diff --git a/test/files/neg/t9334.check b/test/files/neg/t9334.check new file mode 100644 index 000000000000..e5fe6ef6d0ed --- /dev/null +++ b/test/files/neg/t9334.check @@ -0,0 +1,6 @@ +t9334.scala:5: error: weaker access privileges in overriding +def aaa: Int (defined in class A) + override should not be private + private[this] def aaa: Int = 42 + ^ +1 error diff --git a/test/files/neg/t9334.scala b/test/files/neg/t9334.scala new file mode 100644 index 000000000000..c8838e855db2 --- /dev/null +++ b/test/files/neg/t9334.scala @@ -0,0 +1,6 @@ +class A { + def aaa: Int = 10 +} +class B extends A { + private[this] def aaa: Int = 42 +} diff --git a/test/files/neg/t9538.check b/test/files/neg/t9538.check new file mode 100644 index 000000000000..17458daf5d32 --- /dev/null +++ b/test/files/neg/t9538.check @@ -0,0 +1,13 @@ +t9538.scala:9: error: Option[String] is not a valid result type of an unapplySeq method of an extractor. + def f(x: Any) = x match { case X(y, z) => } + ^ +t9538.scala:10: error: Option[(Int, Int, Int)] is not a valid result type of an unapplySeq method of an extractor. + def g0(x: Any) = x match { case Y() => } + ^ +t9538.scala:11: error: Option[(Int, Int, Int)] is not a valid result type of an unapplySeq method of an extractor. + def g1(x: Any) = x match { case Y(y) => } + ^ +t9538.scala:12: error: Option[(Int, Int, Int)] is not a valid result type of an unapplySeq method of an extractor. + def g2(x: Any) = x match { case Y(y,z) => } + ^ +4 errors diff --git a/test/files/neg/t9538.scala b/test/files/neg/t9538.scala new file mode 100644 index 000000000000..f64ef9552dd8 --- /dev/null +++ b/test/files/neg/t9538.scala @@ -0,0 +1,13 @@ + + + +object X { def unapplySeq(x: Any): Option[String] = { Some(x.toString.toUpperCase) }} + +object Y { def unapplySeq(v: Any) = Option((1, 2, 3)) } + +object Test extends App { + def f(x: Any) = x match { case X(y, z) => } + def g0(x: Any) = x match { case Y() => } + def g1(x: Any) = x match { case Y(y) => } + def g2(x: Any) = x match { case Y(y,z) => } +} diff --git a/test/files/neg/t9847.check b/test/files/neg/t9847.check index 27899eb467be..d3c6c485f72c 100644 --- a/test/files/neg/t9847.check +++ b/test/files/neg/t9847.check @@ -1,15 +1,3 @@ -t9847.scala:10: warning: Line starts with an operator that in future -will be taken as an infix expression continued from the previous line. -To force the previous interpretation as a separate statement, -add an explicit `;`, add an empty line, or remove spaces after the operator. - + 1 - ^ -t9847.scala:14: warning: Line starts with an operator that in future -will be taken as an infix expression continued from the previous line. -To force the previous interpretation as a separate statement, -add an explicit `;`, add an empty line, or remove spaces after the operator. - + 1 - ^ t9847.scala:6: warning: discarded non-Unit value def f(): Unit = 42 ^ @@ -47,5 +35,5 @@ t9847.scala:24: warning: a pure expression does nothing in statement position; m class D { 42 ; 17 } ^ error: No warnings can be incurred under -Werror. -14 warnings +12 warnings 1 error diff --git a/test/files/neg/text-blocks.check b/test/files/neg/text-blocks.check new file mode 100644 index 000000000000..8a9af6292a04 --- /dev/null +++ b/test/files/neg/text-blocks.check @@ -0,0 +1,13 @@ +text-blocks/Invalid1.java:4: error: illegal text block open delimiter sequence, missing line terminator + public static final String badOpeningDelimiter = """non-whitespace + ^ +text-blocks/Invalid1.java:4: error: expected + public static final String badOpeningDelimiter = """non-whitespace + ^ +text-blocks/Invalid1.java:6: error: illegal text block open delimiter sequence, missing line terminator + """; + ^ +text-blocks/Invalid2.java:6: error: unclosed string literal + foo""""; + ^ +4 errors diff --git a/test/files/neg/text-blocks/Invalid1.java b/test/files/neg/text-blocks/Invalid1.java new file mode 100644 index 000000000000..54c7e98d9219 --- /dev/null +++ b/test/files/neg/text-blocks/Invalid1.java @@ -0,0 +1,7 @@ +// javaVersion: 15+ +class Invalid1 { + + public static final String badOpeningDelimiter = """non-whitespace + foo + """; +} diff --git a/test/files/neg/text-blocks/Invalid2.java b/test/files/neg/text-blocks/Invalid2.java new file mode 100644 index 000000000000..08b0a57548aa --- /dev/null +++ b/test/files/neg/text-blocks/Invalid2.java @@ -0,0 +1,7 @@ +// javaVersion: 15+ +class Invalid2 { + + // Closing delimiter is first three eligible `"""`, not last + public static final String closingDelimiterIsNotScalas = """ + foo""""; +} diff --git a/test/files/neg/trailing-commas.check b/test/files/neg/trailing-commas.check index 17ceb40c09d1..a371d51fe2fb 100644 --- a/test/files/neg/trailing-commas.check +++ b/test/files/neg/trailing-commas.check @@ -61,15 +61,9 @@ trait TypeArgs { def f: C[Int, String, ] } trailing-commas.scala:23: error: identifier expected but ']' found. trait TypeParamClause { type C[A, B, ] } ^ -trailing-commas.scala:23: error: ']' expected but '}' found. -trait TypeParamClause { type C[A, B, ] } - ^ trailing-commas.scala:24: error: identifier expected but ']' found. trait FunTypeParamClause { def f[A, B, ] } ^ -trailing-commas.scala:24: error: ']' expected but '}' found. -trait FunTypeParamClause { def f[A, B, ] } - ^ trailing-commas.scala:26: error: identifier expected but ')' found. trait SimpleType { def f: (Int, String, ) } ^ @@ -127,4 +121,4 @@ trait SimpleType2 { def f: (Int, ) } trailing-commas.scala:48: error: ')' expected but '}' found. trait SimpleType2 { def f: (Int, ) } ^ -43 errors +41 errors diff --git a/test/files/neg/unchecked-refinement.check b/test/files/neg/unchecked-refinement.check index 95dcec0c89ea..8ef312f91ca5 100644 --- a/test/files/neg/unchecked-refinement.check +++ b/test/files/neg/unchecked-refinement.check @@ -1,11 +1,11 @@ -unchecked-refinement.scala:19: warning: abstract type U in type pattern Foo[U,U,V] is unchecked since it is eliminated by erasure +unchecked-refinement.scala:19: warning: the type test for pattern Foo[U,U,V] cannot be checked at runtime because it has type parameters eliminated by erasure /* warn */ case _: Foo[U, U, V] if b => () ^ -unchecked-refinement.scala:21: warning: non-variable type argument Any in type pattern Foo[Any,U,V] is unchecked since it is eliminated by erasure +unchecked-refinement.scala:21: warning: the type test for pattern Foo[Any,U,V] cannot be checked at runtime because it has type parameters eliminated by erasure /* warn */ case _: Foo[Any, U, V] if b => () ^ unchecked-refinement.scala:25: warning: a pattern match on a refinement type is unchecked - /* nowarn - todo */ case x: AnyRef { def bippy: Int } if b => x.bippy // this could/should do an instance check and not warn + /* nowarn - todo */ case x: AnyRef { def bippy: Int } if b => x.bippy // this could/should do an instance check and not warn // dotty warns under reflectiveSelectable ^ unchecked-refinement.scala:26: warning: a pattern match on a refinement type is unchecked /* nowarn - todo */ case x: AnyRef { def size: Int } if b => x.size // this could/should do a static conformance test and not warn diff --git a/test/files/neg/unchecked-refinement.scala b/test/files/neg/unchecked-refinement.scala index 5902a442ae13..2d3b27eda23a 100644 --- a/test/files/neg/unchecked-refinement.scala +++ b/test/files/neg/unchecked-refinement.scala @@ -22,7 +22,7 @@ class A { } def f4(xs: List[Int]) = xs match { - /* nowarn - todo */ case x: AnyRef { def bippy: Int } if b => x.bippy // this could/should do an instance check and not warn + /* nowarn - todo */ case x: AnyRef { def bippy: Int } if b => x.bippy // this could/should do an instance check and not warn // dotty warns under reflectiveSelectable /* nowarn - todo */ case x: AnyRef { def size: Int } if b => x.size // this could/should do a static conformance test and not warn /* nowarn */ case x: ((AnyRef { def size: Int }) @unchecked) if b => x.size } diff --git a/test/files/neg/varargs2.check b/test/files/neg/varargs2.check new file mode 100644 index 000000000000..23d13ec6bf09 --- /dev/null +++ b/test/files/neg/varargs2.check @@ -0,0 +1,13 @@ +varargs2.scala:7: error: Only methods can be marked @varargs + @varargs val x = 42 // nok + ^ +varargs2.scala:8: error: Only methods can be marked @varargs + def f(@varargs y: Int) = 42 // nok + ^ +varargs2.scala:9: error: Only methods can be marked @varargs + def g(z: Int @varargs) = 42 // nok + ^ +varargs2.scala:10: error: Only methods can be marked @varargs + def h(z: Int) = 42: @varargs // nok + ^ +4 errors diff --git a/test/files/neg/varargs2.scala b/test/files/neg/varargs2.scala new file mode 100644 index 000000000000..82ccf97cb03a --- /dev/null +++ b/test/files/neg/varargs2.scala @@ -0,0 +1,13 @@ +// scalac: -Xsource:3 + +import annotation.* + +trait T { + @varargs def d(n: Int*) = 42 // ok + @varargs val x = 42 // nok + def f(@varargs y: Int) = 42 // nok + def g(z: Int @varargs) = 42 // nok + def h(z: Int) = 42: @varargs // nok + + lazy val VarargsClass = List.empty[varargs] // good one +} diff --git a/test/files/neg/variant-placeholders-future.check b/test/files/neg/variant-placeholders-future.check new file mode 100644 index 000000000000..d166e8d577a9 --- /dev/null +++ b/test/files/neg/variant-placeholders-future.check @@ -0,0 +1,7 @@ +variant-placeholders-future.scala:4: error: `=`, `>:`, or `<:` expected + type -_ = Int // error -_ not allowed as a type def name without backticks + ^ +variant-placeholders-future.scala:5: error: `=`, `>:`, or `<:` expected + type +_ = Int // error +_ not allowed as a type def name without backticks + ^ +2 errors diff --git a/test/files/neg/variant-placeholders-future.scala b/test/files/neg/variant-placeholders-future.scala new file mode 100644 index 000000000000..75296ff945b4 --- /dev/null +++ b/test/files/neg/variant-placeholders-future.scala @@ -0,0 +1,6 @@ +// scalac: -Xsource:3 +// +object Test { + type -_ = Int // error -_ not allowed as a type def name without backticks + type +_ = Int // error +_ not allowed as a type def name without backticks +} diff --git a/test/files/neg/variant-placeholders-nofuture.check b/test/files/neg/variant-placeholders-nofuture.check new file mode 100644 index 000000000000..8cf591d0a32f --- /dev/null +++ b/test/files/neg/variant-placeholders-nofuture.check @@ -0,0 +1,7 @@ +variant-placeholders-nofuture.scala:5: error: ';' expected but '_' found. + val fnMinusPlus1: -_ => +_ = (_: Int).toLong // error -_/+_ won't parse without -Xsource:3 + ^ +variant-placeholders-nofuture.scala:6: error: ')' expected but '_' found. + val fnMinusPlus2: (-_) => +_ = fnMinusPlus1 // error -_/+_ won't parse without -Xsource:3 + ^ +2 errors diff --git a/test/files/neg/variant-placeholders-nofuture.scala b/test/files/neg/variant-placeholders-nofuture.scala new file mode 100644 index 000000000000..5f638f68a84a --- /dev/null +++ b/test/files/neg/variant-placeholders-nofuture.scala @@ -0,0 +1,8 @@ +object Test { + type `-_` = Int + type `+_` = Long + + val fnMinusPlus1: -_ => +_ = (_: Int).toLong // error -_/+_ won't parse without -Xsource:3 + val fnMinusPlus2: (-_) => +_ = fnMinusPlus1 // error -_/+_ won't parse without -Xsource:3 + val fnMinusPlus3: -_ => (+_) = fnMinusPlus2 // error -_/+_ won't parse without -Xsource:3 +} diff --git a/test/files/neg/wildcards-future.check b/test/files/neg/wildcards-future.check new file mode 100644 index 000000000000..31f116c7e547 --- /dev/null +++ b/test/files/neg/wildcards-future.check @@ -0,0 +1,11 @@ +wildcards-future.scala:7: error: type mismatch; + found : scala.collection.immutable.Map[_$1,Any] where type _$1 <: AnyRef + required: Map[String,String] + underscores : Map[String, String] // error wildcard variables starting with `_` + ^ +wildcards-future.scala:9: error: type mismatch; + found : scala.collection.immutable.Map[?$1,Any] where type ?$1 <: AnyRef + required: Map[String,String] + qmarks : Map[String, String] // error – wildcard variables should start with `?` to differentiate from the old syntax + ^ +2 errors diff --git a/test/files/neg/wildcards-future.scala b/test/files/neg/wildcards-future.scala new file mode 100644 index 000000000000..54b7675813e7 --- /dev/null +++ b/test/files/neg/wildcards-future.scala @@ -0,0 +1,11 @@ +// scalac: -Xsource:3 +// +object Test { + val underscores: Map[_ <: AnyRef, _ >: Null] = Map() + val qmarks: Map[? <: AnyRef, ? >: Null] = Map() + + underscores : Map[String, String] // error wildcard variables starting with `_` + + qmarks : Map[String, String] // error – wildcard variables should start with `?` to differentiate from the old syntax + // (and have a mildly more readable error...) +} diff --git a/test/files/pos/and-future.scala b/test/files/pos/and-future.scala new file mode 100644 index 000000000000..f7e15e822ecc --- /dev/null +++ b/test/files/pos/and-future.scala @@ -0,0 +1,17 @@ +// scalac: -Xsource:3 +// + +trait X +trait Y + +class Test[A, B <: A & AnyRef] { + def foo[T >: A & Null <: A & AnyRef & Any](x: T & ""): "" & T = x + + val a: X & Y & AnyRef = new X with Y {} + val b: (X & Y) & AnyRef = new X with Y {} + val c: X & (Y & AnyRef) = new X with Y {} + + val d: X & Y = c match { + case xy: (X & Y) => xy + } +} diff --git a/test/files/pos/i11371.scala b/test/files/pos/i11371.scala new file mode 100644 index 000000000000..74156b777c9f --- /dev/null +++ b/test/files/pos/i11371.scala @@ -0,0 +1,21 @@ +// scalac: -Xsource:3 +// +object HelloWorld { + def whileLoop: Int = { + var i = 0 + var acc = 0 + while (i < 3) { + var `i'` = 0 + while (`i'` < 4) { + acc += (i * `i'`) + `i'` += 1 + } + i += 1 + } + acc + } + + def main(args: Array[String]): Unit = { + println(s"hello world: ${whileLoop}") + } +} diff --git a/test/files/pos/import-future.scala b/test/files/pos/import-future.scala new file mode 100644 index 000000000000..5b215d907a58 --- /dev/null +++ b/test/files/pos/import-future.scala @@ -0,0 +1,33 @@ +// scalac: -Xsource:3 +// + +import java.io as jio +import scala.{collection as c} + +import c.mutable as mut +import mut.ArrayBuffer as Buf + +object O { + val x: jio.IOException = ??? + val y = Buf(1, 2, 3) + + type OString = String + def foo22(x: Int) = x +} + +class C { + import O.{ foo22 as foo, OString as OS } + println(foo(22)) + val s: OS = "" + + import mut.* + val ab = ArrayBuffer(1) +} + +object starring { + + import scala.concurrent.{*, given}, duration.{given, Duration as D, *}, ExecutionContext.Implicits.* + + val f = Future(42) + val r = Await.result(f, D.Inf) +} diff --git a/test/files/pos/leading-infix-op.scala b/test/files/pos/leading-infix-op.scala new file mode 100644 index 000000000000..4b60aa67b8c1 --- /dev/null +++ b/test/files/pos/leading-infix-op.scala @@ -0,0 +1,19 @@ + +// scalac: -Xsource:3 + +trait T { + def f(x: Int): Boolean = + x < 0 + || + x > 0 + && + x != 3 + + def g(x: Option[Int]) = x match { + case Some(err) => + println("hi") + ??? + case None => + ??? + } +} diff --git a/test/files/pos/macro-annot/t12366.check b/test/files/pos/macro-annot/t12366.check new file mode 100644 index 000000000000..de47a31a6b4e --- /dev/null +++ b/test/files/pos/macro-annot/t12366.check @@ -0,0 +1 @@ +warning: 2 deprecations; re-run with -deprecation for details diff --git a/test/files/pos/macro-annot/t12366.scala b/test/files/pos/macro-annot/t12366.scala new file mode 100644 index 000000000000..9b75bb3c6d1f --- /dev/null +++ b/test/files/pos/macro-annot/t12366.scala @@ -0,0 +1,15 @@ +// scalac: -Ymacro-annotations +object Test extends App { + + @deprecated + class Inner() { + } + + lazy val Inner = new Inner() + + @deprecated + class Inner2() { + } + + val Inner2 = new Inner2() +} diff --git a/test/files/pos/open-infix-future.scala b/test/files/pos/open-infix-future.scala new file mode 100644 index 000000000000..8fee778d40cb --- /dev/null +++ b/test/files/pos/open-infix-future.scala @@ -0,0 +1,36 @@ +// scalac: -Xsource:3 +// + +open class A +infix class B[T, S] + +open infix class C[T, S] +open infix case class CC[T, S](x: Int) +infix open class D[T, S] +infix trait DT[T, S] + +open +infix +private +class E + +class F { + open infix class C1[T, S] + infix type X + + infix def foo(x: Int): Int = x +} + +object G { + open infix class C2[T, S] +} + +object Test { + val infix: Int = 1 + infix + 1 + val open: Int => Int = x => x + open(1) + open { + 2 + } +} diff --git a/test/files/pos/patmat-exprs-b.scala b/test/files/pos/patmat-exprs-b.scala new file mode 100644 index 000000000000..426419a0c8ee --- /dev/null +++ b/test/files/pos/patmat-exprs-b.scala @@ -0,0 +1,51 @@ + +import annotation.nowarn + +trait Pattern { + + trait NumericOps[T] extends Serializable { + + def zero: T + + def add(a: T, b: T): T + def add(a: T, b: T, c: T): T = add(a, add(b, c)) + + def sum(terms: Iterable[T]) = terms.foldLeft(zero)(add) + def sum(terms: Iterator[T]) = terms.foldLeft(zero)(add) + } + + trait Expr[T] { + + /** Returns arguments of this operator */ + def args: Iterable[Expr[_]] + + def + (other: Expr[T])(implicit n: NumericOps[T]) = Add(List(this, other)) + + def specialize(implicit num: NumericOps[T]): Expr[T] = + this match { + case Add(Seq(a, b)) => Add2(a, b) + case Add(Seq(a, b, c)) => Add3(a, b, c) + case x => x + } + } + + trait TwoArg[T] extends Expr[T] { + val left: Expr[T] + val right: Expr[T] + val args = List(left, right) + } + + trait ManyArg[T] extends Expr[T] + + case class Add[T](args: Iterable[Expr[T]])(implicit @nowarn num: NumericOps[T]) extends ManyArg[T] { + override def toString = "(" + args.mkString(" + ") + ")" + } + + case class Add2[T](left: Expr[T], right: Expr[T])(implicit @nowarn num: NumericOps[T]) extends TwoArg[T] { + override def toString = "(" + left + " + " + right + ")" + } + case class Add3[T](a1: Expr[T], a2: Expr[T], a3: Expr[T])(implicit @nowarn num: NumericOps[T]) extends ManyArg[T] { + val args = List(a1, a2, a3) + override def toString = "(" + a1 + " + " + a2 + " + " + a3 + ")" + } +} diff --git a/test/files/pos/sammy_java8/F.java b/test/files/pos/sammy_java8/F.java new file mode 100644 index 000000000000..5dac57a1e2ae --- /dev/null +++ b/test/files/pos/sammy_java8/F.java @@ -0,0 +1,6 @@ +public interface F { + U apply(T t); + default void yadayada() { + throw new UnsupportedOperationException("yadayada"); + } +} diff --git a/test/files/pos/sammy_java8/Test.scala b/test/files/pos/sammy_java8/Test.scala new file mode 100644 index 000000000000..61fcf4f0ce4f --- /dev/null +++ b/test/files/pos/sammy_java8/Test.scala @@ -0,0 +1,4 @@ +class T { + def app[T, U](x: T)(f: F[T, U]): U = f(x) + app(1)(x => List(x)) +} diff --git a/test/files/pos/surrogates.scala b/test/files/pos/surrogates.scala new file mode 100644 index 000000000000..1b710ad901ae --- /dev/null +++ b/test/files/pos/surrogates.scala @@ -0,0 +1,28 @@ + +// allow supplementary chars in identifiers + +class 𐐀 { + def 𐐀 = 42 + + // regression check: anything goes in strings + def x = "𐐀" + def y = s"$𐐀" + def w = s" 𐐀" +} + +case class 𐐀𐐀(n: Int) { + def 𐐀𐐀 = n + def `𐐀𐐀1` = n + n +} + +// uncontroversially, orphan surrogates may be introduced +// via unicode escape. +class Construction { + def hi = '\ud801' + def lo = '\udc00' + def endhi = "abc\ud801" + def startlo = "\udc00xyz" + def reversed = "xyz\udc00\ud801abc" +} + +// was: error: illegal character '\ud801', '\udc00' diff --git a/test/files/pos/t11534.scala b/test/files/pos/t11534.scala new file mode 100644 index 000000000000..bab4bd956d87 --- /dev/null +++ b/test/files/pos/t11534.scala @@ -0,0 +1,8 @@ +// scalac: -Werror +object Test1 { + val g: scala.tools.nsc.Global = ??? + import g._ + def test(sym: Symbol) = sym.name match { + case _: TermName => + } +} diff --git a/test/files/pos/t11908/C.scala b/test/files/pos/t11908/C.scala new file mode 100644 index 000000000000..615277efc50b --- /dev/null +++ b/test/files/pos/t11908/C.scala @@ -0,0 +1,55 @@ +// javaVersion: 16+ +object C { + + def useR1 = { + // constructor signature + val r1 = new R1(123, "hello") + + // accessors signature + val i: Int = r1.i + val s: String = r1.s + + // method + val s2: String = r1.someMethod() + + // supertype + val isRecord: java.lang.Record = r1 + + () + } + + def useR2 = { + // constructor signature + val r2 = new R2.R(123, "hello") + + // accessors signature + val i: Int = r2.i + val s: String = r2.s + + // method + val i2: Int = r2.getInt + + // supertype + val isIntLike: IntLike = r2 + val isRecord: java.lang.Record = r2 + + () + } + + def useR3 = { + // constructor signature + val r3 = new R3(123, 42L, "hi") + new R3("hi", 123) + + // accessors signature + val i: Int = r3.i + val l: Long = r3.l + val s: String = r3.s + + // method + val l2: Long = r3.l(43L, 44L) + + // supertype + val isRecord: java.lang.Record = r3 + } +} diff --git a/test/files/pos/t11908/IntLike.scala b/test/files/pos/t11908/IntLike.scala new file mode 100644 index 000000000000..9e45fd43bc98 --- /dev/null +++ b/test/files/pos/t11908/IntLike.scala @@ -0,0 +1,4 @@ +// javaVersion: 16+ +trait IntLike { + def getInt: Int +} diff --git a/test/files/pos/t11908/R1.java b/test/files/pos/t11908/R1.java new file mode 100644 index 000000000000..350ac64b987e --- /dev/null +++ b/test/files/pos/t11908/R1.java @@ -0,0 +1,7 @@ +// javaVersion: 16+ +record R1(int i, String s) { + + public String someMethod() { + return s + "!"; + } +} diff --git a/test/files/pos/t11908/R2.java b/test/files/pos/t11908/R2.java new file mode 100644 index 000000000000..62bf5ff6c22c --- /dev/null +++ b/test/files/pos/t11908/R2.java @@ -0,0 +1,14 @@ +// javaVersion: 16+ +public class R2 { + final record R(int i, String s) implements IntLike { + public int getInt() { + return i; + } + + // Canonical constructor + public R(int i, java.lang.String s) { + this.i = i; + this.s = s.intern(); + } + } +} diff --git a/test/files/pos/t11908/R3.java b/test/files/pos/t11908/R3.java new file mode 100644 index 000000000000..03a06dfc6f37 --- /dev/null +++ b/test/files/pos/t11908/R3.java @@ -0,0 +1,23 @@ +// javaVersion: 16+ +public record R3(int i, long l, String s) { + + // User-specified accessor + public int i() { + return i + 1; // evil >:) + } + + // Not an accessor - too many parameters + public long l(long a1, long a2) { + return a1 + a2; + } + + // Secondary constructor + public R3(String s, int i) { + this(i, 42L, s); + } + + // Compact constructor + public R3 { + s = s.intern(); + } +} diff --git a/test/files/pos/t11964.scala b/test/files/pos/t11964.scala new file mode 100644 index 000000000000..4f0bd8f73726 --- /dev/null +++ b/test/files/pos/t11964.scala @@ -0,0 +1,19 @@ +// scalac: -Werror -Xlint + +object Hmm { + def zxc(b: Int*)(implicit x: Int = 3) = "" + b + x + def res = zxc(4) +} + +object Test { + def foo(a: Any, b: Any = null, c: Any = null)(cs: String*) = ??? + def res = foo("", c = "")("X") +} + +object OP { + def f(a: Int, b: String*) = "first" + def res = f(b = "sl19", a = 28) // looks like the issue is only with single arg supplied to varargs. + def or = f(b = ("x"::"y"::Nil):_*, a = 42) // 2.13 syntax only + //def and = f(b = ("x"::"y"::Nil):_*) // broken under 2.13, which disallows default + varargs + def and = List(elems = ("x"::"y"::Nil):_*) +} diff --git a/test/files/pos/t12210.scala b/test/files/pos/t12210.scala new file mode 100644 index 000000000000..35d6cdbf8c87 --- /dev/null +++ b/test/files/pos/t12210.scala @@ -0,0 +1,20 @@ +trait SpecFun[@specialized T] { + type Res + def res: Res +} + +object Test { + def m[@specialized T](op: SpecFun[T]): op.Res = op.res +} + +trait ValuesVisitor[A] { + def visit(a: A): Unit + def visitArray(arr: Array[A]): Unit = ??? +} + +class OpArray[@specialized A] { + def traverse(from: Array[A], fn: ValuesVisitor[A]): fn.type = { + fn.visitArray(from) + fn + } +} diff --git a/test/files/pos/t12225.scala b/test/files/pos/t12225.scala new file mode 100644 index 000000000000..baae67d36bf8 --- /dev/null +++ b/test/files/pos/t12225.scala @@ -0,0 +1,6 @@ +// scalac: -Ydebug +object Test { + def foo(arr: Array[Int]): Unit = { + val Array(x, y) = arr + } +} diff --git a/test/files/pos/t12233.scala b/test/files/pos/t12233.scala new file mode 100644 index 000000000000..481b5258d2d5 --- /dev/null +++ b/test/files/pos/t12233.scala @@ -0,0 +1,12 @@ + +trait TypeClass[T] +class Hehe[T: TypeClass](i: Int, j: Int) { + def this(i: Int)(implicit j: Int) = this(i, j) +} + +/* was +test/files/pos/t12233.scala:4: error: too many arguments (found 2, expected 1) for constructor Hehe: (implicit evidence$1: TypeClass[T]): Hehe[T] + def this(i: Int)(implicit j: Int) = this(i, j) + ^ +1 error + */ diff --git a/test/files/pos/t12312-hmm.scala b/test/files/pos/t12312-hmm.scala new file mode 100644 index 000000000000..16decd4f9325 --- /dev/null +++ b/test/files/pos/t12312-hmm.scala @@ -0,0 +1,45 @@ +package hmm + +// Taken from https://github.com/typelevel/kind-projector/blob/7ad46d6ca995976ae2ff18215dbb32cd7ad0dd7a/src/test/scala/hmm.scala +// As a regression test for the issue spotted in https://github.com/scala/community-build/pull/1400 + +class TC[A] + +object TC { + def apply[A]: Unit = () +} + +object test { + + sealed trait HList extends Product with Serializable + case class ::[+H, +T <: HList](head : H, tail : T) extends HList + sealed trait HNil extends HList + case object HNil extends HNil + + TC[Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: HNil] + + TC[Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: HNil] + + TC[Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: HNil] +} diff --git a/test/files/pos/t12349b/A.java b/test/files/pos/t12349b/A.java new file mode 100644 index 000000000000..aab1185d87ac --- /dev/null +++ b/test/files/pos/t12349b/A.java @@ -0,0 +1,7 @@ +package p; + +public class A { + public static class R { } + + /* package-protected */ R foo() { return null; } +} diff --git a/test/files/pos/t12349b/B.java b/test/files/pos/t12349b/B.java new file mode 100644 index 000000000000..735c91372a03 --- /dev/null +++ b/test/files/pos/t12349b/B.java @@ -0,0 +1,7 @@ +package q; + +public class B extends p.A { + public static class RR extends p.A.R { } + + /* package-protected */ RR foo() { return null; } +} diff --git a/test/files/pos/t12349b/Test.scala b/test/files/pos/t12349b/Test.scala new file mode 100644 index 000000000000..3f22fa033e08 --- /dev/null +++ b/test/files/pos/t12349b/Test.scala @@ -0,0 +1 @@ +class Test extends q.B diff --git a/test/files/pos/t12392.scala b/test/files/pos/t12392.scala new file mode 100644 index 000000000000..056fd1ae2d17 --- /dev/null +++ b/test/files/pos/t12392.scala @@ -0,0 +1,15 @@ +// scalac: -Werror +import scala.reflect.api.Universe + +object Test { + type SingletonUniverse = Universe with Singleton + def deepIntersectionTypeMembers[U <: SingletonUniverse](targetType: U#Type): List[U#Type] = { + def go(tpe: U#Type): List[U#Type] = { + tpe match { + case r: U#RefinedTypeApi => r.parents.flatMap(t => deepIntersectionTypeMembers[U]((t.dealias): U#Type)) + case _ => List(tpe) + } + } + go(targetType).distinct + } +} diff --git a/test/files/pos/t12393/R1.java b/test/files/pos/t12393/R1.java new file mode 100644 index 000000000000..08c764ceb4ba --- /dev/null +++ b/test/files/pos/t12393/R1.java @@ -0,0 +1,7 @@ +// javaVersion: 9+ +public interface R1 { + + private void foo() { + return; + } +} diff --git a/test/files/pos/t12398.scala b/test/files/pos/t12398.scala new file mode 100644 index 000000000000..ebd6bda4cf8e --- /dev/null +++ b/test/files/pos/t12398.scala @@ -0,0 +1,11 @@ +// scalac: -Werror +import scala.reflect.api.Universe + +object Test { + type SingletonUniverse = Universe with Singleton + + def foo[U <: SingletonUniverse](u: U)(typ: u.Type): List[u.Annotation] = typ match { + case t: u.AnnotatedTypeApi => t.annotations // was: "The outer reference in this type test cannot be checked at run time." + case _ => Nil + } +} diff --git a/test/files/pos/t12398b.scala b/test/files/pos/t12398b.scala new file mode 100644 index 000000000000..9337a6e8e0fd --- /dev/null +++ b/test/files/pos/t12398b.scala @@ -0,0 +1,11 @@ +// scalac: -Werror +import scala.reflect.api.Universe + +object Test { + type SingletonUniverse = Universe with Singleton + + def foo[U <: SingletonUniverse](u: U)(typ: U#Type): List[U#Annotation] = typ match { + case t: U#AnnotatedTypeApi => t.annotations // as a comparison, this wasn't emitting a warning + case _ => Nil + } +} diff --git a/test/files/pos/t12407/A.java b/test/files/pos/t12407/A.java new file mode 100644 index 000000000000..fd2c83a43298 --- /dev/null +++ b/test/files/pos/t12407/A.java @@ -0,0 +1,10 @@ +public class A { + public interface I { + I[] getArray(); + } + + public interface J extends I { + @Override + J[] getArray(); + } +} diff --git a/test/files/pos/t12407/Test.scala b/test/files/pos/t12407/Test.scala new file mode 100644 index 000000000000..6ef6c534d423 --- /dev/null +++ b/test/files/pos/t12407/Test.scala @@ -0,0 +1 @@ +trait Test extends A.J diff --git a/test/files/pos/t12467.scala b/test/files/pos/t12467.scala new file mode 100644 index 000000000000..a0cb4f79dd4f --- /dev/null +++ b/test/files/pos/t12467.scala @@ -0,0 +1,15 @@ +object PagedResponse { + type Aux[Item0] = PagedResponse { type Item = Item0 } +} + +trait PagedResponse { + type Item + sealed trait NextPage + case class NoMorePages() extends NextPage +} + +object Test { + def foo[A](next: PagedResponse.Aux[A]#NextPage): Unit = next match { + case _: PagedResponse.Aux[A]#NoMorePages => ??? + } +} diff --git a/test/files/pos/t5606.scala b/test/files/pos/t5606.scala index 2545271e32d8..8daffaf1e783 100644 --- a/test/files/pos/t5606.scala +++ b/test/files/pos/t5606.scala @@ -1,9 +1,9 @@ +// was: _ taken as ident of type param, now a fresh name +case class CaseTest[_](someData: String) +// was: _ already defined, now a fresh name +case class CaseTest2[_, _](someData: String) - - - - - - -case class CaseTest[_](someData:String) +class C { + def f[_](x: Int) = ??? +} diff --git a/test/files/pos/t7398/Iterator.java b/test/files/pos/t7398/Iterator.java new file mode 100644 index 000000000000..75b5a8b303b7 --- /dev/null +++ b/test/files/pos/t7398/Iterator.java @@ -0,0 +1,10 @@ +public interface Iterator { + boolean hasNext(); + E next(); + default void remove() { + throw new UnsupportedOperationException("remove"); + } + default void forEachRemaining(java.util.function.Consumer action) { + throw new UnsupportedOperationException("forEachRemaining"); + } +} diff --git a/test/files/pos/t7398/Test.scala b/test/files/pos/t7398/Test.scala new file mode 100644 index 000000000000..2068acaa6dc7 --- /dev/null +++ b/test/files/pos/t7398/Test.scala @@ -0,0 +1,5 @@ +class Test extends Iterator[String] { + def hasNext = true + def next() = "" + def test = this.remove() +} diff --git a/test/files/pos/t7745.scala b/test/files/pos/t7745.scala new file mode 100644 index 000000000000..d1b0ed9b2c8f --- /dev/null +++ b/test/files/pos/t7745.scala @@ -0,0 +1,38 @@ + +package bug + +import scala.language.implicitConversions + +class Base[T] + +class BaseOps[T] { + type OpT[U] = Op[T, U] // Fails below + //type OpT[U] = List[U] // OK + //trait OpT[U] extends Op[T, U] // OK + + def op(tgt: OpTarget[OpT]) = tgt +} + +object Base { + implicit def baseOps[T](b: Base[T]): BaseOps[T] = new BaseOps[T] +} + +class Op[A, B] + +class OpTarget[TC[_]] + +object OpTarget { + implicit def apply[TC[_]](a: Any): OpTarget[TC] = new OpTarget[TC] +} + +object TestBase { + val baseOps = new BaseOps[String] + baseOps.op(23) // OK in all cases + + val base = new Base[String] + base.op(23) // In the failing case: + // found : Int(23) + // required: shapeless.OpTarget[[U]shapeless.Op[String,U]] + // base.op(23) + // ^ +} diff --git a/test/files/pos/t8493.scala b/test/files/pos/t8493.scala new file mode 100644 index 000000000000..a5e63a1bede4 --- /dev/null +++ b/test/files/pos/t8493.scala @@ -0,0 +1,25 @@ +object Test { + trait Foo { + def foo: this.type + } + + case class Impl() extends Foo { + def foo = ??? + def bar: Unit = () + } + + object Foo { + def foo(f: Foo): f.type = f.foo + } + + def work(f: Impl): Unit = + Foo.foo(f).bar + + def bug(f: Int => Impl): Unit = + Foo.foo(f(1)).bar + + def workaround(f: Int => Impl): Unit = { + val tmp = f(1) + Foo.foo(tmp).bar + } +} diff --git a/test/files/pos/t8852/Interface.java b/test/files/pos/t8852/Interface.java new file mode 100644 index 000000000000..7b35f3b12f1e --- /dev/null +++ b/test/files/pos/t8852/Interface.java @@ -0,0 +1,5 @@ +public interface Interface { + public static int staticMethod() { + return 42; + } +} diff --git a/test/files/pos/t8852/Test.scala b/test/files/pos/t8852/Test.scala new file mode 100644 index 000000000000..acd36ec2a5a0 --- /dev/null +++ b/test/files/pos/t8852/Test.scala @@ -0,0 +1,5 @@ +object Test { + val x: Int = Interface.staticMethod() +} + +class C extends Interface // expect no errors about unimplemented members. diff --git a/test/files/pos/t9014.scala b/test/files/pos/t9014.scala new file mode 100644 index 000000000000..8af97634c488 --- /dev/null +++ b/test/files/pos/t9014.scala @@ -0,0 +1,6 @@ +object Test { + def spec[@specialized(Byte, Short, Int, Long) T : Integral](t: T) = { + def inner(default: T = t): T = t + inner() + } +} diff --git a/test/files/pos/varargs-future.scala b/test/files/pos/varargs-future.scala new file mode 100644 index 000000000000..8b8c414b47b0 --- /dev/null +++ b/test/files/pos/varargs-future.scala @@ -0,0 +1,43 @@ +// scalac: -Xsource:3 +// + +class Test { + def foo(xs: Int*): Seq[Int] = xs + + val s: Seq[Int] = Seq(1, 2, 3) + foo(s*) + foo((s ++ s)*) + + // not very useful, but supported by Scala 3 (and matches what works with `: _*` syntax) + foo( + s*, + ) + + s match { + case Seq(elems*) => println(elems) + } + + s match { + case Seq(x, rest*) => println(rest) + } + + // regression tests for comparison + s match { + case Seq(elems @ _*) => println(elems) + } + + s match { + case Seq(x, rest @ _*) => println(rest) + } + + // more parens + s match { + case Seq((xs) @ _*) => xs + } + + /* also disallowed in Scala 3 + s match { + case Seq((xs)*) => xs + } + */ +} diff --git a/test/files/pos/variant-placeholders-future.scala b/test/files/pos/variant-placeholders-future.scala new file mode 100644 index 000000000000..383db8420f85 --- /dev/null +++ b/test/files/pos/variant-placeholders-future.scala @@ -0,0 +1,35 @@ +// scalac: -Xsource:3 +// +object Test { + type `-_` = Int + type `+_` = Long + + val fnMinusPlus1: -_ => +_ = (_: Int).toLong + val fnMinusPlus2: (-_) => +_ = fnMinusPlus1 + val fnMinusPlus3: -_ => (+_) = fnMinusPlus2 + + val fnTupMinusPlus2: (=> -_, -_) => +_ = (a, b) => ((a: Int) + (b: Int)).toLong + def defMinusPlus2(byname: => -_, vararg: -_*): +_ = ((vararg.sum: Int) + (byname: -_)).toLong + val infixMinusPlus2: -_ Either +_ = Right[-_, +_](1L) + + val optPlus: Option[+_] = Some[ + _ ](1L) // spaces allowed + optPlus match { + case opt: Option[ + _ ] => + val opt1: + _ = opt.get + val opt2: Long = opt1 + } + + val optMinus: Option[-_] = Some[ - _ ](1) // spaces allowed + optMinus match { + case opt: Option[ - _ ] => + val opt1: `-_` = opt.get + val optErr: - _ = opt.get + val opt2: Int = opt1 + } + + locally { + type `-_`[A] = A + type `+_`[A] = Option[A] + val optOpt: Option[ + _ [+_[-_[Int]]]] = Some(Some(Some(1))) + } +} diff --git a/test/files/pos/wildcards-future.scala b/test/files/pos/wildcards-future.scala new file mode 100644 index 000000000000..c9afdea17bab --- /dev/null +++ b/test/files/pos/wildcards-future.scala @@ -0,0 +1,20 @@ +// scalac: -Xsource:3 +// +object Test { + val xs: List[?] = List(1, 2, 3) + val ys: Map[? <: AnyRef, ? >: Null] = Map() + + def foo(x: Any) = x match { + case x: List[?] => x + case _ => x + } + + type `?` = Int + + val xs2: List[`?`] = List(1) + val xs3: List[Int] = xs2 + + def foo2(x: List[`?`]): List[Int] = x match { + case x: List[`?`] => x + } +} diff --git a/test/files/presentation/infix-completion.check b/test/files/presentation/infix-completion.check index 9d0723e882c4..a6549c83911b 100644 --- a/test/files/presentation/infix-completion.check +++ b/test/files/presentation/infix-completion.check @@ -3,9 +3,9 @@ reload: Snippet.scala askTypeCompletion at Snippet.scala(1,34) ================================================================================ [response] askTypeCompletion at (1,34) -#partest !java15 +#partest !java15+ retrieved 203 members -#partest java15 +#partest java15+ retrieved 205 members #partest [inaccessible] protected def num: Fractional[Double] @@ -123,7 +123,7 @@ def compareTo(x$1: Double): Int def compareTo(x$1: Float): Int def compareTo(x$1: Integer): Int def compareTo(x$1: Long): Int -#partest java15 +#partest java15+ def describeConstable(): java.util.Optional[Double] #partest def doubleValue(): Double @@ -145,7 +145,7 @@ def isNegInfinity: Boolean def isPosInfinity: Boolean def isValidLong: Boolean def longValue(): Long -#partest java15 +#partest java15+ def resolveConstantDesc(x$1: java.lang.invoke.MethodHandles.Lookup): Double #partest def round: Long diff --git a/test/files/presentation/infix-completion2.check b/test/files/presentation/infix-completion2.check index 9d0723e882c4..a6549c83911b 100644 --- a/test/files/presentation/infix-completion2.check +++ b/test/files/presentation/infix-completion2.check @@ -3,9 +3,9 @@ reload: Snippet.scala askTypeCompletion at Snippet.scala(1,34) ================================================================================ [response] askTypeCompletion at (1,34) -#partest !java15 +#partest !java15+ retrieved 203 members -#partest java15 +#partest java15+ retrieved 205 members #partest [inaccessible] protected def num: Fractional[Double] @@ -123,7 +123,7 @@ def compareTo(x$1: Double): Int def compareTo(x$1: Float): Int def compareTo(x$1: Integer): Int def compareTo(x$1: Long): Int -#partest java15 +#partest java15+ def describeConstable(): java.util.Optional[Double] #partest def doubleValue(): Double @@ -145,7 +145,7 @@ def isNegInfinity: Boolean def isPosInfinity: Boolean def isValidLong: Boolean def longValue(): Long -#partest java15 +#partest java15+ def resolveConstantDesc(x$1: java.lang.invoke.MethodHandles.Lookup): Double #partest def round: Long diff --git a/test/files/run/StringConcat.check b/test/files/run/StringConcat.check new file mode 100644 index 000000000000..10eaa9a20d1b Binary files /dev/null and b/test/files/run/StringConcat.check differ diff --git a/test/files/run/StringConcat.scala b/test/files/run/StringConcat.scala new file mode 100644 index 000000000000..568c3e68aa26 --- /dev/null +++ b/test/files/run/StringConcat.scala @@ -0,0 +1,86 @@ +// java: -Xss128M + +import scala.tools.partest.ReplTest + +// ReplTest so that the long concatenation is compiled at test-run-time with the larger `Xss`. +// Tests are always compiled in the partest VM. +object Test extends ReplTest { + def code = + """// This should generally obey 15.18.1. of the JLS (String Concatenation Operator +) + |def concatenatingVariousTypes(): String = { + | val str: String = "some string" + | val sb: StringBuffer = new StringBuffer("some stringbuffer") + | val cs: CharSequence = java.nio.CharBuffer.allocate(50).append("charsequence") + | val i: Int = 123456789 + | val s: Short = 345 + | val b: Byte = 12 + | val z: Boolean = true + | val f: Float = 3.14f + | val j: Long = 98762147483647L + | val d: Double = 3.1415d + | + | "String " + str + "\n" + + | "StringBuffer " + sb + "\n" + + | "CharSequence " + cs + "\n" + + | "Int " + i + "\n" + + | "Short " + s + "\n" + + | "Byte " + b + "\n" + + | "Boolean " + z + "\n" + + | "Float " + f + "\n" + + | "Long " + j + "\n" + + | "Double " + d + "\n" + |} + |// The characters `\u0001` and `\u0002` play a special role in `StringConcatFactory` + |def concatenationInvolvingSpecialCharacters(): String = { + | val s1 = "Qux" + | val s2 = "Quux" + | + | s"Foo \u0001 $s1 Bar \u0002 $s2 Baz" + |} + |// Concatenation involving more than 200 elements + |def largeConcatenation(): String = { + | val s00 = "s00" + | val s01 = "s01" + | val s02 = "s02" + | val s03 = "s03" + | val s04 = "s04" + | val s05 = "s05" + | val s06 = "s06" + | val s07 = "s07" + | val s08 = "s08" + | + | // 24 rows follow + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + | s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + |} + |println("----------") + |println(concatenatingVariousTypes()) + |println("----------") + |println(concatenationInvolvingSpecialCharacters()) + |println("----------") + |println(largeConcatenation()) + |println("----------") + |""".stripMargin +} diff --git a/test/files/run/analyzerPlugins.check b/test/files/run/analyzerPlugins.check index a3c7d4cc4319..2659fd3b3e6e 100644 --- a/test/files/run/analyzerPlugins.check +++ b/test/files/run/analyzerPlugins.check @@ -19,7 +19,7 @@ canAdaptAnnotations(Trees$TypeTree, ?) [7] canAdaptAnnotations(Trees$Typed, ?) [3] canAdaptAnnotations(Trees$Typed, Any) [1] canAdaptAnnotations(Trees$Typed, Int) [1] -lub(List(Int(1) @testAnn, 2)) [1] +lub(List(1 @testAnn, 2)) [1] pluginsPt(?, Trees$Annotated) [6] pluginsPt(?, Trees$Apply) [17] pluginsPt(?, Trees$ApplyImplicitView) [2] diff --git a/test/files/run/analyzerPlugins.scala b/test/files/run/analyzerPlugins.scala index 09c541366922..81b085d74fcb 100644 --- a/test/files/run/analyzerPlugins.scala +++ b/test/files/run/analyzerPlugins.scala @@ -3,8 +3,6 @@ import scala.tools.nsc._ object Test extends DirectTest { - override def extraSettings: String = "-usejavacp" - def code = """ class testAnn extends annotation.TypeConstraint diff --git a/test/files/run/annotatedRetyping.scala b/test/files/run/annotatedRetyping.scala index 556e2ffcedef..865b6aad1c71 100644 --- a/test/files/run/annotatedRetyping.scala +++ b/test/files/run/annotatedRetyping.scala @@ -2,8 +2,6 @@ import scala.tools.partest._ object Test extends DirectTest { - override def extraSettings: String = "-usejavacp" - def code = """ class testAnn extends annotation.Annotation diff --git a/test/files/run/bridges.javaopts b/test/files/run/bridges.javaopts deleted file mode 100644 index 3a63111bf2fd..000000000000 --- a/test/files/run/bridges.javaopts +++ /dev/null @@ -1 +0,0 @@ --Xss128M diff --git a/test/files/run/bridges.scala b/test/files/run/bridges.scala index 53494500a4d5..de641f03f6b5 100644 --- a/test/files/run/bridges.scala +++ b/test/files/run/bridges.scala @@ -1,3 +1,5 @@ +// java: -Xss128M + //############################################################################ // Test bridge methods //############################################################################ diff --git a/test/files/run/colltest1.scala b/test/files/run/colltest1.scala index cc58fbf8230b..ae5e6d8be18e 100644 --- a/test/files/run/colltest1.scala +++ b/test/files/run/colltest1.scala @@ -34,7 +34,7 @@ object Test extends App { val (o, e) = ten.partition(_ % 2 == 0) assert(o.size == e.size) val gs = ten groupBy (x => x / 4) - val vs1 = (for (k <- gs.keysIterator; v <- gs(k).toIterable.iterator) yield v).toList.sorted + val vs1 = (for (k <- gs.keysIterator; v <- gs(k).iterator) yield v).toList.sorted val vs2 = gs.values.toList.flatten.sorted // val vs2 = gs.values.toList flatMap (xs => xs) assert(ten.head == 1) @@ -60,7 +60,6 @@ object Test extends App { assert(buf == ten, buf) assert(ten.toArray.size == 10) assert(ten.toArray.toSeq == ten, ten.toArray.toSeq) - assert(ten.toIterable == ten) assert(ten.toList == ten) assert(ten.toSeq == ten) assert(ten.toStream == ten) diff --git a/test/files/run/delambdafy_t6028.scala b/test/files/run/delambdafy_t6028.scala index 2eee66eb3000..5e1ed7d294f8 100644 --- a/test/files/run/delambdafy_t6028.scala +++ b/test/files/run/delambdafy_t6028.scala @@ -12,8 +12,5 @@ object Test extends DirectTest { |} |""".stripMargin.trim - override def show(): Unit = - Console.withErr(System.out) { - compile() - } + override def show(): Unit = compile() } diff --git a/test/files/run/delambdafy_t6555.scala b/test/files/run/delambdafy_t6555.scala index 2ee5955883f6..93839ecf950c 100644 --- a/test/files/run/delambdafy_t6555.scala +++ b/test/files/run/delambdafy_t6555.scala @@ -6,8 +6,5 @@ object Test extends DirectTest { override def code = "class Foo { val f = (param: String) => param } " - override def show(): Unit = - Console.withErr(System.out) { - compile() - } + override def show(): Unit = compile() } diff --git a/test/files/run/delambdafy_uncurry_byname_inline.scala b/test/files/run/delambdafy_uncurry_byname_inline.scala index 6b33c921ea81..6e3507960d39 100644 --- a/test/files/run/delambdafy_uncurry_byname_inline.scala +++ b/test/files/run/delambdafy_uncurry_byname_inline.scala @@ -11,8 +11,5 @@ object Test extends DirectTest { |} |""".stripMargin.trim - override def show(): Unit = - Console.withErr(System.out) { - compile() - } + override def show(): Unit = compile() } diff --git a/test/files/run/delambdafy_uncurry_byname_method.scala b/test/files/run/delambdafy_uncurry_byname_method.scala index d12edfcf6bc4..ccef6d1cd3dc 100644 --- a/test/files/run/delambdafy_uncurry_byname_method.scala +++ b/test/files/run/delambdafy_uncurry_byname_method.scala @@ -11,8 +11,5 @@ object Test extends DirectTest { |} |""".stripMargin.trim - override def show(): Unit = - Console.withErr(System.out) { - compile() - } + override def show(): Unit = compile() } diff --git a/test/files/run/delambdafy_uncurry_inline.scala b/test/files/run/delambdafy_uncurry_inline.scala index 40c91814143f..4187909a1508 100644 --- a/test/files/run/delambdafy_uncurry_inline.scala +++ b/test/files/run/delambdafy_uncurry_inline.scala @@ -11,9 +11,5 @@ object Test extends DirectTest { |} |""".stripMargin.trim - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } diff --git a/test/files/run/delambdafy_uncurry_method.scala b/test/files/run/delambdafy_uncurry_method.scala index d83446535357..849ed872f9c6 100644 --- a/test/files/run/delambdafy_uncurry_method.scala +++ b/test/files/run/delambdafy_uncurry_method.scala @@ -11,9 +11,5 @@ object Test extends DirectTest { |} |""".stripMargin.trim - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } diff --git a/test/files/run/dynamic-applyDynamic.check b/test/files/run/dynamic-applyDynamic.check index a496e6259626..0631de014b1c 100644 --- a/test/files/run/dynamic-applyDynamic.check +++ b/test/files/run/dynamic-applyDynamic.check @@ -1,8 +1,8 @@ [[syntax trees at end of typer]] // newSource1.scala [0:67]package [0:0] { - [0:67]object X extends [9:67][67]scala.AnyRef { - [67]def (): [9]X.type = [67]{ - [67][67][67]X.super.(); + [0:67]object X extends [9:67][9]scala.AnyRef { + [9]def (): [9]X.type = [9]{ + [9][9][9]X.super.(); [9]() }; [17:30]private[this] val d: [21]D = [25:30][25:30][25:30]new [29:30]D(); diff --git a/test/files/run/dynamic-applyDynamic.scala b/test/files/run/dynamic-applyDynamic.scala index 17efad94a797..25a7cf1dcfeb 100644 --- a/test/files/run/dynamic-applyDynamic.scala +++ b/test/files/run/dynamic-applyDynamic.scala @@ -13,11 +13,7 @@ object Test extends DirectTest { } """.trim - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } import language.dynamics diff --git a/test/files/run/dynamic-applyDynamicNamed.check b/test/files/run/dynamic-applyDynamicNamed.check index 09ddf2cf7ad5..20fb5e870477 100644 --- a/test/files/run/dynamic-applyDynamicNamed.check +++ b/test/files/run/dynamic-applyDynamicNamed.check @@ -1,8 +1,8 @@ [[syntax trees at end of typer]] // newSource1.scala [0:97]package [0:0] { - [0:97]object X extends [9:97][97]scala.AnyRef { - [97]def (): [9]X.type = [97]{ - [97][97][97]X.super.(); + [0:97]object X extends [9:97][9]scala.AnyRef { + [9]def (): [9]X.type = [9]{ + [9][9][9]X.super.(); [9]() }; [17:30]private[this] val d: [21]D = [25:30][25:30][25:30]new [29:30]D(); diff --git a/test/files/run/dynamic-applyDynamicNamed.scala b/test/files/run/dynamic-applyDynamicNamed.scala index de15a4857d82..d5185476ba1b 100644 --- a/test/files/run/dynamic-applyDynamicNamed.scala +++ b/test/files/run/dynamic-applyDynamicNamed.scala @@ -13,11 +13,7 @@ object Test extends DirectTest { } """.trim - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } import language.dynamics diff --git a/test/files/run/dynamic-selectDynamic.check b/test/files/run/dynamic-selectDynamic.check index 29a2a1a3e06c..82cd656e6602 100644 --- a/test/files/run/dynamic-selectDynamic.check +++ b/test/files/run/dynamic-selectDynamic.check @@ -1,8 +1,8 @@ [[syntax trees at end of typer]] // newSource1.scala [0:50]package [0:0] { - [0:50]object X extends [9:50][50]scala.AnyRef { - [50]def (): [9]X.type = [50]{ - [50][50][50]X.super.(); + [0:50]object X extends [9:50][9]scala.AnyRef { + [9]def (): [9]X.type = [9]{ + [9][9][9]X.super.(); [9]() }; [17:30]private[this] val d: [21]D = [25:30][25:30][25:30]new [29:30]D(); diff --git a/test/files/run/dynamic-selectDynamic.scala b/test/files/run/dynamic-selectDynamic.scala index 392d7bd53c57..8383c1f45823 100644 --- a/test/files/run/dynamic-selectDynamic.scala +++ b/test/files/run/dynamic-selectDynamic.scala @@ -12,11 +12,7 @@ object Test extends DirectTest { } """.trim - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } import language.dynamics diff --git a/test/files/run/dynamic-updateDynamic.check b/test/files/run/dynamic-updateDynamic.check index b320ab129312..5180f3e7bfd8 100644 --- a/test/files/run/dynamic-updateDynamic.check +++ b/test/files/run/dynamic-updateDynamic.check @@ -1,8 +1,8 @@ [[syntax trees at end of typer]] // newSource1.scala [0:69]package [0:0] { - [0:69]object X extends [9:69][69]scala.AnyRef { - [69]def (): [9]X.type = [69]{ - [69][69][69]X.super.(); + [0:69]object X extends [9:69][9]scala.AnyRef { + [9]def (): [9]X.type = [9]{ + [9][9][9]X.super.(); [9]() }; [17:30]private[this] val d: [21]D = [25:30][25:30][25:30]new [29:30]D(); diff --git a/test/files/run/dynamic-updateDynamic.scala b/test/files/run/dynamic-updateDynamic.scala index 237c4884884d..0c5914b61604 100644 --- a/test/files/run/dynamic-updateDynamic.scala +++ b/test/files/run/dynamic-updateDynamic.scala @@ -13,11 +13,7 @@ object Test extends DirectTest { } """.trim - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } import language.dynamics diff --git a/test/files/run/existential-rangepos.check b/test/files/run/existential-rangepos.check index 984baeaaf8e3..39efe241688b 100644 --- a/test/files/run/existential-rangepos.check +++ b/test/files/run/existential-rangepos.check @@ -1,8 +1,8 @@ [[syntax trees at end of patmat]] // newSource1.scala [0:76]package [0:0] { - [0:76]abstract class A[[17:18]T[17:18]] extends [20:76][76]scala.AnyRef { - [76]def (): [20]A[T] = [76]{ - [76][76][76]A.super.(); + [0:76]abstract class A[[17:18]T[17:18]] extends [20:76][20]scala.AnyRef { + [20]def (): [20]A[T] = [20]{ + [20][20][20]A.super.(); [20]() }; [24:51]private[this] val foo: [28]Set[_ <: T] = [47:51]null; diff --git a/test/files/run/existential-rangepos.scala b/test/files/run/existential-rangepos.scala index 2f56e8ebed87..d31a5e754f53 100644 --- a/test/files/run/existential-rangepos.scala +++ b/test/files/run/existential-rangepos.scala @@ -9,5 +9,5 @@ abstract class A[T] { val bar: Set[_ <: T] }""".trim - override def show(): Unit = Console.withErr(System.out)(compile()) + override def show(): Unit = compile() } diff --git a/test/files/run/icode-reader-dead-code.scala b/test/files/run/icode-reader-dead-code.scala index 31f5c06f388d..dd3934a0eef6 100644 --- a/test/files/run/icode-reader-dead-code.scala +++ b/test/files/run/icode-reader-dead-code.scala @@ -30,13 +30,13 @@ object Test extends DirectTest { |} """.stripMargin - compileString(newCompiler("-usejavacp", "-cp", testOutput.path))(aCode) + compileString(newCompiler("-cp", testOutput.path))(aCode) addDeadCode() // If inlining fails, the compiler will issue an inliner warning that is not present in the // check file - compileString(newCompiler("-usejavacp", "-cp", testOutput.path, "-opt:l:inline", "-opt-inline-from:**"))(bCode) + compileString(newCompiler("-cp", testOutput.path, "-opt:l:inline", "-opt-inline-from:**"))(bCode) } def readClass(file: String) = { diff --git a/test/files/run/idempotency-case-classes.check b/test/files/run/idempotency-case-classes.check index 78ee0af219a2..7339a68be71b 100644 --- a/test/files/run/idempotency-case-classes.check +++ b/test/files/run/idempotency-case-classes.check @@ -40,7 +40,7 @@ C(2,3) case _ => false }.&&({ val C$1: C = x$1.asInstanceOf[C]; - C.this.x.==(C$1.x).&&(C.this.y.==(C$1.y)).&&(C$1.canEqual(C.this)) + C.this.x.==(C$1.x).&&(C.this.y.==(C$1.y).&&(C$1.canEqual(C.this))) })) }; object C extends scala.runtime.AbstractFunction2[Int,Int,C] with java.io.Serializable { diff --git a/test/files/run/indy-via-macro-class-constant-bsa.check b/test/files/run/indy-via-macro-class-constant-bsa.check new file mode 100644 index 000000000000..ecb48be612a5 --- /dev/null +++ b/test/files/run/indy-via-macro-class-constant-bsa.check @@ -0,0 +1 @@ +Test$C diff --git a/test/files/run/indy-via-macro-class-constant-bsa/Bootstrap.java b/test/files/run/indy-via-macro-class-constant-bsa/Bootstrap.java new file mode 100644 index 000000000000..3457910e8b71 --- /dev/null +++ b/test/files/run/indy-via-macro-class-constant-bsa/Bootstrap.java @@ -0,0 +1,14 @@ +package test; + +import java.lang.invoke.*; + +public final class Bootstrap { + private Bootstrap() { + } + + public static CallSite bootstrap(MethodHandles.Lookup lookup, String invokedName, + MethodType invokedType, + Class cls) throws Throwable { + return new java.lang.invoke.ConstantCallSite(java.lang.invoke.MethodHandles.constant(String.class, cls.getName())); + } +} diff --git a/test/files/run/indy-via-macro-class-constant-bsa/Test_2.scala b/test/files/run/indy-via-macro-class-constant-bsa/Test_2.scala new file mode 100644 index 000000000000..e5e8b4c1b32e --- /dev/null +++ b/test/files/run/indy-via-macro-class-constant-bsa/Test_2.scala @@ -0,0 +1,6 @@ +object Test { + def main(args: Array[String]): Unit = { + println(Macro.classNameOf(classOf[C])) + } + class C(val x: Int) extends AnyVal +} diff --git a/test/files/run/indy-via-macro-class-constant-bsa/macro_1.scala b/test/files/run/indy-via-macro-class-constant-bsa/macro_1.scala new file mode 100644 index 000000000000..366dd7ff03f9 --- /dev/null +++ b/test/files/run/indy-via-macro-class-constant-bsa/macro_1.scala @@ -0,0 +1,29 @@ +import java.util.regex._ +import scala.reflect.internal.SymbolTable +import scala.reflect.macros.blackbox._ +import language.experimental.macros +import java.lang.invoke._ + +object Macro { + def classNameOf(expr: Class[_]): String = macro Impl.classNameOf +} + + +class Impl(val c: Context) { + def classNameOf(expr: c.Tree): c.Tree = { + { + val symtab = c.universe.asInstanceOf[SymbolTable] + import symtab._ + val bootstrapMethod = typeOf[test.Bootstrap].companion.member(TermName("bootstrap")) + val paramSym = NoSymbol.newTermSymbol(TermName("x")).setInfo(typeOf[String]) + val dummySymbol = NoSymbol.newTermSymbol(TermName("classNameOf")).setInfo(internal.nullaryMethodType(typeOf[String])) + val bootstrapArgTrees: List[Tree] = List( + Literal(Constant(bootstrapMethod)).setType(NoType), + expr.asInstanceOf[Tree], + ) + val result = ApplyDynamic(Ident(dummySymbol).setType(dummySymbol.info), bootstrapArgTrees) + result.setType(dummySymbol.info.resultType) + result.asInstanceOf[c.Tree] + } + } +} diff --git a/test/files/run/indy-via-macro-method-type-bsa.check b/test/files/run/indy-via-macro-method-type-bsa.check new file mode 100644 index 000000000000..c0297137ee5e --- /dev/null +++ b/test/files/run/indy-via-macro-method-type-bsa.check @@ -0,0 +1,2 @@ +(int)String +()int diff --git a/test/files/run/indy-via-macro-method-type-bsa/Bootstrap.java b/test/files/run/indy-via-macro-method-type-bsa/Bootstrap.java new file mode 100644 index 000000000000..2a788a758dd5 --- /dev/null +++ b/test/files/run/indy-via-macro-method-type-bsa/Bootstrap.java @@ -0,0 +1,14 @@ +package test; + +import java.lang.invoke.*; + +public final class Bootstrap { + private Bootstrap() { + } + + public static CallSite bootstrap(MethodHandles.Lookup lookup, String invokedName, + MethodType invokedType, + MethodType mt) throws Throwable { + return new java.lang.invoke.ConstantCallSite(java.lang.invoke.MethodHandles.constant(MethodType.class, mt)); + } +} diff --git a/test/files/run/indy-via-macro-method-type-bsa/Test_2.scala b/test/files/run/indy-via-macro-method-type-bsa/Test_2.scala new file mode 100644 index 000000000000..b51a72006b29 --- /dev/null +++ b/test/files/run/indy-via-macro-method-type-bsa/Test_2.scala @@ -0,0 +1,7 @@ +object Test { + def main(args: Array[String]): Unit = { + println(Macro.methodTypeOf({def x(a: Int): String = ???})) + println(Macro.methodTypeOf({def x(): C = ???})) + } + class C(val x: Int) extends AnyVal +} diff --git a/test/files/run/indy-via-macro-method-type-bsa/macro_1.scala b/test/files/run/indy-via-macro-method-type-bsa/macro_1.scala new file mode 100644 index 000000000000..f058584587e2 --- /dev/null +++ b/test/files/run/indy-via-macro-method-type-bsa/macro_1.scala @@ -0,0 +1,35 @@ +import java.util.regex._ +import scala.reflect.internal.SymbolTable +import scala.reflect.macros.blackbox._ +import language.experimental.macros +import java.lang.invoke._ + +object Macro { + def methodTypeOf(expr: Any): MethodType = macro Impl.methodTypeOf +} + + +class Impl(val c: Context) { + def methodTypeOf(expr: c.Tree): c.Tree = { + { + val symtab = c.universe.asInstanceOf[SymbolTable] + import symtab._ + val tp = transformedType(expr.asInstanceOf[Tree] match { + case Block((dd: DefDef) :: Nil, Literal(Constant(()))) => + dd.symbol.info + case expr => + expr.tpe + }) + val bootstrapMethod = typeOf[test.Bootstrap].companion.member(TermName("bootstrap")) + val paramSym = NoSymbol.newTermSymbol(TermName("x")).setInfo(typeOf[String]) + val dummySymbol = NoSymbol.newTermSymbol(TermName("methodTypeOf")).setInfo(internal.nullaryMethodType(typeOf[java.lang.invoke.MethodType])) + val bootstrapArgTrees: List[Tree] = List( + Literal(Constant(bootstrapMethod)).setType(NoType), + Literal(Constant(tp)).setType(typeOf[java.lang.invoke.MethodType]), + ) + val result = ApplyDynamic(Ident(dummySymbol).setType(dummySymbol.info), bootstrapArgTrees) + result.setType(dummySymbol.info.resultType) + result.asInstanceOf[c.Tree] + } + } +} diff --git a/test/files/run/indy-via-macro-reflector.check b/test/files/run/indy-via-macro-reflector.check new file mode 100644 index 000000000000..e14bfd6b53ae --- /dev/null +++ b/test/files/run/indy-via-macro-reflector.check @@ -0,0 +1,3 @@ +HandleAndStrings{handle=MethodHandle(C,Object,int)String, scalaParamNames=[p1, p2]}, dynamic +HandleAndStrings{handle=MethodHandle(int)C1, scalaParamNames=[a]}, dynamic +HandleAndStrings{handle=MethodHandle(T)int, scalaParamNames=[]}, dynamic diff --git a/test/files/run/indy-via-macro-reflector/Bootstrap.java b/test/files/run/indy-via-macro-reflector/Bootstrap.java new file mode 100644 index 000000000000..468b8e43acc6 --- /dev/null +++ b/test/files/run/indy-via-macro-reflector/Bootstrap.java @@ -0,0 +1,44 @@ +package test; + +import java.lang.invoke.*; + +public final class Bootstrap { + private Bootstrap() { + } + + public static CallSite bootstrap(MethodHandles.Lookup lookup, String invokedName, + MethodType invokedType, + Object... args) throws Throwable { + int arity = (int) args[0]; + MethodHandle MH = (MethodHandle) args[1]; + String[] strings = new String[arity]; + for (int i = 0; i < arity; i++) { + strings[i] = (String) args[2 + i]; + } + + Reflection handleAndStrings = new Reflection(MH, strings); + MethodHandle foo = MethodHandles.lookup().findVirtual(Reflection.class, "foo", MethodType.methodType(String.class, String.class)); + return new java.lang.invoke.ConstantCallSite(foo.bindTo(handleAndStrings)); + } + static class Reflection { + private final MethodHandle handle; + private final String[] scalaParamNames; + + public Reflection(MethodHandle handle, String[] scalaParamNames) { + this.handle = handle; + this.scalaParamNames = scalaParamNames; + } + + public String foo(String f) { + return toString() + ", " + f; + } + + @java.lang.Override + public java.lang.String toString() { + return "HandleAndStrings{" + + "handle=" + handle + + ", scalaParamNames=" + java.util.Arrays.toString(scalaParamNames) + + '}'; + } + } +} diff --git a/test/files/run/indy-via-macro-reflector/Test_2.scala b/test/files/run/indy-via-macro-reflector/Test_2.scala new file mode 100644 index 000000000000..d0ffefb2c0ec --- /dev/null +++ b/test/files/run/indy-via-macro-reflector/Test_2.scala @@ -0,0 +1,18 @@ +object Test { + def main(args: Array[String]): Unit = { + println(new C().foo(null, 0)) + println(Macro.reflectorConstructor("dynamic")) + println(Macro.reflectorTrait("dynamic")) + } +} + +class C { + def foo(p1: Object, p2: Int): String = { + Macro.reflector("dynamic") + privateFoo(p1, p2) + } + + private def privateFoo(p1: Object, p2: Int): String = { + Macro.reflector("dynamic") + } +} diff --git a/test/files/run/indy-via-macro-reflector/macro_1.scala b/test/files/run/indy-via-macro-reflector/macro_1.scala new file mode 100644 index 000000000000..46783d8cecaa --- /dev/null +++ b/test/files/run/indy-via-macro-reflector/macro_1.scala @@ -0,0 +1,53 @@ +import java.util.regex._ +import scala.reflect.internal.SymbolTable +import scala.reflect.macros.blackbox._ +import language.experimental.macros +import java.lang.invoke.{MethodHandle, MethodHandles} + +object Macro { + def reflector(dynamic: String): String = macro Impl.reflector + def reflectorConstructor(dynamic: String): String = macro Impl.reflectorConstructor + def reflectorTrait(dynamic: String): String = macro Impl.reflectorTrait +} + +class C1(a: Int) { +} + +trait T { + def foo = 42 +} + +class Impl(val c: Context) { + def reflectorConstructor(dynamic: c.Tree): c.Tree = { + import c.universe._ + impl(dynamic, symbolOf[C1].info.decl(nme.CONSTRUCTOR)) + } + def reflectorTrait(dynamic: c.Tree): c.Tree = { + import c.universe._ + impl(dynamic, symbolOf[T].info.decl(TermName("foo"))) + } + + def reflector(dynamic: c.Tree): c.Tree = { + impl(dynamic, c.internal.enclosingOwner) + } + + private def impl(dynamic: c.Tree, reflectionSubject0: c.Symbol): c.Tree = { + { + val symtab = c.universe.asInstanceOf[SymbolTable] + import symtab._ + val reflectionSubject = reflectionSubject0.asInstanceOf[Symbol] + val bootstrapMethod = typeOf[test.Bootstrap].companion.member(TermName("bootstrap")) + val paramSym = NoSymbol.newTermSymbol(TermName("x")).setInfo(typeOf[String]) + val dummySymbol = NoSymbol.newTermSymbol(TermName("reflector")).setInfo(internal.methodType(paramSym :: Nil, typeOf[String])) + val reflectionSubjectParams = reflectionSubject.info.paramss.flatten + val bootstrapArgTrees: List[Tree] = List( + Literal(Constant(bootstrapMethod)).setType(NoType), + Literal(Constant(reflectionSubjectParams.length)).setType(typeOf[Int]), + Literal(Constant(reflectionSubject)).setType(typeOf[MethodHandle]) + ) ::: reflectionSubjectParams.map(s => Literal(Constant(s.name.decoded)).setType(typeOf[String])) + val result = ApplyDynamic(Ident(dummySymbol).setType(dummySymbol.info), bootstrapArgTrees ::: List(dynamic.asInstanceOf[symtab.Tree])) + result.setType(dummySymbol.info.resultType) + result.asInstanceOf[c.Tree] + } + } +} diff --git a/test/files/run/infix-rangepos.scala b/test/files/run/infix-rangepos.scala new file mode 100644 index 000000000000..8d2a16a0b536 --- /dev/null +++ b/test/files/run/infix-rangepos.scala @@ -0,0 +1,21 @@ +import scala.tools.partest._ + +object Test extends CompilerTest { + import global._ + override def extraSettings = super.extraSettings + " -Yrangepos" + override def sources = List( + "class C1 { def t = List(1).map ( x => x ) }", + "class C2 { def t = List(1).map { x => x } }", + "class C3 { def t = List(1).map ({x => x}) }", + "class C4 { def t = List(1) map ( x => x ) }", + "class C5 { def t = List(1) map { x => x } }", + "class C6 { def t = List(1) map ({x => x}) }") + + def check(source: String, unit: CompilationUnit): Unit = unit.body foreach { + case dd: DefDef if dd.name.startsWith("t") => + val pos = dd.rhs.pos + assert(pos.start == 19, pos.start) + assert(pos.end == 41, pos.end) + case _ => + } +} diff --git a/test/files/run/lambda-serialization-gc.javaopts b/test/files/run/lambda-serialization-gc.javaopts deleted file mode 100644 index 9ecdb8a4dafd..000000000000 --- a/test/files/run/lambda-serialization-gc.javaopts +++ /dev/null @@ -1 +0,0 @@ --Xmx512m \ No newline at end of file diff --git a/test/files/run/lambda-serialization-gc.scala b/test/files/run/lambda-serialization-gc.scala index 9a179d4ed5c1..529a32146302 100644 --- a/test/files/run/lambda-serialization-gc.scala +++ b/test/files/run/lambda-serialization-gc.scala @@ -1,3 +1,5 @@ +// java: -Xmx512m + import java.io._ import java.net.URLClassLoader diff --git a/test/files/run/large_class.scala b/test/files/run/large_class.scala index b10462aa5681..e422f653a2da 100644 --- a/test/files/run/large_class.scala +++ b/test/files/run/large_class.scala @@ -2,7 +2,6 @@ import scala.tools.partest._ // a cold run of partest takes about 15s for this test on my laptop object Test extends DirectTest { - override def extraSettings: String = "-usejavacp" def s(n: Int) = "\""+n+"\"" @@ -18,9 +17,5 @@ object Test extends DirectTest { s(n+60000)+")") mkString ";"} |}""".stripMargin.trim - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } diff --git a/test/files/run/large_code.scala b/test/files/run/large_code.scala index e6104d2c062c..c3b0beac7876 100644 --- a/test/files/run/large_code.scala +++ b/test/files/run/large_code.scala @@ -2,7 +2,6 @@ import scala.tools.partest._ // a cold run of partest takes about 15s for this test on my laptop object Test extends DirectTest { - override def extraSettings: String = "-usejavacp" // test that we hit the code size limit and error out gracefully // 5958 is the magic number (2^16/11 -- each `a(1,2,3,4,5,6)` is 11 bytes of bytecode) @@ -15,9 +14,5 @@ object Test extends DirectTest { | } |}""".stripMargin.trim - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } diff --git a/test/files/run/literals-parsing.check b/test/files/run/literals-parsing.check index 25a57dd41d27..e1b3cac77718 100644 --- a/test/files/run/literals-parsing.check +++ b/test/files/run/literals-parsing.check @@ -1,6 +1,6 @@ [[syntax trees at end of parser]] // newSource1.scala [0:161]package [0:0] { - [0:161]abstract trait T extends [8:161][161]scala.AnyRef { + [0:161]abstract trait T extends [8:161][8]scala.AnyRef { [8]def $init$() = [8]{ [8]() }; diff --git a/test/files/run/literals-parsing.scala b/test/files/run/literals-parsing.scala index eb94d5a260df..04a0c5f4d359 100644 --- a/test/files/run/literals-parsing.scala +++ b/test/files/run/literals-parsing.scala @@ -19,7 +19,5 @@ object Test extends DirectTest { } """.trim - override def show(): Unit = Console.withErr(System.out) { - compile() - } + override def show(): Unit = compile() } diff --git a/test/files/run/macroPlugins-namerHooks.scala b/test/files/run/macroPlugins-namerHooks.scala index ca049e78e9e6..89ee7756867a 100644 --- a/test/files/run/macroPlugins-namerHooks.scala +++ b/test/files/run/macroPlugins-namerHooks.scala @@ -2,7 +2,6 @@ import scala.tools.partest._ import scala.tools.nsc._ object Test extends DirectTest { - override def extraSettings: String = "-usejavacp" def code = """ case class C(x: Int, y: Int) diff --git a/test/files/run/maxerrs.scala b/test/files/run/maxerrs.scala index c04b3e5bad1f..fa2768ec6688 100644 --- a/test/files/run/maxerrs.scala +++ b/test/files/run/maxerrs.scala @@ -14,8 +14,6 @@ object Test extends DirectTest { } """.trim - override def extraSettings = "-usejavacp" - // a reporter that ignores all limits lazy val store = new UnfilteredStoreReporter diff --git a/test/files/run/multiLineOps.scala b/test/files/run/multiLineOps.scala index 0bba854027fc..ef319d9210dc 100644 --- a/test/files/run/multiLineOps.scala +++ b/test/files/run/multiLineOps.scala @@ -1,12 +1,15 @@ // scalac: -Xsource:3 // -// without backticks, "not found: value +" +// was: without backticks, "not found: value +" (but parsed here as +a * 6, where backticks fool the lexer) +// now: + is taken as "solo" infix op // object Test extends App { val a = 7 val x = 1 - + // - `a` * 6 + + + `a` + * + 6 - assert(x == 1) + assert(x == 1 + 7 * 6, x) // was: 1, now: successor(42) } diff --git a/test/files/run/names-defaults.check b/test/files/run/names-defaults.check index 8b6d99ec2981..7e38494250da 100644 --- a/test/files/run/names-defaults.check +++ b/test/files/run/names-defaults.check @@ -13,9 +13,6 @@ names-defaults.scala:371: warning: the parameter name x is deprecated: use s ins names-defaults.scala:35: warning: local var var2 in value is never used var var2 = 0 ^ -names-defaults.scala:108: warning: local val x$34 in value is never used - println(t7.f(b = "sl19", a = 28)) // first - ^ names-defaults.scala:279: warning: local val u in method foo is never used class A2489 { def foo(): Unit = { def bar(a: Int = 1) = a; bar(); val u = 0 } } ^ @@ -25,12 +22,6 @@ names-defaults.scala:280: warning: local val v in method foo is never used names-defaults.scala:280: warning: local val u in method foo is never used class A2489x2 { def foo(): Unit = { val v = 10; def bar(a: Int = 1, b: Int = 2) = a; bar(); val u = 0 } } ^ -names-defaults.scala:380: warning: local val x$104 in value is never used - println(t3697.a(3)()) - ^ -names-defaults.scala:385: warning: local val x$112 in value is never used - println(t3697.b(b = 1, a = 2, c = 3)) - ^ names-defaults.scala:269: warning: multiline expressions might require enclosing parentheses; a value can be silently discarded when Unit is expected spawn(b = { val ttt = 1; ttt }, a = 0) ^ diff --git a/test/files/run/package-object-stale-decl.scala b/test/files/run/package-object-stale-decl.scala new file mode 100644 index 000000000000..bbf1ba7cda16 --- /dev/null +++ b/test/files/run/package-object-stale-decl.scala @@ -0,0 +1,40 @@ +import scala.reflect.io.Path +import scala.tools.partest._ +import java.io.File + +object Test extends StoreReporterDirectTest { + class V1 { + def pkg = "package object b extends B" + def B = "package b; class B { def stale = 42 }" + def A = "package b; class A { stale }" + } + class V2 extends V1 { + override def B = "package b; class B { }" + } + + override def extraSettings = s"-cp ${sys.props("partest.lib")}${File.pathSeparator}$testOutput" + + def show(): Unit = { + val v1 = new V1 + val v2 = new V2 + compiles(v1.A, v1.B, v1.pkg)() + delete(testOutput / "b" / "A.class") + compiles(v2.B, v2.A)(Some("not found: value stale")) + } + + def compiles(codes: String*)(expectedError: Option[String] = None) = { + val global = newCompiler() + withRun(global)(_ compileSources newSources(codes: _*)) + val reporterOutput = storeReporter.infos.map(x => x.pos.showError(x.msg)).mkString("\n") + expectedError match { + case None => + assert(!global.reporter.hasErrors, reporterOutput) + case Some(text) => + assert(global.reporter.hasErrors, "expected compile failure, got success") + assert(reporterOutput.contains(text), reporterOutput) + } + } + + def delete(paths: Path*) = paths.foreach(p => assert(p.delete(), s"$p didn't delete")) + def code = "" +} diff --git a/test/files/run/package-object-toolbox.scala b/test/files/run/package-object-toolbox.scala new file mode 100644 index 000000000000..d84a7e3c2668 --- /dev/null +++ b/test/files/run/package-object-toolbox.scala @@ -0,0 +1,40 @@ +import java.io.File +import java.net.URLClassLoader + +import scala.reflect.io.Path +import scala.reflect.runtime.{ universe => ru } +import scala.tools.partest._ +import scala.tools.reflect.ToolBox + +import org.junit.Assert._ + +object Test extends StoreReporterDirectTest { + val cp = List(sys.props("partest.lib"), testOutput.path) + override def extraSettings = s"-cp ${cp.mkString(File.pathSeparator)}" + + def show(): Unit = { + compiles("package object pkg { def foo = 1 }") + val loader = new URLClassLoader(cp.map(new File(_).toURI.toURL).toArray) + val mirror = ru.runtimeMirror(loader) + + val toolbox = mirror.mkToolBox() + val result1 = toolbox.eval(toolbox.parse("pkg.foo")) + assertEquals(1, result1) + + val obj = toolbox.eval(toolbox.parse("pkg.`package`")) + val pkg = mirror.staticPackage("pkg") + val sym = pkg.info.decl(ru.TermName("foo")).asMethod + val meth = mirror.reflect(obj).reflectMethod(sym) + val res2 = meth.apply() + assertEquals(1, res2) + } + + def compiles(codes: String*) = { + val global = newCompiler() + withRun(global)(_ compileSources newSources(codes: _*)) + assert(!global.reporter.hasErrors, storeReporter.infos.mkString("\n")) + } + + def delete(paths: Path*) = paths.foreach(p => assert(p.delete(), s"$p didn't delete")) + def code = "" +} diff --git a/test/files/run/package-object-with-inner-class-in-ancestor-simpler-still.scala b/test/files/run/package-object-with-inner-class-in-ancestor-simpler-still.scala new file mode 100644 index 000000000000..9d467f714044 --- /dev/null +++ b/test/files/run/package-object-with-inner-class-in-ancestor-simpler-still.scala @@ -0,0 +1,25 @@ +import scala.reflect.io.Path +import scala.tools.partest._ +import java.io.File + +object Test extends StoreReporterDirectTest { + def A = "package b; class A" + def pkg = "package object b extends A" + + override def extraSettings = s"-cp ${sys.props("partest.lib")}${File.pathSeparator}$testOutput" + + def show(): Unit = { + compiles(A, pkg) + delete(testOutput / "b" / "A.class") + compiles(A) + } + + def compiles(codes: String*) = { + val global = newCompiler() + withRun(global)(_ compileSources newSources(codes: _*)) + assert(!global.reporter.hasErrors, storeReporter.infos.mkString("\n")) + } + + def delete(paths: Path*) = paths.foreach(p => assert(p.delete(), s"$p didn't delete")) + def code = "" +} diff --git a/test/files/run/package-object-with-inner-class-in-ancestor-simpler.scala b/test/files/run/package-object-with-inner-class-in-ancestor-simpler.scala new file mode 100644 index 000000000000..123de8d847b1 --- /dev/null +++ b/test/files/run/package-object-with-inner-class-in-ancestor-simpler.scala @@ -0,0 +1,26 @@ +import scala.reflect.io.Path +import scala.tools.partest._ +import java.io.File + +object Test extends StoreReporterDirectTest { + def A = "package b; class A" + def pkg = "package object b extends A" + def M = "package b; class M" + + override def extraSettings = s"-cp ${sys.props("partest.lib")}${File.pathSeparator}$testOutput" + + def show(): Unit = { + compiles(A, pkg, M) + delete(testOutput / "b" / "A.class") + compiles(M, A) + } + + def compiles(codes: String*) = { + val global = newCompiler() + withRun(global)(_ compileSources newSources(codes: _*)) + assert(!global.reporter.hasErrors, storeReporter.infos.mkString("\n")) + } + + def delete(paths: Path*) = paths.foreach(p => assert(p.delete(), s"$p didn't delete")) + def code = "" +} diff --git a/test/files/run/package-object-with-inner-class-in-ancestor.scala b/test/files/run/package-object-with-inner-class-in-ancestor.scala new file mode 100644 index 000000000000..03e1c561de0d --- /dev/null +++ b/test/files/run/package-object-with-inner-class-in-ancestor.scala @@ -0,0 +1,33 @@ +import scala.reflect.io.Path +import scala.tools.partest._ +import java.io.File + +object Test extends StoreReporterDirectTest { + class V1 { + def O = "package b; object O { def o = \"\" }" + def A = "package b; class A { class C { O.o } }" + def pkg = "package object b extends A" + } + class V2 extends V1 { + override def O = "package b; object O { def o = 42 }" + } + + override def extraSettings = s"-cp ${sys.props("partest.lib")}${File.pathSeparator}$testOutput" + + def show(): Unit = { + val v1 = new V1 + compiles(v1.O, v1.A, v1.pkg) + delete(testOutput / "b" / "A.class", testOutput / "b" / "A$C.class") + val v2 = new V2 + compiles(v2.O, v2.A) + } + + def compiles(codes: String*) = { + val global = newCompiler() + withRun(global)(_ compileSources newSources(codes: _*)) + assert(!global.reporter.hasErrors, storeReporter.infos.mkString("\n")) + } + + def delete(paths: Path*) = paths.foreach(p => assert(p.delete(), s"$p didn't delete")) + def code = "" +} diff --git a/test/files/run/patmat-behavior.check b/test/files/run/patmat-behavior.check index e36e3add5503..ec81fbb143e2 100644 --- a/test/files/run/patmat-behavior.check +++ b/test/files/run/patmat-behavior.check @@ -1,91 +1,91 @@ -patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C10[A] +patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C10[_] def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C20[A] +patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C20[_] def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C01[A] +patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C01[_] def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C11[A] +patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C11[_] def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C21[A] +patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C21[_] def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C00[A] +patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C00[_] def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C20[A] +patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C20[_] def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C01[A] +patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C01[_] def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C11[A] +patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C11[_] def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C21[A] +patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C21[_] def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C00[A] +patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C00[_] def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C10[A] +patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C10[_] def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C01[A] +patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C01[_] def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C11[A] +patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C11[_] def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C21[A] +patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C21[_] def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C00[A] +patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C00[_] def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C10[A] +patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C10[_] def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C20[A] +patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C20[_] def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C11[A] +patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C11[_] def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C21[A] +patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C21[_] def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C00[A] +patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C00[_] def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C10[A] +patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C10[_] def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C20[A] +patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C20[_] def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C01[A] +patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C01[_] def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C21[A] +patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C21[_] def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C00[A] +patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C00[_] def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C10[A] +patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C10[_] def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C20[A] +patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C20[_] def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C01[A] +patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C01[_] def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C11[A] +patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C11[_] def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ patmat-behavior.scala:43: warning: match may not be exhaustive. diff --git a/test/files/run/patmat-exprs.check b/test/files/run/patmat-exprs.check deleted file mode 100644 index b6df9385faa0..000000000000 --- a/test/files/run/patmat-exprs.check +++ /dev/null @@ -1 +0,0 @@ -((5 + 10) + 300) diff --git a/test/files/run/patmat-exprs.scala b/test/files/run/patmat-exprs.scala index bece2d04a1ac..0f7b71803a3f 100644 --- a/test/files/run/patmat-exprs.scala +++ b/test/files/run/patmat-exprs.scala @@ -1,4 +1,4 @@ -// scalac: -deprecation +// scalac: -Werror -Xlint // import scala.language.{ implicitConversions } @@ -31,7 +31,7 @@ object Test { } def main(args: Array[String]): Unit = { - println((5: Expr[Int]) + 10 + 15 * 20) + assert("((5 + 10) + 300)" == ((5: Expr[Int]) + 10 + 15 * 20).toString) } } @@ -156,7 +156,7 @@ trait Pattern { if (f.isDefinedAt(this)) (f(this) :: a) else a } - def leaves: List[Leaf[T]] = collect { case l: Leaf[T] => l } + def leaves: List[Leaf[T]] = collect { case l: Leaf[T @unchecked] => l } def + (other: Expr[T])(implicit n: NumericOps[T]) = Add(List(this, other)) def - (other: Expr[T])(implicit n: NumericOps[T]) = Sub(this, other) @@ -301,7 +301,7 @@ trait Pattern { private def optimizeWith(f: Expr[T] => Expr[T]): Expr[T] = { f(mapArgs(EndoFunction[Expr[_]]( - a => a match { case x: Expr[T] => x.optimizeWith(f) } + a => a match { case x: Expr[T @unchecked] => x.optimizeWith(f) } ))) } @@ -512,9 +512,7 @@ trait Pattern { override lazy val hashCode = ScalaRunTime._hashCode(this); } - - abstract class Compare[T](left: Expr[T], right: Expr[T], cmp: (T, T) => Boolean)(implicit num: NumericOps[T]) - extends Expr[Boolean] { + abstract class Compare[T: NumericOps](left: Expr[T], right: Expr[T], cmp: (T, T) => Boolean) extends Expr[Boolean] { def derivative(v: Var[Boolean]) = throw new IllegalStateException("Derivative of Boolean not allowed") def eval(f: Any => Any) = cmp(left.eval(f), right.eval(f)) val args = List(left, right) diff --git a/test/files/run/patmat-no-inline-isEmpty.scala b/test/files/run/patmat-no-inline-isEmpty.scala index 3af510134c70..52fb76d1ccf4 100644 --- a/test/files/run/patmat-no-inline-isEmpty.scala +++ b/test/files/run/patmat-no-inline-isEmpty.scala @@ -24,8 +24,8 @@ object Test extends DirectTest { |} """.stripMargin - def show(): Unit = Console.withErr(System.out) { - compileString(newCompiler("-usejavacp"))(depCode) - compileString(newCompiler("-usejavacp", "-cp", testOutput.path, "-Vprint:patmat"))(code) + def show(): Unit = { + compileString(newCompiler())(depCode) + compileString(newCompiler("-cp", testOutput.path, "-Vprint:patmat"))(code) } } diff --git a/test/files/run/patmat-no-inline-unapply.scala b/test/files/run/patmat-no-inline-unapply.scala index bd6a5541cdd6..1ce9994c30d2 100644 --- a/test/files/run/patmat-no-inline-unapply.scala +++ b/test/files/run/patmat-no-inline-unapply.scala @@ -16,8 +16,8 @@ object Test extends DirectTest { |} """.stripMargin - def show(): Unit = Console.withErr(System.out) { - compileString(newCompiler("-usejavacp"))(depCode) - compileString(newCompiler("-usejavacp", "-cp", testOutput.path, "-Vprint:patmat"))(code) + def show(): Unit = { + compileString(newCompiler())(depCode) + compileString(newCompiler("-cp", testOutput.path, "-Vprint:patmat"))(code) } } diff --git a/test/files/run/patmat-origtp-switch.scala b/test/files/run/patmat-origtp-switch.scala index 8451d31bac83..c890ee13601c 100644 --- a/test/files/run/patmat-origtp-switch.scala +++ b/test/files/run/patmat-origtp-switch.scala @@ -12,9 +12,5 @@ object Test extends DirectTest { } """ - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } diff --git a/test/files/run/patmat-seq.scala b/test/files/run/patmat-seq.scala index c0319298b45b..874656ab6d66 100644 --- a/test/files/run/patmat-seq.scala +++ b/test/files/run/patmat-seq.scala @@ -51,9 +51,5 @@ object Test extends DirectTest { |} """.stripMargin - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } diff --git a/test/files/run/productElementName-oob.check b/test/files/run/productElementName-oob.check deleted file mode 100644 index 1d73c804feb4..000000000000 --- a/test/files/run/productElementName-oob.check +++ /dev/null @@ -1,11 +0,0 @@ -java.lang.IndexOutOfBoundsException: 99 -scala.runtime.Statics.ioobe -CaseClass.productElementName -Test$.delayedEndpoint$Test$1 -Test$delayedInit$body.apply - -java.lang.IndexOutOfBoundsException: 99 is out of bounds (min 0, max -1 -scala.Product.productElementName -scala.Product.productElementName$ -CaseObject$.productElementName -Test$.delayedEndpoint$Test$1 diff --git a/test/files/run/productElementName-oob.scala b/test/files/run/productElementName-oob.scala deleted file mode 100644 index 52702a4a5165..000000000000 --- a/test/files/run/productElementName-oob.scala +++ /dev/null @@ -1,25 +0,0 @@ -case class CaseClass(a: String, b: Int) -case object CaseObject - -object Test extends App { - - try { - CaseClass("foo", 123).productElementName(99) - } catch { - case e: IndexOutOfBoundsException => - println(e) - e.getStackTrace.take(4).foreach(s => println(s.toString.takeWhile(_ != '('))) - } - - println() - - try { - CaseObject.productElementName(99) - } catch { - case e: IndexOutOfBoundsException => - println(e) - e.getStackTrace.take(4).foreach(s => println(s.toString.takeWhile(_ != '('))) - } - -} - diff --git a/test/files/run/productElementName.scala b/test/files/run/productElementName.scala index ff9a2e4dac33..18dcaad0935a 100644 --- a/test/files/run/productElementName.scala +++ b/test/files/run/productElementName.scala @@ -1,3 +1,7 @@ +// scalac: -Xsource:3 +import scala.tools.testkit.AssertUtil.assertThrown +import scala.util.chaining.* +import org.junit.Assert.assertEquals case class User(name: String, age: Int) @@ -14,15 +18,12 @@ case class Symbols(:: : String, || : Int) case class MultipleParamLists(a: String, b: Int)(c: Boolean) case class AuxiliaryConstructor(a: String, b: Int) { - def this(x: String) = { - this(x, 123) - } + def this(x: String) = this(x, 123) } case class OverloadedApply(a: String, b: Int) object OverloadedApply { - def apply(x: String): OverloadedApply = - new OverloadedApply(x, 123) + def apply(x: String): OverloadedApply = new OverloadedApply(x, 123) } case class DefinesProductElementName(a: String, b: Int) { @@ -46,32 +47,60 @@ case class InheritsProductElementName_Override_SelfType(a: String, b: Int) exten case class PrivateMembers(a: Int, private val b: Int, c: Int, private val d: Int, e: Int, private val f: Int) +case class ImplicitParameter[A: Ordering](a: String, b: Int)(c: A) + +case object CaseObject + object Test extends App { - def pretty(p: Product): String = - p.productElementNames.zip(p.productIterator) - .map { case (name, value) => s"$name=$value" } - .mkString(p.productPrefix + "(", ", ", ")") - - println(pretty(User("Susan", 42))) - println(pretty(ćƒ¦ćƒ¼ć‚¶("Susan", 42))) - println(pretty(U$er("Susan", 42))) - println(pretty(`type`("Susan", 42))) - println(pretty(`contains spaces`("Susan", 42))) - println(pretty(Symbols("Susan", 42))) - println(pretty(MultipleParamLists("Susan", 42)(true))) - println(pretty(AuxiliaryConstructor("Susan", 42))) - println(pretty(OverloadedApply("Susan"))) - println(pretty(DefinesProductElementName("Susan", 42))) + def verify(p: Product, checkName: Boolean = true): Unit = { + val iterated = p.productElementNames.zip(p.productIterator) + .map { case (name, value) => s"$name=$value" } + .mkString(p.productPrefix + "(", ", ", ")") + val indexed = (0 until p.productArity) + .map(i => s"${p.productElementName(i)}=${p.productElement(i)}") + .mkString(p.productPrefix + "(", ", ", ")") + assertEquals(iterated, indexed) + if (checkName) assertThrown[IndexOutOfBoundsException](_ => true)(p.productElementName(p.productArity + 1)) + println(iterated) + } + + verify(User("Susan", 42)) + verify(ćƒ¦ćƒ¼ć‚¶("Susan", 42)) + verify(U$er("Susan", 42)) + verify(`type`("Susan", 42)) + verify(`contains spaces`("Susan", 42)) + verify(Symbols("Susan", 42)) + verify(MultipleParamLists("Susan", 42)(true)) + verify(AuxiliaryConstructor("Susan", 42)) + verify(OverloadedApply("Susan")) + verify(DefinesProductElementName("Susan", 42), checkName = false) // uses the synthetic, not the one defined in the trait - println(pretty(InheritsProductElementName("Susan", 42))) + verify(InheritsProductElementName("Susan", 42)) // uses the override defined in the trait - println(pretty(InheritsProductElementName_Override("Susan", 42))) + verify(InheritsProductElementName_Override("Susan", 42), checkName = false) // uses the synthetic, not the one defined in the trait - println(pretty(InheritsProductElementName_Override_SelfType("Susan", 42))) + verify(InheritsProductElementName_Override_SelfType("Susan", 42)) - println(pretty(PrivateMembers(10, 20, 30, 40, 50, 60))) -} + verify(PrivateMembers(10, 20, 30, 40, 50, 60)) + // message check and probe for characteristic stack frames + def check(t: Throwable)(msg: String)(ms: String*): Boolean = + (t.getMessage == msg).tap(if (_) () else println(s"expected [$msg], got [${t.getMessage}]")) + && + ms.forall(m => t.getStackTrace.exists(f => m == s"${f.getClassName}.${f.getMethodName}")) + + //java.lang.IndexOutOfBoundsException: 99 + assertThrown[IndexOutOfBoundsException](check(_)("99")("scala.runtime.Statics.ioobe", "ImplicitParameter.productElementName")) { + ImplicitParameter("foo", 123)(42).productElementName(99) + } + assertThrown[IndexOutOfBoundsException](_ => true) { + ImplicitParameter("foo", 123)(42).productElementName(2) + } + //java.lang.IndexOutOfBoundsException: 99 is out of bounds (min 0, max -1 [sic] + assertThrown[IndexOutOfBoundsException](check(_)(s"99 is out of bounds (min 0, max -1)")("scala.Product.productElementName", "CaseObject$.productElementName")) { + CaseObject.productElementName(99) + } +} diff --git a/test/files/run/reflection-java-crtp/Main_2.scala b/test/files/run/reflection-java-crtp/Main_2.scala index 3199eaf5ffdb..b9361131023c 100644 --- a/test/files/run/reflection-java-crtp/Main_2.scala +++ b/test/files/run/reflection-java-crtp/Main_2.scala @@ -1,8 +1,8 @@ object Test extends App { import scala.reflect.runtime.universe._ - val enum = typeOf[JavaSimpleEnumeration_1].baseClasses(1).asClass + val `enum` = typeOf[JavaSimpleEnumeration_1].baseClasses(1).asClass // make sure that the E's in Enum> are represented by the same symbol - val e1 = enum.typeParams(0).asType + val e1 = `enum`.typeParams(0).asType val TypeBounds(_, TypeRef(_, _, List(TypeRef(_, e2: TypeSymbol, _)))) = e1.info println(e1, e2, e1 eq e2) } diff --git a/test/files/run/reflection-magicsymbols-invoke.check b/test/files/run/reflection-magicsymbols-invoke.check index 7300a52e3068..6759edfecff3 100644 --- a/test/files/run/reflection-magicsymbols-invoke.check +++ b/test/files/run/reflection-magicsymbols-invoke.check @@ -64,10 +64,10 @@ testing Object.finalize: () testing Object.getClass: class java.lang.String testing Object.hashCode: 50 testing Object.ne: false -#partest !java15 +#partest !java15+ testing Object.notify: class java.lang.IllegalMonitorStateException: null testing Object.notifyAll: class java.lang.IllegalMonitorStateException: null -#partest java15 +#partest java15+ testing Object.notify: class java.lang.IllegalMonitorStateException: current thread is not owner testing Object.notifyAll: class java.lang.IllegalMonitorStateException: current thread is not owner #partest diff --git a/test/files/run/reflection-mem-glbs.javaopts b/test/files/run/reflection-mem-glbs.javaopts deleted file mode 100644 index 9ecdb8a4dafd..000000000000 --- a/test/files/run/reflection-mem-glbs.javaopts +++ /dev/null @@ -1 +0,0 @@ --Xmx512m \ No newline at end of file diff --git a/test/files/run/reflection-mem-glbs.scala b/test/files/run/reflection-mem-glbs.scala index 2a76f1db86b0..790a445cc6d7 100644 --- a/test/files/run/reflection-mem-glbs.scala +++ b/test/files/run/reflection-mem-glbs.scala @@ -1,3 +1,5 @@ +// java: -Xmx512m + import scala.tools.partest.MemoryTest trait A { type T <: A } diff --git a/test/files/run/reflection-mem-tags.javaopts b/test/files/run/reflection-mem-tags.javaopts deleted file mode 100644 index 9ecdb8a4dafd..000000000000 --- a/test/files/run/reflection-mem-tags.javaopts +++ /dev/null @@ -1 +0,0 @@ --Xmx512m \ No newline at end of file diff --git a/test/files/run/reflection-mem-tags.scala b/test/files/run/reflection-mem-tags.scala index 6ea3c34c86bf..0ae1b9406afb 100644 --- a/test/files/run/reflection-mem-tags.scala +++ b/test/files/run/reflection-mem-tags.scala @@ -1,3 +1,5 @@ +// java: -Xmx512m + import scala.tools.partest.MemoryTest trait A { type T <: A } diff --git a/test/files/run/reflection-package-name-conflict/Test.scala b/test/files/run/reflection-package-name-conflict/Test.scala index fafb27e09106..4f35a011417b 100644 --- a/test/files/run/reflection-package-name-conflict/Test.scala +++ b/test/files/run/reflection-package-name-conflict/Test.scala @@ -1,7 +1,7 @@ import reflect.runtime.universe._ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { for (clsName <- List("a.b1.c", "a.b2.c")) { println(rootMirror.classSymbol(Class.forName("a.b1.c"))) println(rootMirror.classSymbol(Class.forName("a.b2.c"))) diff --git a/test/files/run/reify_copypaste1.javaopts b/test/files/run/reify_copypaste1.javaopts deleted file mode 100644 index 9740f07b079b..000000000000 --- a/test/files/run/reify_copypaste1.javaopts +++ /dev/null @@ -1 +0,0 @@ --Dneeds.forked.jvm \ No newline at end of file diff --git a/test/files/run/reify_copypaste1.scala b/test/files/run/reify_copypaste1.scala index 12cc7dfe19d1..16b6ffed21c7 100644 --- a/test/files/run/reify_copypaste1.scala +++ b/test/files/run/reify_copypaste1.scala @@ -1,3 +1,5 @@ +// java: -Dneeds.forked.jvm + import scala.reflect.runtime._ import scala.reflect.runtime.universe._ import scala.reflect.runtime.universe.definitions._ diff --git a/test/files/run/repl-completions.check b/test/files/run/repl-completions.check index 90d463fdf751..224c7b7e3155 100644 --- a/test/files/run/repl-completions.check +++ b/test/files/run/repl-completions.check @@ -9,6 +9,7 @@ scala> :completions O.x [completions] O.x_y_z scala> :completions O.x_y_x +[completions] O.x_y_x scala> :completions O.x_y_a @@ -27,6 +28,6 @@ scala> :completions object O2 { val x = O. [completions] object O2 { val x = O.x_y_z scala> :completions :completion -[completions] :completions +[completions] ::completions scala> :quit diff --git a/test/files/run/repl-trim-stack-trace.check b/test/files/run/repl-trim-stack-trace.check index 53609d85dcc5..ee27e0c4cec9 100644 --- a/test/files/run/repl-trim-stack-trace.check +++ b/test/files/run/repl-trim-stack-trace.check @@ -24,9 +24,9 @@ java.lang.Exception ... ??? elided scala> null.asInstanceOf -#partest !java15 +#partest !java15+ java.lang.NullPointerException -#partest java15 +#partest java15+ java.lang.NullPointerException: Cannot throw exception because the return value of "res3()" is null #partest at .lzycompute(:8) diff --git a/test/files/run/sammy_java8.scala b/test/files/run/sammy_java8.scala deleted file mode 100644 index 39118486eddd..000000000000 --- a/test/files/run/sammy_java8.scala +++ /dev/null @@ -1,32 +0,0 @@ -import scala.tools.partest._ - -// java8 version of sammy_poly.scala -object Test extends CompilerTest { - import global._ - - override lazy val units: List[CompilationUnit] = { - // This test itself does not depend on JDK8. - javaCompilationUnits(global)(samSource) ++ - compilationUnits(global)(useSamSource) - } - - private def samSource = """ -// trait F[T, U] { def apply(x: T): U } -public interface F { - U apply(T t); - default void yadayada() { - throw new UnsupportedOperationException("yadayada"); - } -} - """ - - private def useSamSource = """ -class T { - def app[T, U](x: T)(f: F[T, U]): U = f(x) - app(1)(x => List(x)) -} - """ - - // We're only checking we can compile it. - def check(source: String, unit: global.CompilationUnit): Unit = () -} diff --git a/test/files/run/sbt-icode-interface.scala b/test/files/run/sbt-icode-interface.scala index f0281ccf63c2..1b7bd5a6acad 100644 --- a/test/files/run/sbt-icode-interface.scala +++ b/test/files/run/sbt-icode-interface.scala @@ -9,7 +9,7 @@ object Test extends DirectTest { """.trim def show(): Unit = { - val global = newCompiler("-usejavacp") + val global = newCompiler() import global._ val r = new Run r.compileSources(newSourceFile(code) :: Nil) diff --git a/test/files/run/sd187.check b/test/files/run/sd187.check index c8fcab58239d..2c97874a2a65 100644 --- a/test/files/run/sd187.check +++ b/test/files/run/sd187.check @@ -1,8 +1,8 @@ [[syntax trees at end of patmat]] // newSource1.scala [1:2302]package [1:1] { - [1:2302]class C extends [9:2302][2302]scala.AnyRef { - [2302]def (): [9]C = [2302]{ - [2302][2302][2302]C.super.(); + [1:2302]class C extends [9:2302][9]scala.AnyRef { + [9]def (): [9]C = [9]{ + [9][9][9]C.super.(); [9]() }; [103:904]def commonSubPattern([124:130]x: [127:130]): [107]AnyVal = [206:220]{ diff --git a/test/files/run/sd187.scala b/test/files/run/sd187.scala index d8892be7f1f0..be475a15e0c3 100644 --- a/test/files/run/sd187.scala +++ b/test/files/run/sd187.scala @@ -32,10 +32,5 @@ object Test extends DirectTest { |} |""".stripMargin - - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } diff --git a/test/files/run/sd275.scala b/test/files/run/sd275.scala index 519558d1a552..b150b59afebe 100644 --- a/test/files/run/sd275.scala +++ b/test/files/run/sd275.scala @@ -24,7 +24,7 @@ package p1 { """ override def extraSettings = { - val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator") + val classpath = pathOf(sys.props("partest.lib"), testOutput.path) s"-cp $classpath" } diff --git a/test/files/run/shutdownhooks.javaopts b/test/files/run/shutdownhooks.javaopts deleted file mode 100644 index 9740f07b079b..000000000000 --- a/test/files/run/shutdownhooks.javaopts +++ /dev/null @@ -1 +0,0 @@ --Dneeds.forked.jvm \ No newline at end of file diff --git a/test/files/run/shutdownhooks.scala b/test/files/run/shutdownhooks.scala index 518243598f97..1d22ea78380d 100644 --- a/test/files/run/shutdownhooks.scala +++ b/test/files/run/shutdownhooks.scala @@ -1,3 +1,5 @@ +// java: -Dneeds.forked.jvm + object Test { scala.sys.addShutdownHook { // sleep is added here so main#shutdown happens before this hook. diff --git a/test/files/run/splain-tree.check b/test/files/run/splain-tree.check new file mode 100644 index 000000000000..2e3c5b2597db --- /dev/null +++ b/test/files/run/splain-tree.check @@ -0,0 +1,47 @@ +newSource1.scala:28: error: implicit error; +!I e: tpes.I1 +i1a invalid because +!I p: tpes.I2 +――i2 invalid because + !I p: tpes.I3 +――――i3a invalid because + !I p: tpes.I4 +――――――i4 invalid because + !I p: tpes.I5 +――――――――i5 invalid because + !I p: tpes.I6 +――――――――――i6a invalid because + !I p: tpes.I7 +――――――――――――i7 invalid because + !I p: tpes.I8 +――――――――――――――i8 invalid because + !I p: tpes.I9 + +――――――――――i6b invalid because + !I p: tpes.I8 +――――――――――――i8 invalid because + !I p: tpes.I9 + +――――i3b invalid because + !I p: tpes.I4 +――――――i4 invalid because + !I p: tpes.I5 +――――――――i5 invalid because + !I p: tpes.I6 +――――――――――i6a invalid because + !I p: tpes.I7 +――――――――――――i7 invalid because + !I p: tpes.I8 +――――――――――――――i8 invalid because + !I p: tpes.I9 + +i1b invalid because +!I p: tpes.I6 +――i6a invalid because + !I p: tpes.I7 +――――i7 invalid because + !I p: tpes.I8 +――――――i8 invalid because + !I p: tpes.I9 + implicitly[I1] + ^ diff --git a/test/files/run/splain-tree.scala b/test/files/run/splain-tree.scala new file mode 100644 index 000000000000..d660ee85d3f2 --- /dev/null +++ b/test/files/run/splain-tree.scala @@ -0,0 +1,48 @@ +import scala.tools.partest._ + +object Test extends DirectTest { + override def extraSettings: String = "-usejavacp -Vimplicits -Vimplicits-verbose-tree" + + def code: String = "" + + def verboseTree: String = """ +object tpes +{ + trait I1 + trait I2 + trait I3 + trait I4 + trait I5 + trait I6 + trait I7 + trait I8 + trait I9 +} +import tpes._ + +object Tree +{ + implicit def i8(implicit p: I9): I8 = ??? + implicit def i7(implicit p: I8): I7 = ??? + implicit def i6a(implicit p: I7): I6 = ??? + implicit def i6b(implicit p: I8): I6 = ??? + implicit def i5(implicit p: I6): I5 = ??? + implicit def i4(implicit p: I5): I4 = ??? + implicit def i3a(implicit p: I4): I3 = ??? + implicit def i3b(implicit p: I4): I3 = ??? + implicit def i2(implicit p: I3): I2 = ??? + implicit def i1a(implicit p: I2): I1 = ??? + implicit def i1b(implicit p: I6): I1 = ??? + implicitly[I1] +} + """ + + def show(): Unit = { + val global = newCompiler() + + def run(code: String): Unit = + compileString(global)(code.trim) + + run(verboseTree) + } +} diff --git a/test/files/run/splain-truncrefined.check b/test/files/run/splain-truncrefined.check new file mode 100644 index 000000000000..bf112963fd65 --- /dev/null +++ b/test/files/run/splain-truncrefined.check @@ -0,0 +1,4 @@ +newSource1.scala:7: error: type mismatch; + TruncRefined.D|TruncRefined.C {...} + f(new D { type X = C; type Y = D }) + ^ diff --git a/test/files/run/splain-truncrefined.scala b/test/files/run/splain-truncrefined.scala new file mode 100644 index 000000000000..2be99a6350bb --- /dev/null +++ b/test/files/run/splain-truncrefined.scala @@ -0,0 +1,28 @@ +import scala.tools.partest._ + +object Test extends DirectTest { + override def extraSettings: String = "-usejavacp -Vimplicits -Vtype-diffs -Vimplicits-max-refined 5" + + def code: String = "" + + def truncrefined: String = """ +object TruncRefined +{ + class C + trait D + type CAux[A] = C { type X = C; type Y = D } + def f(arg1: CAux[D]) = ??? + f(new D { type X = C; type Y = D }) +} + + """ + + def show(): Unit = { + val global = newCompiler() + + def run(code: String): Unit = + compileString(global)(code.trim) + + run(truncrefined) + } +} diff --git a/test/files/run/splain.check b/test/files/run/splain.check new file mode 100644 index 000000000000..9dbb8db96b7c --- /dev/null +++ b/test/files/run/splain.check @@ -0,0 +1,143 @@ +newSource1.scala:13: error: implicit error; +!I e: ImplicitChain.II +ImplicitChain.g invalid because +!I impPar3: ImplicitChain.I1 +ā‹® +――ImplicitChain.i1 invalid because + !I impPar7: ImplicitChain.I3 + implicitly[II] + ^ +newSource1.scala:6: error: type mismatch; + FoundReq.L|FoundReq.R + f(new L) + ^ +newSource1.scala:7: error: implicit error; +!I e: Bounds.F[Bounds.Arg] + implicitly[F[Arg]] + ^ +newSource1.scala:4: error: implicit error; +!I ec: scala.concurrent.ExecutionContext + Cannot find an implicit ExecutionContext. You might add + an (implicit ec: ExecutionContext) parameter to your method. + + The ExecutionContext is used to configure how and on which + thread pools asynchronous tasks (such as Futures) will run, + so the specific ExecutionContext that is selected is important. + + If your application does not define an ExecutionContext elsewhere, + consider using Scala's global ExecutionContext by defining + the following: + + implicit val ec: scala.concurrent.ExecutionContext = scala.concurrent.ExecutionContext.global + + long + ^ +newSource1.scala:10: error: implicit error; +!I e: java.lang.String +f invalid because +!I impPar4: + List[ + ( + InfixBreak.VeryLongTypeName :::: + InfixBreak.VeryLongTypeName :::: + InfixBreak.VeryLongTypeName :::: + InfixBreak.VeryLongTypeName + ) + :::: + (InfixBreak.Short :::: InfixBreak.Short) :::: + ( + InfixBreak.VeryLongTypeName :::: + InfixBreak.VeryLongTypeName :::: + InfixBreak.VeryLongTypeName :::: + InfixBreak.VeryLongTypeName + ) + :::: + InfixBreak.VeryLongTypeName :::: + InfixBreak.VeryLongTypeName :::: + InfixBreak.VeryLongTypeName :::: + InfixBreak.VeryLongTypeName + ] + (No implicit view available from Int => InfixBreak.T2.) + + implicitly[String] + ^ +newSource1.scala:11: error: implicit error; +!I e: + DeepHole.C1[ + DeepHole.T3[ + DeepHole.T1[List[java.lang.String], ?] + , + DeepHole.T2[DeepHole.Id, DeepHole.C4, ?] + , + ? + ] + ] + implicitly[C1[T3]] + ^ +newSource1.scala:9: error: implicit error; +!I e: Aux.F.Aux[Aux.C, Aux.D] +Aux.f invalid because +!I impPar10: Aux.C + implicitly[F.Aux[C, D]] + ^ +newSource1.scala:11: error: type mismatch; + Refined.A with Refined.B with Refined.E|Refined.C with Refined.F| {type X = scala.Int|java.lang.String; type Y = java.lang.String; type Z = |java.lang.String} + f(x) + ^ +newSource1.scala:25: error: type mismatch; + C.X.Y.T|B.X.Y.T + f(x: C.X.Y.T) + ^ +newSource1.scala:6: error: type mismatch; + scala.Int|(=> Foo.A) => Foo.B + f(1: Int) + ^ +newSource1.scala:3: error: type mismatch; + java.lang.String|Tuple1[java.lang.String] + val a: Tuple1[String] = "Tuple1": String + ^ +newSource1.scala:7: error: implicit error; +!I e: a.type *** b.type + implicitly[a.type *** b.type] + ^ +newSource1.scala:8: error: implicit error; +!I e: a.type *** b.type + implicitly[a.type *** b.type] + ^ +newSource1.scala:6: error: implicit error; +!I e: a.type *** b.type + implicitly[a.type *** b.type] + ^ +newSource1.scala:5: error: implicit error; +!I ev: scala.math.Ordering[java.lang.Object] + No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. + +Ordering.ordered invalid because +!I asComparable: java.lang.Object => java.lang.Comparable[_$2] + No implicit view available from Object => Comparable[_ >: Object]. + +ā‹® +Ordering.comparatorToOrdering invalid because +!I cmp: java.util.Comparator[java.lang.Object] + ms.map(_ => o) + ^ +newSource1.scala:9: error: implicit error; +!I e: List[a.TypeA] + (No implicit view available from Int => a.TypeA.) + + implicitly[List[TypeA]] + ^ +newSource1.scala:10: error: implicit error; +!I e: Seq[a.b.TypeB] + (No implicit view available from Int => a.b.TypeB.) + + implicitly[Seq[TypeB]] + ^ +newSource1.scala:11: error: implicit error; +!I e: Iterable[a.b.c.TypeC] + implicitly[Traversable[TypeC]] + ^ +newSource1.scala:12: error: implicit error; +!I e: Iterator[a.b.c.d.TypeD] + implicitly[Iterator[TypeD]] + ^ diff --git a/test/files/run/splain.scala b/test/files/run/splain.scala new file mode 100644 index 000000000000..57f3b4ef569e --- /dev/null +++ b/test/files/run/splain.scala @@ -0,0 +1,245 @@ +import scala.tools.partest._ + +object Test +extends DirectTest +{ + override def extraSettings: String = "-usejavacp -Vimplicits -Vtype-diffs" + + def code: String = "" + + def chain: String = """ +object ImplicitChain +{ + trait I1 + trait I2 + trait I3 + trait I4 + trait II + implicit def i1(implicit impPar7: I3): I1 = ??? + implicit def i2a(implicit impPar8: I3): I2 = ??? + implicit def i2b(implicit impPar8: I3): I2 = ??? + implicit def i4(implicit impPar9: I2): I4 = ??? + implicit def g(implicit impPar3: I1, impPar1: I4): II = ??? + implicitly[II] +} + """ + + def foundReq: String = """ +object FoundReq +{ + class L + type R + def f(r: R): Int = ??? + f(new L) +} + """ + + def bounds: String = """ +object Bounds +{ + trait Base + trait Arg + trait F[A] + implicit def g[A <: Base, B]: F[A] = ??? + implicitly[F[Arg]] +} + """ + + def longAnnotationMessage: String = """ +object Long +{ + def long(implicit ec: concurrent.ExecutionContext): Unit = ??? + long +} + """ + + def longInfix: String = """ +object InfixBreak +{ + type ::::[A, B] + trait VeryLongTypeName + trait Short + type T1 = VeryLongTypeName :::: VeryLongTypeName :::: VeryLongTypeName :::: + VeryLongTypeName + type T2 = T1 :::: (Short :::: Short) :::: T1 :::: T1 + implicit def f(implicit impPar4: List[T2]): String = ??? + implicitly[String] +} + """ + + def deeplyNestedHole: String = """ +object DeepHole +{ + trait C1[F[_]] + trait C2[F[_], G[_], A] + trait C3[A, B] + trait C4[A] + type Id[A] = A + type T1[X] = C3[List[String], X] + type T2[Y] = C2[Id, C4, Y] + type T3[Z] = C2[T1, T2, Z] + implicitly[C1[T3]] +} + """ + + def auxType: String = """ +object Aux +{ + trait C + trait D + trait F + object F { type Aux[A, B] = F { type X = A; type Y = B } } + implicit def f[A, B](implicit impPar10: C): F { type X = A; type Y = B } = + ??? + implicitly[F.Aux[C, D]] +} + """ + + def refined: String = """ +object Refined +{ + trait A + trait B + trait C + trait D + trait E + trait F + def f(a: A with B with C { type Y = String; type X = String; type Z = String }): Unit = ??? + val x: B with E with A with F { type X = Int; type Y = String } = ??? + f(x) +} + """ + + def disambiguateQualified: String = """ +object A +{ + object B + { + object X + { + object Y + { + type T + } + } + } + object C + { + object X + { + object Y + { + type T + } + } + } + def f(a: B.X.Y.T): Unit = () + val x: C.X.Y.T = ??? + f(x: C.X.Y.T) +} + """ + + def bynameParam: String = """ +object Foo +{ + type A + type B + def f(g: (=> A) => B): Unit = () + f(1: Int) +} + """ + + def tuple1: String = """ +object Tup1 +{ + val a: Tuple1[String] = "Tuple1": String +} + """ + + def singleType: String = """ +object SingleImp +{ + class ***[A, B] + val a = 1 + val b = 2 + + implicitly[a.type *** b.type] +} + """ + + def singleTypeInFunction: String = """ +object SingleImp +{ + class ***[A, B] + def fn(): Unit = { + val a = 1 + val b = 2 + + implicitly[a.type *** b.type] + } +} + """ + + def singleTypeWithFreeSymbol: String = """ +object SingleImp +{ + class ***[A, B] + def fn[A, B](a: A, b: B) = { + + implicitly[a.type *** b.type] + } +} + """ + + def parameterAnnotation: String = """ + import collection.{mutable => m, immutable => i} + object Test { + val o = new Object + val ms = m.SortedSet(1,2,3) + ms.map(_ => o) + } + """ + + def shorthandTypes: String = """ +object a { + type TypeA + object b { + type TypeB + object c { + type TypeC + object d { + type TypeD + implicitly[List[TypeA]] + implicitly[Seq[TypeB]] + implicitly[Traversable[TypeC]] + implicitly[Iterator[TypeD]] + } + } + } +} +""" + + def show(): Unit = { + val global = newCompiler() + + def run(code: String): Unit = + compileString(global)(code.trim) + + run(chain) + run(foundReq) + run(bounds) + run(longAnnotationMessage) + run(longInfix) + run(deeplyNestedHole) + run(auxType) + run(refined) + run(disambiguateQualified) + run(bynameParam) + run(tuple1) + run(singleType) + run(singleTypeInFunction) + run(singleTypeWithFreeSymbol) + run(parameterAnnotation) + run(shorthandTypes) + } +} diff --git a/test/files/run/stream-gc.check b/test/files/run/stream-gc.check index 1f954e63c641..202f49c8ebab 100644 --- a/test/files/run/stream-gc.check +++ b/test/files/run/stream-gc.check @@ -1 +1 @@ -warning: 5 deprecations (since 2.13.0); re-run with -deprecation for details +warning: 6 deprecations (since 2.13.0); re-run with -deprecation for details diff --git a/test/files/run/stream-gc.javaopts b/test/files/run/stream-gc.javaopts deleted file mode 100644 index 58ba19b41eff..000000000000 --- a/test/files/run/stream-gc.javaopts +++ /dev/null @@ -1 +0,0 @@ --Xmx5M -Xms5M diff --git a/test/files/run/stream-gc.scala b/test/files/run/stream-gc.scala index 699ab621de0b..182ba3244b70 100644 --- a/test/files/run/stream-gc.scala +++ b/test/files/run/stream-gc.scala @@ -1,3 +1,5 @@ +// java: -Xmx5M -Xms5M + import scala.collection.immutable._ object Test extends App { @@ -6,4 +8,5 @@ object Test extends App { Stream.tabulate(100)(_ => new Array[AnyRef](10000)).collectFirst { case x if false => x } Stream.tabulate(100)(_ => new Array[AnyRef](10000)).collectFirst { case x if false => x } Stream.tabulate(100)(_ => new Array[AnyRef](10000)).collectFirst { case x if false => x } + Stream.tabulate(100)(_ => new Array[AnyRef](10000)).iterator.foreach(_ => ()) } diff --git a/test/files/run/string-switch-pos.check b/test/files/run/string-switch-pos.check index 6b292d0a3808..27ea7da767af 100644 --- a/test/files/run/string-switch-pos.check +++ b/test/files/run/string-switch-pos.check @@ -1,11 +1,11 @@ [[syntax trees at end of patmat]] // newSource1.scala -[0:187]package [0:0] { - [0:187]class Switch extends [13:187][187]scala.AnyRef { - [187]def (): [13]Switch = [187]{ - [187][187][187]Switch.super.(); +[0:216]package [0:0] { + [0:216]class Switch extends [13:216][13]scala.AnyRef { + [13]def (): [13]Switch = [13]{ + [13][13][13]Switch.super.(); [13]() }; - [17:185]def switch([28:37]s: [31:37], [39:52]cond: [45:52]): [21]Int = [56:57]{ + [17:214]def switch([28:37]s: [31:37], [39:52]cond: [45:52]): [21]Int = [56:57]{ [56:57]case val x1: [56]String = [56:57]s; [56:57][56:57]x1 match { [56:57]case [75:81]"AaAa" => [93:94]1 @@ -14,6 +14,7 @@ [151:152]3 else [180:181]4 + [56:57]case [56:57]([191:197]"CcCc"| [200:205]"Cc2") => [209:210]5 [56:57]case [56:57]_ => [56:57]throw [56:57][56:57][56:57]new [56:57]MatchError([56:57]x1) } } @@ -21,42 +22,44 @@ } [[syntax trees at end of cleanup]] // newSource1.scala -[0:187]package [0:0] { - [0:187]class Switch extends [13:187][13:187]Object { - [17:185]def switch([28:37]s: [31:37], [39:52]cond: [45:52]): [21]Int = [56:57]{ +[0:216]package [0:0] { + [0:216]class Switch extends [13:216][13:216]Object { + [17:214]def switch([28:37]s: [31:37], [39:52]cond: [45:52]): [21]Int = [56:57]{ [56:57]case val x1: [56]String = [56:57]s; [56:57]{ [56:139][56:57]if ([56][56]x1.eq([56]null)) [56]0 else [56][56]x1.hashCode() match { + [56:57]case [56]67506 => [56:57]if ([56][56][56]"Cc2".equals([56]x1)) + [56][56]case4() + else + [56][56]defaultCase1() [75:81]case [56]2031744 => [75:81]if ([75][75][75]"AaAa".equals([75]x1)) - [75][75]case1() + [93:94][75]matchEnd1([93:94]1) else - [56][56]matchEnd2() + [56][56]defaultCase1() [133:139]case [56]2062528 => [133:139]if ([133][133][133]"BbBb".equals([133]x1)) - [133][133]case3() + [143:181][133]matchEnd1([143:181]if ([143:147]cond) + [151:152]3 + else + [180:181]4) + else + [56][56]defaultCase1() + [56:57]case [56]2093312 => [56:57]if ([56][56][56]"CcCc".equals([56]x1)) + [56][56]case4() else - [56][56]matchEnd2() + [56][56]defaultCase1() [104:110]case [56]3003444 => [104:110]if ([104][104][104]"asdf".equals([104]x1)) - [104][104]case2() + [122:123][104]matchEnd1([122:123]2) else - [56][56]matchEnd2() - [56]case [56]_ => [56][56]matchEnd2() - }; - [56]case1(){ - [56][56]matchEnd1([93:94]1) + [56][56]defaultCase1() + [56]case [56]_ => [56][56]defaultCase1() }; - [56]case2(){ - [56][56]matchEnd1([122:123]2) - }; - [56]case3(){ - [56][56]matchEnd1([143:181]if ([143:147]cond) - [151:152]3 - else - [180:181]4) + [56]case4(){ + [56][56]matchEnd1([209:210]5) }; - [56]matchEnd2(){ + [56]defaultCase1(){ [56][56]matchEnd1([56:57]throw [56:57][56:57][56:57]new [56:57]MatchError([56:57]x1)) }; [56]matchEnd1(x$1: [NoPosition]Int){ @@ -64,8 +67,8 @@ } } }; - [187]def (): [13]Switch = [187]{ - [187][187][187]Switch.super.(); + [13]def (): [13]Switch = [13]{ + [13][13][13]Switch.super.(); [13]() } } diff --git a/test/files/run/string-switch-pos.scala b/test/files/run/string-switch-pos.scala index a75208046391..b8d8c7ad1a9f 100644 --- a/test/files/run/string-switch-pos.scala +++ b/test/files/run/string-switch-pos.scala @@ -10,9 +10,10 @@ object Test extends DirectTest { | case "asdf" => 2 | case "BbBb" if cond => 3 | case "BbBb" => 4 + | case "CcCc" | "Cc2" => 5 | } |} """.stripMargin.trim - override def show(): Unit = Console.withErr(Console.out) { super.compile() } -} \ No newline at end of file + override def show(): Unit = compile() +} diff --git a/test/files/run/t10016.check b/test/files/run/t10016.check new file mode 100644 index 000000000000..7457fcc9b22c --- /dev/null +++ b/test/files/run/t10016.check @@ -0,0 +1,8 @@ + +scala> def existWith(x: (List[T] forSome {type T}) with Int {def xxx: Int}) = ??? +def existWith(x: List[_] with Int{def xxx: Int}): Nothing + +scala> def existKeepsAnnot(x: (List[T] forSome {type T})@SerialVersionUID(1L) with Int {def xxx: Int}) = ??? +def existKeepsAnnot(x: List[Any] @SerialVersionUID(value = 1L) with Int{def xxx: Int}): Nothing + +scala> :quit diff --git a/test/files/run/t10016.scala b/test/files/run/t10016.scala new file mode 100644 index 000000000000..113046527a04 --- /dev/null +++ b/test/files/run/t10016.scala @@ -0,0 +1,11 @@ +import scala.tools.partest.ReplTest + +// check that we don't lose the annotation on the existential type nested in an intersection type +// it's okay that List[_] is represented as List[Any] -- they are equivalent due to variance (existential extrapolation) +// (The above comment should not be construed as an endorsement of rewrapping as a great way to implement a bunch of different type "proxies") +object Test extends ReplTest { + def code = """ + |def existWith(x: (List[T] forSome {type T}) with Int {def xxx: Int}) = ??? + |def existKeepsAnnot(x: (List[T] forSome {type T})@SerialVersionUID(1L) with Int {def xxx: Int}) = ??? + """.stripMargin +} diff --git a/test/files/run/t10094.check b/test/files/run/t10094.check new file mode 100644 index 000000000000..45b983be36b7 --- /dev/null +++ b/test/files/run/t10094.check @@ -0,0 +1 @@ +hi diff --git a/test/files/run/t10094.scala b/test/files/run/t10094.scala new file mode 100644 index 000000000000..74f507e447d8 --- /dev/null +++ b/test/files/run/t10094.scala @@ -0,0 +1,11 @@ +trait T[@specialized(Int) S] { + def initialValue: S + var value: S = initialValue +} + +final class C[@specialized(Int) S](val initialValue: S) extends T[S] + +object Test { + def main(args: Array[String]): Unit = + println(new C("hi").initialValue) +} diff --git a/test/files/run/t10203.check b/test/files/run/t10203.check index d7fa5ca5de37..c97fe36a70b7 100644 --- a/test/files/run/t10203.check +++ b/test/files/run/t10203.check @@ -1,8 +1,8 @@ [[syntax trees at end of typer]] // newSource1.scala [0:88]package [0:0] { - [0:88]object X extends [9:88][88]scala.AnyRef { - [88]def (): [9]X.type = [88]{ - [88][88][88]X.super.(); + [0:88]object X extends [9:88][9]scala.AnyRef { + [9]def (): [9]X.type = [9]{ + [9][9][9]X.super.(); [9]() }; [17:24][17:18][17:18]D.selectDynamic[[17]Nothing](<19:24>"aaaaa"); diff --git a/test/files/run/t10203.scala b/test/files/run/t10203.scala index 2ad060399d85..c718ee7995c7 100644 --- a/test/files/run/t10203.scala +++ b/test/files/run/t10203.scala @@ -14,11 +14,7 @@ object Test extends DirectTest { } """.trim - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } import language.dynamics diff --git a/test/files/run/t10344.scala b/test/files/run/t10344.scala index dfcf1f442451..fbbc8a871c88 100644 --- a/test/files/run/t10344.scala +++ b/test/files/run/t10344.scala @@ -13,9 +13,5 @@ object t10344 { } """ - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } diff --git a/test/files/run/t10363.scala b/test/files/run/t10363.scala new file mode 100644 index 000000000000..7d1462c9ced8 --- /dev/null +++ b/test/files/run/t10363.scala @@ -0,0 +1,31 @@ +trait Foo[A, B] +object Foo { + type Bar[A] = Foo[A, _] +} + +trait Base[M[_]] { + def method(in: M[_]): Unit +} + +class Concrete extends Base[Foo.Bar] { + def method(in: Foo.Bar[_]): Unit = {} +} + +trait Template[M[_]] { + def toBeImplemented: Base[M] + def mark[A]: M[A] + + def method2(): Unit = { + toBeImplemented.method(mark[Nothing]) + } +} + +class Impl extends Template[Foo.Bar] { + def toBeImplemented: Base[Foo.Bar] = new Concrete + def mark[A]: Foo.Bar[A] = new Foo[A, Nothing] {} +} + +object Test { + def main(args: Array[String]): Unit = + (new Impl).method2() +} diff --git a/test/files/run/t10751.check b/test/files/run/t10751.check index 41c811ac2537..0142b6896a14 100644 --- a/test/files/run/t10751.check +++ b/test/files/run/t10751.check @@ -1,8 +1,8 @@ [[syntax trees at end of typer]] // newSource1.scala [0:201]package [0:0] { - [0:201]object Test extends [12:201][201]scala.AnyRef { - [201]def (): [12]Test.type = [201]{ - [201][201][201]Test.super.(); + [0:201]object Test extends [12:201][12]scala.AnyRef { + [12]def (): [12]Test.type = [12]{ + [12][12][12]Test.super.(); [12]() }; [20:43]private[this] val n: [38]Int = [42:43]1; diff --git a/test/files/run/t10751.scala b/test/files/run/t10751.scala index dd6fbbd5dcc0..bcef4e169a3f 100644 --- a/test/files/run/t10751.scala +++ b/test/files/run/t10751.scala @@ -23,11 +23,7 @@ object Test extends DirectTest { } """.trim - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } class C { diff --git a/test/files/run/t11385.scala b/test/files/run/t11385.scala index 5f66a6ddeb5b..a46985706f70 100644 --- a/test/files/run/t11385.scala +++ b/test/files/run/t11385.scala @@ -12,7 +12,7 @@ object Test extends DirectTest { def show() = assert { val tmp = createTempDirectory("t11385") val pkg = createDirectories(tmp.resolve("acme").resolve("C").resolve("sub")) - compile("-usejavacp", "-classpath", tmp.toString) + compile("-classpath", tmp.toString) } } diff --git a/test/files/run/t11534b.scala b/test/files/run/t11534b.scala new file mode 100644 index 000000000000..75e835bed9a3 --- /dev/null +++ b/test/files/run/t11534b.scala @@ -0,0 +1,24 @@ +object Test { + case class O(i: Int) { + class A + class B extends A { + def bOuter = O.this + } + trait C { + def cOuter = O.this + } + class D extends o2.B with C + } + val o1 = new O(1); + val o2 = new O(2); + def pat1(a: Test.o1.C) = a match { + case b: Test.o1.B => + assert(b.bOuter eq Test.o1, + s"expected ${o1} as outer of value conforming to pattern `b: Test.o1.B`, but got ${b.bOuter}") + case _ => + + } + def main(args: Array[String]): Unit = { + pat1(new o1.D) + } +} diff --git a/test/files/run/t11534c.scala b/test/files/run/t11534c.scala new file mode 100644 index 000000000000..a1fbaf0d72e6 --- /dev/null +++ b/test/files/run/t11534c.scala @@ -0,0 +1,135 @@ +// scalac: -unchecked +import scala.util.Try + +object Test { + class O(val i: Int) { + class A { + val aOuter = i + } + + class B1 extends A { + val b1Outer = i + } + } + class M(i: Int) extends O(i) { + class B2 extends m2.A { + val b2Outer = i + } + + def pat1(a: M.this.A) = a match { + case b: M.this.B1 => // can elide outer check, (a : m1.A) && (a : O#B1) implies (a : m1.B1) + assertOuter(m1.i, b.b1Outer) + true + case _ => + false + } + def pat2(a: m2.A) = a match { + case b: M.this.B2 => // needs runtime outer check + assertOuter(m1.i, b.b2Outer) + true + case _ => + false + } + def pat3(a: M.this.B1) = a match { + case b: M.this.A => // can elide outer check, (a : m1.B1) && (a : O#A) implies (a : m1.B1) + assertOuter(m1.i, b.aOuter) + true + case _ => + false + } + def pat4(a: M.this.B2) = a match { + case b: m2.A => // can elide outer check, (a : m1.B2) implies (a : m2.A) + assertOuter(m2.i, b.aOuter) + true + case _ => + false + } + } + + val m1 = new M(1); + val m2 = new M(2); + + def pat1(a: m1.A) = a match { + case b: m1.B1 => // can elide outer check, (a : m1.A) && (a : O#B1) implies (a : m1.B1) + assertOuter(m1.i, b.b1Outer) + true + case _ => + false + } + def pat2(a: m2.A) = a match { + case b: m1.B2 => // needs runtime outer check + assertOuter(m1.i, b.b2Outer) + true + case _ => + false + } + def pat3(a: m1.B1) = a match { + case b: m1.A => // can elide outer check, (a : m1.B1) && (a : O#A) implies (a : m1.B1) + assertOuter(m1.i, b.aOuter) + true + case _ => + false + } + def pat4(a: m1.B2) = a match { + case b: m2.A => // can elide outer check, (a : m1.B2) implies (a : m2.A) + assertOuter(m2.i, b.aOuter) + true + case _ => + false + } + + def pat5(a: M#B2) = a match { + case b: m2.A => // can elide outer check, (a : A#B2) implies (a : m2.A) + assertOuter(m2.i, b.aOuter) + true + case _ => + false + } + + trait ScalaProvider { def loader: Int } + type ScalaProvider2 = { def loaderLibraryOnly: Int } + import scala.language.reflectiveCalls + + def cb1400(provider: ScalaProvider) = try { + provider match { + case p: ScalaProvider2 @unchecked => p.loaderLibraryOnly + } + } catch { + case _: NoSuchMethodException => provider.loader + } + + def assertOuter(expected: Int, actual: Int): Unit = { + if (expected != actual) throw WrongOuter(expected, actual) + } + case class WrongOuter(expected: Int, actual: Int) extends RuntimeException(s"expected: $expected, actual: $actual") + + def main(args: Array[String]): Unit = { + assert(pat1(new m1.B1)) + assert(m1.pat1(new m1.B1)) + assert(Try(pat1((new m2.B1).asInstanceOf[m1.B1])).failed.get == WrongOuter(m1.i, m2.i)) + assert(Try(m1.pat1((new m2.B1).asInstanceOf[m1.B1])).failed.get == WrongOuter(m1.i, m2.i)) + + assert(!pat2(new m2.B2)) + assert(!m1.pat2(new m2.B2)) + assert(pat2(new m1.B2)) + assert(m1.pat2(new m1.B2)) + + assert(pat3(new m1.B1)) + assert(m1.pat3(new m1.B1)) + assert(Try(pat3((new m2.B1).asInstanceOf[m1.B1])).failed.get == WrongOuter(m1.i, m2.i)) + assert(Try(m1.pat3((new m2.B1).asInstanceOf[m1.B1])).failed.get == WrongOuter(m1.i, m2.i)) + + assert(pat4(new m1.B2)) + assert(m1.pat4(new m1.B2)) + assert(pat4((new m2.B2).asInstanceOf[m1.B2])) + assert(m1.pat4((new m2.B2).asInstanceOf[m1.B2])) + + assert(pat5(new m1.B2)) + assert(pat5(new m2.B2)) + + class SP1 extends ScalaProvider { def loader = 1 } + class SP2 extends ScalaProvider { def loader = 1; def loaderLibraryOnly = 2 } + assert(cb1400(new SP1()) == 1) + assert(cb1400(new SP2()) == 2) + } +} diff --git a/test/files/run/t11731.scala b/test/files/run/t11731.scala index d52a9dc94ab7..3973c5a8e682 100644 --- a/test/files/run/t11731.scala +++ b/test/files/run/t11731.scala @@ -35,7 +35,7 @@ object Test extends DirectTest { private def fakeSbt = new sbt.FakeSbt override def show() = { - val global = newCompiler("-usejavacp", "-feature") + val global = newCompiler("-feature") def checkMsg(): Unit = assert(global.reporter.asInstanceOf[StoreReporter].infos.head.msg.contains("postfix operator")) diff --git a/test/files/run/t11924.check b/test/files/run/t11924.check new file mode 100644 index 000000000000..edee5862ce53 --- /dev/null +++ b/test/files/run/t11924.check @@ -0,0 +1,12 @@ +B1-a +B1-b +B2 +A +B3-a +B3-b +A +B4-a +B4-b +B5-a +B5-b +A diff --git a/test/files/run/t11924.scala b/test/files/run/t11924.scala new file mode 100644 index 000000000000..56211a1bffb2 --- /dev/null +++ b/test/files/run/t11924.scala @@ -0,0 +1,93 @@ +package pkg { + class A { + protected def f(): Unit = println("A") + } +} + +import pkg.A + +trait B1 { self: A => + private[this] var go = true + override def f(): Unit = if (go) { + go = false + println("B1-a") + self.f() + } else + println("B1-b") +} + +trait B2 extends A { + override def f(): Unit = { + println("B2") + super.f() + } +} + +trait B3 extends A { self: A => + private[this] var go = true + override def f(): Unit = if (go) { + go = false + println("B3-a") + self.f() + } else { + println("B3-b") + super.f() + } +} + +class C1 extends A with B1 +class C2 extends A with B2 +class C3 extends A with B3 + +// test case from pull request comment + +package l1 { + class I { + class A { + protected def f(): Unit = println("A") + } + } + object O extends I +} + +package l2 { + class I { + trait B4 { self: l1.O.A => + private[this] var go = true + override def f(): Unit = if (go) { + go = false + println("B4-a") + self.f() + } else { + println("B4-b") + } + } + + trait B5 extends l1.O.A { self: l1.O.A => + private[this] var go = true + override def f(): Unit = if (go) { + go = false + println("B5-a") + self.f() + } else { + println("B5-b") + super.f() + } + } + } + object O extends I +} + +class C4 extends l1.O.A with l2.O.B4 +class C5 extends l1.O.A with l2.O.B5 + + +object Test { + def main(args: Array[String]): Unit = { + new C1().f() + new C2().f() + new C3().f() + new C4().f() + new C5().f() + } +} diff --git a/test/files/run/t12062.check b/test/files/run/t12062.check index c578003008d7..c0456326b804 100644 --- a/test/files/run/t12062.check +++ b/test/files/run/t12062.check @@ -1,7 +1,7 @@ warning: 6 deprecations (since 2.13.0); re-run with -deprecation for details class TestByte -source-newSource1.scala,line-28 TestByte.super.() +source-newSource1.scala,line-2 TestByte.super.() source-newSource1.scala,line-3 1.toByte() source-newSource1.scala,line-6 java.lang.Byte.toString(TestByte.this.value()) source-newSource1.scala,line-6 TestByte.this.value() @@ -51,7 +51,7 @@ source-newSource1.scala,line-27 TestByte.this.value() class TestShort -source-newSource2.scala,line-28 TestShort.super.() +source-newSource2.scala,line-2 TestShort.super.() source-newSource2.scala,line-3 1.toShort() source-newSource2.scala,line-6 java.lang.Short.toString(TestShort.this.value()) source-newSource2.scala,line-6 TestShort.this.value() @@ -101,7 +101,7 @@ source-newSource2.scala,line-27 TestShort.this.value() class TestInt -source-newSource3.scala,line-28 TestInt.super.() +source-newSource3.scala,line-2 TestInt.super.() source-newSource3.scala,line-3 1.toInt() source-newSource3.scala,line-6 java.lang.Integer.toString(TestInt.this.value()) source-newSource3.scala,line-6 TestInt.this.value() @@ -150,7 +150,7 @@ source-newSource3.scala,line-27 TestInt.this.value() class TestLong -source-newSource4.scala,line-28 TestLong.super.() +source-newSource4.scala,line-2 TestLong.super.() source-newSource4.scala,line-3 1.toLong() source-newSource4.scala,line-6 java.lang.Long.toString(TestLong.this.value()) source-newSource4.scala,line-6 TestLong.this.value() @@ -200,7 +200,7 @@ source-newSource4.scala,line-27 TestLong.this.value() class TestBoolean -source-newSource5.scala,line-9 TestBoolean.super.() +source-newSource5.scala,line-2 TestBoolean.super.() source-newSource5.scala,line-6 java.lang.Boolean.toString(TestBoolean.this.value()) source-newSource5.scala,line-6 TestBoolean.this.value() source-newSource5.scala,line-7 java.lang.Boolean.hashCode(TestBoolean.this.value()) @@ -209,7 +209,7 @@ source-newSource5.scala,line-8 TestBoolean.this.value() class TestChar -source-newSource6.scala,line-9 TestChar.super.() +source-newSource6.scala,line-2 TestChar.super.() source-newSource6.scala,line-6 java.lang.Character.toString(TestChar.this.value()) source-newSource6.scala,line-6 TestChar.this.value() source-newSource6.scala,line-7 java.lang.Character.hashCode(TestChar.this.value()) @@ -219,7 +219,7 @@ source-newSource6.scala,line-8 TestChar.this.value() class TestFloat -source-newSource7.scala,line-39 TestFloat.super.() +source-newSource7.scala,line-2 TestFloat.super.() source-newSource7.scala,line-3 1.toFloat() source-newSource7.scala,line-6 java.lang.Float.toString(TestFloat.this.value()) source-newSource7.scala,line-6 TestFloat.this.value() @@ -296,7 +296,7 @@ source-newSource7.scala,line-38 TestFloat.this.value() class TestDouble -source-newSource8.scala,line-39 TestDouble.super.() +source-newSource8.scala,line-2 TestDouble.super.() source-newSource8.scala,line-3 1.toDouble() source-newSource8.scala,line-6 java.lang.Double.toString(TestDouble.this.value()) source-newSource8.scala,line-6 TestDouble.this.value() diff --git a/test/files/run/t12071.scala b/test/files/run/t12071.scala new file mode 100644 index 000000000000..5950647a1526 --- /dev/null +++ b/test/files/run/t12071.scala @@ -0,0 +1,28 @@ +// scalac: -Werror -Xlint -Xsource:3 + +class C { + def `c c`(n: Int): Int = n + 1 +} + +// backticked operator is candidate for multiline infix, +// but backticked value is an innocent bystander. +// +class t12071 { + def c: C = new C + def i: Int = 42 + def `n n`: Int = 27 + def f = c + `c c` i + def g = i + + `n n` + def basic = + 1 + + 2 +} + +object Test extends App { + val t = new t12071 + assert(t.f == 43) + assert(t.g == 69) + assert(t.basic == 3) +} diff --git a/test/files/run/t12221.check b/test/files/run/t12221.check new file mode 100644 index 000000000000..d3827e75a5ca --- /dev/null +++ b/test/files/run/t12221.check @@ -0,0 +1 @@ +1.0 diff --git a/test/files/run/t12221.scala b/test/files/run/t12221.scala new file mode 100644 index 000000000000..8a52989b84fc --- /dev/null +++ b/test/files/run/t12221.scala @@ -0,0 +1,16 @@ +object Test { + trait GenericBase[T] { + def init: T + val value: T = init + def get: T = value + } + + class SpecializedClass[@specialized(Double)T](x: T) extends GenericBase[T] { + override def init: T = x + } + + def main(args: Array[String]): Unit = { + val x = new SpecializedClass(1.0) + println(x.get) + } +} \ No newline at end of file diff --git a/test/files/run/t12222.check b/test/files/run/t12222.check new file mode 100644 index 000000000000..573541ac9702 --- /dev/null +++ b/test/files/run/t12222.check @@ -0,0 +1 @@ +0 diff --git a/test/files/run/t12222/Buffer_1.scala b/test/files/run/t12222/Buffer_1.scala new file mode 100644 index 000000000000..353ecdd4af27 --- /dev/null +++ b/test/files/run/t12222/Buffer_1.scala @@ -0,0 +1,10 @@ +trait ABuffer[@specialized(Float)T] { + def count: Int +} + +class Buffer[@specialized(Float) T](array_par: Array[T]) extends ABuffer[T] { + var array: Array[T] = array_par + var count: Int = 0 +} + +class Float32Buffer(array_par: Array[Float]) extends Buffer[Float](array_par) \ No newline at end of file diff --git a/test/files/run/t12222/Test_2.scala b/test/files/run/t12222/Test_2.scala new file mode 100644 index 000000000000..a5c975cd349e --- /dev/null +++ b/test/files/run/t12222/Test_2.scala @@ -0,0 +1,7 @@ +object Test { + def main(args: Array[String]): Unit = { + val vertices = Array[Float]() + val attribute = new Float32Buffer(vertices) + println(attribute.count) + } +} \ No newline at end of file diff --git a/test/files/run/t12276.scala b/test/files/run/t12276.scala index 50ef6b0edc5e..36fbbbc6c558 100644 --- a/test/files/run/t12276.scala +++ b/test/files/run/t12276.scala @@ -1,6 +1,7 @@ import scala.tools.nsc.Settings import scala.tools.nsc.interpreter.shell.{ILoop, ShellConfig} -import scala.tools.partest.{hexdump, ReplTest} +import scala.tools.partest.ReplTest +import scala.tools.testkit.AssertUtil.hexdump object Test extends ReplTest { def code = s""" diff --git a/test/files/run/t12290.check b/test/files/run/t12290.check new file mode 100644 index 000000000000..00d93b3657dd --- /dev/null +++ b/test/files/run/t12290.check @@ -0,0 +1,61 @@ +==== +A text + +==== + + +

Hello, world

+ + + +==== +SELECT "EMP_ID", "LAST_NAME" FROM "EMPLOYEE_TB" +WHERE "CITY" = 'INDIANAPOLIS' +ORDER BY "EMP_ID", "LAST_NAME"; + +==== + + +

Hello, world

+ + + +==== + + +

Hello, world

+ + + +==== + + +

Hello, world

+ + + + +==== + + +

Hello , world

+ + + +==== + this line has 4 tabs before it + this line has 5 spaces before it and space after it + this line has 2 tabs and 3 spaces before it +  this line has 6 spaces before it + +==== +String text = """ + A text block inside a text block +"""; + +==== +foo bar +baz +==== + +==== diff --git a/test/files/run/t12290/Test.scala b/test/files/run/t12290/Test.scala new file mode 100644 index 000000000000..13b01b51478c --- /dev/null +++ b/test/files/run/t12290/Test.scala @@ -0,0 +1,30 @@ +// javaVersion: 15+ +/* Using `valueOf` is a way to check that the Java string literals were properly + * parsed, since the parsed value is what the Scala compiler will use when + * resolving the singleton types + */ +object Test extends App { + println("====") + println(valueOf[TextBlocks.aText.type]) + println("====") + println(valueOf[TextBlocks.html1.type]) + println("====") + println(valueOf[TextBlocks.query.type]) + println("====") + println(valueOf[TextBlocks.html2.type]) + println("====") + println(valueOf[TextBlocks.html3.type]) + println("====") + println(valueOf[TextBlocks.html4.type]) + println("====") + println(valueOf[TextBlocks.html5.type]) + println("====") + println(valueOf[TextBlocks.mixedIndents.type]) + println("====") + println(valueOf[TextBlocks.code.type]) + println("====") + println(valueOf[TextBlocks.simpleString.type]) + println("====") + println(valueOf[TextBlocks.emptyString.type]) + println("====") +} diff --git a/test/files/run/t12290/TextBlocks.java b/test/files/run/t12290/TextBlocks.java new file mode 100644 index 000000000000..e1928e74c971 --- /dev/null +++ b/test/files/run/t12290/TextBlocks.java @@ -0,0 +1,78 @@ +// javaVersion: 15+ +class TextBlocks { + + final static String aText = """ + A text + """; + + final static String html1 = """ + + +

Hello, world

+ + + """; + + // quote characters are unescaped + final static String query = """ + SELECT "EMP_ID", "LAST_NAME" FROM "EMPLOYEE_TB" + WHERE "CITY" = 'INDIANAPOLIS' + ORDER BY "EMP_ID", "LAST_NAME"; + """; + + // incidental trailing spaces + final static String html2 = """ + + +

Hello, world

+ + + """; + + // trailing delimiter influences + final static String html3 = """ + + +

Hello, world

+ + + """; + + // blank line does not affect + final static String html4 = """ + + +

Hello, world

+ + + + """; + + // escape sequences + final static String html5 = """ + \n + \ +

Hello\s,\tworld

+ + + """; + + // mixed indentation + final static String mixedIndents = """ + \s this line has 4 tabs before it + this line has 5 spaces before it and space after it \u0020 \u000C\u0020 \u001E + this line has 2 tabs and 3 spaces before it +\u0020 \u000C\u0020 \u001E this line has 6 spaces before it + """; + + final static String code = + """ + String text = \""" + A text block inside a text block + \"""; + """; + + final static String simpleString = "foo\tbar\nbaz"; + + final static String emptyString = ""; +} diff --git a/test/files/run/t12312.scala b/test/files/run/t12312.scala new file mode 100644 index 000000000000..3ad1c4542b75 --- /dev/null +++ b/test/files/run/t12312.scala @@ -0,0 +1,25 @@ +class A { object X } + +class C { + val a, b = new A; import a.X + class D { + def isInstanceOf_aX(z: AnyRef) = z.isInstanceOf[X.type] + class E { + def isInstanceOf_aX(z: AnyRef) = z.isInstanceOf[X.type] + } + } +} + +object Test extends C { + def main(args: Array[String]): Unit = { + val d = new D() + assert(d.isInstanceOf_aX(a.X)) + assert(!d.isInstanceOf_aX(b.X)) + assert(!d.isInstanceOf_aX(new Object)) + + val e = new d.E() + assert(e.isInstanceOf_aX(a.X)) + assert(!e.isInstanceOf_aX(b.X)) + assert(!e.isInstanceOf_aX(new Object)) + } +} diff --git a/test/files/run/t12348.scala b/test/files/run/t12348.scala new file mode 100644 index 000000000000..fdbb4d9465df --- /dev/null +++ b/test/files/run/t12348.scala @@ -0,0 +1,9 @@ +// javaVersion: 11+ + +object Test { + def main(args: Array[String]): Unit = { + val a = new Array[Object](1) + val h = java.lang.invoke.MethodHandles.arrayElementVarHandle(a.getClass) + val r = h.setVolatile(a, 0, "foo") // important: no expected type + } +} diff --git a/test/files/run/t12380/A.java b/test/files/run/t12380/A.java new file mode 100644 index 000000000000..1cdbd7e83bbf --- /dev/null +++ b/test/files/run/t12380/A.java @@ -0,0 +1,28 @@ +// filter: unchecked + +package p; + +public class A { + public static interface I { + public I w(); + } + + public static interface J> extends I { + @Override public R w(); + } + + public static interface K extends I { + @Override public K w(); + + public default String mK() { return "K"; } + } + + /* package-private */ static class B> implements J { + @Override public R w() { return (R) this; } + } + + public static class C> extends B implements J { } + + // OK in Java, also OK in Scala + public static class Test extends C implements K { } +} diff --git a/test/files/run/t12380/Test.scala b/test/files/run/t12380/Test.scala new file mode 100644 index 000000000000..abab74cde7da --- /dev/null +++ b/test/files/run/t12380/Test.scala @@ -0,0 +1,7 @@ +class Test extends p.A.C[Test] with p.A.K +object Test { + def main(args: Array[String]): Unit = { + assert((new Test).w.mK == "K") + assert((new p.A.Test).w.mK == "K") + } +} diff --git a/test/files/run/t12403.scala b/test/files/run/t12403.scala new file mode 100644 index 000000000000..76342193e786 --- /dev/null +++ b/test/files/run/t12403.scala @@ -0,0 +1,9 @@ + +object Test extends App { + val xs = + Array.empty[Double] + val ys = + Array(0.0) + assert(xs.intersect(ys).getClass.getComponentType == classOf[Double]) + assert(Array.empty[Double].intersect(Array(0.0)).getClass.getComponentType == classOf[Double]) +} diff --git a/test/files/run/t12405.check b/test/files/run/t12405.check new file mode 100644 index 000000000000..439f2ccf16b9 --- /dev/null +++ b/test/files/run/t12405.check @@ -0,0 +1,96 @@ +[[syntax trees at end of patmat]] // newSource1.scala +package { + final class C[A] extends scala.AnyVal { + private[this] val x: A = _; + def x: A = C.this.x; + def (x: A): C[A] = { + C.super.(); + () + }; + def isEmpty: Boolean = C.isEmpty$extension[A](C.this); + def get: A = C.get$extension[A](C.this); + override def hashCode(): Int = C.hashCode$extension[A](C.this)(); + override def equals(x$1: Any): Boolean = C.equals$extension[A](C.this)(x$1) + }; + object C extends scala.AnyRef { + def (): C.type = { + C.super.(); + () + }; + def unapply[T](c: C[T]): C[T] = c; + final def isEmpty$extension[A]($this: C[A]): Boolean = scala.Predef.???; + final def get$extension[A]($this: C[A]): A = scala.Predef.???; + final def hashCode$extension[A]($this: C[A])(): Int = $this.x.hashCode(); + final def equals$extension[A]($this: C[A])(x$1: Any): Boolean = { + case val x1: Any = x$1; + case5(){ + if (x1.isInstanceOf[C[$this._]]) + matchEnd4(true) + else + case6() + }; + case6(){ + matchEnd4(false) + }; + matchEnd4(x: Boolean){ + x + } +}.&&({ + val C$1: C[A] = x$1.asInstanceOf[C[A]]; + $this.x.==(C$1.x) + }) + }; + class Test extends scala.AnyRef { + def (): Test = { + Test.super.(); + () + }; + def m1(a: Any): Any = { + case val x1: Any = a; + case6(){ + if (x1.isInstanceOf[C[T]]) + { + val x2: C[T] = (x1.asInstanceOf[C[T]]: C[T]); + { + val o8: C[T] = C.unapply[T](x2); + if (o8.isEmpty.unary_!) + { + val x: T = o8.get; + matchEnd5(x) + } + else + case7() + } + } + else + case7() + }; + case7(){ + matchEnd5(null) + }; + matchEnd5(x: Any){ + x + } + }; + def m2(c: C[String]): String = { + case val x1: C[String] = c; + case5(){ + val o7: C[String] = C.unapply[String](x1); + if (o7.isEmpty.unary_!) + { + val x: String = o7.get; + matchEnd4(x) + } + else + case6() + }; + case6(){ + matchEnd4("") + }; + matchEnd4(x: String){ + x + } + } + } +} + diff --git a/test/files/run/t12405.scala b/test/files/run/t12405.scala new file mode 100644 index 000000000000..f506bd062c38 --- /dev/null +++ b/test/files/run/t12405.scala @@ -0,0 +1,28 @@ +import scala.tools.partest._ + +object Test extends DirectTest { + override def extraSettings: String = "-usejavacp -Vprint:patmat -Ystop-after:patmat" + + override val code = + """final class C[A](val x: A) extends AnyVal { + | def isEmpty: Boolean = ??? + | def get: A = ??? + |} + |object C { + | def unapply[T](c: C[T]): C[T] = c + |} + |class Test { + | def m1(a: Any) = a match { + | case C(x) => x + | case _ => null + | } + | + | def m2(c: C[String]) = c match { + | case C(x) => x + | case _ => "" + | } + |} + |""".stripMargin + + override def show(): Unit = compile() +} diff --git a/test/files/run/t12481.check b/test/files/run/t12481.check new file mode 100644 index 000000000000..39d6696135de --- /dev/null +++ b/test/files/run/t12481.check @@ -0,0 +1,2 @@ +Test$Universe[_ <: Any] +Test$Universe[] diff --git a/test/files/run/t12481.scala b/test/files/run/t12481.scala new file mode 100644 index 000000000000..8407c634ef54 --- /dev/null +++ b/test/files/run/t12481.scala @@ -0,0 +1,6 @@ +object Test extends App { + trait Txn[T <: Txn[T]] + trait Universe[T <: Txn[T]] + println(implicitly[Manifest[Universe[_]]]) + println(implicitly[OptManifest[Universe[_]]]) +} diff --git a/test/files/run/t12490.scala b/test/files/run/t12490.scala new file mode 100644 index 000000000000..422ef3fb4222 --- /dev/null +++ b/test/files/run/t12490.scala @@ -0,0 +1,33 @@ +import scala.tools.partest._ +import scala.collection.mutable.LinkedHashMap + +object Test extends CompilerTest { + import global._ + override def extraSettings = super.extraSettings + " -Yrangepos -Ystop-after:parser" + val tests = LinkedHashMap( + "class A { def t = new C() }" -> (24, 31), + "class B { def t = (new C) }" -> (25, 30), + "class C { def t = new C }" -> (24, 29), + "class D { def t = new C().t }" -> (24, 33), + "class E { def t = (new C).t }" -> (24, 33), + "class F { def t(c: C) = c }" -> (24, 25), + "class G { def t(c: C) = (c) }" -> (25, 26), + "class H { def t(c: C) = c.t }" -> (24, 27), + "class I { def t(c: C) = (c).t }" -> (24, 29), + "class J { def t[T]: C = (x.t)[C] }" -> (24, 32), + "class K { def t(f: F) = (f) t c }" -> (24, 31), + "class L { def t(c: C) = (c) t }" -> (24, 29), + // ^ 24 ^ 33 + ) + + override def sources = tests.toList.map(_._1) + + def check(source: String, unit: CompilationUnit): Unit = unit.body foreach { + case dd: DefDef if dd.name.startsWith("t") => + val pos = dd.rhs.pos + val (start, end) = tests(source) + assert(pos.start == start, pos.start) + assert(pos.end == end, pos.end) + case _ => + } +} diff --git a/test/files/run/t1406.scala b/test/files/run/t1406.scala new file mode 100644 index 000000000000..c027771716a8 --- /dev/null +++ b/test/files/run/t1406.scala @@ -0,0 +1,32 @@ + +import scala.tools.partest.DirectTest + +object Test extends DirectTest { + // for reference, UTF-8 of U0 + //val data = Array(0xed, 0xa0, 0x81).map(_.asInstanceOf[Byte]) + def U0 = "\ud801" + def U1 = "\udc00" + // \u10428 isLetter and isLowerCase + def U2 = "\ud801" + def U3 = "\udc28" + def code = + s"""class C { + | def x = "$U0" + | def y = "$U1" + | def `$U0` = x + | def `$U1` = y + | + | def f(x: Any): Boolean = x match { + | case ${U2}${U3}XYZ: String => true + | case $U2$U3 => true + | } + | def g(x: Any) = x match { + | case $U2$U3 @ _ => $U2$U3 + | } + |}""".stripMargin + + def show(): Unit = { + assert(U0.length == 1) + assert(compile()) + } +} diff --git a/test/files/run/t1406b.check b/test/files/run/t1406b.check new file mode 100644 index 000000000000..407e44adf89d --- /dev/null +++ b/test/files/run/t1406b.check @@ -0,0 +1,6 @@ +newSource1.scala:4: error: illegal character '\ud801' missing low surrogate + def ? = x + ^ +newSource1.scala:5: error: illegal character '\udc00' + def ? = y + ^ diff --git a/test/files/run/t1406b.scala b/test/files/run/t1406b.scala new file mode 100644 index 000000000000..bd1868a642fb --- /dev/null +++ b/test/files/run/t1406b.scala @@ -0,0 +1,22 @@ + +import scala.tools.partest.DirectTest + +object Test extends DirectTest { + // for reference, UTF-8 of U0 + //val data = Array(0xed, 0xa0, 0x81).map(_.asInstanceOf[Byte]) + def U0 = "\ud801" + def U1 = "\udc00" + def code = + s"""class C { + | def x = "$U0" + | def y = "$U1" + | def $U0 = x + | def $U1 = y + |}""".stripMargin + + def show(): Unit = { + assert(U0.length == 1) + assert(!compile()) + } +} + diff --git a/test/files/run/t2318.javaopts b/test/files/run/t2318.javaopts deleted file mode 100644 index 8bf493ce91e6..000000000000 --- a/test/files/run/t2318.javaopts +++ /dev/null @@ -1 +0,0 @@ --Ddummy=fresh_jvm_needed_to_test_security_manager \ No newline at end of file diff --git a/test/files/run/t2318.scala b/test/files/run/t2318.scala index bce56f6be33f..ac30df0e902e 100644 --- a/test/files/run/t2318.scala +++ b/test/files/run/t2318.scala @@ -1,12 +1,17 @@ +// java: -Ddummy=fresh_jvm_needed_to_test_security_manager // filter: WARNING.* // for now, ignore warnings due to reflective invocation import java.security._ import scala.language.reflectiveCalls +import scala.annotation.nowarn + +// SecurityManager is deprecated on JDK 17, so we sprinkle `@nowarn` around object Test { trait Bar { def bar: Unit } + @nowarn("cat=deprecation") object Mgr extends SecurityManager { def allowedProperty(name: String) = name == "sun.net.inetaddr.ttl" || @@ -28,6 +33,8 @@ object Test { def doDestroy( obj : Destroyable ) : Unit = obj.destroy(); doDestroy( p ); } + + @nowarn("cat=deprecation") def t2() = { System.setSecurityManager(Mgr) diff --git a/test/files/run/t3613.scala b/test/files/run/t3613.scala index 1293f62c0fd4..d8a6a862c925 100644 --- a/test/files/run/t3613.scala +++ b/test/files/run/t3613.scala @@ -1,15 +1,14 @@ class Boopy { - private val s = new Schnuck - def observer : PartialFunction[ Any, Unit ] = s.observer + private val s = new Schnuck + def observer : PartialFunction[ Any, Unit ] = s.observer - private class Schnuck extends javax.swing.AbstractListModel { - model => - val observer : PartialFunction[ Any, Unit ] = { - case "Boopy" => fireIntervalAdded( model, 0, 1 ) - } - def getSize = 0 - def getElementAt( idx: Int ) = ??? + private class Schnuck extends javax.swing.AbstractListModel[AnyRef] { model => + val observer : PartialFunction[ Any, Unit ] = { + case "Boopy" => fireIntervalAdded( model, 0, 1 ) } + def getSize = 0 + def getElementAt(idx: Int): AnyRef = null + } } diff --git a/test/files/run/t4415.scala b/test/files/run/t4415.scala index 5892b0c16def..8a196b516dfd 100644 --- a/test/files/run/t4415.scala +++ b/test/files/run/t4415.scala @@ -3,7 +3,7 @@ * * Exception in thread "main" java.lang.VerifyError: (class: ExtractorIssue$$, method: convert signature: (LTopProperty;)LMyProp;) Accessing value from uninitialized register 5 * at ExtractorIssue.main(ExtractorIssue.scala) - * at com.intellij.rt.execution.application.AppMain.main(AppMain.java:115)] + * at com.intellij.rt.execution.application.AppMain.main(AppMain.java)] * * If lines 15/16 are present, the compiler crashes: * diff --git a/test/files/run/t4511.check b/test/files/run/t4511.check new file mode 100644 index 000000000000..a1e2647d215e --- /dev/null +++ b/test/files/run/t4511.check @@ -0,0 +1 @@ +? diff --git a/test/files/run/t4511.scala b/test/files/run/t4511.scala new file mode 100644 index 000000000000..e014b16b8fa8 --- /dev/null +++ b/test/files/run/t4511.scala @@ -0,0 +1,9 @@ +class B[@specialized(Int) T](t: T) { + val a = t + val b = "?" +} + +object Test { + def main(args: Array[String]): Unit = + println(new B(42).b) +} diff --git a/test/files/run/t4841-no-plugin.scala b/test/files/run/t4841-no-plugin.scala index 8105278ca3a0..d10cddc60ff1 100644 --- a/test/files/run/t4841-no-plugin.scala +++ b/test/files/run/t4841-no-plugin.scala @@ -7,8 +7,6 @@ import java.io.File object Test extends DirectTest { override def code = "class Code" - override def extraSettings = s"-usejavacp" - override def show() = { val tmp = new File(testOutput.jfile, "plugins.partest").getAbsolutePath compile("-Xdev", s"-Xplugin:$tmp", "-Xpluginsdir", tmp) diff --git a/test/files/run/t4930.scala b/test/files/run/t4930.scala index 46705729a1d2..dbd5dac43eba 100644 --- a/test/files/run/t4930.scala +++ b/test/files/run/t4930.scala @@ -2,7 +2,7 @@ import collection.immutable.SortedMap import scala.math.Ordering.Implicits._ object Test { - implicit val ord: Ordering[Array[Byte]] = Ordering.by(x => x.toIterable: collection.Seq[Byte]) + implicit val ord: Ordering[Array[Byte]] = Ordering.by(x => x: collection.Seq[Byte]) def main(args: Array[String]): Unit = { val m = SortedMap(Array[Byte](1) -> 0) diff --git a/test/files/run/t5045.scala b/test/files/run/t5045.scala index a539e3a4cb19..994469f01303 100644 --- a/test/files/run/t5045.scala +++ b/test/files/run/t5045.scala @@ -4,8 +4,8 @@ object Test extends App { import scala.util.matching.{ Regex, UnanchoredRegex } val dateP1 = """(\d\d\d\d)-(\d\d)-(\d\d)""".r.unanchored - val dateP2 = """(\d\d\d\d)-(\d\d)-(\d\d)""".r("year", "month", "day").unanchored - val dateP3 = new Regex("""(\d\d\d\d)-(\d\d)-(\d\d)""", "year", "month", "day") with UnanchoredRegex + val dateP2 = """(?\d\d\d\d)-(?\d\d)-(?\d\d)""".r.unanchored + val dateP3 = new Regex("""(?\d\d\d\d)-(?\d\d)-(?\d\d)""") with UnanchoredRegex val yearStr = "2011" val dateStr = List(yearStr,"07","15").mkString("-") diff --git a/test/files/run/t5064.check b/test/files/run/t5064.check index 9d2c9a3bdec8..78ab21e65f5c 100644 --- a/test/files/run/t5064.check +++ b/test/files/run/t5064.check @@ -7,9 +7,9 @@ newSource1.scala:5: warning: a pure expression does nothing in statement positio newSource1.scala:6: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses Nil ^ -[53] T5064.super.() -[53] T5064.super. -[53] this +[12] T5064.super.() +[12] T5064.super. +[12] this [16:23] scala.`package`.List().apply(scala.runtime.ScalaRunTime.wrapIntArray(Array[Int]{1})) [16:20] scala.`package`.List().apply <16:20> scala.`package`.List() diff --git a/test/files/run/t5463.scala b/test/files/run/t5463.scala index 30b8306156d3..db710beff70d 100644 --- a/test/files/run/t5463.scala +++ b/test/files/run/t5463.scala @@ -12,7 +12,7 @@ object Test extends DirectTest { val classpath = List(sys.props("partest.lib"), jarpath, testOutput.path) mkString sys.props("path.separator") try { - compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code) + compileString(newCompiler("-cp", classpath))(code) throw new Error("Compilation should have failed"); } catch { case ex: FatalError => // this is expected diff --git a/test/files/run/t5545.scala b/test/files/run/t5545.scala index 3b46bbb6422c..0faf87a943af 100644 --- a/test/files/run/t5545.scala +++ b/test/files/run/t5545.scala @@ -3,9 +3,9 @@ import java.io._ object Test extends DirectTest { - override def extraSettings: String = "-usejavacp -d " + testOutput.path + " -cp " + testOutput.path + override def extraSettings: String = s"-usejavacp -cp ${testOutput.path}" - override def code = """ + override def code = s""" // scala/bug#5545 trait F[@specialized(Int) T1, R] { def f(v1: T1): R @@ -14,12 +14,8 @@ object Test extends DirectTest { """.trim override def show(): Unit = { - // redirect err to out, for logging - val prevErr = System.err - System.setErr(System.out) compile() // the bug manifests at the second compilation, when the bytecode is already there compile() - System.setErr(prevErr) } } diff --git a/test/files/run/t5603.check b/test/files/run/t5603.check index c9ebb69ececb..14ee478343c4 100644 --- a/test/files/run/t5603.check +++ b/test/files/run/t5603.check @@ -1,6 +1,6 @@ [[syntax trees at end of parser]] // newSource1.scala [0:241]package [0:0] { - [0:82]abstract trait Greeting extends [15:82][83]scala.AnyRef { + [0:82]abstract trait Greeting extends [15:82][15]scala.AnyRef { [15]def $init$() = [15]{ [15]() }; diff --git a/test/files/run/t5603.scala b/test/files/run/t5603.scala index c047fe7896b5..2e36639931f8 100644 --- a/test/files/run/t5603.scala +++ b/test/files/run/t5603.scala @@ -7,7 +7,7 @@ import scala.tools.nsc.reporters.ConsoleReporter object Test extends DirectTest { - override def extraSettings: String = "-usejavacp -Vprint:parser -Ystop-after:parser -d " + testOutput.path + override def extraSettings: String = "-usejavacp -Vprint:parser -Ystop-after:parser" override def code = """ trait Greeting { @@ -24,13 +24,7 @@ object Test extends DirectTest { object Test extends App {} """.trim - override def show(): Unit = { - // redirect err to out, for logging - val prevErr = System.err - System.setErr(System.out) - compile() - System.setErr(prevErr) - } + override def show(): Unit = compile() override def newCompiler(args: String*): Global = { diff --git a/test/files/run/t5717.scala b/test/files/run/t5717.scala index 5e3b94656476..805e3f99203c 100644 --- a/test/files/run/t5717.scala +++ b/test/files/run/t5717.scala @@ -2,12 +2,10 @@ import scala.tools.partest._ import java.io.File object Test extends StoreReporterDirectTest { - def code = ??? + def code = "package a { class B }" + + override def extraSettings: String = s"-cp ${pathOf(sys.props("partest.lib"), testOutput.path)}" - def compileCode(code: String) = { - val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator") - compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code) - } // TODO // Don't assume output is on physical disk // Let the compiler tell us output dir @@ -16,7 +14,7 @@ object Test extends StoreReporterDirectTest { def show(): Unit = { // Don't crash when we find a file 'a' where package 'a' should go. scala.reflect.io.File(testOutput.path + "/a").writeAll("a") - compileCode("package a { class B }") + compile() val List(i) = filteredInfos // for some reason, nio doesn't throw the same exception on windows and linux/mac import File.separator diff --git a/test/files/run/t5905-features.scala b/test/files/run/t5905-features.scala index 5d92961931ea..d411f7d92148 100644 --- a/test/files/run/t5905-features.scala +++ b/test/files/run/t5905-features.scala @@ -7,8 +7,6 @@ import tools.partest.DirectTest object Test extends DirectTest { override def code = "class Code { def f = (1 to 10) size }" // exercise a feature to sanity-check coverage of -language options - override def extraSettings = s"-usejavacp -d ${testOutput.path}" - override def show() = { val global = newCompiler("-Ystop-after:typer") compileString(global)("") // warm me up, scotty diff --git a/test/files/run/t5905b-features.scala b/test/files/run/t5905b-features.scala index 627df8334b89..938d26e4f247 100644 --- a/test/files/run/t5905b-features.scala +++ b/test/files/run/t5905b-features.scala @@ -5,8 +5,6 @@ import tools.partest.DirectTest object Test extends DirectTest { override def code = "class Code" - override def extraSettings = s"-usejavacp -d ${testOutput.path}" - override def show() = { //compile("-language", "--") // no error compile(s"-language:noob") diff --git a/test/files/run/t5938.scala b/test/files/run/t5938.scala index 7a3093102a70..58ce964f9d0c 100644 --- a/test/files/run/t5938.scala +++ b/test/files/run/t5938.scala @@ -3,7 +3,7 @@ import scala.tools.partest.DirectTest object Test extends DirectTest { override def extraSettings: String = - s"-usejavacp -d ${testOutput.path} -cp ${testOutput.path} -d ${testOutput.path}" + s"-usejavacp -cp ${testOutput.path}" override def code = """ object O extends C { @@ -15,11 +15,9 @@ object O extends C { override def show(): Unit = { val global = newCompiler() - Console.withErr(System.out) { - compileString(global)(code) - compileString(global)(code) - loadClass // was "duplicate name and signature in class X" - } + compileString(global)(code) + compileString(global)(code) + loadClass // was "duplicate name and signature in class X" } def loadClass: Class[_] = { diff --git a/test/files/run/t5940.scala b/test/files/run/t5940.scala index 0c0e39f603c0..d86585e8720a 100644 --- a/test/files/run/t5940.scala +++ b/test/files/run/t5940.scala @@ -17,8 +17,8 @@ object Test extends DirectTest { } """ def compileMacros() = { - val classpath = List(sys.props("partest.lib"), sys.props("partest.reflect")) mkString sys.props("path.separator") - compileString(newCompiler("-language:experimental.macros", "-cp", classpath, "-d", testOutput.path))(macros_1) + val classpath = pathOf(sys.props("partest.lib"), sys.props("partest.reflect")) + compileString(newCompiler("-language:experimental.macros", "-cp", classpath))(macros_1) } def test_2 = """ @@ -27,7 +27,7 @@ object Test extends DirectTest { } """ def compileTest() = { - val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator") + val classpath = pathOf(sys.props("partest.lib"), testOutput.path) compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(test_2) } diff --git a/test/files/run/t6028.scala b/test/files/run/t6028.scala index 6e4e179f1dfd..60517d5193e7 100644 --- a/test/files/run/t6028.scala +++ b/test/files/run/t6028.scala @@ -13,9 +13,5 @@ object Test extends DirectTest { |} |""".stripMargin.trim - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } diff --git a/test/files/run/t6288.check b/test/files/run/t6288.check index eb1ef1105679..a4ad1fd15e49 100644 --- a/test/files/run/t6288.check +++ b/test/files/run/t6288.check @@ -1,8 +1,8 @@ [[syntax trees at end of patmat]] // newSource1.scala [0:553]package [0:0] { - [0:151]object Case3 extends [13:151][152]scala.AnyRef { - [152]def (): [13]Case3.type = [152]{ - [152][152][152]Case3.super.(); + [0:151]object Case3 extends [13:151][13]scala.AnyRef { + [13]def (): [13]Case3.type = [13]{ + [13][13][13]Case3.super.(); [13]() }; [17:60]def unapply([29:35]z: [32:35]): [21]Option[Int] = [52:60][52:56][52:56]new [52:56]Some[Int]([57:59]-1); @@ -28,9 +28,9 @@ } } }; - [152:308]object Case4 extends [165:308][309]scala.AnyRef { - [309]def (): [165]Case4.type = [309]{ - [309][309][309]Case4.super.(); + [152:308]object Case4 extends [165:308][165]scala.AnyRef { + [165]def (): [165]Case4.type = [165]{ + [165][165][165]Case4.super.(); [165]() }; [169:217]def unapplySeq([184:190]z: [187:190]): [173]Option[List[Int]] = [213:217]scala.None; @@ -56,9 +56,9 @@ } } }; - [309:448]object Case5 extends [322:448][449]scala.AnyRef { - [449]def (): [322]Case5.type = [449]{ - [449][449][449]Case5.super.(); + [309:448]object Case5 extends [322:448][322]scala.AnyRef { + [322]def (): [322]Case5.type = [322]{ + [322][322][322]Case5.super.(); [322]() }; [326:361]def unapply([338:344]z: [341:344]): [330]Boolean = [357:361]true; @@ -84,9 +84,9 @@ } } }; - [449:553]object Case6 extends [462:553][553]scala.AnyRef { - [553]def (): [462]Case6.type = [553]{ - [553][553][553]Case6.super.(); + [449:553]object Case6 extends [462:553][462]scala.AnyRef { + [462]def (): [462]Case6.type = [462]{ + [462][462][462]Case6.super.(); [462]() }; [466:509]def unapply([478:484]z: [481:484]): [470]Option[Int] = [501:509][501:505][501:505]new [501:505]Some[Int]([506:508]-1); diff --git a/test/files/run/t6288.scala b/test/files/run/t6288.scala index 0565e848ea40..29ef3567a553 100644 --- a/test/files/run/t6288.scala +++ b/test/files/run/t6288.scala @@ -40,11 +40,7 @@ object Test extends DirectTest { |} |""".stripMargin.trim - override def show(): Unit = { + override def show(): Unit = compile() // Now: [84][84]Case3.unapply([84]x1); // Was: [84][84]Case3.unapply([64]x1); - Console.withErr(System.out) { - compile() - } - } } diff --git a/test/files/run/t6329_repl.check b/test/files/run/t6329_repl.check index e6b94db0f589..7035b4569747 100644 --- a/test/files/run/t6329_repl.check +++ b/test/files/run/t6329_repl.check @@ -4,7 +4,7 @@ import scala.reflect.{ClassManifest, classTag} scala> implicitly[ClassManifest[scala.List[_]]] warning: 1 deprecation (since 2.10.0); for details, enable `:setting -deprecation` or `:replay -deprecation` -val res0: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[] +val res0: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[Any] scala> classTag[scala.List[_]] val res1: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List diff --git a/test/files/run/t6329_repl_bug.check b/test/files/run/t6329_repl_bug.check index f82398194821..8e89bd2ed33f 100644 --- a/test/files/run/t6329_repl_bug.check +++ b/test/files/run/t6329_repl_bug.check @@ -7,7 +7,7 @@ import scala.reflect.runtime._ scala> implicitly[scala.reflect.ClassManifest[List[_]]] warning: 1 deprecation (since 2.10.0); for details, enable `:setting -deprecation` or `:replay -deprecation` -val res0: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[] +val res0: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[java.lang.Class] scala> scala.reflect.classTag[List[_]] val res1: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List diff --git a/test/files/run/t6329_vanilla.check b/test/files/run/t6329_vanilla.check index ad8f4b5c7720..74481cabf49c 100644 --- a/test/files/run/t6329_vanilla.check +++ b/test/files/run/t6329_vanilla.check @@ -1,4 +1,4 @@ -scala.collection.immutable.List[] +scala.collection.immutable.List[Any] scala.collection.immutable.List scala.collection.immutable.List[] scala.collection.immutable.List diff --git a/test/files/run/t6329_vanilla_bug.check b/test/files/run/t6329_vanilla_bug.check index 56b075b2e658..8282afaeba13 100644 --- a/test/files/run/t6329_vanilla_bug.check +++ b/test/files/run/t6329_vanilla_bug.check @@ -1,2 +1,2 @@ -scala.collection.immutable.List[] +scala.collection.immutable.List[Any] scala.collection.immutable.List diff --git a/test/files/run/t6344.check b/test/files/run/t6344.check index d994d81c7dc5..03f2468145d1 100644 --- a/test/files/run/t6344.check +++ b/test/files/run/t6344.check @@ -4,9 +4,9 @@ public int C0.v1(int) public int C0.v3() public int C0.v3() public int C0.v4(int,scala.collection.immutable.List) -#partest !java15 +#partest !java15+ public int C0.v4(int,scala.collection.immutable.List>) -#partest java15 +#partest java15+ public int C0.v4(int,scala.collection.immutable.List>) #partest public scala.collection.immutable.List C0.v2() @@ -18,9 +18,9 @@ public java.lang.Object C1.v1(java.lang.Object) public java.lang.Object C1.v3() public java.lang.Object C1.v3() public java.lang.Object C1.v4(java.lang.Object,scala.collection.immutable.List) -#partest !java15 +#partest !java15+ public java.lang.Object C1.v4(java.lang.Object,scala.collection.immutable.List>) -#partest java15 +#partest java15+ public java.lang.Object C1.v4(java.lang.Object,scala.collection.immutable.List>) #partest public scala.collection.immutable.List C1.v2() @@ -32,9 +32,9 @@ public java.lang.String C2.v1(java.lang.String) public java.lang.String C2.v3() public java.lang.String C2.v3() public java.lang.String C2.v4(java.lang.String,scala.collection.immutable.List) -#partest !java15 +#partest !java15+ public java.lang.String C2.v4(java.lang.String,scala.collection.immutable.List>) -#partest java15 +#partest java15+ public java.lang.String C2.v4(java.lang.String,scala.collection.immutable.List>) #partest public scala.collection.immutable.List C2.v2() diff --git a/test/files/run/t6411a.javaopts b/test/files/run/t6411a.javaopts deleted file mode 100644 index 2e862e5f806e..000000000000 --- a/test/files/run/t6411a.javaopts +++ /dev/null @@ -1 +0,0 @@ --XX:CompileCommand=exclude,scala/runtime/BoxesRunTime.unboxToInt diff --git a/test/files/run/t6411a.scala b/test/files/run/t6411a.scala index f40c42d05965..bd2fdd37be52 100644 --- a/test/files/run/t6411a.scala +++ b/test/files/run/t6411a.scala @@ -1,3 +1,4 @@ +// java: -XX:CompileCommand=exclude,scala/runtime/BoxesRunTime.unboxToInt // filter: scala.runtime.BoxesRunTime.{1,2}unboxToInt // // noise from -XX:CompileCommand=exclude,scala/runtime/BoxesRunTime.unboxToInt diff --git a/test/files/run/t6440.scala b/test/files/run/t6440.scala index 94eda3642ea6..f6fcc97d4870 100644 --- a/test/files/run/t6440.scala +++ b/test/files/run/t6440.scala @@ -5,8 +5,8 @@ object Test extends StoreReporterDirectTest { def code = ??? def compileCode(code: String) = { - val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator") - compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code) + val classpath = pathOf(sys.props("partest.lib"), testOutput.path) + compileString(newCompiler("-cp", classpath))(code) } def library1 = """ diff --git a/test/files/run/t6440b.scala b/test/files/run/t6440b.scala index a1ad71716226..65a40edc40b4 100644 --- a/test/files/run/t6440b.scala +++ b/test/files/run/t6440b.scala @@ -5,8 +5,8 @@ object Test extends StoreReporterDirectTest { def code = ??? def compileCode(code: String) = { - val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator") - compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code) + val classpath = pathOf(sys.props("partest.lib"), testOutput.path) + compileString(newCompiler("-cp", classpath))(code) } def library1 = """ diff --git a/test/files/run/t6488.javaopts b/test/files/run/t6488.javaopts deleted file mode 100644 index 0c252573c8f0..000000000000 --- a/test/files/run/t6488.javaopts +++ /dev/null @@ -1 +0,0 @@ --Dforked.test=yes.please diff --git a/test/files/run/t6488.scala b/test/files/run/t6488.scala index 1d99bd85d4cf..90d29b264964 100644 --- a/test/files/run/t6488.scala +++ b/test/files/run/t6488.scala @@ -1,3 +1,5 @@ +// java: -Dforked.test=yes.please + import scala.sys.process._ import scala.util.Try import scala.util.Properties.{javaHome, javaClassPath, userDir} diff --git a/test/files/run/t6502.scala b/test/files/run/t6502.scala index 68c0e8aa4e88..8664f5c96ade 100644 --- a/test/files/run/t6502.scala +++ b/test/files/run/t6502.scala @@ -6,7 +6,7 @@ object Test extends StoreReporterDirectTest { def code = ??? private def compileCode(code: String, jarFileName: String) = { - val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator") + val classpath = pathOf(sys.props("partest.lib"), testOutput.path) compileString(newCompiler("-cp", classpath, "-d", s"${testOutput.path}/$jarFileName"))(code) } private def runAdded(codeToRun: String): String = { @@ -16,7 +16,6 @@ object Test extends StoreReporterDirectTest { output.mkString("\n") } - def app1 = """ package test diff --git a/test/files/run/t6555.scala b/test/files/run/t6555.scala index e1db058da1d7..f4425d960113 100644 --- a/test/files/run/t6555.scala +++ b/test/files/run/t6555.scala @@ -7,9 +7,5 @@ object Test extends DirectTest { override def code = "class Foo { val f = (param: Int) => param } " - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } diff --git a/test/files/run/t7271.check b/test/files/run/t7271.check index 6db301c21ddb..ddfc0d560a8e 100644 --- a/test/files/run/t7271.check +++ b/test/files/run/t7271.check @@ -1,6 +1,6 @@ [[syntax trees at end of parser]] // newSource1.scala [0:91]package [0:0] { - [0:91]class C extends [8:91][91]scala.AnyRef { + [0:91]class C extends [8:91][8]scala.AnyRef { [8]def () = [8]{ [NoPosition][NoPosition][NoPosition]super.(); [8]() diff --git a/test/files/run/t7271.scala b/test/files/run/t7271.scala index 6eadb7816c0e..c2801396d37e 100644 --- a/test/files/run/t7271.scala +++ b/test/files/run/t7271.scala @@ -16,13 +16,7 @@ object Test extends DirectTest { } """.trim - override def show(): Unit = { - // redirect err to out, for logging - val prevErr = System.err - System.setErr(System.out) - compile() - System.setErr(prevErr) - } + override def show(): Unit = compile() override def newCompiler(args: String*): Global = { diff --git a/test/files/run/t7398.scala b/test/files/run/t7398.scala deleted file mode 100644 index 4b4685076810..000000000000 --- a/test/files/run/t7398.scala +++ /dev/null @@ -1,26 +0,0 @@ -import scala.tools.partest._ - -object Test extends CompilerTest { - import global._ - - override lazy val units: List[CompilationUnit] = { - // This test itself does not depend on JDK8. - javaCompilationUnits(global)(defaultMethodSource) - } - - private def defaultMethodSource = """ -public interface Iterator { - boolean hasNext(); - E next(); - default void remove() { - throw new UnsupportedOperationException("remove"); - } - default void forEachRemaining(Consumer action) { - throw new UnsupportedOperationException("forEachRemaining"); - } -} - """ - - // We're only checking we can compile it. - def check(source: String, unit: global.CompilationUnit): Unit = () -} diff --git a/test/files/run/t7448.scala b/test/files/run/t7448.scala new file mode 100644 index 000000000000..5bf74ee85a77 --- /dev/null +++ b/test/files/run/t7448.scala @@ -0,0 +1,18 @@ +// scalac: -nowarn +import util.chaining._ + +object Test { + def main(args: Array[String]) = 42.tap(res => assert(res == 42)) +} + +// test that partest is using scala runner to execute this test. +// With warnings enabled: +/* +t7448.scala:7: warning: not a valid main method for Test, + because main methods must have the exact signature `(Array[String]): Unit`, though Scala runners will forgive a non-Unit result. + To define an entry point, please define the main method as: + def main(args: Array[String]): Unit + + def main(args: Array[String]) = 42.tap(res => assert(res == 42)) + ^ + */ diff --git a/test/files/run/t7569.check b/test/files/run/t7569.check index 5153e9d6a4e9..0f6b70f96d9f 100644 --- a/test/files/run/t7569.check +++ b/test/files/run/t7569.check @@ -1,8 +1,8 @@ source-newSource1.scala,line-3,offset=49 A.this.one source-newSource1.scala,line-3,offset=49 A.this -source-newSource1.scala,line-4,offset=67 A.super.() -source-newSource1.scala,line-4,offset=67 A.super. -source-newSource1.scala,line-4,offset=67 this +source-newSource1.scala,line-2,offset=41 A.super.() +source-newSource1.scala,line-2,offset=41 A.super. +source-newSource1.scala,line-2,offset=41 this source-newSource1.scala,line-3,offset=49 A.this.one source-newSource1.scala,line-3,offset=49 A.this RangePosition(newSource1.scala, 55, 57, 65) java.lang.Integer.toString(1) diff --git a/test/files/run/t7634.javaopts b/test/files/run/t7634.javaopts deleted file mode 100644 index b0c90bb1f73a..000000000000 --- a/test/files/run/t7634.javaopts +++ /dev/null @@ -1 +0,0 @@ --Dneeds.forked.jvm.for.windows diff --git a/test/files/run/t7634.scala b/test/files/run/t7634.scala index 345138eb933f..5997b3d48fa5 100644 --- a/test/files/run/t7634.scala +++ b/test/files/run/t7634.scala @@ -1,3 +1,5 @@ +// java: -Dneeds.forked.jvm.for.windows + import java.io.File import scala.tools.partest.ReplTest import scala.util.Properties.propOrElse diff --git a/test/files/run/t7741a.check b/test/files/run/t7741a.check deleted file mode 100644 index e835f0ce738c..000000000000 --- a/test/files/run/t7741a.check +++ /dev/null @@ -1,3 +0,0 @@ -#partest !java8 -Note: t7741a/GroovyInterface$1Dump.java uses or overrides a deprecated API. -Note: Recompile with -Xlint:deprecation for details. diff --git a/test/files/run/t7741a/GroovyInterface$1Dump.java b/test/files/run/t7741a/GroovyInterface$1Dump.java index 0c0eab3f1b6d..cc187f353ed4 100644 --- a/test/files/run/t7741a/GroovyInterface$1Dump.java +++ b/test/files/run/t7741a/GroovyInterface$1Dump.java @@ -175,7 +175,7 @@ public static byte[] dump () throws Exception { { mv = cw.visitMethod(ACC_PRIVATE + ACC_STATIC + ACC_SYNTHETIC, "$createCallSiteArray", "()Lorg/codehaus/groovy/runtime/callsite/CallSiteArray;", null, null); mv.visitCode(); - mv.visitLdcInsn(new Integer(0)); + mv.visitLdcInsn(Integer.valueOf(0)); mv.visitTypeInsn(ANEWARRAY, "java/lang/String"); mv.visitVarInsn(ASTORE, 0); mv.visitTypeInsn(NEW, "org/codehaus/groovy/runtime/callsite/CallSiteArray"); diff --git a/test/files/run/t7805-repl-i.javaopts b/test/files/run/t7805-repl-i.javaopts deleted file mode 100644 index 9740f07b079b..000000000000 --- a/test/files/run/t7805-repl-i.javaopts +++ /dev/null @@ -1 +0,0 @@ --Dneeds.forked.jvm \ No newline at end of file diff --git a/test/files/run/t7805-repl-i.scala b/test/files/run/t7805-repl-i.scala index 2a80ad8bda2d..816926b7c38a 100644 --- a/test/files/run/t7805-repl-i.scala +++ b/test/files/run/t7805-repl-i.scala @@ -1,3 +1,5 @@ +// java: -Dneeds.forked.jvm + import scala.tools.partest.ReplTest import scala.tools.nsc.{ GenericRunnerSettings, Settings } import scala.tools.nsc.settings.MutableSettings diff --git a/test/files/run/t7825.scala b/test/files/run/t7825.scala deleted file mode 100644 index 65ca06fdfc09..000000000000 --- a/test/files/run/t7825.scala +++ /dev/null @@ -1,34 +0,0 @@ -import scala.tools.partest._ - -object Test extends CompilerTest { - import global._ - - override lazy val units: List[CompilationUnit] = { - // We can test this on JDK6. - javaCompilationUnits(global)(defaultMethodSource) ++ compilationUnits(global)(scalaExtendsDefault) - } - - private def defaultMethodSource = """ -public interface Iterator { - boolean hasNext(); - E next(); - default void remove() { - throw new UnsupportedOperationException("remove"); - } -} - """ - - private def scalaExtendsDefault = """ -object Test { - object X extends Iterator[String] { - def hasNext = true - def next = "!" - } -} - """ - - // We're only checking we that the Scala compilation unit passes refchecks - // No further checks are needed here. - def check(source: String, unit: global.CompilationUnit): Unit = { - } -} diff --git a/test/files/run/t7876.scala b/test/files/run/t7876.scala index 8c41e2e3c34a..bc18f16b6dd8 100644 --- a/test/files/run/t7876.scala +++ b/test/files/run/t7876.scala @@ -2,7 +2,6 @@ import scala.tools.partest._ // Type constructors for FunctionN and TupleN should not be considered as function type / tuple types. object Test extends DirectTest { - override def extraSettings: String = "-usejavacp" def code = "" diff --git a/test/files/run/t7970.scala b/test/files/run/t7970.scala new file mode 100644 index 000000000000..5f90186c5b87 --- /dev/null +++ b/test/files/run/t7970.scala @@ -0,0 +1,15 @@ +object Test { + { + val session: Object = null + trait Outer{ + trait Inner{ + assert(session == null) + } + } + val o = new Outer{} + new o.Inner { } + } + + def main(args: Array[String]): Unit = { + } +} diff --git a/test/files/run/t8266-octal-interp.javaopts b/test/files/run/t8266-octal-interp.javaopts deleted file mode 100644 index 9740f07b079b..000000000000 --- a/test/files/run/t8266-octal-interp.javaopts +++ /dev/null @@ -1 +0,0 @@ --Dneeds.forked.jvm \ No newline at end of file diff --git a/test/files/run/t8433.scala b/test/files/run/t8433.scala index 16144ffddb7c..c4757997c897 100644 --- a/test/files/run/t8433.scala +++ b/test/files/run/t8433.scala @@ -42,5 +42,5 @@ object Test extends DirectTest { ScalaClassLoader(getClass.getClassLoader).run("Main", Nil) } - override def extraSettings = s"-usejavacp -d ${testOutput.path} -cp ${testOutput.path}" + override def extraSettings = s"-usejavacp -cp ${testOutput.path}" } diff --git a/test/files/run/t8502.scala b/test/files/run/t8502.scala index a700eb521729..fa10a10d9c3a 100644 --- a/test/files/run/t8502.scala +++ b/test/files/run/t8502.scala @@ -5,8 +5,8 @@ object Test extends StoreReporterDirectTest { def code = ??? def compileCode(code: String) = { - val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator") - compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code) + val classpath = pathOf(sys.props("partest.lib"), testOutput.path) + compileString(newCompiler("-cp", classpath))(code) } def show(): Unit = { diff --git a/test/files/run/t8502b.scala b/test/files/run/t8502b.scala index f1858fd88859..5113179c957e 100644 --- a/test/files/run/t8502b.scala +++ b/test/files/run/t8502b.scala @@ -10,8 +10,8 @@ object Test extends StoreReporterDirectTest { def code = ??? def compileCode(code: String) = { - val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator") - compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code) + val classpath = pathOf(sys.props("partest.lib"), testOutput.path) + compileString(newCompiler("-cp", classpath))(code) } def show(): Unit = { diff --git a/test/files/run/t8852a.scala b/test/files/run/t8852a.scala deleted file mode 100644 index cbff8ab75b91..000000000000 --- a/test/files/run/t8852a.scala +++ /dev/null @@ -1,34 +0,0 @@ -import scala.tools.partest._ - -// Test that static methods in Java interfaces (new in Java 8) -// are callable from jointly compiler Scala code. -object Test extends CompilerTest { - import global._ - - override lazy val units: List[CompilationUnit] = { - // This test itself does not depend on JDK8. - javaCompilationUnits(global)(staticMethodInInterface) ++ - compilationUnits(global)(scalaClient) - } - - private def staticMethodInInterface = """ -public interface Interface { - public static int staticMethod() { - return 42; - } -} - - """ - - private def scalaClient = """ -object Test { - val x: Int = Interface.staticMethod() -} - -class C extends Interface // expect no errors about unimplemented members. - - """ - - // We're only checking we can compile it. - def check(source: String, unit: global.CompilationUnit): Unit = () -} diff --git a/test/files/run/t8907.scala b/test/files/run/t8907.scala index e425e93546e8..a20e9c552e7c 100644 --- a/test/files/run/t8907.scala +++ b/test/files/run/t8907.scala @@ -5,7 +5,7 @@ object Test extends StoreReporterDirectTest { def code = ??? def compileCode(code: String) = { - val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator") + val classpath = pathOf(sys.props("partest.lib"), testOutput.path) compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code) } diff --git a/test/files/run/t8928.javaopts b/test/files/run/t8928.javaopts deleted file mode 100644 index a8e6bbca18ae..000000000000 --- a/test/files/run/t8928.javaopts +++ /dev/null @@ -1 +0,0 @@ --Dneeds.forked.jvm diff --git a/test/files/run/t8928/Test_1.scala b/test/files/run/t8928/Test_1.scala index 1cef564ff1be..bcf94ce41e52 100644 --- a/test/files/run/t8928/Test_1.scala +++ b/test/files/run/t8928/Test_1.scala @@ -1,3 +1,4 @@ +// java: -Dneeds.forked.jvm import test._ object Test extends App { diff --git a/test/files/run/t9097.scala b/test/files/run/t9097.scala index a7ddc5cf917e..23c9c31a7a32 100644 --- a/test/files/run/t9097.scala +++ b/test/files/run/t9097.scala @@ -9,7 +9,7 @@ object Test extends StoreReporterDirectTest { "-Ydelambdafy:method", "-Vprint:delambdafy", s"-d ${testOutput.path}" - ) mkString " " + ).mkString(" ") override def code = """package o |package a { diff --git a/test/files/run/t9437b.scala b/test/files/run/t9437b.scala index 9278e02ec8d4..82a83dd093e8 100644 --- a/test/files/run/t9437b.scala +++ b/test/files/run/t9437b.scala @@ -12,7 +12,7 @@ import Opcodes._ // that uses the class with named arguments. // Any failure will be dumped to std out. object Test extends DirectTest { - override def extraSettings: String = "-usejavacp -d " + testOutput.path + " -cp " + testOutput.path + override def extraSettings: String = s"-usejavacp -cp ${testOutput.path}" def generateCode(): Unit = { val className = "Foo" @@ -78,15 +78,8 @@ class Driver { """ override def show(): Unit = { - // redirect err to out, for logging - val prevErr = System.err - System.setErr(System.out) - try { - generateCode() - compile() - Class.forName("Driver").getDeclaredConstructor().newInstance() - } - finally - System.setErr(prevErr) + generateCode() + compile() + Class.forName("Driver").getDeclaredConstructor().newInstance() } } diff --git a/test/files/run/t9529.check b/test/files/run/t9529.check index f1c433ddaced..38ad198f56ba 100644 --- a/test/files/run/t9529.check +++ b/test/files/run/t9529.check @@ -32,7 +32,7 @@ u: List(@anns.Ann_0$Container(value={@anns.Ann_0(name="u", value="you"), @anns.A List(@anns.Ann_0$Container(value={@anns.Ann_0(name="", value="constructor"), @anns.Ann_0(name="", value="initializer")})) -#partest java15 +#partest java15+ A: List() B: List(@java.lang.Deprecated(forRemoval=false, since="")) C: List(@anns.Ann_0(name="C", value="see")) diff --git a/test/files/run/t9915/Test_2.scala b/test/files/run/t9915/Test_2.scala index afed667cc6e5..f26f1c1a3d91 100644 --- a/test/files/run/t9915/Test_2.scala +++ b/test/files/run/t9915/Test_2.scala @@ -1,12 +1,14 @@ +import scala.tools.testkit.AssertUtil.assertEqualStrings + object Test extends App { val c = new C_1 - assert(c.nulled == "X\u0000ABC") // "X\000ABC" - assert(c.supped == "š’ˆš’š’‘š’›š’š’˜š’•š’–") + assert(C_1.NULLED.length == "XYABC".length) + assert(C_1.SUPPED.codePointCount(0, C_1.SUPPED.length) == 8) - assert(C_1.NULLED == "X\u0000ABC") // "X\000ABC" - assert(C_1.SUPPED == "š’ˆš’š’‘š’›š’š’˜š’•š’–") + assertEqualStrings(c.nulled)("X\u0000ABC") // "X\000ABC" in java source + assertEqualStrings(c.supped)("š’ˆš’š’‘š’›š’š’˜š’•š’–") - assert(C_1.NULLED.size == "XYABC".size) - assert(C_1.SUPPED.codePointCount(0, C_1.SUPPED.length) == 8) + assertEqualStrings(C_1.NULLED)("X\u0000ABC") // "X\000ABC" in java source + assertEqualStrings(C_1.SUPPED)("š’ˆš’š’‘š’›š’š’˜š’•š’–") } diff --git a/test/files/run/tasty-lambdatype-strawman.check b/test/files/run/tasty-lambdatype-strawman.check new file mode 100644 index 000000000000..8c9ed5c9770a --- /dev/null +++ b/test/files/run/tasty-lambdatype-strawman.check @@ -0,0 +1,3 @@ +PolyType([B => TypeBounds(NothingType, AppliedType(ParamRef(CC), [IntType])), CC => PolyType([_ => TypeBounds(NothingType, AnyType)], AnyType)], AppliedType(NamedRef(Bar), [ParamRef(B), ParamRef(CC)])) + +there was a cycle in creating Delta type constructor diff --git a/test/files/run/tasty-lambdatype-strawman.scala b/test/files/run/tasty-lambdatype-strawman.scala new file mode 100644 index 000000000000..67afd04a06e4 --- /dev/null +++ b/test/files/run/tasty-lambdatype-strawman.scala @@ -0,0 +1,168 @@ +import collection.immutable.ArraySeq + +object Test { + + def main(args: Array[String]): Unit = { + + val BarTypeConstructor = // [B <: CC[Int], CC[_]] => Bar[B, CC] + PolyType.from( + params = List( + "B" -> (hk => TypeBounds.upper(AppliedType(hk.ref(1), IntType :: Nil))), + "CC" -> (hk => PolyType.from(List("_" -> (_ => TypeBounds.upper(AnyType))), hk => AnyType)) + ), + res = hk => AppliedType(NamedRef("Bar"), hk.ref(0) :: hk.ref(1) :: Nil) + ) + + println(BarTypeConstructor.debug) + println() + + try { + val DeltaTypeConstructor = // [B <: CC[[I <: B] =>> Any], CC[_[_ <: B]]] =>> Delta[B, CC] + PolyType.from( + params = List( + "B" -> (hk => + TypeBounds.upper( + AppliedType( + tycon = hk.ref(1), + args = PolyType.from(List("I" -> (_ => TypeBounds.upper(hk.ref(0)))), _ => AnyType) :: Nil + ) + ) + ), + "CC" -> (hk => + PolyType.from( + params = List( + "_" -> (_ => + PolyType.from( + params = List( + "_" -> (_ => + // force a cyclic completion - this type is illegal in Dotty + // a completion would be needed here to check the bounds of `CC` + TypeBounds.upper({val ref = hk.ref(0); ref.underlying; ref}) + ) + ), + res = hk => AnyType + ) + ) + ), + res = hk => AnyType + ) + ) + ), + res = hk => AppliedType(NamedRef("Delta"), hk.ref(0) :: hk.ref(1) :: Nil) + ) + } catch { + case err: AssertionError => + assert(err.getMessage.contains("cyclic completion of SyncRef")) + println("there was a cycle in creating Delta type constructor") + } + } +} + +final class SyncRef[A](private var compute: () => A) { + private var out: A = _ + private var entered: Boolean = false + + def apply(): A = { + if (entered) { + assert(out != null, "cyclic completion of SyncRef") + } + else { + entered = true + val result = compute() + compute = null + assert(result != null, "SyncRef is non-nullable") + out = result + } + out + } +} + +sealed abstract class TypeOrCompleter { + def debug: String = this match { + case p: Product => s"${p.productPrefix}${ + def iter(it: Iterator[Any], s: String = "(", e: String = ")"): String = + it.map { + case t: Type => t.debug + case t: Iterable[u] => iter(t.iterator, s = "[", e = "]") + case a => a.toString + }.mkString(s, ", ", e) + val it = p.productIterator + if (!it.hasNext) "" else iter(it) + }" + case _ => toString + } +} + +abstract class Completer extends TypeOrCompleter { + def complete(sym: Symbol): Unit +} + +abstract class Type extends TypeOrCompleter { + def underlying: Type = this +} + +class Symbol(val name: String, private var myInfoOrCompleter: TypeOrCompleter) { self => + + def infoOrCompleter = myInfoOrCompleter + + def info_=(tp: Type): Unit = + myInfoOrCompleter = tp + + def info: Type = myInfoOrCompleter match { + case c: Completer => + c.complete(self) + info + case t: Type => t + } + + override def toString = s"$name => ${infoOrCompleter.debug}" + +} + +case class ParamRef(symbol: Symbol) extends Type { + override def underlying: Type = symbol.info + override def debug: String = s"ParamRef(${symbol.name})" +} + +case class PolyType(params: List[Symbol], resultType: Type) extends Type +case class AppliedType(tycon: Type, args: List[Type]) extends Type +case class TypeBounds(lo: Type, hi: Type) extends Type +object TypeBounds { + def upper(hi: Type) = TypeBounds(NothingType, hi) +} +case object IntType extends Type +case object AnyType extends Type +case object NothingType extends Type +case class NamedRef(fullname: String) extends Type + +object PolyType { + def from(params: List[(String, HKTypeLambda => Type)], res: HKTypeLambda => Type): PolyType = { + val (names, infos0) = params.to(ArraySeq).unzip + val infos = (hk: HKTypeLambda) => () => infos0.map { case op => op(hk) } + new HKTypeLambda(names, infos, res).underlying + } +} + +class HKTypeLambda(paramNames: ArraySeq[String], paramInfosOp: HKTypeLambda => () => ArraySeq[Type], resOp: HKTypeLambda => Type) { thisLambda => + + final val lambdaParams = { + val paramInfoDb = new SyncRef(paramInfosOp(thisLambda)) + paramNames.zipWithIndex.map { case (name, idx) => + new Symbol(name, new Completer { + def complete(sym: Symbol): Unit = { + sym.info = paramInfoDb()(idx) + } + }) + } + } + + final val resType = resOp(thisLambda) + + def ref(idx: Int): ParamRef = new ParamRef(lambdaParams(idx)) + + def underlying: PolyType = { + lambdaParams.foreach(_.info) + new PolyType(lambdaParams.toList, resType) + } + +} diff --git a/test/files/run/type-tag-leak.javaopts b/test/files/run/type-tag-leak.javaopts deleted file mode 100644 index 408a4e4cb595..000000000000 --- a/test/files/run/type-tag-leak.javaopts +++ /dev/null @@ -1 +0,0 @@ --Xmx192M -XX:+ExitOnOutOfMemoryError \ No newline at end of file diff --git a/test/files/run/type-tag-leak.scala b/test/files/run/type-tag-leak.scala index 245288802a84..277799f765eb 100644 --- a/test/files/run/type-tag-leak.scala +++ b/test/files/run/type-tag-leak.scala @@ -1,3 +1,5 @@ +// java: -Xmx192M -XX:+ExitOnOutOfMemoryError + import scala.reflect.runtime.universe import scala.reflect.runtime.universe._ import scala.tools.nsc.interpreter._ diff --git a/test/files/run/typetags_without_scala_reflect_manifest_lookup.scala b/test/files/run/typetags_without_scala_reflect_manifest_lookup.scala index 6488c78d164b..820479cfd3be 100644 --- a/test/files/run/typetags_without_scala_reflect_manifest_lookup.scala +++ b/test/files/run/typetags_without_scala_reflect_manifest_lookup.scala @@ -2,7 +2,7 @@ import scala.tools.partest._ import scala.tools.nsc.Settings object Test extends DirectTest { - override def extraSettings = "-cp " + sys.props("partest.lib") + " -d \"" + testOutput.path + "\"" + override def extraSettings = "-cp " + sys.props("partest.lib") def code = """ object Test extends App { diff --git a/test/files/run/typetags_without_scala_reflect_typetag_lookup.scala b/test/files/run/typetags_without_scala_reflect_typetag_lookup.scala index dccb2af8f55e..fd0f2dee3d32 100644 --- a/test/files/run/typetags_without_scala_reflect_typetag_lookup.scala +++ b/test/files/run/typetags_without_scala_reflect_typetag_lookup.scala @@ -3,6 +3,9 @@ import scala.tools.partest._ object Test extends StoreReporterDirectTest { def code = ??? + // differs for two compilations + override def extraSettings: String = "" + def library = """ import scala.reflect.runtime.universe._ @@ -11,8 +14,8 @@ object Test extends StoreReporterDirectTest { } """ def compileLibrary() = { - val classpath = List(sys.props("partest.lib"), sys.props("partest.reflect")) mkString sys.props("path.separator") - compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(library) + val classpath = pathOf(sys.props("partest.lib"), sys.props("partest.reflect")) + compileString(newCompiler("-cp", classpath))(library) } def app = """ @@ -27,15 +30,15 @@ object Test extends StoreReporterDirectTest { } """ def compileApp() = { - val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator") - compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(app) + val classpath = pathOf(sys.props("partest.lib"), testOutput.path) + compileString(newCompiler("-cp", classpath))(app) } def show(): Unit = { compileLibrary(); println(filteredInfos.mkString("\n")) storeReporter.infos.clear() - compileApp(); + compileApp() // we should get "missing or invalid dependency detected" errors, because we're trying to use an implicit that can't be unpickled // but we don't know the number of these errors and their order, so I just ignore them all println(filteredInfos.filterNot(_.msg.contains("is missing from the classpath")).mkString("\n")) diff --git a/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.scala b/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.scala index c865759588c3..a62c5fe1e6d5 100644 --- a/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.scala +++ b/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.scala @@ -4,6 +4,9 @@ import scala.tools.nsc.Settings object Test extends StoreReporterDirectTest { def code = ??? + // differs for two compilations + override def extraSettings: String = "" + def library = """ import scala.reflect.runtime.universe._ @@ -13,8 +16,8 @@ object Test extends StoreReporterDirectTest { } """ def compileLibrary() = { - val classpath = List(sys.props("partest.lib"), sys.props("partest.reflect")) mkString sys.props("path.separator") - compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(library) + val classpath = pathOf(sys.props("partest.lib"), sys.props("partest.reflect")) + compileString(newCompiler("-cp", classpath))(library) } def app = """ @@ -29,19 +32,17 @@ object Test extends StoreReporterDirectTest { } """ def compileApp() = { - val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator") - val global = newCompiler("-cp", classpath, "-d", testOutput.path) - compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(app) - //global.reporter.ERROR.foreach(println) + val classpath = pathOf(sys.props("partest.lib"), testOutput.path) + compileString(newCompiler("-cp", classpath))(app) } def show(): Unit = { compileLibrary(); println(filteredInfos.mkString("\n")) storeReporter.infos.clear() - compileApp(); + compileApp() // we should get "missing or invalid dependency detected" errors, because we're trying to use an implicit that can't be unpickled // but we don't know the number of these errors and their order, so I just ignore them all - println(filteredInfos.filterNot (_.msg.contains("is missing from the classpath")).mkString("\n")) + println(filteredInfos.filterNot(_.msg.contains("is missing from the classpath")).mkString("\n")) } } diff --git a/test/jcstress/project/build.properties b/test/jcstress/project/build.properties index e67343ae796c..bb3a9b7dc6d2 100644 --- a/test/jcstress/project/build.properties +++ b/test/jcstress/project/build.properties @@ -1 +1 @@ -sbt.version=1.5.0 +sbt.version=1.5.6 diff --git a/test/junit/scala/PartialFunctionTest.scala b/test/junit/scala/PartialFunctionTest.scala new file mode 100644 index 000000000000..a4cfa5693935 --- /dev/null +++ b/test/junit/scala/PartialFunctionTest.scala @@ -0,0 +1,30 @@ +package scala + +import org.junit.Assert._ +import org.junit.Test + +class PartialFunctionTest { + + import PartialFunction.{cond, condOpt} + + @Test + def `cond evaluates pf`(): Unit = { + assertTrue(cond("x") { case "x" => true }) + } + + @Test + def `cond evaluates default`(): Unit = { + assertFalse(cond("z") { case "x" => true }) + } + + @Test + def `condOpt evaluates pf`(): Unit = { + assertEquals(Some("y"), condOpt("x") { case "x" => "y" }) + assertEquals(Some(null), condOpt("x") { case "x" => null case "z" => "y" }) + } + + @Test + def `condOpt evaluates default`(): Unit = { + assertEquals(None, condOpt("z") { case "x" => "y" }) + } +} diff --git a/test/junit/scala/collection/ArrayOpsTest.scala b/test/junit/scala/collection/ArrayOpsTest.scala index 06a1cc2713f3..d8f9eb4229f3 100644 --- a/test/junit/scala/collection/ArrayOpsTest.scala +++ b/test/junit/scala/collection/ArrayOpsTest.scala @@ -122,4 +122,36 @@ class ArrayOpsTest { val a: Array[Byte] = new Array[Byte](1000).sortWith { _ < _ } assertEquals(0, a(0)) } + + @Test + def `empty intersection has correct component type for array`(): Unit = { + val something = Array(3.14) + val nothing = Array[Double]() + val empty = Array.empty[Double] + + assertEquals(classOf[Double], nothing.intersect(something).getClass.getComponentType) + assertTrue(nothing.intersect(something).isEmpty) + + assertEquals(classOf[Double], empty.intersect(something).getClass.getComponentType) + assertTrue(empty.intersect(something).isEmpty) + assertEquals(classOf[Double], empty.intersect(nothing).getClass.getComponentType) + assertTrue(empty.intersect(nothing).isEmpty) + + assertEquals(classOf[Double], something.intersect(nothing).getClass.getComponentType) + assertTrue(something.intersect(nothing).isEmpty) + assertEquals(classOf[Double], something.intersect(empty).getClass.getComponentType) + assertTrue(something.intersect(empty).isEmpty) + } + + // discovered while working on scala/scala#9388 + @Test + def iterator_drop(): Unit = { + val it = Array(1, 2, 3) + .iterator + .drop(Int.MaxValue) + .drop(Int.MaxValue) // potential index overflow to negative + assert(!it.hasNext) // bug had index as negative and this returning true + // even though the index is both out of bounds and should + // always be between `0` and `Array#length`. + } } diff --git a/test/junit/scala/collection/FactoriesTest.scala b/test/junit/scala/collection/FactoriesTest.scala index 34ebc1286870..d08fac0759d7 100644 --- a/test/junit/scala/collection/FactoriesTest.scala +++ b/test/junit/scala/collection/FactoriesTest.scala @@ -215,11 +215,11 @@ class FactoriesTest { im.BitSet(1, 2, 3) ) - object enum extends Enumeration { + object `enum` extends Enumeration { val x, y, z = Value } - val enumValues = enum.values + val enumValues = `enum`.values sortedFactoryFromIterableOnceReturnsSameReference(SortedSet, im.SortedSet)(enumValues) @@ -277,8 +277,9 @@ class FactoriesTest { assert(Iterable().isInstanceOf[List[_]], "Iterable.apply should delegate to List.apply") assert(Iterable(1,2,3).isInstanceOf[List[_]], "Iterable.apply should delegate to List.apply") - assert(im.SeqMap().isInstanceOf[im.VectorMap[_, _]], "immutable.SeqMap.apply should delegate to VectorMap.apply") - assert(im.SeqMap(1 -> 2, 3 -> 4, 5 -> 6).isInstanceOf[im.VectorMap[_, _]], "immutable.SeqMap.apply should delegate to VectorMap.apply") + assert(im.SeqMap().getClass.getSimpleName == "EmptySeqMap$", "immutable.SeqMap.apply should use EmptySeqMap$") + assert(im.SeqMap(1 -> 2, 3 -> 4, 5 -> 6).getClass.getSimpleName == "SeqMap3", "immutable.SeqMap.apply should use SeqMap3") + assert(im.SeqMap(1 -> 2, 3 -> 4, 5 -> 6, 7 -> 8, 9 -> 10).isInstanceOf[im.VectorMap[_, _]], "immutable.SeqMap.apply should delegate to VectorMap.apply") assert(Map().isInstanceOf[im.Map[_, _]], "Map.apply should delegate to immutable.Map.apply") assert(Map(1 -> 2, 3 -> 4, 5 -> 6).isInstanceOf[im.Map[_, _]], "Map.apply should delegate to immutable.Map.apply") diff --git a/test/junit/scala/collection/IndexedSeqViewTest.scala b/test/junit/scala/collection/IndexedSeqViewTest.scala index 01858a17acb1..9f6485482034 100644 --- a/test/junit/scala/collection/IndexedSeqViewTest.scala +++ b/test/junit/scala/collection/IndexedSeqViewTest.scala @@ -18,4 +18,9 @@ class IndexedSeqViewTest { assertEquals(2, IndexedSeq(1, 2, 3, 4, 5).view.iterator.take(2).knownSize) assertEquals(2, IndexedSeq(1, 2, 3, 4, 5).view.iterator.slice(2, 4).knownSize) } + + @Test + def reverseEmptyIterator(): Unit = { + assertEquals(0, Vector.empty[Int].reverseIterator.take(1).toList.size) + } } diff --git a/test/junit/scala/collection/IterableTest.scala b/test/junit/scala/collection/IterableTest.scala index 78f911aace1d..3a3495d2602b 100644 --- a/test/junit/scala/collection/IterableTest.scala +++ b/test/junit/scala/collection/IterableTest.scala @@ -1,7 +1,9 @@ package scala.collection -import org.junit.{Assert, Test}, Assert.{assertEquals, assertTrue} +import org.junit.{Assert, Test} +import Assert.{assertEquals, assertTrue} +import scala.annotation.nowarn import scala.collection.immutable.{ArraySeq, List, Range, Vector} import scala.tools.testkit.AssertUtil._ @@ -135,8 +137,7 @@ class IterableTest { check(new Array(10), l.copyToArray(_, 0, -1), 0, 0, 0) } - @deprecated("Uses deprecated toTraversable", since="2.13.0") - @Test + @Test @nowarn("cat=deprecation") def emptyToTraversable(): Unit = { assert(Iterable.empty == Array.empty.toIterable) assert(Iterable.empty == Array.empty.toTraversable) diff --git a/test/junit/scala/collection/MapTest.scala b/test/junit/scala/collection/MapTest.scala index 6bfa66955e54..90900619f472 100644 --- a/test/junit/scala/collection/MapTest.scala +++ b/test/junit/scala/collection/MapTest.scala @@ -123,4 +123,9 @@ class MapTest { check(mutable.CollisionProofHashMap(1 -> 1)) } + @Test + def t12228(): Unit = { + assertFalse(Set("") == immutable.BitSet(1)) + assertFalse(Map("" -> 2) == scala.collection.immutable.LongMap(1L -> 2)) + } } diff --git a/test/junit/scala/collection/SortedSetMapEqualsTest.scala b/test/junit/scala/collection/SortedSetMapEqualsTest.scala index 44653696c74f..804a6989f8e7 100644 --- a/test/junit/scala/collection/SortedSetMapEqualsTest.scala +++ b/test/junit/scala/collection/SortedSetMapEqualsTest.scala @@ -1,6 +1,7 @@ package scala.collection -import org.junit.{Assert, Test}, Assert.assertEquals +import org.junit.{Assert, Test} +import Assert.{assertEquals, assertNotEquals} class SortedSetMapEqualsTest { @Test @@ -68,4 +69,60 @@ class SortedSetMapEqualsTest { } assertEquals(m1, m2) } + + @Test + def compareSortedMapKeysByOrdering(): Unit = { + val ord: Ordering[String] = _ compareToIgnoreCase _ + + val itm1 = scala.collection.immutable.TreeMap("A" -> "2")(ord) + val itm2 = scala.collection.immutable.TreeMap("a" -> "2")(ord) + val mtm1 = scala.collection.mutable.TreeMap("A" -> "2")(ord) + val mtm2 = scala.collection.mutable.TreeMap("a" -> "2")(ord) + + assertEquals(itm1, itm2) + assertEquals(mtm1, mtm2) + + assertEquals(itm1, mtm2) + assertEquals(mtm1, itm2) + + val m1 = Map("A" -> "2") + val m2 = Map("a" -> "2") + + for (m <- List(m1, m2); tm <- List[Map[String, String]](itm1, itm2, mtm1, mtm2)) + assertEquals(m, tm) // uses keys in `m` to look up values in `tm`, which always succeeds + + assertEquals(itm1, m1) + assertEquals(mtm1, m1) + + assertNotEquals(itm2, m1) // uses key in `itm2` ("a") to look up in `m1`, which fails + assertNotEquals(mtm2, m1) + } + + @Test + def compareSortedSetsByOrdering(): Unit = { + val ord: Ordering[String] = _ compareToIgnoreCase _ + + val its1 = scala.collection.immutable.TreeSet("A")(ord) + val its2 = scala.collection.immutable.TreeSet("a")(ord) + val mts1 = scala.collection.mutable.TreeSet("A")(ord) + val mts2 = scala.collection.mutable.TreeSet("a")(ord) + + assertEquals(its1, its2) + assertEquals(mts1, mts2) + + assertEquals(its1, mts2) + assertEquals(mts1, its2) + + val s1 = Set("A") + val s2 = Set("a") + + for (m <- List(s1, s2); tm <- List[Set[String]](its1, its2, mts1, mts2)) + assertEquals(m, tm) // uses keys in `m` to look up values in `tm`, which always succeeds + + assertEquals(its1, s1) + assertEquals(mts1, s1) + + assertNotEquals(its2, s1) // uses key in `its2` ("a") to look up in `s1`, which fails + assertNotEquals(mts2, s1) + } } diff --git a/test/junit/scala/collection/ToConserveTest.scala b/test/junit/scala/collection/ToConserveTest.scala new file mode 100644 index 000000000000..b3acbd5f13b2 --- /dev/null +++ b/test/junit/scala/collection/ToConserveTest.scala @@ -0,0 +1,108 @@ +package scala.collection + +import org.junit.Assert.{assertNotSame, assertSame} +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +import scala.annotation.nowarn +import scala.collection.{immutable => i, mutable => m} +import scala.language.implicitConversions +import scala.{collection => c} + +@RunWith(classOf[JUnit4]) +@nowarn("cat=deprecation") +class ToConserveTest { + // scala/bug#12188 + implicit def toAnyRefFactory[A, CC[_] <: AnyRef](factory: c.IterableFactory[CC]): c.Factory[A, AnyRef] = + c.IterableFactory.toFactory(factory) + implicit def toFactory[K, V, CC[_, _] <: AnyRef](factory: MapFactory[CC]): Factory[(K, V), AnyRef] = + c.MapFactory.toFactory(factory) + + @Test def toConserveList: Unit = { + val l: c.Iterable[Int] = (1 to 3).toList + + assertSame(l, l.toList) + assertSame(l, l.toSeq) + assertSame(l, l.toIterable) + + assertSame(l, l.to(c.Iterable)) + assertSame(l, l.to(i.Iterable)) + + assertSame(l, l.to(c.Seq)) + assertSame(l, l.to(i.Seq)) + + assertSame(l, l.to(c.LinearSeq)) + assertSame(l, l.to(i.LinearSeq)) + + assertSame(l, l.to(List)) + } + + @Test def toConserveImmutableHashSet: Unit = { + val s: c.Iterable[Int] = (1 to 10).to(i.HashSet) + assertSame(s, s.toSet) + assertSame(s, s.toIterable) + + assertSame(s, s.to(c.Iterable)) + assertSame(s, s.to(i.Iterable)) + + assertSame(s, s.to(c.Set)) + assertSame(s, s.to(i.Set)) + + assertSame(s, s.to(i.HashSet)) + } + + @Test def toConserveImmutableHashMap: Unit = { + val m: c.Iterable[(Int, Int)] = (1 to 10).map(x => (x, x)).to(i.HashMap): i.Map[Int, Int] + + assertSame(m, m.toMap) + assertSame(m, m.toIterable) + + assertSame(m, m.to(c.Iterable)) + assertSame(m, m.to(i.Iterable)) + + assertSame(m, m.to(c.Map)) + assertSame(m, m.to(i.Map)) + + assertSame(m, m.to(i.HashMap)) + } + + @Test def toConserveLazyList: Unit = { + val l: c.Iterable[Int] = LazyList.from(1 to 10) + + assertSame(l, l.toSeq) + assertSame(l, l.toIterable) + + assertSame(l, l.to(c.Iterable)) + assertSame(l, l.to(i.Iterable)) + + assertSame(l, l.to(c.Seq)) + assertSame(l, l.to(i.Seq)) + + assertSame(l, l.to(c.LinearSeq)) + assertSame(l, l.to(i.LinearSeq)) + + assertSame(l, l.to(LazyList)) + } + + @Test def toRebuildMutable: Unit = { + val s: c.Iterable[Int] = (1 to 3).to(m.HashSet) + assertSame(s, s.toIterable) // slightly inconsistent... + assertNotSame(s, s.to(c.Iterable)) + assertNotSame(s, s.to(m.Iterable)) + assertNotSame(s, s.to(c.Set)) + assertNotSame(s, s.to(m.Set)) + assertNotSame(s, s.to(m.HashSet)) + + val b: c.Iterable[Int] = (1 to 6).to(m.ArrayBuffer) + assertSame(b, b.toIterable) // slightly inconsistent... + assertNotSame(b, b.toBuffer) + assertNotSame(b, b.to(c.Iterable)) + assertNotSame(b, b.to(m.Iterable)) + assertNotSame(b, b.to(c.Seq)) + assertNotSame(b, b.to(m.Seq)) + assertNotSame(b, b.to(m.Buffer)) + assertNotSame(b, b.to(m.IndexedBuffer)) + assertNotSame(b, b.to(m.ArrayBuffer)) + } +} diff --git a/test/junit/scala/collection/ViewTest.scala b/test/junit/scala/collection/ViewTest.scala index 89418aa6a024..cb5814654e37 100644 --- a/test/junit/scala/collection/ViewTest.scala +++ b/test/junit/scala/collection/ViewTest.scala @@ -1,10 +1,10 @@ package scala.collection -import scala.collection.immutable.List import org.junit.Assert._ import org.junit.Test import scala.collection.mutable.{ArrayBuffer, ListBuffer} +import scala.tools.testkit.AssertUtil.assertSameElements class ViewTest { @@ -113,4 +113,21 @@ class ViewTest { def _toString(): Unit = { assertEquals("View()", View(1, 2, 3).toString) } + + // see scala/scala#9388 + @Test + def patch(): Unit = { + // test re-iterability + val v1 = List(2).view.patch(1, List(3, 4, 5).iterator, 0) + assertSameElements(Seq(2, 3, 4, 5), v1.toList) + assertSameElements(Seq(2, 3, 4, 5), v1.toList) // check that it works twice + + // https://github.com/scala/scala/pull/9388#discussion_r709392221 + val v2 = List(2).view.patch(1, Nil, 0) + assert(!v2.isEmpty) + + // https://github.com/scala/scala/pull/9388#discussion_r709481748 + val v3 = Nil.view.patch(0, List(1).iterator, 0) + assert(v3.knownSize != 0) + } } diff --git a/test/junit/scala/collection/concurrent/ConcurrentMapTestHelper.scala b/test/junit/scala/collection/concurrent/ConcurrentMapTestHelper.scala new file mode 100644 index 000000000000..717f60a8329b --- /dev/null +++ b/test/junit/scala/collection/concurrent/ConcurrentMapTestHelper.scala @@ -0,0 +1,51 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.concurrent + +import scala.concurrent.duration.SECONDS + +object ConcurrentMapTestHelper { + def genericTest_filterInPlace(newMap: => Map[String, Int]): Unit = { + val tester = new ConcurrentMapTester(newMap += "k1" -> 0 += "k2" -> 0) + + tester.runTasks(5, SECONDS)( + _.filterInPlace((_, v) => { + SECONDS.sleep(2) + v > 0 + }), + map => { + SECONDS.sleep(1) + map("k1") = 1 + }, + ) + + tester.assertContainsEntry("k1", 1) // can get `0` if racy implementation + tester.assertDoesNotContain("k2") + } + + def genericTest_mapValuesInPlace(newMap: => Map[String, Int]): Unit = { + val tester = new ConcurrentMapTester(newMap += "k" -> 0) + tester.runTasks(5, SECONDS)( + _.mapValuesInPlace((_, v) => { + SECONDS.sleep(2) + v + 5 + }), + map => { + SECONDS.sleep(1) + map("k") = 1 + }, + ) + + tester.assertExistsEntry("k", x => x == 1 || x == 6) // can get `5` if racy implementation + } +} diff --git a/test/junit/scala/collection/concurrent/ConcurrentMapTester.scala b/test/junit/scala/collection/concurrent/ConcurrentMapTester.scala new file mode 100644 index 000000000000..f88c51a3a1c0 --- /dev/null +++ b/test/junit/scala/collection/concurrent/ConcurrentMapTester.scala @@ -0,0 +1,33 @@ +package scala.collection.concurrent + +import java.util.concurrent.Executors +import scala.concurrent.duration.TimeUnit + +class ConcurrentMapTester[K, V](map: Map[K, V]) { + def runTasks(executionTimeout: Long, unit: TimeUnit)(tasks: (Map[K, V] => Unit)*): Unit = { + val exec = Executors.newCachedThreadPool() + for (task <- tasks) exec.execute(() => task(map)) + exec.shutdown() + exec.awaitTermination(executionTimeout, unit) + } + + @throws[AssertionError] + def assertContainsEntry(k: K, v: V): Unit = { + val value = map.get(k) + assert(value.isDefined, s"map does not contain key '$k'") + assert(value.contains(v), s"key '$k' is mapped to '${value.get}', not to '$v'") + } + + @throws[AssertionError] + def assertExistsEntry(k: K, p: V => Boolean): Unit = { + val value = map.get(k) + assert(value.isDefined, s"map does not contain key '$k'") + assert(value.exists(p), s"key '$k' is mapped to '${value.get}', which does not match the predicate") + } + + @throws[AssertionError] + def assertDoesNotContain(k: K): Unit = { + val value = map.get(k) + assert(value.isEmpty, s"key '$k' is not empty and is mapped to '${value.get}'") + } +} diff --git a/test/junit/scala/collection/concurrent/TrieMapTest.scala b/test/junit/scala/collection/concurrent/TrieMapTest.scala index 287e914dc6bf..46f5fe0ff763 100644 --- a/test/junit/scala/collection/concurrent/TrieMapTest.scala +++ b/test/junit/scala/collection/concurrent/TrieMapTest.scala @@ -5,6 +5,7 @@ import org.junit.Assert.assertEquals import scala.util.hashing.Hashing import scala.tools.testkit.AssertUtil.assertThrows +import scala.util.chaining._ @deprecated("Tests deprecated API", since="2.13") class TrieMapTest { @@ -57,6 +58,16 @@ class TrieMapTest { check(List(("k", "v")))(_.view.mapValues(x => x)) } + @Test + def filterInPlace(): Unit = { + ConcurrentMapTestHelper.genericTest_filterInPlace(TrieMap.empty) + } + + @Test + def mapValuesInPlace(): Unit = { + ConcurrentMapTestHelper.genericTest_mapValuesInPlace(TrieMap.empty) + } + @Test def customHashingAndEquiv_10481(): Unit = { val h = new Hashing[Int] { def hash(i: Int) = i % 4 } @@ -659,4 +670,39 @@ class TrieMapTest { assertEquals(hashMap4.updateWith(2)(noneAnytime), None) assertEquals(hashMap4, TrieMap(1 -> "a")) } + + @Test + def knownSizeConsistency(): Unit = { + def check(tm: TrieMap[_, _]): Unit = { + def msg = s"for ${tm.toString()}" + val snapshot = tm.readOnlySnapshot() + val initialKS = snapshot.knownSize + val size = snapshot.size + assert(initialKS == -1 || initialKS == size, msg) + val laterKS = snapshot.knownSize + assert(laterKS == -1 || laterKS == size, msg) + assert(laterKS >= initialKS, msg) // assert we haven't forgotten the size + } + + check(TrieMap.empty) + check(TrieMap()) + check(TrieMap("k" -> "v")) + check(TrieMap.empty[String, String].tap(_("k") = "v")) + check(TrieMap.empty[String, String].tap(_.put("k", "v"))) + check(TrieMap.from((1 to 5).map(x => x -> x))) + check(TrieMap.from((1 to 10).map(x => x -> x))) + check(TrieMap.from((1 to 100).map(x => x -> x))) + } + + @Test + def isEmptyCorrectness(): Unit = { + assert(TrieMap.empty.isEmpty) + assert(TrieMap().isEmpty) + assert(!TrieMap("k" -> "v").isEmpty) + assert(!TrieMap.empty[String, String].tap(_("k") = "v").isEmpty) + assert(!TrieMap.empty[String, String].tap(_.put("k", "v")).isEmpty) + assert(!TrieMap.from((1 to 5).map(x => x -> x)).isEmpty) + assert(!TrieMap.from((1 to 10).map(x => x -> x)).isEmpty) + assert(!TrieMap.from((1 to 100).map(x => x -> x)).isEmpty) + } } diff --git a/test/junit/scala/collection/convert/JConcurrentMapWrapperTest.scala b/test/junit/scala/collection/convert/JConcurrentMapWrapperTest.scala new file mode 100644 index 000000000000..b4712207ea01 --- /dev/null +++ b/test/junit/scala/collection/convert/JConcurrentMapWrapperTest.scala @@ -0,0 +1,30 @@ +package scala.collection.convert + +import org.junit.Test + +import java.util.concurrent.{ConcurrentHashMap, ConcurrentSkipListMap} + +import scala.collection.concurrent.ConcurrentMapTestHelper +import scala.jdk.CollectionConverters._ + +class JConcurrentMapWrapperTest { + @Test + def CHM_filterInPlace(): Unit = { + ConcurrentMapTestHelper.genericTest_filterInPlace(new ConcurrentHashMap[String, Int].asScala) + } + + @Test + def CHM_mapValuesInPlace(): Unit = { + ConcurrentMapTestHelper.genericTest_mapValuesInPlace(new ConcurrentHashMap[String, Int].asScala) + } + + @Test + def CSLM_filterInPlace(): Unit = { + ConcurrentMapTestHelper.genericTest_filterInPlace(new ConcurrentSkipListMap[String, Int].asScala) + } + + @Test + def CSLM_mapValuesInPlace(): Unit = { + ConcurrentMapTestHelper.genericTest_mapValuesInPlace(new ConcurrentSkipListMap[String, Int].asScala) + } +} diff --git a/test/junit/scala/collection/immutable/HashMapTest.scala b/test/junit/scala/collection/immutable/HashMapTest.scala index d9f1bf53d27a..a73c02d000b3 100644 --- a/test/junit/scala/collection/immutable/HashMapTest.scala +++ b/test/junit/scala/collection/immutable/HashMapTest.scala @@ -8,6 +8,7 @@ import org.junit.runner.RunWith import org.junit.runners.JUnit4 import scala.tools.testkit.AllocationTest +import scala.tools.testkit.AssertUtil.assertThrows @RunWith(classOf[JUnit4]) class HashMapTest extends AllocationTest{ @@ -339,4 +340,10 @@ class HashMapTest extends AllocationTest{ check(cs => TreeMap(cs: _*)) // exercise special case for HashMap/HasForEachEntry check(cs => HashMap(cs: _*).withDefault(_ => ???)) // default cases } + + @Test + def noSuchElement(): Unit = { + assertThrows[NoSuchElementException](HashMap(1->1)(2), _ == "key not found: 2") + assertThrows[NoSuchElementException](HashMap.empty(3), _ == "key not found: 3") + } } diff --git a/test/junit/scala/collection/immutable/ListSetTest.scala b/test/junit/scala/collection/immutable/ListSetTest.scala index 395da88c75b6..4ce4fc5a6206 100644 --- a/test/junit/scala/collection/immutable/ListSetTest.scala +++ b/test/junit/scala/collection/immutable/ListSetTest.scala @@ -23,7 +23,9 @@ class ListSetTest { @Test def hasTailRecursiveDelete(): Unit = { val s = ListSet(1 to 50000: _*) - try s - 25000 catch { case e: StackOverflowError => fail("A stack overflow occurred") } + try s - 25000 catch { + case e: StackOverflowError => fail("A stack overflow occurred") + } } @Test @@ -50,4 +52,67 @@ class ListSetTest { val s = ListSet(1, 2, 3, 5, 4) assertEquals(List(1, 2, 3, 5, 4), s.iterator.toList) } + + @Test + def hasCorrectOrderAfterPlusPlus(): Unit = { + val foo = ListSet(1) + var bar = foo ++ ListSet() + assertEquals(List(1), bar.iterator.toList) + + bar = foo ++ ListSet(1) + assertEquals(List(1), bar.iterator.toList) + + bar = foo ++ ListSet(2) + assertEquals(List(1, 2), bar.iterator.toList) + + bar = foo ++ ListSet(1, 2) + assertEquals(List(1, 2), bar.iterator.toList) + + bar = foo ++ ListSet(1, 2, 3) + assertEquals(List(1, 2, 3), bar.iterator.toList) + + bar = foo ++ ListSet(1, 2, 3, 4) + assertEquals(List(1, 2, 3, 4), bar.iterator.toList) + + bar = foo ++ ListSet(1, 2, 3, 4, 5) + assertEquals(List(1, 2, 3, 4, 5), bar.iterator.toList) + + bar = foo ++ ListSet(1, 2, 3, 4, 5, 6) + assertEquals(List(1, 2, 3, 4, 5, 6), bar.iterator.toList) + } + + @Test + def smallPlusPlus1(): Unit = { + def check(l1: ListSet[Int], l2: ListSet[Int]) = { + val expected = l1.iterator.toList ++ l2.iterator.filterNot(l1).toList + val actual = (l1 ++ l2).iterator.toList + assertEquals(expected, actual) + } + + for (start0 <- 0 until 6; + end0 <- start0 until 6; + start1 <- 0 until 6; + end1 <- start1 until 6) { + val ls0 = ListSet((start0 until end0): _*) + val ls1 = ListSet((start1 until end1): _*) + check(ls0, ls1) + } + } + @Test + def smallPlusPlusAfter(): Unit = { + def check(l1: ListSet[Int], l2: ListSet[Int]) = { + val expected = l1.iterator.toList ++ l2.iterator.filterNot(l1).toList + val actual = (l1 ++ l2).iterator.toList + assertEquals(expected, actual) + } + + for (start0 <- 0 until 9; + end0 <- start0 until 9; + start1 <- 10 until 19; + end1 <- start1 until 19) { + val ls0 = ListSet((start0 until end0): _*) + val ls1 = ListSet((start1 until end1): _*) + check(ls0, ls1) + } + } } diff --git a/test/junit/scala/collection/immutable/SeqMapTest.scala b/test/junit/scala/collection/immutable/SeqMapTest.scala new file mode 100644 index 000000000000..3d8cce9a5fb0 --- /dev/null +++ b/test/junit/scala/collection/immutable/SeqMapTest.scala @@ -0,0 +1,42 @@ +package scala.collection.immutable + +import org.junit.Test +import org.junit.Assert.assertEquals + +import scala.collection.mutable + +class SeqMapTest { + private def checkClass(map: SeqMap[_, _], simpleName: String): Unit = { + assertEquals(simpleName, map.getClass.getSimpleName.stripSuffix("$")) + } + + @Test + def applyFromSmallSizeSpecialization(): Unit = { + checkClass(SeqMap(), "EmptySeqMap") + checkClass(SeqMap(1 -> 1), "SeqMap1") + checkClass(SeqMap(1 -> 1, 2 -> 2), "SeqMap2") + checkClass(SeqMap(1 -> 1, 2 -> 2, 3 -> 3), "SeqMap3") + checkClass(SeqMap(1 -> 1, 2 -> 2, 3 -> 3, 4 -> 4), "SeqMap4") + checkClass(SeqMap(1 -> 1, 2 -> 2, 3 -> 3, 4 -> 4, 5 -> 5), "VectorMap") + + // no knownSize + checkClass(SeqMap.from(List(1 -> 1)), "SeqMap1") + } + + @Test + def newBuilderSmallSizeSpecialization(): Unit = { + type Builder = mutable.Builder[(Int, Int), SeqMap[Int, Int]] + def build(op: Builder => Builder): SeqMap[Int, Int] = + op(SeqMap.newBuilder[Int, Int]).result() + + checkClass(build(identity), "EmptySeqMap") + checkClass(build(_ += 1 -> 1), "SeqMap1") + checkClass(build(_ += 1 -> 1 += 2 -> 2), "SeqMap2") + checkClass(build(_ += 1 -> 1 += 2 -> 2 += 3 -> 3), "SeqMap3") + checkClass(build(_ += 1 -> 1 += 2 -> 2 += 3 -> 3 += 4 -> 4), "SeqMap4") + checkClass(build(_ += 1 -> 1 += 2 -> 2 += 3 -> 3 += 4 -> 4 += 5 -> 5), "VectorMap") + + // `addAll` + checkClass(build(_ ++= List(1 -> 1)), "SeqMap1") + } +} diff --git a/test/junit/scala/collection/immutable/SmallMapTest.scala b/test/junit/scala/collection/immutable/SmallMapTest.scala index 1c182276b29d..c6c278676f34 100644 --- a/test/junit/scala/collection/immutable/SmallMapTest.scala +++ b/test/junit/scala/collection/immutable/SmallMapTest.scala @@ -6,14 +6,14 @@ import org.junit._ import scala.tools.testkit.AllocationTest class SmallMapTest extends AllocationTest { - def iterator(m:Map[_,_]) = m.iterator - def keysIterator(m:Map[_,_]) = m.keysIterator - def valuesIterator(m:Map[_,_]) = m.valuesIterator + def iterator(m: Map[_,_]) = m.iterator + def keysIterator(m: Map[_,_]) = m.keysIterator + def valuesIterator(m: Map[_,_]) = m.valuesIterator - //we use this side effect to avoid the git optimising away the tuples - //but without affecting the allocations + // we use this side effect to avoid the jit optimising away the tuples + // but without affecting the allocations val nonAllocationResult = new Array[Any](5) - def consume(it:Iterator[_]): Int = { + def consume(it: Iterator[_]): Int = { var size = 0 nonAllocationResult(0) = it while (it.hasNext) { @@ -22,7 +22,7 @@ class SmallMapTest extends AllocationTest { } size } - def consume1(it:Iterator[_]): Int = { + def consume1(it: Iterator[_]): Int = { nonAllocationResult(0) = it nonAllocationResult(1) = it.next() 1 diff --git a/test/junit/scala/collection/immutable/StringLikeTest.scala b/test/junit/scala/collection/immutable/StringLikeTest.scala index 4de7763efcd3..fd0f228162f3 100644 --- a/test/junit/scala/collection/immutable/StringLikeTest.scala +++ b/test/junit/scala/collection/immutable/StringLikeTest.scala @@ -64,10 +64,14 @@ class StringLikeTest { assertEquals("no trim toDouble", 2.0d, sOk.toDouble, 0.1d) assertEquals("no trim toFloat", 2.0f, sOk.toFloat, 0.1f) - assertThrows[java.lang.NumberFormatException](sNull.toInt, {s => s == "null"}) - assertThrows[java.lang.NumberFormatException](sNull.toLong, {s => s == "null"}) - assertThrows[java.lang.NumberFormatException](sNull.toShort, {s => s == "null"}) - assertThrows[java.lang.NumberFormatException](sNull.toByte, {s => s == "null"}) + // JDK 17 gives the nicer message + def isNullStringMessage(s: String) = + s == "null" || s == "Cannot parse null string" + + assertThrows[java.lang.NumberFormatException](sNull.toInt, isNullStringMessage) + assertThrows[java.lang.NumberFormatException](sNull.toLong, isNullStringMessage) + assertThrows[java.lang.NumberFormatException](sNull.toShort, isNullStringMessage) + assertThrows[java.lang.NumberFormatException](sNull.toByte, isNullStringMessage) assertThrows[java.lang.NullPointerException](sNull.toDouble) assertThrows[java.lang.NullPointerException](sNull.toFloat) diff --git a/test/junit/scala/collection/immutable/VectorTest.scala b/test/junit/scala/collection/immutable/VectorTest.scala index f6ae171d70fd..685100f4c821 100644 --- a/test/junit/scala/collection/immutable/VectorTest.scala +++ b/test/junit/scala/collection/immutable/VectorTest.scala @@ -58,6 +58,14 @@ class VectorTest { assertSame(m, Vector.apply(m: _*)) } + @Test def factoryReuseArraySet(): Unit = { + val arraySeq = ArraySeq[AnyRef]("a", "b") + val vectorFromArraySeq = Vector.from(arraySeq) + val prefix1Field = classOf[Vector[_]].getDeclaredField("prefix1") + prefix1Field.setAccessible(true) + assertSame(arraySeq.unsafeArray, prefix1Field.get(vectorFromArraySeq)) + } + @Test def checkSearch(): Unit = SeqTests.checkSearch(Vector(0 to 1000: _*), 15, implicitly[Ordering[Int]]) @Test diff --git a/test/junit/scala/collection/mutable/ArrayBufferTest.scala b/test/junit/scala/collection/mutable/ArrayBufferTest.scala index 8f7ae6fe1fb2..1d934a63b967 100644 --- a/test/junit/scala/collection/mutable/ArrayBufferTest.scala +++ b/test/junit/scala/collection/mutable/ArrayBufferTest.scala @@ -4,7 +4,7 @@ import org.junit.Test import org.junit.Assert.{assertEquals, assertTrue} import scala.annotation.nowarn -import scala.tools.testkit.AssertUtil.{assertThrows, fail} +import scala.tools.testkit.AssertUtil.{assertSameElements, assertThrows, fail} import scala.tools.testkit.ReflectUtil.{getMethodAccessible, _} class ArrayBufferTest { @@ -447,4 +447,29 @@ class ArrayBufferTest { assertEquals(32, resizeDown(64, 30)) assertEquals(21, resizeDown(42, 17)) } + + // scala/bug#12121 + @Test + def insertAll_self(): Unit = { + val buf = ArrayBuffer(1, 2, 3) + buf.insertAll(1, buf) + assertSameElements(List(1, 1, 2, 3, 2, 3), buf) + } + + // scala/bug#12284 + @Test + def viewConsistency(): Unit = { + def check[U](op: ArrayBuffer[Int] => U): Unit = { + val buf = ArrayBuffer.from(1 to 50) + val view = buf.view + op(buf) + assertSameElements(buf, view) + } + + check(_.clear()) + check(_.dropRightInPlace(30)) + check(_.dropInPlace(30)) + check(_ ++= (1 to 100)) + check(_.insertAll(1, 1 to 100)) + } } diff --git a/test/junit/scala/collection/mutable/ArraySortingTest.scala b/test/junit/scala/collection/mutable/ArraySortingTest.scala index 2e98fd6ac350..dd97587e981a 100644 --- a/test/junit/scala/collection/mutable/ArraySortingTest.scala +++ b/test/junit/scala/collection/mutable/ArraySortingTest.scala @@ -25,7 +25,7 @@ class ArraySortingTest { java.util.Arrays.sort(test) scala.util.Sorting.quickSort(cant)(CanOrder) assert( test(6) == 1 ) - assert( test.toIterable.lazyZip(cant).forall(_ == _.i) ) + assert( test.lazyZip(cant).forall(_ == _.i) ) } @Test diff --git a/test/junit/scala/collection/mutable/MutationTrackingTest.scala b/test/junit/scala/collection/mutable/MutationTrackingTest.scala index 9ff9511320e3..98ed439ee0bd 100644 --- a/test/junit/scala/collection/mutable/MutationTrackingTest.scala +++ b/test/junit/scala/collection/mutable/MutationTrackingTest.scala @@ -18,34 +18,40 @@ import java.util.ConcurrentModificationException import org.junit.Test import scala.annotation.nowarn +import scala.annotation.unchecked.{uncheckedVariance => uV} import scala.tools.testkit.AssertUtil.assertThrows abstract class MutationTrackingTest[+C <: Iterable[_]](factory: Factory[Int, C]) { - private def runOp(op: C => Any, viewOrIterator: C => IterableOnceOps[_, AnyConstr, _]): Unit = { - val coll = (factory.newBuilder += 1 += 2 += 3 += 4).result() + private[this] type VoI = C => IterableOnceOps[_, AnyConstr, _] + // if you do bad things with this by returning a different builder, it WILL bite you + protected[this] type BuildSequence = Builder[Int, C @uV] => Builder[Int, C @uV] + protected[this] val defaultBuildSequence: BuildSequence = _ += 1 += 2 += 3 += 4 + + private[this] def runOp(op: C => Any, bs: BuildSequence, viewOrIterator: VoI): Unit = { + val coll = bs(factory.newBuilder).result() val it = viewOrIterator(coll) op(coll) it.foreach(_ => ()) } - private def runOpMaybeThrowing(op: C => Any, - throws: Boolean, - viewOrIterator: C => IterableOnceOps[_, AnyConstr, _]): Unit = { - if (throws) assertThrows[ConcurrentModificationException](runOp(op, viewOrIterator), _ contains "iteration") - else runOp(op, viewOrIterator) + private[this] def runOpMaybeThrowing(op: C => Any, bs: BuildSequence, throws: Boolean, viewOrIterator: VoI): Unit = { + if (throws) assertThrows[ConcurrentModificationException](runOp(op, bs, viewOrIterator), _ contains "iteration") + else runOp(op, bs, viewOrIterator) } - private def runOpForViewAndIterator(op: C => Any, throws: Boolean): Unit = { - runOp(op, _.view) // never throws - runOpMaybeThrowing(op, throws, _.iterator) - runOpMaybeThrowing(op, throws, _.view.iterator) + private[this] def runOpForViewAndIterator(op: C => Any, bs: BuildSequence, throws: Boolean): Unit = { + runOp(op, bs, _.view) // never throws + runOpMaybeThrowing(op, bs, throws, _.iterator) + runOpMaybeThrowing(op, bs, throws, _.view.iterator) } /** Checks that no exception is thrown by an operation. */ - def checkFine(op: C => Any): Unit = runOpForViewAndIterator(op, throws = false) + protected[this] def checkFine(op: C => Any, buildSequence: BuildSequence = defaultBuildSequence): Unit = + runOpForViewAndIterator(op, buildSequence, throws = false) /** Checks that an exception is thrown by an operation. */ - def checkThrows(op: C => Any): Unit = runOpForViewAndIterator(op, throws = true) + protected[this] def checkThrows(op: C => Any, buildSequence: BuildSequence = defaultBuildSequence): Unit = + runOpForViewAndIterator(op, buildSequence, throws = true) @Test def nop(): Unit = checkFine { _ => () } @@ -94,6 +100,29 @@ object MutationTrackingTest { def transform(): Unit = checkThrows { _.transform(_ + 1) } } + trait IndexedSeqTest { self: MutationTrackingTest[IndexedSeq[Int]] => + @Test + def mapInPlace(): Unit = checkThrows { _.mapInPlace(_ + 1) } + + @Test + def sortInPlace(): Unit = { + checkThrows { _.sortInPlace() } + checkFine (_.sortInPlace(), _ += 1) + } + + @Test + def sortInPlaceWith(): Unit = { + checkThrows { _.sortInPlaceWith(_ > _) } + checkFine (_.sortInPlaceWith(_ > _), _ += 1) + } + + @Test + def sortInPlaceBy(): Unit = { + checkThrows { _.sortInPlaceBy(_ * -1) } + checkFine (_.sortInPlaceBy(_ * -1), _ += 1) + } + } + trait BufferTest extends GrowableTest with ShrinkableTest with SeqTest { self: MutationTrackingTest[Buffer[Int]] => @Test def insert(): Unit = checkThrows { _.insert(0, 5) } @@ -210,4 +239,15 @@ package MutationTrackingTestImpl { @Test def filterInPlace(): Unit = checkThrows { _.filterInPlace(_ => true) } } + + class ArrayBufferTest extends MutationTrackingTest(ArrayBuffer) with BufferTest with IndexedSeqTest { + @Test + def clearAndShrink(): Unit = checkThrows { _ clearAndShrink 2 } + + @Test + def trimToSize(): Unit = checkFine { _.trimToSize() } + + @Test + def sizeHint(): Unit = checkFine { _ sizeHint 16 } + } } diff --git a/test/junit/scala/collection/mutable/StackTest.scala b/test/junit/scala/collection/mutable/StackTest.scala index 5576a569b37d..a5352c85e82e 100644 --- a/test/junit/scala/collection/mutable/StackTest.scala +++ b/test/junit/scala/collection/mutable/StackTest.scala @@ -24,4 +24,18 @@ class StackTest { @Test def sliding(): Unit = ArrayDequeTest.genericSlidingTest(Stack, "Stack") + + @Test def `popAll preserves iteration order`: Unit = { + val stack = Stack.from(1 to 10) + val list = stack.toList + assertEquals(list, stack.popAll()) + assertTrue(stack.isEmpty) + } + + @Test def `popWhile preserves iteration order`: Unit = { + val stack = Stack.tabulate(10)(_ * 10) + val list = stack.toList.take(5) + assertEquals(list, stack.popWhile(_ < 50)) + assertEquals(5, stack.size) + } } diff --git a/test/junit/scala/concurrent/FutureTest.scala b/test/junit/scala/concurrent/FutureTest.scala index 45069e274170..8c3e3310f687 100644 --- a/test/junit/scala/concurrent/FutureTest.scala +++ b/test/junit/scala/concurrent/FutureTest.scala @@ -6,8 +6,46 @@ import org.junit.Test import scala.tools.testkit.AssertUtil._ import scala.util.Try +import duration.Duration.Inf class FutureTest { + @Test + def testZipWithFailFastBothWays(): Unit = { + import ExecutionContext.Implicits.global + + val p1 = Promise[Int]() + val p2 = Promise[Int]() + + // Make sure that the combined future fails early, after the earlier failure occurs, and does not + // wait for the later failure regardless of which one is on the left and which is on the right + p1.failure(new Exception("Boom Early")) + val f1 = p1.future + val f2 = p2.future + + val scala.util.Failure(fa) = Try(Await.result(f1.zip(f2), Inf)) + val scala.util.Failure(fb) = Try(Await.result(f2.zip(f1), Inf)) + + val scala.util.Failure(fc) = Try(Await.result(f1.zipWith(f2)((_, _)), Inf)) + val scala.util.Failure(fd) = Try(Await.result(f2.zipWith(f1)((_, _)), Inf)) + + val scala.util.Failure(fe) = Try(Await.result(Future.sequence(Seq(f1, f2)), Inf)) + val scala.util.Failure(ff) = Try(Await.result(Future.sequence(Seq(f2, f1)), Inf)) + + val scala.util.Failure(fg) = Try(Await.result(Future.traverse(Seq(0, 1))(Seq(f1, f2)(_)), Inf)) + val scala.util.Failure(fh) = Try(Await.result(Future.traverse(Seq(0, 1))(Seq(f1, f2)(_)), Inf)) + + // Make sure the early failure is always reported, regardless of whether it's on + // the left or right of the zip/zipWith/sequence/traverse + assert(fa.getMessage == "Boom Early") + assert(fb.getMessage == "Boom Early") + assert(fc.getMessage == "Boom Early") + assert(fd.getMessage == "Boom Early") + assert(fe.getMessage == "Boom Early") + assert(ff.getMessage == "Boom Early") + assert(fg.getMessage == "Boom Early") + assert(fh.getMessage == "Boom Early") + } + @Test def `bug/issues#10513 firstCompletedOf must not leak references`(): Unit = { val unfulfilled = Promise[AnyRef]() diff --git a/test/junit/scala/reflect/internal/SubstMapTest.scala b/test/junit/scala/reflect/internal/SubstMapTest.scala new file mode 100644 index 000000000000..7719e3a9a968 --- /dev/null +++ b/test/junit/scala/reflect/internal/SubstMapTest.scala @@ -0,0 +1,13 @@ +package scala.reflect.internal + +import scala.tools.nsc.symtab.SymbolTableForUnitTesting + +class SubstMapTest { + object symbolTable extends SymbolTableForUnitTesting + import symbolTable._ + + // compile-test for https://github.com/scala/community-build/pull/1413 + new SubstMap[String](Nil, Nil) { + protected def toType(fromtp: Type, tp: String) = fromtp + } +} diff --git a/test/junit/scala/reflect/runtime/ReflectionUtilsShowTest.scala b/test/junit/scala/reflect/runtime/ReflectionUtilsShowTest.scala new file mode 100644 index 000000000000..d7e90be1af42 --- /dev/null +++ b/test/junit/scala/reflect/runtime/ReflectionUtilsShowTest.scala @@ -0,0 +1,19 @@ +package scala.reflect.runtime + +import java.net.{URL, URLClassLoader} + +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +@RunWith(classOf[JUnit4]) +class ReflectionUtilsShowTest { + @Test def testGetUrlsCanReturnNull(): Unit = { + val sut = new MyClassLoader(Array.empty[URL]) + assert(ReflectionUtils.show(sut).contains("")) + } +} + +class MyClassLoader(urls: Array[URL]) extends URLClassLoader(urls) { + override def getURLs: Array[URL] = null +} diff --git a/test/junit/scala/tools/nsc/DeterminismTest.scala b/test/junit/scala/tools/nsc/DeterminismTest.scala index fa8fd9c9e966..57eda2d5d72b 100644 --- a/test/junit/scala/tools/nsc/DeterminismTest.scala +++ b/test/junit/scala/tools/nsc/DeterminismTest.scala @@ -330,6 +330,14 @@ class DeterminismTest { test(List(code)) } + @Test def testPackageObjectUserLand(): Unit = { + def code = List[SourceFile]( + source("package.scala", "package userland; object `package` { type Throwy = java.lang.Throwable }"), + source("th.scala", "package userland; class th[T <: Throwy](cause: T = null)") + ) + test(code :: Nil) + } + def source(name: String, code: String): SourceFile = new BatchSourceFile(name, code) } diff --git a/test/junit/scala/tools/nsc/PickleWriteTest.scala b/test/junit/scala/tools/nsc/PickleWriteTest.scala index 57dcc7b46d0a..04100950ffd5 100644 --- a/test/junit/scala/tools/nsc/PickleWriteTest.scala +++ b/test/junit/scala/tools/nsc/PickleWriteTest.scala @@ -96,4 +96,61 @@ class PickleWriteTest { new global2.Run().compile(command2.files) assert(!global2.reporter.hasErrors) } + + @Test + def testPickleWriteJava(): Unit = { + val pathFactory = new VirtualFilePathFactory + + val build = new Build(projectsBase, "b1") + val p1 = build.project("p1") + val p1ApiVirtual = VirtualFilePathFactory.path("p1") + p1.scalacOptions ++= List( + "-Ypickle-write", p1ApiVirtual, + "-Ypickle-java", + "-Ystop-after:pickler" + ) + p1.withSource("b1/p1/J.java")( + """ + |package b1.p1; + |public class J { + | public Object foo(Object o) { return o; } + | public T bar(T t) { return t; } + | + | public void ol(scala.Equals o) {} // Equals extends AnyVal + | public void ol(Object o) {} + |} + """.stripMargin) + + val p2 = build.project("p2") + p2.classpath += p1ApiVirtual + p2.withSource("b1/p2/Client.scala")( + """ + |package b1.p2 + |class Client[T] extends b1.p1.J[T] { + | override def foo(o: Object): Object = o + | override def bar(t: T): T = t + | def test(): Unit = { + | // this was incorrectly showing as ambiguous because Unpickler wasn't massaging type refs to Object + | // in Java-defined .sig files. + | ol(Option("")) + | } + |} + """.stripMargin) + + val settings1 = new Settings(Console.println, pathFactory) + settings1.usejavacp.value = true + val argsFile1 = p1.argsFile() + val command1 = new CompilerCommand("@" + argsFile1.toAbsolutePath.toString :: Nil, settings1) + val global1 = new Global(command1.settings) + new global1.Run().compile(command1.files) + assert(!global1.reporter.hasErrors) + + val argsFile2 = p2.argsFile() + val settings2 = new Settings(Console.println, pathFactory) + settings2.usejavacp.value = true + val command2 = new CompilerCommand("@" + argsFile2.toAbsolutePath.toString :: Nil, settings2) + val global2 = new Global(command2.settings) + new global2.Run().compile(command2.files) + assert(!global2.reporter.hasErrors) + } } diff --git a/test/junit/scala/tools/nsc/async/AnnotationDrivenAsyncTest.scala b/test/junit/scala/tools/nsc/async/AnnotationDrivenAsyncTest.scala index 69acde313e20..b179dd2d2da5 100644 --- a/test/junit/scala/tools/nsc/async/AnnotationDrivenAsyncTest.scala +++ b/test/junit/scala/tools/nsc/async/AnnotationDrivenAsyncTest.scala @@ -5,7 +5,6 @@ import java.io.File import java.lang.reflect.InvocationTargetException import java.nio.file.{Files, Paths} import java.util.concurrent.CompletableFuture - import org.junit.Assert.assertEquals import org.junit.{Assert, Ignore, Test} @@ -453,11 +452,9 @@ class AnnotationDrivenAsyncTest { } } catch { case ve: VerifyError => - val asm = out.listFiles().filter(_.getName.contains("stateMachine")).flatMap { file => - import scala.sys.process._ - val javap = List("/usr/local/bin/javap", "-v", file.getAbsolutePath).!! + val asm = out.listFiles().flatMap { file => val asmp = AsmUtils.textify(AsmUtils.readClass(file.getAbsolutePath)) - javap :: asmp :: Nil + asmp :: Nil }.mkString("\n\n") throw new AssertionError(asm, ve) } finally { @@ -495,18 +492,33 @@ abstract class AnnotationDrivenAsyncPlugin extends Plugin { case dd: DefDef if dd.symbol.hasAnnotation(customAsyncSym) => deriveDefDef(dd) { rhs => - val applyMethod = - q"""def apply(tr: _root_.scala.util.Either[_root_.scala.Throwable, _root_.scala.AnyRef]): _root_.scala.Unit = $rhs""" - val applyMethodMarked = global.async.markForAsyncTransform(dd.symbol, applyMethod, awaitSym, Map.empty) + val fsmImplName = currentUnit.freshTermName("fsm$") + val externalFsmMethod = true @nowarn("cat=lint-missing-interpolator") val name = TypeName("stateMachine$async") - val wrapped = + val wrapped = if (!externalFsmMethod) { + val applyMethod = + q"""def apply(tr: _root_.scala.util.Either[_root_.scala.Throwable, _root_.scala.AnyRef]): _root_.scala.Unit = $rhs""" + val applyMethodMarked = global.async.markForAsyncTransform(dd.symbol, applyMethod, awaitSym, Map.empty) q""" class $name extends _root_.scala.tools.nsc.async.CustomFutureStateMachine { - $applyMethodMarked + $applyMethodMarked } new $name().start() """ + } else { + val applyMethod = + q"""def $fsmImplName(self: $name, tr: _root_.scala.util.Either[_root_.scala.Throwable, _root_.scala.AnyRef]): _root_.scala.Unit = $rhs""" + val applyMethodMarked = global.async.markForAsyncTransform(dd.symbol, applyMethod, awaitSym, Map.empty) + q""" + $applyMethodMarked + class $name extends _root_.scala.tools.nsc.async.CustomFutureStateMachine { + def apply(tr: _root_.scala.util.Either[_root_.scala.Throwable, _root_.scala.AnyRef]): _root_.scala.Unit = + $fsmImplName(this, tr) + } + new $name().start() + """ + } val tree = q""" diff --git a/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala b/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala index 27cd78a375ef..f41dce93959b 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala @@ -8,6 +8,7 @@ import org.junit.runners.JUnit4 import scala.collection.mutable import scala.tools.asm.Opcodes +import scala.tools.testkit.AssertUtil.assertThrows import scala.tools.testkit.BytecodeTesting @RunWith(classOf[JUnit4]) @@ -19,7 +20,8 @@ class BTypesTest extends BytecodeTesting { } import global.genBCode.bTypes._ - def classBTFS(sym: global.Symbol) = global.exitingDelambdafy(classBTypeFromSymbol(sym)) + def duringBackend[T](f: => T) = global.exitingDelambdafy(f) + def classBTFS(sym: global.Symbol) = duringBackend { classBTypeFromSymbol(sym) } def jlo = global.definitions.ObjectClass def jls = global.definitions.StringClass @@ -50,7 +52,7 @@ class BTypesTest extends BytecodeTesting { assert(FLOAT.typedOpcode(Opcodes.IALOAD) == Opcodes.FALOAD) assert(LONG.typedOpcode(Opcodes.IALOAD) == Opcodes.LALOAD) assert(DOUBLE.typedOpcode(Opcodes.IALOAD) == Opcodes.DALOAD) - assert(classBTFS(jls).typedOpcode(Opcodes.IALOAD) == Opcodes.AALOAD) + assert(s.typedOpcode(Opcodes.IALOAD) == Opcodes.AALOAD) assert(UNIT.typedOpcode(Opcodes.IRETURN) == Opcodes.RETURN) assert(BOOL.typedOpcode(Opcodes.IRETURN) == Opcodes.IRETURN) @@ -61,7 +63,7 @@ class BTypesTest extends BytecodeTesting { assert(FLOAT.typedOpcode(Opcodes.IRETURN) == Opcodes.FRETURN) assert(LONG.typedOpcode(Opcodes.IRETURN) == Opcodes.LRETURN) assert(DOUBLE.typedOpcode(Opcodes.IRETURN) == Opcodes.DRETURN) - assert(classBTFS(jls).typedOpcode(Opcodes.IRETURN) == Opcodes.ARETURN) + assert(s.typedOpcode(Opcodes.IRETURN) == Opcodes.ARETURN) } @Test @@ -234,4 +236,29 @@ class BTypesTest extends BytecodeTesting { } assertTrue(ArrayBType(s).conformsTo(ArrayBType(o)).get) } + + @Test + def maxValueTypeATest(): Unit = duringBackend { + assertEquals(LONG, LONG.maxValueType(BYTE)) + assertEquals(LONG, LONG.maxValueType(SHORT)) + assertEquals(LONG, LONG.maxValueType(CHAR)) + assertEquals(LONG, LONG.maxValueType(INT)) + assertEquals(LONG, LONG.maxValueType(LONG)) + assertEquals(FLOAT, LONG.maxValueType(FLOAT)) + assertEquals(DOUBLE, LONG.maxValueType(DOUBLE)) + + assertUncomparable(LONG, UNIT) + assertUncomparable(LONG, BOOL) + assertUncomparable(LONG, o) + assertUncomparable(LONG, s) + assertUncomparable(LONG, oArr) + assertUncomparable(LONG, method) + + def assertUncomparable(t1: PrimitiveBType, t2: BType): Unit = { + assertThrows[AssertionError]( + t1.maxValueType(t2), + _.equals(s"Cannot compute maxValueType: $t1, $t2") + ) + } + } } diff --git a/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala b/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala index b67ee23b13e3..44983abe6524 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala @@ -10,18 +10,19 @@ import scala.tools.testkit.ASMConverters._ import scala.tools.testkit.BytecodeTesting import scala.tools.testkit.BytecodeTesting._ import scala.tools.asm.Opcodes +import scala.tools.asm.tree.MethodNode class BytecodeTest extends BytecodeTesting { import compiler._ @Test def t10812(): Unit = { - val code = - """ A { def f: Object = null } + def code(prefix: String) = + s"""$prefix A { def f: Object = null } |object B extends A { override def f: String = "b" } """.stripMargin for (base <- List("trait", "class")) { - val List(a, bMirror, bModule) = compileClasses(base + code) + val List(a, bMirror, bModule) = compileClasses(code(base)) assertEquals(bMirror.name, "B") assertEquals(bMirror.methods.asScala.filter(_.name == "f").map(m => m.name + m.desc).toList, List("f()Ljava/lang/String;")) } @@ -201,6 +202,22 @@ class BytecodeTest extends BytecodeTesting { ) } + @Test def `class constructor has correct line numbers (12470)`: Unit = { + val code = + """class A + |class B + |object D + |class C + """.stripMargin + val lines = Map("A" -> 1, "B" -> 2, "D$" -> 3, "C" -> 4) + compileClasses(code).foreach { c => + c.methods.asScala.foreach(m => convertMethod(m).instructions.foreach { + case LineNumber(n, _) => assertEquals(s"class ${c.name} method ${m.name}", lines(c.name), n) + case _ => + }) + } + } + @Test def sd233(): Unit = { val code = "def f = { println(1); synchronized(println(2)) }" @@ -343,4 +360,55 @@ class BytecodeTest extends BytecodeTesting { val a = A.fields.asScala.find(_.name == "a").get assertEquals(0, a.access & Opcodes.ACC_FINAL) } + + @Test + def t12362(): Unit = { + val code = + """object Test { + | def foo(value: String) = { + | println(value) + | } + | + | def abcde(value1: String, value2: Long, value3: Double, value4: Int, value5: Double): Double = { + | println(value1) + | value5 + | } + |}""".stripMargin + + val List(mirror, _) = compileClasses(code) + assertEquals(mirror.name, "Test") + + val foo = getAsmMethod(mirror, "foo") + val abcde = getAsmMethod(mirror, "abcde") + + def t(m: MethodNode, r: List[(String, String, Int)]) = { + assertTrue((m.access & Opcodes.ACC_STATIC) != 0) + assertEquals(r, m.localVariables.asScala.toList.map(l => (l.desc, l.name, l.index))) + } + + t(foo, List(("Ljava/lang/String;", "value", 0))) + t(abcde, List(("Ljava/lang/String;", "value1", 0), ("J", "value2", 1), ("D", "value3", 3), ("I", "value4", 5), ("D", "value5", 6))) + } + + @Test + def nonSpecializedValFence(): Unit = { + def code(u1: String) = + s"""abstract class Speck[@specialized(Int) T](t: T, sm: String, val sn: String) { + | val a = t + | $u1 + | lazy val u2 = "?" + | var u3 = "?" + | val u4: String + | var u5: String + |} + |""".stripMargin + + for (u1 <- "" :: List("", "private", "private[this]", "protected").map(mod => s"$mod val u1 = \"?\"")) { + for (c <- compileClasses(code(u1)).map(getMethod(_, ""))) + if (u1.isEmpty) + assertDoesNotInvoke(c, "releaseFence") + else + assertInvoke(c, "scala/runtime/Statics", "releaseFence") + } + } } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala index d927107df8b7..388660a1bdd7 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala @@ -1865,14 +1865,14 @@ class InlinerTest extends BytecodeTesting { ALOAD, ARRAYLENGTH, ISTORE, ICONST_0, ISTORE, // get length, init loop counter -1 /*8*/, ILOAD, ILOAD, IF_ICMPGE /*25*/, // check loop condition ALOAD, ILOAD, IALOAD, ISTORE, ALOAD, ILOAD, "consume", // load element, store into local, call body - ILOAD, ICONST_1, IADD, ISTORE, GOTO /*8*/, // increase loop counter, jump - -1 /*25*/, RETURN)) + IINC, GOTO /*7*/, // increase loop counter, jump + -1 /*26*/, RETURN)) assertSameSummary(getMethod(c, "t2"), List( ALOAD, ARRAYLENGTH, ISTORE, ICONST_0, ISTORE, -1 /*8*/, ILOAD, ILOAD, IF_ICMPGE /*24*/, ALOAD, ILOAD, AALOAD, "trim", POP, - ILOAD, ICONST_1, IADD, ISTORE, GOTO /*8*/, + IINC, GOTO /*8*/, -1 /*24*/, RETURN) ) } @@ -1891,14 +1891,14 @@ class InlinerTest extends BytecodeTesting { -1 /*14*/, ILOAD, ILOAD, IF_ICMPGE /*39*/, // loop condition ALOAD, ILOAD, IALOAD, ICONST_1, IADD, ISTORE, // compute element ALOAD, ILOAD, ILOAD, IASTORE, // store element - ILOAD, ICONST_1, IADD, ISTORE, GOTO /*14*/, // increase counter, jump - -1 /*39*/, ALOAD, ARETURN) + IINC, GOTO /*22*/, // increase counter, jump + -1 /*44*/, ALOAD, ARETURN) ) assertSameSummary(getMethod(c, "t2"), List( ALOAD, ARRAYLENGTH, ISTORE, ILOAD, ANEWARRAY, ASTORE, ILOAD, ICONST_0, IF_ICMPLE /*38*/, ICONST_0, ISTORE, // init new array, loop counter -1 /*15*/, ILOAD, ILOAD, IF_ICMPGE /*38*/, // loop condition ALOAD, ILOAD, AALOAD, "trim", ASTORE, ALOAD, ACONST_NULL, ASTORE, ASTORE, ALOAD, ILOAD, ALOAD, AASTORE, ACONST_NULL, ASTORE, // compute and store element - ILOAD, ICONST_1, IADD, ISTORE, GOTO /*15*/, // increase counter, jump + IINC, GOTO /*15*/, // increase counter, jump -1 /*38*/, ALOAD, ARETURN) ) } diff --git a/test/junit/scala/tools/nsc/classpath/MultiReleaseJarTest.scala b/test/junit/scala/tools/nsc/classpath/MultiReleaseJarTest.scala index c3c3560c30b2..542408f6b1cd 100644 --- a/test/junit/scala/tools/nsc/classpath/MultiReleaseJarTest.scala +++ b/test/junit/scala/tools/nsc/classpath/MultiReleaseJarTest.scala @@ -4,31 +4,27 @@ import java.io.ByteArrayOutputStream import java.nio.file.{FileSystems, Files, Path} import java.util.jar.Attributes import java.util.jar.Attributes.Name - import org.junit.{Assert, Test} -import scala.tools.nsc.{Global, Settings} -import scala.tools.testkit.BytecodeTesting -import scala.util.Properties +import scala.tools.nsc.{CloseableRegistry, Global, Settings} +import scala.tools.testkit.{BytecodeTesting, ForDeletion} +import scala.util.{Properties, Using} class MultiReleaseJarTest extends BytecodeTesting { import compiler._ @Test def mrJar(): Unit = { - if (!Properties.isJavaAtLeast("9")) { println("skipping mrJar() on old JDK"); return} // TODO test that the compiler warns that --release is unsupported. - - val temp1 = Files.createTempFile("mr-jar-test-", ".jar") + if (!Properties.isJavaAtLeast("9")) return // TODO test that the compiler warns that --release is unsupported. - // TODO test fails if both Global runs look at the same JAR on disk. Caching problem in our classpath implementation? - // val temp2 = temp1 - val temp2 = Files.createTempFile("mr-jar-test-", ".jar") + // TODO test fails if both Global runs look at the same JAR on disk. Caching problem in our classpath implementation? So use two JARs. + def makeTemp() = Files.createTempFile("mr-jar-test-", ".jar") + Using.resources(ForDeletion(makeTemp()), ForDeletion(makeTemp())) { (temp1, temp2) => - try { def code(newApi: String) = s"package p1; abstract class Versioned { def oldApi: Int; $newApi }" val oldC = compileToBytes(code("")).head._2 val newC = compileToBytes(code("def newApi: Int")).head._2 - List(temp1, temp2).foreach(temp => createZip(temp, List( + List(temp1.path, temp2.path).foreach(temp => createZip(temp, List( "/p1/Versioned.class" -> oldC, "/META-INF/versions/9/p1/Versioned.class" -> newC, "/META-INF/MANIFEST.MF" -> createManifest) @@ -42,33 +38,41 @@ class MultiReleaseJarTest extends BytecodeTesting { settings.release.value = release new g.Run val decls = g.rootMirror.staticClass("p1.Versioned").info.decls.filterNot(_.isConstructor).map(_.name.toString).toList.sorted + g.close() decls } - Assert.assertEquals(List("newApi", "oldApi"), declsOfC(temp1, "9")) - Assert.assertEquals(List("oldApi"), declsOfC(temp2, "8")) - } finally - List(temp1, temp2).foreach(Files.deleteIfExists) + Assert.assertEquals(List("newApi", "oldApi"), declsOfC(temp1.path, "9")) + Assert.assertEquals(List("oldApi"), declsOfC(temp2.path, "8")) + } } @Test def ctSymTest(): Unit = { - if (!Properties.isJavaAtLeast("9")) { println("skipping mrJar() on old JDK"); return} // TODO test that the compiler warns that --release is unsupported. + if (!Properties.isJavaAtLeast("9")) return // TODO test that the compiler warns that --release is unsupported. + val cleanup = new CloseableRegistry def lookup(className: String, release: String): Boolean = { val settings = new Settings() settings.usejavacp.value = true val g = new Global(settings) + cleanup.registerCloseable(g) import g._ settings.release.value = release new Run rootMirror.getClassIfDefined(className) != NoSymbol } - Assert.assertTrue(lookup("java.lang.invoke.LambdaMetafactory", "8")) - Assert.assertFalse(lookup("java.lang.invoke.LambdaMetafactory", "7")) - Assert.assertTrue(lookup("java.lang.invoke.LambdaMetafactory", "9")) + try { + Assert.assertTrue(lookup("java.lang.invoke.LambdaMetafactory", "8")) + Assert.assertFalse(lookup("java.lang.invoke.LambdaMetafactory", "7")) + Assert.assertTrue(lookup("java.lang.invoke.LambdaMetafactory", "9")) + } finally { + cleanup.close() + } } + + private def createManifest = { val manifest = new java.util.jar.Manifest() manifest.getMainAttributes.put(Name.MANIFEST_VERSION, "1.0") diff --git a/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala b/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala index 870b9e987bb1..d37fad76419d 100644 --- a/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala +++ b/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala @@ -50,7 +50,7 @@ class CompletionTest { } val acc = new Accumulator val shellCompletion = new Completion { - override def complete(buffer: String, cursor: Int) = + override def complete(buffer: String, cursor: Int, filter: Boolean) = if (buffer.startsWith(":")) new CommandMock().colonCompletion(buffer, cursor).complete(buffer, cursor) else NoCompletions } @@ -106,7 +106,7 @@ class CompletionTest { checkExact(completer, "asInstanceO", "", includeUniversal = true)("asInstanceOf") // Output is sorted - assertEquals(List("prefix_aaa", "prefix_nnn", "prefix_zzz"), completer.complete( """class C { def prefix_nnn = 0; def prefix_zzz = 0; def prefix_aaa = 0; prefix_""").candidates.filter(!_.isUniversal).map(_.defString)) + assertEquals(List("prefix_aaa", "prefix_nnn", "prefix_zzz"), completer.complete( """class C { def prefix_nnn = 0; def prefix_zzz = 0; def prefix_aaa = 0; prefix_""").candidates.filter(!_.isUniversal).map(_.name)) // Enable implicits to check completion enrichment checkExact(completer, """'c'.toU""")("toUpper") @@ -172,11 +172,9 @@ class CompletionTest { def defStringConstructor(): Unit = { val intp = newIMain() val completer = new ReplCompletion(intp) - checkExact(completer, "class Shazam(i: Int); new Shaza")("Shazam") - checkExact(completer, "class Shazam(i: Int); new Shazam")(EmptyString, "def (i: Int): Shazam") - - checkExact(completer, "class Shazam(i: Int) { def this(x: String) = this(0) }; new Shaza")("Shazam") - checkExact(completer, "class Shazam(i: Int) { def this(x: String) = this(0) }; new Shazam")(EmptyString, "def (i: Int): Shazam", "def (x: String): Shazam") + // : String to workaround https://github.com/scala/bug/issues/11964 + checkExact(completer, "class Shazam(i: Int); new Shazam", result = _.declString())("def (i: Int): Shazam" : String) + checkExact(completer, "class Shazam(i: Int) { def this(x: String) = this(0) }; new Shazam", result = _.declString())("def (i: Int): Shazam", "def (x: String): Shazam": String) } @Test @@ -212,7 +210,7 @@ class CompletionTest { | .map(_ + 1) /* then we do reverse */ | .rev""".stripMargin assertTrue( - completer.complete(withMultilineCommit).candidates.map(_.defString).contains("reverseMap") + completer.complete(withMultilineCommit).candidates.map(_.name).contains("reverseMap") ) val withInlineCommit = @@ -220,7 +218,7 @@ class CompletionTest { | .map(_ + 1) // then we do reverse | .rev""".stripMargin assertTrue( - completer.complete(withInlineCommit).candidates.map(_.defString).contains("reverseMap") + completer.complete(withInlineCommit).candidates.map(_.name).contains("reverseMap") ) } @@ -245,7 +243,9 @@ class CompletionTest { ) val candidates1 = completer.complete("Stale.ol").candidates assertEquals(2, candidates1.size) - assertEquals(candidates1.head.isDeprecated, false) + // Our JLine Reader is now responsible for only displaying @deprecated if all candidates with the name are + // deprecated. That isn't covered by this test. + assertEquals(candidates1.head.isDeprecated, true) assertEquals(candidates1.last.isDeprecated, false) } @@ -255,8 +255,8 @@ class CompletionTest { """object Stale { def oldie(i: Int) = ???; @deprecated("","") def oldie = ??? }""" ) val candidates1 = completer.complete("Stale.oldie").candidates - assertEquals(3, candidates1.size) - assertEquals(candidates1.filter(_.isDeprecated).map(_.defString.contains("deprecated")).head, true) + assertEquals(2, candidates1.size) + assertEquals(candidates1.filter(_.isDeprecated).map(_.declString().contains("deprecated")).head, true) assertEquals(candidates1.last.isDeprecated, false) } @@ -267,11 +267,11 @@ class CompletionTest { """object Stuff { @deprecated("","") def `this` = ??? ; @deprecated("","") def `that` = ??? }""" ) val candidates1 = completer.complete("Stale.oldie").candidates - assertEquals(2, candidates1.size) // When exactly matched, there is an empty character - assertTrue(candidates1.filter(_.defString.contains("oldie")).head.defString.contains("deprecated")) + assertEquals(1, candidates1.size) // When exactly matched, there is an empty character + assertTrue(candidates1.filter(_.declString().contains("oldie")).head.declString().contains("deprecated")) val candidates2 = completer.complete("Stuff.that").candidates - assertEquals(2, candidates2.size) - assertTrue(candidates2.filter(_.defString.contains("that")).head.defString.contains("deprecated")) + assertEquals(1, candidates2.size) + assertTrue(candidates2.filter(_.declString().contains("that")).head.declString().contains("deprecated")) } @Test @@ -301,9 +301,9 @@ class CompletionTest { """object A { class Type; object Term }""" ) val candidates1 = completer.complete("A.T").candidates - assertEquals("Term", candidates1.map(_.defString).mkString(" ")) + assertEquals("Term", candidates1.map(_.name).mkString(" ")) val candidates2 = completer.complete("import A.T").candidates - assertEquals("Term Type", candidates2.map(_.defString).sorted.mkString(" ")) + assertEquals("Term Type", candidates2.map(_.name).sorted.mkString(" ")) } @Test @@ -348,11 +348,12 @@ object Test2 { checkExact(completer, "test.Test.withoutParens.charA")("charAt") } - def checkExact(completer: Completion, before: String, after: String = "", includeUniversal: Boolean = false)(expected: String*): Unit = { - val actual = - completer.complete(before, after).candidates - .filter(c => includeUniversal || !c.isUniversal) - .map(_.defString) + def checkExact(completer: Completion, before: String, after: String = "", includeUniversal: Boolean = false, + result: CompletionCandidate => String = _.name)(expected: String*): Unit = { + val candidates = completer.complete(before, after).candidates + .filter(c => includeUniversal || !c.isUniversal) + val actual = candidates.map(result) assertEquals(expected.sorted.mkString(" "), actual.toSeq.distinct.sorted.mkString(" ")) } + } diff --git a/test/junit/scala/tools/nsc/settings/TargetTest.scala b/test/junit/scala/tools/nsc/settings/TargetTest.scala index 065aa4d5a98f..4925ed6a56fe 100644 --- a/test/junit/scala/tools/nsc/settings/TargetTest.scala +++ b/test/junit/scala/tools/nsc/settings/TargetTest.scala @@ -65,8 +65,11 @@ class TargetTest { check("-target:jvm-17", "17") check("-target:17", "17") + check("-target:jvm-18", "18") + check("-target:18", "18") + checkFail("-target:jvm-6") // no longer - checkFail("-target:jvm-18") // not yet... + checkFail("-target:jvm-19") // not yet... checkFail("-target:jvm-3000") // not in our lifetime checkFail("-target:msil") // really? diff --git a/test/junit/scala/tools/testkit/AssertUtilTest.scala b/test/junit/scala/tools/testkit/AssertUtilTest.scala index 98e2c0308553..90e98e1598e3 100644 --- a/test/junit/scala/tools/testkit/AssertUtilTest.scala +++ b/test/junit/scala/tools/testkit/AssertUtilTest.scala @@ -110,4 +110,10 @@ class AssertUtilTest { assertEquals(1, sut.errors.size) assertEquals(0, sut.errors.head._2.getSuppressed.length) } + + /** TODO + @Test def `hexdump is supplementary-aware`: Unit = { + assertEquals("00000000 f0 90 90 80 |𐐀.|", hexdump("\ud801\udc00").next()) + } + */ } diff --git a/test/junit/scala/util/matching/CharRegexTest.scala b/test/junit/scala/util/matching/CharRegexTest.scala index c2a30830cecc..f78316bd8d53 100644 --- a/test/junit/scala/util/matching/CharRegexTest.scala +++ b/test/junit/scala/util/matching/CharRegexTest.scala @@ -1,54 +1,50 @@ package scala.util.matching -import org.junit.Test +import scala.tools.testkit.AssertUtil.{assertCond, assertCondNot, assertThrows} -import PartialFunction._ +import org.junit.Test /** Regex can match a Char. * If the pattern includes a group, * always return a single char. */ class CharRegexTest { - implicit class Averrable(val b: Boolean) /*extends AnyVal*/ { - def yes(): Unit = assert(b) - def no(): Unit = assert(!b) - } + val c: Char = 'c' // "cat"(0) val d: Char = 'D' // "Dog"(0) - @Test def comparesGroupCorrectly(): Unit = { + @Test def comparesGroupCorrectly: Unit = { val r = """(\p{Lower})""".r - cond(c) { case r(x) => true } .yes() - cond(c) { case r(_) => true } .yes() - cond(c) { case r(_*) => true } .yes() - cond(c) { case r() => true } .no() - - cond(d) { case r(x) => true } .no() - cond(d) { case r(_) => true } .no() - cond(d) { case r(_*) => true } .no() - cond(d) { case r() => true } .no() + assertCond(c) { case r(x) => true } + assertCond(c) { case r(_) => true } + assertCond(c) { case r(_*) => true } + assertCondNot(c) { case r() => true } + + assertCondNot(d) { case r(x) => true } + assertCondNot(d) { case r(_) => true } + assertCondNot(d) { case r(_*) => true } + assertCondNot(d) { case r() => true } } - @Test def comparesNoGroupCorrectly(): Unit = { + @Test def comparesNoGroupCorrectly: Unit = { val rnc = """\p{Lower}""".r - cond(c) { case rnc(x) => true } .no() - cond(c) { case rnc(_) => true } .no() - cond(c) { case rnc(_*) => true } .yes() - cond(c) { case rnc() => true } .yes() - - cond(d) { case rnc(x) => true } .no() - cond(d) { case rnc(_) => true } .no() - cond(d) { case rnc(_*) => true } .no() - cond(d) { case rnc() => true } .no() + assertCondNot(c) { case rnc(x) => true } + assertCondNot(c) { case rnc(_) => true } + assertCond(c) { case rnc(_*) => true } + assertCond(c) { case rnc() => true } + + assertCondNot(d) { case rnc(x) => true } + assertCondNot(d) { case rnc(_) => true } + assertCondNot(d) { case rnc(_*) => true } + assertCondNot(d) { case rnc() => true } } - @Test(expected = classOf[MatchError]) - def failCorrectly(): Unit = { + @Test def failCorrectly: Unit = { val headAndTail = """(\p{Lower})([a-z]+)""".r - val n = "cat"(0) match { + def test = "cat"(0) match { case headAndTail(ht @ _*) => ht.size } - assert(false, s"Match size $n") + assertThrows[MatchError](test) } } diff --git a/test/junit/scala/util/matching/RegexTest.scala b/test/junit/scala/util/matching/RegexTest.scala index 61e3af2ef5bf..09ec4ee533be 100644 --- a/test/junit/scala/util/matching/RegexTest.scala +++ b/test/junit/scala/util/matching/RegexTest.scala @@ -1,15 +1,12 @@ package scala.util.matching -import org.junit.Assert.{ assertThrows => _, _ } +import org.junit.Assert.{assertEquals, assertFalse, assertTrue} import org.junit.Test -import org.junit.runner.RunWith -import org.junit.runners.JUnit4 -import scala.tools.testkit.AssertUtil._ +import scala.tools.testkit.AssertUtil.{assertCond, assertThrows} -@RunWith(classOf[JUnit4]) class RegexTest { - @Test def t8022CharSequence(): Unit = { + @Test def t8022CharSequence: Unit = { val full = """.*: (.)$""".r val text = " When I use this operator: *" // Testing 2.10.x compatibility of the return types of unapplySeq @@ -17,7 +14,7 @@ class RegexTest { assertEquals("*", y) } - @Test def t8022Match(): Unit = { + @Test def t8022Match: Unit = { val R = """(\d)""".r val matchh = R.findFirstMatchIn("a1").get // Testing 2.10.x compatibility of the return types of unapplySeq @@ -25,7 +22,7 @@ class RegexTest { assertEquals("1", y) } - @Test def `t9666: use inline group names`(): Unit = { + @Test def `t9666: use inline group names`: Unit = { val r = new Regex("a(?b*)c") val ms = r findAllIn "stuff abbbc more abc and so on" assertTrue(ms.hasNext) @@ -37,7 +34,8 @@ class RegexTest { assertFalse(ms.hasNext) } - @Test def `t9666: use explicit group names`(): Unit = { + @deprecated("Explicit group names are essentially deprecated", since="2.13.7") + @Test def `t9666: use explicit group names`: Unit = { val r = new Regex("a(b*)c", "Bee") val ms = r findAllIn "stuff abbbc more abc and so on" assertTrue(ms.hasNext) @@ -49,7 +47,8 @@ class RegexTest { assertFalse(ms.hasNext) } - @Test def `t9666: fall back to explicit group names`(): Unit = { + @deprecated("Explicit group names are essentially deprecated", since="2.13.7") + @Test def `t9666: fall back to explicit group names`: Unit = { val r = new Regex("a(?b*)c", "Bee") val ms = r findAllIn "stuff abbbc more abc and so on" assertTrue(ms.hasNext) @@ -67,13 +66,16 @@ class RegexTest { type NoMatch = NoSuchElementException type NoData = IllegalStateException - @Test def `t9666: throw on bad name`(): Unit = { + @Test def `t9666: throw on bad name`: Unit = assertThrows[NoGroup] { val r = new Regex("a(?b*)c") val ms = r findAllIn "stuff abbbc more abc and so on" assertTrue(ms.hasNext) ms group "Bee" } + + @deprecated("Explicit group names are essentially deprecated", since="2.13.7") + @Test def `t9666: throw on bad explicit name`: Unit = { assertThrows[NoGroup] { val r = new Regex("a(?b*)c", "Bar") val ms = r findAllIn "stuff abbbc more abc and so on" @@ -88,7 +90,7 @@ class RegexTest { } } - @Test def `t9827 MatchIterator ergonomics`(): Unit = { + @Test def `t9827 MatchIterator ergonomics`: Unit = { val r = "(ab)(cd)".r val s = "xxxabcdyyyabcdzzz" assertEquals(3, r.findAllIn(s).start) @@ -155,7 +157,7 @@ class RegexTest { } } - @Test def `t10827 matches method`(): Unit = { + @Test def `t10827 matches method`: Unit = { val r = """\d+""".r assertTrue(r.matches("500")) assertFalse(r.matches("foo")) @@ -164,7 +166,7 @@ class RegexTest { assertFalse(r.matches("2foo")) } - @Test def `t10827 matches method for unanchored Regex`(): Unit = { + @Test def `t10827 matches method for unanchored Regex`: Unit = { val r = """\d+""".r.unanchored assertTrue(r.matches("500")) assertFalse(r.matches("abc")) @@ -173,7 +175,7 @@ class RegexTest { assertTrue(r.matches("2foo")) } - @Test def replacementMatching(): Unit = { + @Test def replacementMatching: Unit = { val regex = """\$\{(.+?)\}""".r val replaced = regex.replaceAllIn("Replacing: ${main}. And another method: ${foo}.", (m: util.matching.Regex.Match) => { @@ -190,7 +192,7 @@ class RegexTest { assertEquals("Replacing: main. And another: ${foo}.", replaced3) } - @Test def groupsMatching(): Unit = { + @Test def groupsMatching: Unit = { val Date = """(\d+)/(\d+)/(\d+)""".r for (Regex.Groups(a, b, c) <- Date findFirstMatchIn "1/1/2001 marks the start of the millennium. 31/12/2000 doesn't.") { assertEquals("1", a) @@ -198,13 +200,13 @@ class RegexTest { assertEquals("2001", c) } for (Regex.Groups(a, b, c) <- Date.findAllIn("1/1/2001 marks the start of the millennium. 31/12/2000 doesn't.").matchData) { - assertTrue(a == "1" || a == "31") - assertTrue(b == "1" || b == "12") - assertTrue(c == "2001" || c == "2000") + assertCond(a) { case "1" | "31" => true } + assertCond(b) { case "1" | "12" => true } + assertCond(c) { case "2001" | "2000" => true } } } - @Test def `t6406 no longer unapply any`(): Unit = { + @Test def `t6406 no longer unapply any`: Unit = { val r = "(\\d+)".r val q = """(\d)""".r val ns = List("1,2","x","3,4") diff --git a/test/macro-annot/run/kase/macro_kase_1.scala b/test/macro-annot/run/kase/macro_kase_1.scala index abd75e2304c6..a1a9b420443d 100644 --- a/test/macro-annot/run/kase/macro_kase_1.scala +++ b/test/macro-annot/run/kase/macro_kase_1.scala @@ -74,6 +74,7 @@ object kaseMacro { val primaryParams = primaryParamss.head val secondaryParamss = primaryParamss.tail val ourPolyType = if (tparams.nonEmpty) AppliedTypeTree(Ident(name), tparams.map(tparam => Ident(tparam.name))) else Ident(name) + val ourWildType = if (tparams.nonEmpty) AppliedTypeTree(Ident(name), tparams.map(_ => Bind(typeNames.WILDCARD, EmptyTree))) else Ident(name) val tparamUnderscores = tparams.zipWithIndex.map{ case (tdef, i) => TypeDef(makeDeferredSynthetic(unmakeParam(tdef.mods)), TypeName("x$" + (i+1)), tdef.tparams, tdef.rhs) } val ourExistentialType = ExistentialTypeTree(AppliedTypeTree(Ident(name), tparamUnderscores.map(tdef => Ident(tdef.name))), tparamUnderscores) @@ -154,7 +155,7 @@ object kaseMacro { Apply(Select(thatC, TermName("canEqual")), List(This(name))) } def sameTypeCheck = { - val ifSameType = CaseDef(Typed(Ident(termNames.WILDCARD), ourPolyType), EmptyTree, Literal(Constant(true))) + val ifSameType = CaseDef(Typed(Ident(termNames.WILDCARD), ourWildType), EmptyTree, Literal(Constant(true))) val otherwise = CaseDef(Ident(termNames.WILDCARD), EmptyTree, Literal(Constant(false))) Match(Ident(equalsParam.name), List(ifSameType, otherwise)) } diff --git a/test/scalacheck/CheckEither.scala b/test/scalacheck/CheckEither.scala index cf6b2e2f8558..c650cee4ade3 100644 --- a/test/scalacheck/CheckEither.scala +++ b/test/scalacheck/CheckEither.scala @@ -4,8 +4,16 @@ import org.scalacheck.Gen.oneOf import org.scalacheck.Prop._ import org.scalacheck.Test.check import Function.tupled +import scala.util.Either.LeftProjection +@annotation.nowarn("cat=deprecation") object CheckEitherTest extends Properties("Either") { + implicit class Failing[A, B](val e: Either[A, B]) { + def orFail = e.getOrElse(???) + } + implicit class FailingLeft[A, B](val e: LeftProjection[A, B]) { + def orFail = e.getOrElse(???) + } implicit def arbitraryEither[X, Y](implicit xa: Arbitrary[X], ya: Arbitrary[Y]): Arbitrary[Either[X, Y]] = Arbitrary[Either[X, Y]](oneOf(arbitrary[X].map(Left(_)), arbitrary[Y].map(Right(_)))) @@ -14,14 +22,14 @@ object CheckEitherTest extends Properties("Either") { val prop_either2 = forAll((n: Int) => Right(n).fold(a => sys.error("fail"), x => x) == n) val prop_swap = forAll((e: Either[Int, Int]) => e match { - case Left(a) => e.swap.right.get == a - case Right(b) => e.swap.left.get == b + case Left(a) => e.swap.orFail == a + case Right(b) => e.swap.left.orFail == b }) val prop_isLeftRight = forAll((e: Either[Int, Int]) => e.isLeft != e.isRight) object CheckLeftProjection { - val prop_value = forAll((n: Int) => Left(n).left.get == n) + val prop_value = forAll((n: Int) => Left(n).left.orFail == n) val prop_getOrElse = forAll((e: Either[Int, Int], or: Int) => e.left.getOrElse(or) == (e match { case Left(a) => a @@ -29,10 +37,10 @@ object CheckEitherTest extends Properties("Either") { })) val prop_forall = forAll((e: Either[Int, Int]) => - e.left.forall(_ % 2 == 0) == (e.isRight || e.left.get % 2 == 0)) + e.left.forall(_ % 2 == 0) == (e.isRight || e.left.orFail % 2 == 0)) val prop_exists = forAll((e: Either[Int, Int]) => - e.left.exists(_ % 2 == 0) == (e.isLeft && e.left.get % 2 == 0)) + e.left.exists(_ % 2 == 0) == (e.isLeft && e.left.orFail % 2 == 0)) val prop_flatMapLeftIdentity = forAll((e: Either[Int, Int], n: Int, s: String) => { def f(x: Int) = if(x % 2 == 0) Left(s) else Right(s) @@ -53,7 +61,7 @@ object CheckEitherTest extends Properties("Either") { e.left.map(x => f(g(x))) == e.left.map(x => g(x)).left.map(f(_))}) val prop_filterToOption = forAll((e: Either[Int, Int], x: Int) => e.left.filterToOption(_ % 2 == 0) == - (if(e.isRight || e.left.get % 2 != 0) None else Some(e))) + (if(e.isRight || e.left.orFail % 2 != 0) None else Some(e))) val prop_seq = forAll((e: Either[Int, Int]) => e.left.toSeq == (e match { case Left(a) => Seq(a) @@ -67,46 +75,46 @@ object CheckEitherTest extends Properties("Either") { } object CheckRightProjection { - val prop_value = forAll((n: Int) => Right(n).right.get == n) + val prop_value = forAll((n: Int) => Right(n).orFail == n) - val prop_getOrElse = forAll((e: Either[Int, Int], or: Int) => e.right.getOrElse(or) == (e match { + val prop_getOrElse = forAll((e: Either[Int, Int], or: Int) => e.getOrElse(or) == (e match { case Left(_) => or case Right(b) => b })) val prop_forall = forAll((e: Either[Int, Int]) => - e.right.forall(_ % 2 == 0) == (e.isLeft || e.right.get % 2 == 0)) + e.forall(_ % 2 == 0) == (e.isLeft || e.orFail % 2 == 0)) val prop_exists = forAll((e: Either[Int, Int]) => - e.right.exists(_ % 2 == 0) == (e.isRight && e.right.get % 2 == 0)) + e.exists(_ % 2 == 0) == (e.isRight && e.orFail % 2 == 0)) val prop_flatMapLeftIdentity = forAll((e: Either[Int, Int], n: Int, s: String) => { def f(x: Int) = if(x % 2 == 0) Left(s) else Right(s) - Right(n).right.flatMap(f(_)) == f(n)}) + Right(n).flatMap(f(_)) == f(n)}) - val prop_flatMapRightIdentity = forAll((e: Either[Int, Int]) => e.right.flatMap(Right(_)) == e) + val prop_flatMapRightIdentity = forAll((e: Either[Int, Int]) => e.flatMap(Right(_)) == e) val prop_flatMapComposition = forAll((e: Either[Int, Int]) => { def f(x: Int) = if(x % 2 == 0) Left(x) else Right(x) def g(x: Int) = if(x % 7 == 0) Right(x) else Left(x) - e.right.flatMap(f(_)).right.flatMap(g(_)) == e.right.flatMap(f(_).right.flatMap(g(_)))}) + e.flatMap(f(_)).flatMap(g(_)) == e.flatMap(f(_).flatMap(g(_)))}) - val prop_mapIdentity = forAll((e: Either[Int, Int]) => e.right.map(x => x) == e) + val prop_mapIdentity = forAll((e: Either[Int, Int]) => e.map(x => x) == e) val prop_mapComposition = forAll((e: Either[Int, String]) => { def f(s: String) = s.toLowerCase def g(s: String) = s.reverse - e.right.map(x => f(g(x))) == e.right.map(x => g(x)).right.map(f(_))}) + e.map(x => f(g(x))) == e.map(x => g(x)).map(f(_))}) val prop_filterToOption = forAll((e: Either[Int, Int], x: Int) => e.right.filterToOption(_ % 2 == 0) == - (if(e.isLeft || e.right.get % 2 != 0) None else Some(e))) + (if(e.isLeft || e.orFail % 2 != 0) None else Some(e))) - val prop_seq = forAll((e: Either[Int, Int]) => e.right.toSeq == (e match { + val prop_seq = forAll((e: Either[Int, Int]) => e.toSeq == (e match { case Left(_) => Seq.empty case Right(b) => Seq(b) })) - val prop_option = forAll((e: Either[Int, Int]) => e.right.toOption == (e match { + val prop_option = forAll((e: Either[Int, Int]) => e.toOption == (e match { case Left(_) => None case Right(b) => Some(b) })) @@ -114,7 +122,7 @@ object CheckEitherTest extends Properties("Either") { val prop_Either_left = forAll((n: Int) => Left(n).left.get == n) - val prop_Either_right = forAll((n: Int) => Right(n).right.get == n) + val prop_Either_right = forAll((n: Int) => Right(n).orFail == n) val prop_Either_joinLeft = forAll((e: Either[Either[Int, Int], Int]) => e match { case Left(ee) => e.joinLeft == ee diff --git a/test/scalacheck/Ctrie.scala b/test/scalacheck/Ctrie.scala index 6101105f06fc..9c120c552566 100644 --- a/test/scalacheck/Ctrie.scala +++ b/test/scalacheck/Ctrie.scala @@ -3,8 +3,7 @@ import Prop._ import org.scalacheck.Gen._ import collection._ import collection.concurrent.TrieMap - - +import scala.language.reflectiveCalls case class Wrap(i: Int) { override def hashCode = i // * 0x9e3775cd @@ -192,8 +191,8 @@ object CtrieTest extends Properties("concurrent.TrieMap") { idx => (0 until sz) foreach { i => - val v = ct.getOrElseUpdate(Wrap(i), idx + ":" + i) - if (v == idx + ":" + i) totalInserts.incrementAndGet() + val v = ct.getOrElseUpdate(Wrap(i), s"$idx:$i") + if (v == s"$idx:$i") totalInserts.incrementAndGet() } } diff --git a/test/scalacheck/concurrent-map.scala b/test/scalacheck/concurrent-map.scala index 75082e8bd09c..f3c529922269 100644 --- a/test/scalacheck/concurrent-map.scala +++ b/test/scalacheck/concurrent-map.scala @@ -1,6 +1,6 @@ import java.util.concurrent._ import scala.collection._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.scalacheck._ import org.scalacheck.Prop._ import org.scalacheck.Gen._ @@ -26,6 +26,7 @@ object ConcurrentMapTest extends Properties("concurrent.TrieMap") { /* helpers */ def inParallel[T](totalThreads: Int)(body: Int => T): Seq[T] = { + import scala.language.reflectiveCalls val threads = for (idx <- 0 until totalThreads) yield new Thread { setName("ParThread-" + idx) private var res: T = _ diff --git a/test/scalacheck/range.scala b/test/scalacheck/range.scala index 3344d3be6315..f06606b59fbc 100644 --- a/test/scalacheck/range.scala +++ b/test/scalacheck/range.scala @@ -43,9 +43,9 @@ abstract class RangeTest(kind: String) extends Properties("Range "+kind) { size <- choose(1, 100) step <- choose(1, 101) } yield { - val signum = if (boundary == 0) 1 else boundary.signum - if (isStart) Range(boundary, boundary - size * boundary.signum, - step * signum) - else Range(boundary - size * boundary.signum, boundary, step * signum) + val signum = if (boundary == 0) 1 else boundary.sign + if (isStart) Range(boundary, boundary - size * boundary.sign, - step * signum) + else Range(boundary - size * boundary.sign, boundary, step * signum) } diff --git a/test/scalacheck/redblacktree.scala b/test/scalacheck/redblacktree.scala index 3d4cfdd145a4..02c7597548b3 100644 --- a/test/scalacheck/redblacktree.scala +++ b/test/scalacheck/redblacktree.scala @@ -24,7 +24,7 @@ abstract class RedBlackTreeTest(tname: String) extends Properties(tname) with Re import RB._ def nodeAt[A](tree: Tree[String, A], n: Int): Option[(String, A)] = if (n < iterator(tree).size && n >= 0) - Some(iterator(tree).drop(n).next) + Some(iterator(tree).drop(n).next()) else None diff --git a/test/scalacheck/scala/ArrayTest.scala b/test/scalacheck/scala/ArrayTest.scala index e08c77e3e8af..a51562d5d88e 100644 --- a/test/scalacheck/scala/ArrayTest.scala +++ b/test/scalacheck/scala/ArrayTest.scala @@ -25,9 +25,9 @@ object ArrayTest extends Properties("Array") { property("fill") = forAll( Gen.choose(-10, 100), ) { len => - val xs = Vector.fill(len)(Random.nextInt) + val xs = Vector.fill(len)(Random.nextInt()) val i = xs.iterator - Array.fill(len)(i.next).toVector == xs + Array.fill(len)(i.next()).toVector == xs } property("tabulate") = forAll( diff --git a/test/scalacheck/scala/collection/FloatFormatTest.scala b/test/scalacheck/scala/collection/FloatFormatTest.scala index 7dd3989fab9f..6a70352fde82 100644 --- a/test/scalacheck/scala/collection/FloatFormatTest.scala +++ b/test/scalacheck/scala/collection/FloatFormatTest.scala @@ -81,8 +81,8 @@ object FloatFormatTest extends Properties("FloatFormat") { 10 -> right )) - // type annotation shouldn't be necessary? see typelevel/scalacheck#721 - Gen.sequence[List[String], String](bogoparts).map(_.mkString) + import scala.jdk.CollectionConverters._ + Gen.sequence(bogoparts).map(_.asScala.mkString) } //compare NaN equal diff --git a/test/scalacheck/scala/collection/IndexOfSliceTest.scala b/test/scalacheck/scala/collection/IndexOfSliceTest.scala index 3853139d340f..50c11d5cea25 100644 --- a/test/scalacheck/scala/collection/IndexOfSliceTest.scala +++ b/test/scalacheck/scala/collection/IndexOfSliceTest.scala @@ -8,6 +8,7 @@ object IndexOfSliceTest extends Properties("indexOfSlice") { // The default arbitrary[Seq[Int]] picks only one Seq implementation. // Here we explicitly list all the implementations we want to test + @annotation.nowarn("msg=type WrappedArray") val genDifferentSeqs = Gen.oneOf[Seq[Int]]( Arbitrary.arbitrary[collection.immutable.List[Int]], diff --git a/test/scalacheck/scala/collection/IntegralParseTest.scala b/test/scalacheck/scala/collection/IntegralParseTest.scala index 6fd4e229551c..b49466e9bb15 100644 --- a/test/scalacheck/scala/collection/IntegralParseTest.scala +++ b/test/scalacheck/scala/collection/IntegralParseTest.scala @@ -120,8 +120,11 @@ object NumericStringGenerators { if (n >= 0) Gen.oneOf(digitsByValue(n)) else Gen.const(ch) }) - // type annotation shouldn't be necessary? see typelevel/scalacheck#721 - Gen.sequence[List[Char], Char](listOfGens).map(_.mkString) + + import scala.jdk.CollectionConverters._ + + val sequenced = Gen.sequence(listOfGens) + sequenced.map(_.asScala.mkString) } } diff --git a/test/scalacheck/scala/collection/IteratorProperties.scala b/test/scalacheck/scala/collection/IteratorProperties.scala index 820cbaa11748..62481d6a4895 100644 --- a/test/scalacheck/scala/collection/IteratorProperties.scala +++ b/test/scalacheck/scala/collection/IteratorProperties.scala @@ -29,17 +29,22 @@ object IteratorProperties extends Properties("Iterator") { case it: Iterator[Int] => View.dropRightIterator(it, n) case x => throw new MatchError(x) }) + property("patch") = check((it, n) => it match { + case it: Iterable[Int] => it.iterator.patch(1, Iterator.empty, n) + case it: Iterator[Int] => it.patch(1, Iterator.empty, n) + case x => throw new MatchError(x) + }) def check(f: (IterableOnceOps[Int, IterableOnce, IterableOnce[Int]], Int) => IterableOnce[Int]): Prop = forAll(Arbitrary.arbitrary[Seq[Int]], smallInteger) { (s: Seq[Int], n: Int) => val indexed = s.toIndexedSeq // IndexedSeqs and their Iterators have a knownSize val simple = new SimpleIterable(s) // SimpleIterable and its Iterator don't - val stream = LazyList.from(s) // Lazy - val indexed1 = f(indexed, n).toSeq - val indexed2 = f(indexed.iterator, n).toSeq - val simple1 = f(simple, n).toSeq - val simple2 = f(simple.iterator, n).toSeq - val stream1 = f(stream, n).toSeq - val stream2 = f(stream.iterator, n).toSeq + val lazyList = LazyList.from(s) // Lazy + val indexed1 = f(indexed, n).iterator.to(Seq) + val indexed2 = f(indexed.iterator, n).iterator.to(Seq) + val simple1 = f(simple, n).iterator.to(Seq) + val simple2 = f(simple.iterator, n).iterator.to(Seq) + val stream1 = f(lazyList, n).iterator.to(Seq) + val stream2 = f(lazyList.iterator, n).iterator.to(Seq) (indexed1 == indexed2) :| s"indexed: $indexed1 != $indexed2" && (simple1 == simple2) :| s"simple: $simple1 != $simple2" && (stream1 == stream2) :| s"stream: $stream1 != $stream2" && diff --git a/test/scalacheck/scala/collection/StringOpsProps.scala b/test/scalacheck/scala/collection/StringOpsProps.scala index b902512e39e7..bdade1547a72 100644 --- a/test/scalacheck/scala/collection/StringOpsProps.scala +++ b/test/scalacheck/scala/collection/StringOpsProps.scala @@ -6,7 +6,7 @@ import java.io.{BufferedReader, StringReader} import org.scalacheck.{Gen, Properties}, Gen.{oneOf, listOf} import org.scalacheck.Prop._ -import JavaConverters._ +import scala.jdk.CollectionConverters._ object StringOpsTest extends Properties("StringOps") { diff --git a/test/scalacheck/scala/collection/ViewProperties.scala b/test/scalacheck/scala/collection/ViewProperties.scala new file mode 100644 index 000000000000..1814adc1c690 --- /dev/null +++ b/test/scalacheck/scala/collection/ViewProperties.scala @@ -0,0 +1,57 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import org.scalacheck._ +import org.scalacheck.Prop._ + +import scala.collection.mutable.ListBuffer + +object ViewProperties extends Properties("View") { + + type Elem = Int + type SomeSeqOps = SeqOps[Elem, Iterable, Iterable[Elem]] + + private def expectedPatch(seq: SomeSeqOps, from: Int, other: Iterable[Elem], replaced: Int): Seq[Elem] = { + val (prefix, suffix) = seq.splitAt(from) + ListBuffer.empty[Elem] ++= prefix ++= other ++= suffix.drop(replaced) + } + + property("`SeqOps#patch(...)` (i.e. `iterableFactory.from(View.Patched(...))`) correctness") = { + // we use `mutable.ArraySeq` because it uses the default `patch` + // implementation, rather than one from `StrictOptimisedSeqOps` + forAll { (seq: mutable.ArraySeq[Elem], from: Int, other: Iterable[Elem], replaced: Int) => + val expected = expectedPatch(seq, from, other, replaced) + val patchedWithIterable = seq.patch(from, other, replaced) + val patchedWithIterableOnce = seq.patch(from, other.iterator, replaced) + + // we don't need to use `sameElements` like below, because + // both `expected` and patched are `Seq` this time + ((expected =? patchedWithIterable) :| "`patch(_, Iterable, _)` is performed correctly") && + ((expected =? patchedWithIterableOnce) :| "`patch(_, IterableOnce, _)` is performed correctly") + } + } + + + property("`SeqOps#view.patch(...)` (i.e. `View.Patched` used directly) correctness and consistency") = + forAll { (seq: Seq[Elem], from: Int, other: Iterable[Elem], replaced: Int) => + val expected = expectedPatch(seq, from, other, replaced) + val patchedWithIterable = seq.view.patch(from, other, replaced) + val patchedWithIterableOnce = seq.view.patch(from, other.iterator, replaced) + + (expected.sameElements(patchedWithIterable) :| "`patch(_, Iterable, _)` is performed correctly") && + (expected.sameElements(patchedWithIterable) :| "`view.patch(_, Iterable, _)` remains the same after multiple iterations") && + (expected.sameElements(patchedWithIterableOnce) :| "`patch(_, IterableOnce, _)` is performed correctly") && + (expected.sameElements(patchedWithIterableOnce) :| "`view.patch(_, IterableOnce, _)` remains the same after multiple iterations") + } +} diff --git a/test/scalacheck/scala/collection/immutable/ImmutableChampHashMapProperties.scala b/test/scalacheck/scala/collection/immutable/ImmutableChampHashMapProperties.scala index fa41faa4b724..2a61d5fe0382 100644 --- a/test/scalacheck/scala/collection/immutable/ImmutableChampHashMapProperties.scala +++ b/test/scalacheck/scala/collection/immutable/ImmutableChampHashMapProperties.scala @@ -33,7 +33,7 @@ object ImmutableChampHashMapProperties extends Properties("HashMap") { val builder = HashMap.newBuilder[K, V] inputMap.foreach(builder.addOne) - val duplicateMap = builder.result + val duplicateMap = builder.result() inputMap == duplicateMap } @@ -72,7 +72,7 @@ object ImmutableChampHashMapProperties extends Properties("HashMap") { property("adding elems twice to builder is the same as adding them once") = forAll { seq: Seq[(K, V)] => val b = HashMap.newBuilder[K, V].addAll(seq) - b.result == b.addAll(seq).result() + b.result() == b.addAll(seq).result() } property("(xs ++ ys).toMap == xs.toMap ++ ys.toMap") = forAll { (xs: Seq[(K, V)],ys: Seq[(K, V)]) => diff --git a/test/scalacheck/scala/collection/immutable/ImmutableChampHashSetProperties.scala b/test/scalacheck/scala/collection/immutable/ImmutableChampHashSetProperties.scala index 62ea4d75257b..257460e5cb37 100644 --- a/test/scalacheck/scala/collection/immutable/ImmutableChampHashSetProperties.scala +++ b/test/scalacheck/scala/collection/immutable/ImmutableChampHashSetProperties.scala @@ -38,7 +38,7 @@ object ImmutableChampHashSetProperties extends Properties("immutable.HashSet") { val builder = HashSet.newBuilder[K] inputSet.foreach(builder.addOne) - val duplicateSet = builder.result + val duplicateSet = builder.result() inputSet == duplicateSet } @@ -53,7 +53,7 @@ object ImmutableChampHashSetProperties extends Properties("immutable.HashSet") { } property("notContainedAfterInsertRemove") = forAll { (input: HashSet[K], item: K) => - (input + item - item).contains(item) == false + !(input + item - item).contains(item) } property("intersectIdentityReference") = forAll { (inputShared: HashSet[K]) => @@ -64,7 +64,7 @@ object ImmutableChampHashSetProperties extends Properties("immutable.HashSet") { val builder = HashSet.newBuilder[K] inputShared.foreach(builder.addOne) - val duplicateSet = builder.result + val duplicateSet = builder.result() inputShared == inputShared.intersect(duplicateSet) } @@ -121,7 +121,7 @@ object ImmutableChampHashSetProperties extends Properties("immutable.HashSet") { val builder = HashSet.newBuilder[K] inputShared.foreach(builder.addOne) - val duplicateSet = builder.result + val duplicateSet = builder.result() inputShared == inputShared.union(duplicateSet) } @@ -166,7 +166,7 @@ object ImmutableChampHashSetProperties extends Properties("immutable.HashSet") { val builder = HashSet.newBuilder[K] inputShared.foreach(builder.addOne) - val duplicateSet = builder.result + val duplicateSet = builder.result() HashSet.empty[K] == inputShared.diff(duplicateSet) } @@ -240,7 +240,7 @@ object ImmutableChampHashSetProperties extends Properties("immutable.HashSet") { } property("adding elems twice to builder is the same as adding them once") = forAll { seq: Seq[K] => val b = HashSet.newBuilder[K].addAll(seq) - b.result == b.addAll(seq).result() + b.result() == b.addAll(seq).result() } property("(xs ++ ys).toSet == xs.toSet ++ ys.toSet") = forAll { (xs: Seq[K],ys: Seq[K]) => (xs ++ ys).toSet =? xs.toSet ++ ys.toSet diff --git a/test/scalacheck/scala/collection/immutable/ListProperties.scala b/test/scalacheck/scala/collection/immutable/ListProperties.scala index 99e85d4fdb7f..958910e1e136 100644 --- a/test/scalacheck/scala/collection/immutable/ListProperties.scala +++ b/test/scalacheck/scala/collection/immutable/ListProperties.scala @@ -36,7 +36,7 @@ object ListProperties extends Properties("immutable.List") { property("list1 ::: list2 == list1.toVector.prependedAll(list2)") = forAll { (list1: List[Int], list2: List[Int]) => (list1.prependedAll(list2): Seq[Int]) ?= list1.toVector.prependedAll(list2) } - property("list1.prependedAll(iterableOnce) == list1.prependedAll(iterableOnce)") = + property("list1.prependedAll(iterableOnce) == list1.toVector.prependedAll(iterableOnce)") = forAll(arbitrary[List[Int]], iterableOnceGen){ (list1, it) => (list1.prependedAll(it()): Seq[Int]) ?= list1.toVector.prependedAll(it()) } diff --git a/test/scalacheck/scala/collection/immutable/SeqProperties.scala b/test/scalacheck/scala/collection/immutable/SeqProperties.scala index 1086506da5ee..0cd7ecbcbb4f 100644 --- a/test/scalacheck/scala/collection/immutable/SeqProperties.scala +++ b/test/scalacheck/scala/collection/immutable/SeqProperties.scala @@ -11,6 +11,7 @@ import scala.util.{Success, Try} import org.scalacheck.Properties +@annotation.nowarn("cat=deprecation&msg=Stream") object SeqProperties extends Properties("immutable.Seq builder implementations"){ type A = Int diff --git a/test/scalacheck/scala/collection/immutable/SetProperties.scala b/test/scalacheck/scala/collection/immutable/SetProperties.scala index f100b7292f48..f34a303cc164 100644 --- a/test/scalacheck/scala/collection/immutable/SetProperties.scala +++ b/test/scalacheck/scala/collection/immutable/SetProperties.scala @@ -7,8 +7,8 @@ import org.scalacheck.commands.Commands import scala.collection.mutable import scala.util.{Success, Try} - -object SetProperties extends Properties("immutable.Set builder implementations"){ +@annotation.nowarn("cat=deprecation&msg=Stream") +object SetProperties extends Properties("immutable.Set builder implementations") { type A = Int @@ -60,6 +60,7 @@ class SetBuilderStateProperties[A, To <: Set[A]](newBuilder: => mutable.Builder[ override def genCommand(state: State): Gen[Command] = _genCommand + @annotation.nowarn("cat=deprecation&msg=Stream") override def shrinkState = Shrink.apply[State]( set => set.to(Stream).map(set - _) ) case object Clear extends UnitCommand { diff --git a/test/scalacheck/scala/collection/immutable/VectorMapProperties.scala b/test/scalacheck/scala/collection/immutable/VectorMapProperties.scala index f8949c9ee4fa..1253c6804a95 100644 --- a/test/scalacheck/scala/collection/immutable/VectorMapProperties.scala +++ b/test/scalacheck/scala/collection/immutable/VectorMapProperties.scala @@ -31,15 +31,17 @@ object VectorMapProperties extends Properties("immutable.VectorMap") { property("internal underlying and index are consistent after removal") = forAll { (m: Map[K, V]) => m.size >= 3 ==> { - val v = Vector.from(m) - val random = v(new scala.util.Random().nextInt(v.size)) - val vm = VectorMap.from(v) + val v = Vector.from(m) + val random = v(new scala.util.Random().nextInt(v.size)) + val vm = VectorMap.from(v) val removed = vm - random._1 - removed.underlying.forall { case (k, (s, v)) => removed.fields(s) == k } - removed.fields.zipWithIndex.forall { - case (k: K, s) => removed.underlying(k)._1 == s - case _ => true - } + ("all map keys are located at the specified indices in the vector" |: + removed.underlying.forall { case (k, (s, v)) => removed.fields(s) == k }) && + ("all elements in the vector are in the map with the correct associated indices" |: + removed.fields.zipWithIndex.forall { + case (k: K, s) => removed.underlying(k)._1 == s + case _ => true + }) } } diff --git a/test/scalacheck/scala/collection/mutable/ArrayBufferProperties.scala b/test/scalacheck/scala/collection/mutable/ArrayBufferProperties.scala new file mode 100644 index 000000000000..193c49d47f9b --- /dev/null +++ b/test/scalacheck/scala/collection/mutable/ArrayBufferProperties.scala @@ -0,0 +1,39 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.mutable + +import org.scalacheck._ +import org.scalacheck.Prop._ + +object ArrayBufferProperties extends Properties("mutable.ArrayBuffer") { + + type Elem = Int + + property("view consistency after modifications") = forAll { (buf: ArrayBuffer[Elem]) => + def check[U](op: ArrayBuffer[Elem] => U): Prop = { + val b = buf.clone() + val view = b.view + op(b) // modifies the buffer + b.sameElements(view) + } + + val spaceForMoreElems = buf.sizeIs <= (Int.MaxValue / 2 - 101) + + (check(_.clear()) :| "_.clear()") && + (check(_.dropRightInPlace(1)) :| "_.dropRightInPlace(1)") && + (check(_.dropInPlace(1)) :| "_.dropInPlace(1)") && + (spaceForMoreElems ==> (check(_ ++= (1 to 100)) :| "_ ++= (1 to 100)")) && + (spaceForMoreElems ==> (check(_.prependAll(1 to 100)) :| "_.prependAll(1 to 100)")) && + ((!buf.isEmpty && spaceForMoreElems) ==> (check(_.insertAll(1, 1 to 100)) :| "_.insertAll(1, 1 to 100)")) + } +} diff --git a/test/scalacheck/scala/collection/mutable/MapProperties.scala b/test/scalacheck/scala/collection/mutable/MapProperties.scala index a77365d5a244..22394a1931c5 100644 --- a/test/scalacheck/scala/collection/mutable/MapProperties.scala +++ b/test/scalacheck/scala/collection/mutable/MapProperties.scala @@ -33,6 +33,7 @@ object MapProperties extends Properties("mutable.Map") { override def addOne(elem: (K, V)): this.type = { _elems += elem; this } } + @annotation.nowarn("cat=deprecation&msg=ListMap") implicit val arbMap: Arbitrary[Map[K, V]] = Arbitrary { for { @@ -52,4 +53,4 @@ object MapProperties extends Properties("mutable.Map") { map.filterInPlace(p) (map: collection.Map[K, V]) ?= expected } -} \ No newline at end of file +} diff --git a/test/scalacheck/scala/collection/mutable/RedBlackTree.scala b/test/scalacheck/scala/collection/mutable/RedBlackTree.scala index a6613309bc7a..c643a3d4c104 100644 --- a/test/scalacheck/scala/collection/mutable/RedBlackTree.scala +++ b/test/scalacheck/scala/collection/mutable/RedBlackTree.scala @@ -24,7 +24,7 @@ abstract class RedBlackTreeTest(tname: String) extends Properties(tname) with Re import RB._ def nodeAt[A](tree: Tree[String, A], n: Int): Option[(String, A)] = if (n < iterator(tree).size && n >= 0) - Some(iterator(tree).drop(n).next) + Some(iterator(tree).drop(n).next()) else None diff --git a/test/scalacheck/scala/math/BigIntProperties.scala b/test/scalacheck/scala/math/BigIntProperties.scala index c4c0295dc50a..d036719b368f 100644 --- a/test/scalacheck/scala/math/BigIntProperties.scala +++ b/test/scalacheck/scala/math/BigIntProperties.scala @@ -61,6 +61,7 @@ object BigIntProperties extends Properties("BigInt") { property("longValue") = forAll { (l: Long) => BigInt(l).longValue ?= l } property("toLong") = forAll { (l: Long) => BigInt(l).toLong ?= l } + property("new BigInt(bigInteger = BigInteger.ZERO)") = (new BigInt(bigInteger = BigInteger.ZERO)) == 0 property("BigInt.apply(i: Int)") = forAll { (i: Int) => BigInt(i) ?= BigInt(BigInteger.valueOf(i)) } property("BigInt.apply(l: Long)") = forAll { (l: Long) => BigInt(l) ?= BigInt(BigInteger.valueOf(l)) } property("BigInt.apply(x: Array[Byte])") = forAll(bigInteger) { bi => BigInt(bi) ?= BigInt(bi.toByteArray) } diff --git a/test/scalacheck/scala/reflect/quasiquotes/ArbitraryTreesAndNames.scala b/test/scalacheck/scala/reflect/quasiquotes/ArbitraryTreesAndNames.scala index 0541405f1c7d..19032a2d0fb7 100644 --- a/test/scalacheck/scala/reflect/quasiquotes/ArbitraryTreesAndNames.scala +++ b/test/scalacheck/scala/reflect/quasiquotes/ArbitraryTreesAndNames.scala @@ -1,6 +1,7 @@ package scala.reflect.quasiquotes import org.scalacheck._, Prop._, Gen._, Arbitrary._ +import scala.language.implicitConversions import scala.reflect.runtime.universe._, internal._, Flag._ trait ArbitraryTreesAndNames { diff --git a/test/scalacheck/scala/reflect/quasiquotes/DefinitionConstructionProps.scala b/test/scalacheck/scala/reflect/quasiquotes/DefinitionConstructionProps.scala index 01cdea398f55..2356f272038c 100644 --- a/test/scalacheck/scala/reflect/quasiquotes/DefinitionConstructionProps.scala +++ b/test/scalacheck/scala/reflect/quasiquotes/DefinitionConstructionProps.scala @@ -1,6 +1,7 @@ package scala.reflect.quasiquotes import org.scalacheck._, Prop._, Gen._, Arbitrary._ +import scala.language.reflectiveCalls import scala.reflect.runtime.universe._, Flag._, internal.reificationSupport.ScalaDot object DefinitionConstructionProps @@ -34,6 +35,7 @@ trait ClassConstruction { self: QuasiquoteProperties => val emptyConstructor = DefDef(Modifiers(), termNames.CONSTRUCTOR, List(), List(List()), TypeTree(), Block(List(pendingSuperCall), Literal(Constant(())))) + @annotation.nowarn("cat=deprecation&msg=emptyValDef") def classWith(name: TypeName, parents: List[Tree] = List(anyRef), body: List[DefDef] = Nil) = ClassDef( Modifiers(), name, List(), diff --git a/test/scalacheck/scala/reflect/quasiquotes/DeprecationProps.scala b/test/scalacheck/scala/reflect/quasiquotes/DeprecationProps.scala index 9439a5a2c69f..cccb06144ce5 100644 --- a/test/scalacheck/scala/reflect/quasiquotes/DeprecationProps.scala +++ b/test/scalacheck/scala/reflect/quasiquotes/DeprecationProps.scala @@ -3,6 +3,7 @@ package scala.reflect.quasiquotes import org.scalacheck._, Prop._, Gen._, Arbitrary._ import scala.reflect.runtime.universe._ +@annotation.nowarn("cat=deprecation") object DeprecationProps extends QuasiquoteProperties("deprecation") { val tname = TypeName("Foo") val tpt = tq"Foo" diff --git a/test/scalacheck/scala/reflect/quasiquotes/UnliftableProps.scala b/test/scalacheck/scala/reflect/quasiquotes/UnliftableProps.scala index 77e5b2de3d88..ae2d9aaf0b7f 100644 --- a/test/scalacheck/scala/reflect/quasiquotes/UnliftableProps.scala +++ b/test/scalacheck/scala/reflect/quasiquotes/UnliftableProps.scala @@ -1,8 +1,10 @@ package scala.reflect.quasiquotes +import org.junit.Assert.{assertEquals, assertTrue} import org.scalacheck._, Prop._, Gen._, Arbitrary._ import scala.reflect.runtime.universe._, Flag._ +@annotation.nowarn("msg=deprecated adaptation") object UnliftableProps extends QuasiquoteProperties("unliftable") { property("unlift name") = test { val termname0 = TermName("foo") @@ -74,7 +76,9 @@ object UnliftableProps extends QuasiquoteProperties("unliftable") { property("unlift scala.symbol") = test { val q"${s: scala.Symbol}" = q"'foo" - assert(s.isInstanceOf[scala.Symbol] && s == 'foo) + //assert(s.isInstanceOf[scala.Symbol] && s == Symbol("foo")) + assertTrue(s.isInstanceOf[scala.Symbol]) + assertEquals(Symbol("foo"), s) } implicit def unliftList[T: Unliftable]: Unliftable[List[T]] = Unliftable { diff --git a/test/scalacheck/t2460.scala b/test/scalacheck/t2460.scala index 42ff3ecfe6ab..40c8fb87cd61 100644 --- a/test/scalacheck/t2460.scala +++ b/test/scalacheck/t2460.scala @@ -8,15 +8,15 @@ object SI2460Test extends Properties("Regex : Ticket 2460") { val vowel = Gen.oneOf("a", "z") val numberOfMatch = forAll(vowel) { - (s: String) => "\\s*([a-z])\\s*".r("data").findAllMatchIn((1 to 20).map(_ => s).mkString).size == 20 + (s: String) => "\\s*([a-z])\\s*".r.findAllMatchIn((1 to 20).map(_ => s).mkString).size == 20 } val numberOfGroup = forAll(vowel) { - (s: String) => "\\s*([a-z])\\s*([a-z])\\s*".r("data").findAllMatchIn((1 to 20).map(_ => s).mkString).next.groupCount == 2 + (s: String) => "\\s*([a-z])\\s*([a-z])\\s*".r.findAllMatchIn((1 to 20).map(_ => s).mkString).next().groupCount == 2 } val nameOfGroup = forAll(vowel) { - (s: String) => "([a-z])".r("data").findAllMatchIn(s).next.group("data") == s + (s: String) => "(?[a-z])".r.findAllMatchIn(s).next().group("data") == s } val tests = List( diff --git a/test/scalacheck/treemap.scala b/test/scalacheck/treemap.scala index f21dacaef7fa..83fb586b5192 100644 --- a/test/scalacheck/treemap.scala +++ b/test/scalacheck/treemap.scala @@ -71,21 +71,21 @@ object TreeMapTest extends Properties("TreeMap") { property("minAfter") = forAll { (elements: List[Int]) => elements.nonEmpty ==> { val half = elements.take(elements.size / 2) val subject = TreeMap((half zip half): _*) - elements.forall{e => { - val temp = subject.from(e) + elements.forall { e => + val temp = subject.rangeFrom(e) if (temp.isEmpty) subject.minAfter(e).isEmpty else subject.minAfter(e).get == temp.min - }} + } }} property("maxBefore") = forAll { (elements: List[Int]) => elements.nonEmpty ==> { val half = elements.take(elements.size / 2) val subject = TreeMap((half zip half): _*) - elements.forall{e => { - val temp = subject.until(e) + elements.forall { e => + val temp = subject.rangeUntil(e) if (temp.isEmpty) subject.maxBefore(e).isEmpty else subject.maxBefore(e).get == temp.max - }} + } }} property("head/tail identity") = forAll { (subject: TreeMap[Int, String]) => subject.nonEmpty ==> { @@ -146,7 +146,7 @@ object TreeMapTest extends Properties("TreeMap") { property("from is inclusive") = forAll { (subject: TreeMap[Int, String]) => subject.nonEmpty ==> { val n = choose(0, subject.size - 1).sample.get val from = subject.drop(n).firstKey - subject.from(from).firstKey == from && subject.from(from).forall(_._1 >= from) + subject.rangeFrom(from).firstKey == from && subject.rangeFrom(from).forall(_._1 >= from) }} property("to is inclusive") = forAll { (subject: TreeMap[Int, String]) => subject.nonEmpty ==> { @@ -158,7 +158,7 @@ object TreeMapTest extends Properties("TreeMap") { property("until is exclusive") = forAll { (subject: TreeMap[Int, String]) => subject.size > 1 ==> { val n = choose(1, subject.size - 1).sample.get val until = subject.drop(n).firstKey - subject.until(until).lastKey == subject.take(n).lastKey && subject.until(until).forall(_._1 <= until) + subject.rangeUntil(until).lastKey == subject.take(n).lastKey && subject.rangeUntil(until).forall(_._1 <= until) }} property("remove single") = forAll { (subject: TreeMap[Int, String]) => subject.nonEmpty ==> { diff --git a/test/scalacheck/treeset.scala b/test/scalacheck/treeset.scala index 286fb1bc6919..e4ba91f54727 100644 --- a/test/scalacheck/treeset.scala +++ b/test/scalacheck/treeset.scala @@ -70,7 +70,7 @@ object TreeSetTest extends Properties("TreeSet") { val half = elements.take(elements.size / 2) val subject = TreeSet(half: _*) elements.forall{e => { - val temp = subject.from(e) + val temp = subject.rangeFrom(e) if (temp.isEmpty) subject.minAfter(e).isEmpty else subject.minAfter(e).get == temp.min }} @@ -80,7 +80,7 @@ object TreeSetTest extends Properties("TreeSet") { val half = elements.take(elements.size / 2) val subject = TreeSet(half: _*) elements.forall{e => { - val temp = subject.from(e) + val temp = subject.rangeFrom(e) if (temp.isEmpty) subject.minAfter(e).isEmpty else subject.minAfter(e).get == temp.min }} @@ -144,7 +144,7 @@ object TreeSetTest extends Properties("TreeSet") { property("from is inclusive") = forAll { (subject: TreeSet[Int]) => subject.nonEmpty ==> { val n = choose(0, subject.size - 1).sample.get val from = subject.drop(n).firstKey - subject.from(from).firstKey == from && subject.from(from).forall(_ >= from) + subject.rangeFrom(from).firstKey == from && subject.rangeFrom(from).forall(_ >= from) }} property("to is inclusive") = forAll { (subject: TreeSet[Int]) => subject.nonEmpty ==> { @@ -156,7 +156,7 @@ object TreeSetTest extends Properties("TreeSet") { property("until is exclusive") = forAll { (subject: TreeSet[Int]) => subject.size > 1 ==> { val n = choose(1, subject.size - 1).sample.get val until = subject.drop(n).firstKey - subject.until(until).lastKey == subject.take(n).lastKey && subject.until(until).forall(_ <= until) + subject.rangeUntil(until).lastKey == subject.take(n).lastKey && subject.rangeUntil(until).forall(_ <= until) }} property("remove single") = forAll { (subject: TreeSet[Int]) => subject.nonEmpty ==> { diff --git a/test/scaladoc/run/t5527.scala b/test/scaladoc/run/t5527.scala index b4fdc70339a3..ec4ee4eeaa04 100644 --- a/test/scaladoc/run/t5527.scala +++ b/test/scaladoc/run/t5527.scala @@ -137,11 +137,8 @@ object Test extends DirectTest { } """.trim - // redirect err to out, for logging - override def show(): Unit = StreamCapture.savingSystem { - System.setErr(System.out) - compile() - } + override def show(): Unit = compile() + // doc.Settings override def newSettings(args: List[String]) = new doc.Settings(_ => ()).tap(_.processArguments(args, true)) // ScaladocGlobal yielded by DocFactory#compiler, requires doc.Settings diff --git a/test/tasty/neg-move-macros/src-2/TestMacroCompat.check b/test/tasty/neg-move-macros/src-2/TestMacroCompat.check index f69ad0abe7b5..07deaf926e69 100644 --- a/test/tasty/neg-move-macros/src-2/TestMacroCompat.check +++ b/test/tasty/neg-move-macros/src-2/TestMacroCompat.check @@ -1,4 +1,4 @@ -TestMacroCompat_fail.scala:7: error: can't find term required by object tastytest.MacroCompat: tastytest.`package`.Macros.posImpl; perhaps it is missing from the classpath. +TestMacroCompat_fail.scala:7: error: can't find term required by object tastytest.MacroCompat: tastytest.package.Macros.posImpl; perhaps it is missing from the classpath. val result = MacroCompat.testCase("foo")(pos) ^ 1 error diff --git a/test/tasty/neg-move-macros/src-3/MacroCompat.scala b/test/tasty/neg-move-macros/src-3/MacroCompat.scala index d3c3374b17c3..8a0021a42660 100644 --- a/test/tasty/neg-move-macros/src-3/MacroCompat.scala +++ b/test/tasty/neg-move-macros/src-3/MacroCompat.scala @@ -2,9 +2,14 @@ package tastytest import scala.language.experimental.macros +import scala.annotation.experimental + object MacroCompat { + @experimental implicit def pos: Position = macro Macros.posImpl // implemented in test/tasty/run/pre/tastytest/package.scala + + @experimental implicit inline def pos: Position = ${ Macros3.posImpl } def testCase(test: => Any)(using Position): String = @@ -16,7 +21,7 @@ object MacroCompat { def posImpl(using quotes: Quotes): Expr[Position] = { import quotes.reflect.given val pos = quotes.reflect.Position.ofMacroExpansion - val name = pos.sourceFile.jpath.getFileName.toString + val name = pos.sourceFile.getJPath.map(_.getFileName.toString).getOrElse("?.scala") val line = pos.startLine + 1 '{ Position(${Expr(name)}, ${Expr(line)}) } } diff --git a/test/tasty/neg/src-2/TestCompiletimeQuoteType.check b/test/tasty/neg/src-2/TestCompiletimeQuoteType.check index c041a36c4d04..5c2f3c01b8df 100644 --- a/test/tasty/neg/src-2/TestCompiletimeQuoteType.check +++ b/test/tasty/neg/src-2/TestCompiletimeQuoteType.check @@ -1,4 +1,4 @@ -TestCompiletimeQuoteType_fail.scala:4: error: Unsupported Scala 3 context function type in result: scala.quoted.Quotes ?=> scala.quoted.Type[T]; found in method of in object scala.quoted.Type. +TestCompiletimeQuoteType_fail.scala:4: error: could not find implicit value for evidence parameter of type scala.quoted.Type[Int] def test = CompiletimeQuoteType.f[Int] ^ 1 error diff --git a/test/tasty/neg/src-2/TestDelayedPrivate.check b/test/tasty/neg/src-2/TestDelayedPrivate.check new file mode 100644 index 000000000000..dbf046b62d43 --- /dev/null +++ b/test/tasty/neg/src-2/TestDelayedPrivate.check @@ -0,0 +1,4 @@ +TestDelayedPrivate_fail.scala:7: error: value Deeper is not a member of object tastytest.DelayedPrivate.Nested + DelayedPrivate.Nested.Deeper + ^ +1 error diff --git a/test/tasty/neg/src-2/TestDelayedPrivateInverse.check b/test/tasty/neg/src-2/TestDelayedPrivateInverse.check new file mode 100644 index 000000000000..9742e9453372 --- /dev/null +++ b/test/tasty/neg/src-2/TestDelayedPrivateInverse.check @@ -0,0 +1,4 @@ +TestDelayedPrivateInverse_fail.scala:6: error: value Internal is not a member of object tastytest.DelayedPrivateInverse + val _ = DelayedPrivateInverse.Internal + ^ +1 error diff --git a/test/tasty/neg/src-2/TestDelayedPrivateInverse_fail.scala b/test/tasty/neg/src-2/TestDelayedPrivateInverse_fail.scala new file mode 100644 index 000000000000..002fa21936c9 --- /dev/null +++ b/test/tasty/neg/src-2/TestDelayedPrivateInverse_fail.scala @@ -0,0 +1,8 @@ +package tastytest + +object TestDelayedPrivateInverse { + def test: DelayedPrivateInverse.Parent[Nothing] = ??? // force sealed children of parent + locally { + val _ = DelayedPrivateInverse.Internal + } +} diff --git a/test/tasty/neg/src-2/TestDelayedPrivate_fail.scala b/test/tasty/neg/src-2/TestDelayedPrivate_fail.scala new file mode 100644 index 000000000000..50c7728d8e91 --- /dev/null +++ b/test/tasty/neg/src-2/TestDelayedPrivate_fail.scala @@ -0,0 +1,9 @@ +package tastytest + +object TestDelayedPrivate { + + locally { + val _ = Nil: List[DelayedPrivate.Root] // force Root to be seen first + DelayedPrivate.Nested.Deeper + } +} diff --git a/test/tasty/neg/src-2/TestFooMatch.check b/test/tasty/neg/src-2/TestFooMatch.check new file mode 100644 index 000000000000..5bcfabce3eca --- /dev/null +++ b/test/tasty/neg/src-2/TestFooMatch.check @@ -0,0 +1,7 @@ +TestFooMatch_fail.scala:5: warning: match may not be exhaustive. +It would fail on the following input: Foo() + def foo(f: Foo): Unit = f match { + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/tasty/neg/src-2/TestFooMatch_fail.scala b/test/tasty/neg/src-2/TestFooMatch_fail.scala new file mode 100644 index 000000000000..d6c459deefca --- /dev/null +++ b/test/tasty/neg/src-2/TestFooMatch_fail.scala @@ -0,0 +1,8 @@ +// here we test unpickling a sealed child in another tasty file +package dottyi3149 + +object TestFooMatch { + def foo(f: Foo): Unit = f match { + case f: Foo.Bar => () + } +} diff --git a/test/tasty/neg/src-2/TestInvisibleDefs.check b/test/tasty/neg/src-2/TestInvisibleDefs.check new file mode 100644 index 000000000000..9ce3bf4804cd --- /dev/null +++ b/test/tasty/neg/src-2/TestInvisibleDefs.check @@ -0,0 +1,13 @@ +TestInvisibleDefs_fail.scala:5: error: type argIsHello is not a member of package tastytest + def foo: tastytest.argIsHello = ??? // has invisible flag so should not be seen + ^ +TestInvisibleDefs_fail.scala:6: error: type argIsHello is not a member of package tastytest + def bar: tastytest.argIsHello = ??? // second try on same type + ^ +TestInvisibleDefs_fail.scala:11: error: value getStatus is not a member of tastytest.InvisibleDefs.MyBean + mybean.getStatus() // error + ^ +TestInvisibleDefs_fail.scala:12: error: value setStatus is not a member of tastytest.InvisibleDefs.MyBean + mybean.setStatus("closed") // error + ^ +4 errors diff --git a/test/tasty/neg/src-2/TestInvisibleDefs_fail.scala b/test/tasty/neg/src-2/TestInvisibleDefs_fail.scala new file mode 100644 index 000000000000..d8e681206150 --- /dev/null +++ b/test/tasty/neg/src-2/TestInvisibleDefs_fail.scala @@ -0,0 +1,15 @@ +package tastytest + +object TestInvisibleDefs { + + def foo: tastytest.argIsHello = ??? // has invisible flag so should not be seen + def bar: tastytest.argIsHello = ??? // second try on same type + + def testBean = { + val mybean = new InvisibleDefs.MyBean + mybean.status = "open" + mybean.getStatus() // error + mybean.setStatus("closed") // error + } + +} diff --git a/test/tasty/neg/src-2/TestThisTypes.check b/test/tasty/neg/src-2/TestThisTypes.check new file mode 100644 index 000000000000..a9025c7b3fa3 --- /dev/null +++ b/test/tasty/neg/src-2/TestThisTypes.check @@ -0,0 +1,6 @@ +TestThisTypes_fail.scala:12: error: type mismatch; + found : b.Base + required: a.Base + aBase = b.doTest.get // error + ^ +1 error diff --git a/test/tasty/neg/src-2/TestThisTypes_fail.scala b/test/tasty/neg/src-2/TestThisTypes_fail.scala new file mode 100644 index 000000000000..b0c82d808096 --- /dev/null +++ b/test/tasty/neg/src-2/TestThisTypes_fail.scala @@ -0,0 +1,15 @@ +package tastytest + +import ThisTypes._ + +object TestThisTypes { + + def test = { + val a = new Sub3() + val b = new Sub3() + + var aBase = a.doTest.get + aBase = b.doTest.get // error + } + +} diff --git a/test/tasty/neg/src-3/DelayedPrivate.scala b/test/tasty/neg/src-3/DelayedPrivate.scala new file mode 100644 index 000000000000..76c2fc949d20 --- /dev/null +++ b/test/tasty/neg/src-3/DelayedPrivate.scala @@ -0,0 +1,15 @@ +package tastytest + +object DelayedPrivate { + + sealed trait Root + + object Nested { + + private object Deeper { + final class Leaf extends Root + } + + } + +} diff --git a/test/tasty/neg/src-3/DelayedPrivateInverse.scala b/test/tasty/neg/src-3/DelayedPrivateInverse.scala new file mode 100644 index 000000000000..3d03e90fb361 --- /dev/null +++ b/test/tasty/neg/src-3/DelayedPrivateInverse.scala @@ -0,0 +1,8 @@ +package tastytest + +object DelayedPrivateInverse { + private object Internal { + final class Impl extends DelayedPrivateInverse.Parent[Nothing] + } + sealed trait Parent[T] +} diff --git a/test/tasty/neg/src-3/ErasedTypes.scala b/test/tasty/neg/src-3/ErasedTypes.scala index bafb95891012..432dcc306093 100644 --- a/test/tasty/neg/src-3/ErasedTypes.scala +++ b/test/tasty/neg/src-3/ErasedTypes.scala @@ -1,16 +1,25 @@ package tastytest +import language.experimental.erasedDefinitions + +import scala.annotation.experimental + object ErasedTypes { + @experimental trait Foo { def foo1(erased x: String): Int def foo2(using erased x: String): Int } + @experimental class Bar[F <: Foo { def foo1(erased x: String): 0 }] + + @experimental class Baz[F <: Foo { def foo2(using erased x: String): 0 }] object ErasedCompileTimeOps { + @experimental erased def theNothing: Nothing = ??? } diff --git a/test/tasty/neg/src-3/InvisibleDefs.scala b/test/tasty/neg/src-3/InvisibleDefs.scala new file mode 100644 index 000000000000..5bd0190c28e1 --- /dev/null +++ b/test/tasty/neg/src-3/InvisibleDefs.scala @@ -0,0 +1,16 @@ +package tastytest + +import scala.beans.BeanProperty + +object InvisibleDefs { + + @main def argIsHello(arg: String): Unit = assert(arg == "Hello") + + class MyBean { + + @BeanProperty + var status = "" + + } + +} diff --git a/test/tasty/neg/src-3/ThisTypes.scala b/test/tasty/neg/src-3/ThisTypes.scala new file mode 100644 index 000000000000..bf958993c0d8 --- /dev/null +++ b/test/tasty/neg/src-3/ThisTypes.scala @@ -0,0 +1,15 @@ +package tastytest + +object ThisTypes { + + abstract class Wrap3 { + class Base + final type Res = Option[Base] + def doTest: Res + } + + class Sub3 extends Wrap3 { + def doTest: Res = Some(new Base()) + } + +} diff --git a/test/tasty/neg/src-3/dottyi3149/foo.scala b/test/tasty/neg/src-3/dottyi3149/foo.scala new file mode 100644 index 000000000000..e7a2797ab0bd --- /dev/null +++ b/test/tasty/neg/src-3/dottyi3149/foo.scala @@ -0,0 +1,19 @@ +// here we test unpickling a sealed child in another tasty file +package dottyi3149 + +sealed class Foo +object Foo { + final class Bar extends Foo +} + +class Test { + def f = { + class Bar extends Foo + } + class C { + class Bar extends Foo + } + object O { + class Bar extends Foo + } +} diff --git a/test/tasty/pos/pre/tastytest/package.scala b/test/tasty/pos/pre/tastytest/package.scala index cc823305a7cc..179fc8aefa9d 100644 --- a/test/tasty/pos/pre/tastytest/package.scala +++ b/test/tasty/pos/pre/tastytest/package.scala @@ -5,31 +5,52 @@ package object tastytest { import scala.util.Random import scala.reflect.macros.blackbox.Context + import scala.collection.mutable + implicit final class SafeEq[T](private val t: T) extends AnyVal { final def ===[U](u: U)(implicit ev: T =:= U): Boolean = ??? } def compiletimeHasChild[T](child: String): Unit = macro Macros.hasChildImpl[T] - def compiletimeHasNestedChildren[T](children: String*): Unit = macro Macros.hasChildrenImpl[T] + def compiletimeHasNestedChildren[T](expected: String*): Unit = macro Macros.hasChildrenImpl[T] object Macros { - def hasChildrenImpl[T](c: Context)(children: c.Expr[String]*)(implicit T: c.WeakTypeTag[T]): c.Expr[Unit] = { + def hasChildrenImpl[T](c: Context)(expected: c.Expr[String]*)(implicit T: c.WeakTypeTag[T]): c.Expr[Unit] = { import c.universe._ - def findChildren(sym: Symbol): Set[Symbol] = - sym.asClass.knownDirectSubclasses.flatMap(s => findChildren(s) + s) + def findChildren(sym: Symbol): Set[Symbol] = { + def findLvlN(explore: mutable.ArrayDeque[Symbol], seen: Set[Symbol]): Set[Symbol] = { + if (explore.nonEmpty) { + val (s, rest) = (explore.head, explore.dropInPlace(1)) + val lvlN = s.asClass.knownDirectSubclasses + val unseen = lvlN -- seen + if (unseen.nonEmpty) { + findLvlN(rest ++= unseen, seen ++ unseen) + } else { + findLvlN(rest, seen) + } + } + else { + seen + } + } + + val lvl1 = sym.asClass.knownDirectSubclasses + if (lvl1.isEmpty) lvl1 + else findLvlN(mutable.ArrayDeque.from(lvl1 - sym), lvl1) + } val sym = T.tpe.typeSymbol + lazy val children = findChildren(sym) if (!sym.isClass) { c.error(c.enclosingPosition, s"${T.tpe} is not a class type; cannot inspect sealed children") } else { - children.foreach { child => + expected.foreach { child => child.tree match { case Literal(Constant(nmeString: String)) => - val children = findChildren(sym) - val contains = children.toList.map(_.fullName).exists(_ == nmeString) + val contains = children.exists(_.fullName == nmeString) if (!contains) { c.error(child.tree.pos, s"$sym does not have a child symbol $nmeString") } diff --git a/test/tasty/pos/src-2/dottyi3149/TestFooChildren.scala b/test/tasty/pos/src-2/dottyi3149/TestFooChildren.scala new file mode 100644 index 000000000000..4121d1d869da --- /dev/null +++ b/test/tasty/pos/src-2/dottyi3149/TestFooChildren.scala @@ -0,0 +1,13 @@ +// here we test unpickling a sealed child in another tasty file +package dottyi3149 + +import tastytest._ + +object TestFooChildren { + compiletimeHasNestedChildren[Foo]( + "dottyi3149.Foo.Bar", + "dottyi3149.Foo.dottyi3149$Foo$$localSealedChildProxy", // workaround to represent "dottyi3149.Test.Bar$1",k + "dottyi3149.Test.O.Bar", + "dottyi3149.Test.C.Bar" + ) +} diff --git a/test/tasty/pos/src-2/tastytest/TestTCGivens.scala b/test/tasty/pos/src-2/tastytest/TestTCGivens.scala new file mode 100644 index 000000000000..4569dc47fc4e --- /dev/null +++ b/test/tasty/pos/src-2/tastytest/TestTCGivens.scala @@ -0,0 +1,8 @@ +package tastytest + +import givens._ + +object TestTCGivens { + def exported = TCModule.TC.mkTCFromInt[1] + def original: TCInstances.TC.mkTCFromInt[1] = TCInstances.TC.mkTCFromInt[1] +} diff --git a/test/tasty/pos/src-3/dottyi3149/foo.scala b/test/tasty/pos/src-3/dottyi3149/foo.scala new file mode 100644 index 000000000000..e7a2797ab0bd --- /dev/null +++ b/test/tasty/pos/src-3/dottyi3149/foo.scala @@ -0,0 +1,19 @@ +// here we test unpickling a sealed child in another tasty file +package dottyi3149 + +sealed class Foo +object Foo { + final class Bar extends Foo +} + +class Test { + def f = { + class Bar extends Foo + } + class C { + class Bar extends Foo + } + object O { + class Bar extends Foo + } +} diff --git a/test/tasty/pos/src-3/tastytest/Annotated.scala b/test/tasty/pos/src-3/tastytest/Annotated.scala index 6a5a8d43fe75..a9fe6ed2bfde 100644 --- a/test/tasty/pos/src-3/tastytest/Annotated.scala +++ b/test/tasty/pos/src-3/tastytest/Annotated.scala @@ -6,6 +6,12 @@ trait Annotated @rootAnnot(1) trait RootAnnotated +@overloadedAnnot(123) +trait OverloadedAnnotated1 + +@overloadedAnnot(false, "hello") +trait OverloadedAnnotated2 + trait OuterClassAnnotated extends OuterClass { @basicAnnot(xyz) def foo = 1 diff --git a/test/tasty/pos/src-3/tastytest/AnythingIsPossible.scala b/test/tasty/pos/src-3/tastytest/AnythingIsPossible.scala index 31fa2f8da1e2..148b2d9caa21 100644 --- a/test/tasty/pos/src-3/tastytest/AnythingIsPossible.scala +++ b/test/tasty/pos/src-3/tastytest/AnythingIsPossible.scala @@ -2,7 +2,8 @@ package tastytest object AnythingIsPossible { - class Box[A](val a: A) + class Box[A](accept: => A): + val a: A = accept class Class extends Box({ class X { final val x = Map(("", 3)) } ; val foo = new X(); foo.x: foo.x.type }) diff --git a/test/tasty/pos/src-3/tastytest/givens/TC.scala b/test/tasty/pos/src-3/tastytest/givens/TC.scala new file mode 100644 index 000000000000..279f34136045 --- /dev/null +++ b/test/tasty/pos/src-3/tastytest/givens/TC.scala @@ -0,0 +1,12 @@ +package tastytest.givens + +object TCModule: + trait TC[V] + object TC: + export TCInstances.TC.given + +object TCInstances: + object TC: + import TCModule.TC + given mkTCFromInt[V <: Int]: TC[V] with + type Out = Int diff --git a/test/tasty/pos/src-3/tastytest/overloadedAnnot.scala b/test/tasty/pos/src-3/tastytest/overloadedAnnot.scala new file mode 100644 index 000000000000..05179494cbc6 --- /dev/null +++ b/test/tasty/pos/src-3/tastytest/overloadedAnnot.scala @@ -0,0 +1,6 @@ +package tastytest + +final class overloadedAnnot(str: String, int: Int, boolean: Boolean) extends scala.annotation.StaticAnnotation { + def this(int: Int) = this("abc", int, false) + def this(boolean: Boolean, str: String) = this(str, 123, boolean) +} diff --git a/test/tasty/run/pre/tastytest/package.scala b/test/tasty/run/pre/tastytest/package.scala index ccfd109a5f3a..fca544cff4fb 100644 --- a/test/tasty/run/pre/tastytest/package.scala +++ b/test/tasty/run/pre/tastytest/package.scala @@ -4,6 +4,29 @@ import scala.reflect.macros.blackbox.Context package object tastytest { + def anyObj[T]: T = null.asInstanceOf[T] + + trait Aspect { + def applyTo(op: => Unit): Unit + } + + implicit class AspectOps(op: => Unit) { + def @@(aspect: Aspect): Unit = aspect.applyTo(op) + } + + object ExpectCastOrNull extends Aspect { + def applyTo(op: => Unit): Unit = { + try { + op + throw new AssertionError("expected a failure") + } + catch { + case npe: NullPointerException => // swallow + case cce: ClassCastException => // swallow + } + } + } + implicit final class SafeEq[T](private val t: T) extends AnyVal { final def ===[U](u: U)(implicit ev: T =:= U): Boolean = t == u } diff --git a/test/tasty/run/pre/tastytest/reflectshims/Context.scala b/test/tasty/run/pre/tastytest/reflectshims/Context.scala new file mode 100644 index 000000000000..55c883114a9a --- /dev/null +++ b/test/tasty/run/pre/tastytest/reflectshims/Context.scala @@ -0,0 +1,9 @@ +package tastytest.reflectshims + +trait Context { + + type TreeShim = universe.TreeShim + + val universe: Universe + +} diff --git a/test/tasty/run/pre/tastytest/reflectshims/Universe.scala b/test/tasty/run/pre/tastytest/reflectshims/Universe.scala new file mode 100644 index 000000000000..722a4b5a70e6 --- /dev/null +++ b/test/tasty/run/pre/tastytest/reflectshims/Universe.scala @@ -0,0 +1,8 @@ +package tastytest.reflectshims + +abstract class Universe { + type TreeShim >: Null <: AnyRef with TreeShimApi + trait TreeShimApi extends Product { this: TreeShim => } + + val EmptyTree: TreeShim +} diff --git a/test/tasty/run/pre/tastytest/reflectshims/impl/Context.scala b/test/tasty/run/pre/tastytest/reflectshims/impl/Context.scala new file mode 100644 index 000000000000..1ed77e3e3be0 --- /dev/null +++ b/test/tasty/run/pre/tastytest/reflectshims/impl/Context.scala @@ -0,0 +1,17 @@ +package tastytest.reflectshims.impl + +import tastytest.reflectshims + +object Context extends reflectshims.Context { + + object universe extends reflectshims.Universe { + + abstract class TreeShimImpl extends TreeShimApi with Product + + type TreeShim = TreeShimImpl + + case object EmptyTree extends TreeShimImpl + + } + +} diff --git a/test/tasty/run/pre/tastytest/scala2Erasure/api.scala b/test/tasty/run/pre/tastytest/scala2Erasure/api.scala new file mode 100644 index 000000000000..f7999bd8fbd4 --- /dev/null +++ b/test/tasty/run/pre/tastytest/scala2Erasure/api.scala @@ -0,0 +1,263 @@ +package tastytest + +// Keep synchronized with src-3/tastytest/dottyErasureApi/api.scala +package scala2Erasure + +class foo extends scala.annotation.StaticAnnotation + +trait A +trait B +trait SubB extends B +trait C +trait Cov[+T] +trait Univ extends Any + +class D + +class VC(val self: A) extends AnyVal +class VC2(val self: A) extends AnyVal + +class Outer { + class E + trait F extends E +} + +object OpaqueHolder { + type Q[T] = Cov[T] + type Y[T] = Cov[T] +} +import OpaqueHolder._ + +sealed abstract class Enumerated +object Enumerated { + final val C1: Enumerated with A = new Enumerated with A {} + final val C2: Enumerated with B = new Enumerated with B {} +} + +// The parameter type of `a_XX` should erase to A, `b_XX` to `B`, etc. +// This is enforced by dottyApp/Main.scala +class Z { self => + def a_01(a: A with B): Unit = {} + def b_02X(b: B with A): Unit = {} + def a_02(a: A with B with A): Unit = {} + def a_03(a: A with (B with A)): Unit = {} + def b_04(b: A with (B with A) @foo): Unit = {} + def b_04X(b: A with (B with C) @foo): Unit = {} + def b_05(b: A with (B with A) @foo with (C with B with A) @foo): Unit = {} + + type T1 <: A with B + def a_06(a: T1): Unit = {} + + type S <: B with T1 + def a_07(a: S): Unit = {} + + type T2 <: B with A + type U <: T2 with S + def b_08(b: U): Unit = {} + + val singB: B = new B {} + def a_09(a: A with singB.type): Unit = {} + def b_10(b: singB.type with A): Unit = {} + + type V >: SubB <: B + def b_11(b: V): Unit = {} + def b_12(b: V with SubB): Unit = {} + + def d_13(d: D with A): Unit = {} + def d_14(d: A with D): Unit = {} + + val singD: D = new D {} + def d_13x(d: singD.type with A): Unit = {} + def d_14x(d: A with singD.type): Unit = {} + + type DEq = D + def d_15(d: A with DEq): Unit = {} + def d_16(d: A with (DEq @foo)): Unit = {} + def d_17(d: DEq with A): Unit = {} + def d_18(d: (DEq @foo) with A): Unit = {} + + val singDEq: DEq @foo = new D {} + def d_15b(d: A with singDEq.type): Unit = {} + def d_16b(d: A with (singDEq.type @foo)): Unit = {} + + type DSub <: D + def a_19(a: A with DSub): Unit = {} + def d_19x(d: DSub with A): Unit = {} + def z_20(z: DSub with Z): Unit = {} + + type W1 <: A with Cov[Any] + type X1 <: Cov[Int] with W1 + def a_21(a: X1): Unit = {} + + type W2 <: A with Cov[Any] + type X2 <: Cov[Int] with W2 + def a_22(a: X2): Unit = {} + + def z_23(z: A with this.type): Unit = {} + def z_24(z: this.type with A): Unit = {} + + def b_25(b: A with (B { type T })): Unit = {} + def a_26(a: (A { type T }) with ((B with A) { type T })): Unit = {} + + def a_27(a: VC with B): Unit = {} + def a_28(a: B with VC): Unit = {} + + val o1: Outer = new Outer + val o2: Outer = new Outer + def f_29(f: o1.E with o1.F): Unit = {} + def f_30(f: o1.F with o1.E): Unit = {} + def f_31(f: o1.E with o2.F): Unit = {} + def f_32(f: o2.F with o1.E): Unit = {} + def f_33(f: Outer#E with Outer#F): Unit = {} + def f_34(f: Outer#F with Outer#E): Unit = {} + + val structural1: { type DSub <: D } = new { type DSub <: D } + def a_35(a: A with structural1.DSub): Unit = {} + def d_36(a: structural1.DSub with A): Unit = {} + def z_37(z: Z with structural1.DSub): Unit = {} + def z_38(z: structural1.DSub with Z): Unit = {} + + val structural2: { type SubCB <: C with B } = new { type SubCB <: C with B } + def c_39(c: structural2.SubCB with B): Unit = {} + def c_40(c: B with structural2.SubCB): Unit = {} + + val structural3a: { type SubB <: B; type SubCB <: C with SubB } = new { type SubB <: B; type SubCB <: C with SubB } + val structural3b: { type SubB <: B; type SubCB <: C with SubB } = new { type SubB <: B; type SubCB <: C with SubB } + def c_41(c: structural3a.SubB with structural3a.SubCB): Unit = {} + def c_42(c: structural3a.SubCB with structural3a.SubB): Unit = {} + def b_43(b: structural3a.SubB with structural3b.SubCB): Unit = {} + def c_44(c: structural3b.SubCB with structural3a.SubB): Unit = {} + + type SubStructural <: C with structural3a.SubB + def c_45(x: structural3a.SubB with SubStructural): Unit = {} + def b_46(x: structural3b.SubB with SubStructural): Unit = {} + + type Rec1 <: A with B + type Rec2 <: C with Rec1 + def c_47(a: A with B with Rec2): Unit = {} + def a_48(a: (A with B) @foo with Rec2): Unit = {} + + type F1 = A with B + type F2 = A with B + type Rec3 <: F1 + type Rec4 <: C with Rec3 + def c_49(a: F1 @foo with Rec4): Unit = {} + def c_50(a: F1 with Rec4): Unit = {} + def a_51(a: F2 @foo with Rec4): Unit = {} + def c_52(a: F2 with Rec4): Unit = {} + + type AA = A + type F3 = AA with B + type Rec5 <: F3 + type Rec6 <: C with Rec5 + def a_53(a: F3 @foo with Rec6): Unit = {} + def c_54(a: F3 with Rec6): Unit = {} + + val structural4a: { type M[X] <: A } = new { type M[X] <: A } + val structural4b: { type N <: B with structural4a.M[Int] } = new { type N <: B with structural4a.M[Int] } + def b_55(x: structural4a.M[Any] with structural4b.N): Unit = {} + + type Bla = A { type M[X] <: A } + def b_56(x: Bla#M[Any] with ({ type N <: B with Bla#M[Int] })#N): Unit = {} + type AEq = A + type Bla2 = AEq { type M[X] <: A } + def a_57(x: Bla2#M[Any] with ({ type N <: B with Bla2#M[Int] })#N): Unit = {} + + def int_58(x: Int with Singleton): Unit = {} + def int_59(x: Singleton with Int): Unit = {} + def int_60(x: Int with Any): Unit = {} + def int_61(x: Any with Int): Unit = {} + def int_62(x: Int with AnyVal): Unit = {} + def int_63(x: AnyVal with Int): Unit = {} + + def intARRAY_64(x: Array[Int with Singleton]): Unit = {} + def object_65(x: Array[_ <: Int]): Unit = {} + def object_66(x: Array[_ <: Int with Singleton]): Unit = {} + def object_67(x: Array[_ <: Singleton with Int]): Unit = {} + def object_68(x: Array[_ <: Int with Any]): Unit = {} + def object_69(x: Array[_ <: Any with Int]): Unit = {} + def object_70(x: Array[_ <: Int with AnyVal]): Unit = {} + def object_71(x: Array[_ <: AnyVal with Int]): Unit = {} + + def stringARRAY_72(x: Array[String with Singleton]): Unit = {} + def stringARRAY_73(x: Array[_ <: String]): Unit = {} + def stringARRAY_74(x: Array[_ <: String with Singleton]): Unit = {} + def stringARRAY_75(x: Array[_ <: Singleton with String]): Unit = {} + def stringARRAY_76(x: Array[_ <: String with Any]): Unit = {} + def stringARRAY_77(x: Array[_ <: Any with String]): Unit = {} + def stringARRAY_78(x: Array[_ <: String with AnyRef]): Unit = {} + def stringARRAY_79(x: Array[_ <: AnyRef with String]): Unit = {} + + def object_80(x: Array[_ <: Singleton]): Unit = {} + def object_81(x: Array[_ <: AnyVal]): Unit = {} + def objectARRAY_82(x: Array[_ <: AnyRef]): Unit = {} + def object_83(x: Array[_ <: Any]): Unit = {} + + def object_84(x: Array[_ <: Serializable]): Unit = {} + def object_85(x: Array[_ <: Univ]): Unit = {} + def aARRAY_86(x: Array[_ <: A]): Unit = {} + def aARRAY_87(x: Array[_ <: A with B]): Unit = {} + + def objectARRAY_88(x: Array[Any]): Unit = {} + def objectARRAY_89(x: Array[AnyRef]): Unit = {} + def objectARRAY_90(x: Array[AnyVal]): Unit = {} + + def stringARRAY_91(x: Array[_ <: ({ type Foo <: String with Singleton })#Foo]): Unit = {} + def stringARRAY_92(x: Array[({ type Foo <: String with Singleton })#Foo]): Unit = {} + def stringARRAY_93(x: Array[({ type Id[T] = T })#Id[String with Singleton]]): Unit = {} + + def covARRAY_94(x: Array[Q[String]]): Unit = {} // cant define opaque type in scala 2, so it is ordinary type + + def aARRAY_95(x: Array[(A with B { type L <: String }) with C]): Unit = {} + def aARRAY_96(x: Array[A { type L <: String }]): Unit = {} + def zARRAY_97(x: Array[self.type]): Unit = {} + def aARRAY_98(x: Array[(A { type L <: String }) with B]): Unit = {} + def stringARRAY_99[Arg <: String](x: Array[Arg]): Unit = {} + def aARRAY_100(x: Array[Bla2#M[Any] with ({ type N <: B with Bla2#M[Int] })#N]): Unit = {} + def zARRAY_101(x: Array[structural1.DSub with Z]): Unit = {} + def aARRAY_102(x: Array[F3 @foo with Rec6]): Unit = {} + def aARRAY_103(x: Array[A @foo]): Unit = {} + def dARRAY_104(x: Array[singD.type]): Unit = {} + def intARRAY_105(x: Array[3]): Unit = {} + def vcARRAY_106(x: Array[VC]): Unit = {} + def listARRAY_107(x: Array[List[_]]): Unit = {} + def intARRAY_108(x: Array[Int with String]): Unit = {} + def stringARRAY_109(x: Array[String with Int]): Unit = {} + + def a_110(x: VC with VC2): Unit = {} + def a_111(x: VC2 with VC): Unit = {} + def aARRAY_112(x: Array[VC2 with VC]): Unit = {} // this should not erase to Array[A]??? + def aARRAY_113(x: Array[VC with VC2]): Unit = {} // this should not erase to Array[A]??? + def a_114(x: VC with D): Unit = {} + def d_115(x: D with VC): Unit = {} + def d_116(x: D with B with VC): Unit = {} + def d_117(x: B with D with VC): Unit = {} + def a_118(x: VC with B with D): Unit = {} + def a_119(x: VC with Int): Unit = {} + def int_120(x: Int with VC): Unit = {} + + def object_121[T](x: Array[T]): Unit = {} + def object_122(x: Array[_ <: AnyVal with Singleton]): Unit = {} + def objectARRAY_123(x: Array[AnyVal with Singleton]): Unit = {} + def objectARRAY_124[T, U](x: Array[T with U]): Unit = {} + def objectARRAY_125(x: Array[({ type W <: String }) with ({ type X <: Int })]): Unit = {} + def covARRAY_126(x: Array[Q[B] with Y[SubB]]): Unit = {} + def covARRAY_127(x: Array[Q[B] with Y[SubB] { type X <: Cov[String] }]): Unit = {} + + type SubAny <: Any + type SubAnyVal <: AnyVal + + def objectARRAY_128(x: Array[SubAny with SubAnyVal]): Unit = {} + def intARRAYARRAY_129(x: Array[Array[Int]]): Unit = {} + def intARRAYARRAY_130(x: Array[_ <: Array[Int]]): Unit = {} + def objectARRAY_130(x: Array[_ <: Array[_ <: AnyVal]]): Unit = {} + def stringARRAY_131(x: Array[String] with Array[Int]): Unit = {} + + def enumerated_132(x: Enumerated.C1.type with Enumerated.C2.type): Unit = {} + def enumerated_133(x: Enumerated.C2.type with Enumerated.C1.type): Unit = {} + def enumerated_134(x: Enumerated.C1.type): Unit = {} + def enumeratedARRAY_135(x: Array[Enumerated.C1.type]): Unit = {} + def enumeratedARRAY_136(x: Array[Enumerated.C2.type with Enumerated.C1.type]): Unit = {} + def enumeratedARRAY_137(x: Array[Enumerated.C1.type with Enumerated.C2.type]): Unit = {} + +} diff --git a/test/tasty/run/src-2/a/Main.scala b/test/tasty/run/src-2/a/Main.scala new file mode 100644 index 000000000000..d86cf6564d57 --- /dev/null +++ b/test/tasty/run/src-2/a/Main.scala @@ -0,0 +1,11 @@ +package a + +import tastytest.opaques.Offset + +final case class A(off: Offset) + +object Main { + def foo(): Unit = { + assert(A(Offset(10)).off == Offset(10)) + } +} diff --git a/test/tasty/run/src-2/tastytest/TestAsyncSuite.scala b/test/tasty/run/src-2/tastytest/TestAsyncSuite.scala new file mode 100644 index 000000000000..ea101a9f3ec8 --- /dev/null +++ b/test/tasty/run/src-2/tastytest/TestAsyncSuite.scala @@ -0,0 +1,7 @@ +package tastytest + +object TestAsyncSuite extends Suite("TestAsyncSuite") { + + class MySuite extends testsuite.AsyncSuite + +} diff --git a/test/tasty/run/src-2/tastytest/TestCaseClassDefault.scala b/test/tasty/run/src-2/tastytest/TestCaseClassDefault.scala new file mode 100644 index 000000000000..21a924142eae --- /dev/null +++ b/test/tasty/run/src-2/tastytest/TestCaseClassDefault.scala @@ -0,0 +1,14 @@ +package tastytest + +object TestCaseClassDefault extends Suite("TestCaseClassDefault") { + + test(assert(CaseClassDefault.apply().value === 23)) + + test { + val i = new CaseClassDefault.Inner() + assert(i.Local.apply().value === 47) + } + + test(assert(CaseClassDefault.FakeCaseClass.apply().value === 97)) + +} diff --git a/test/tasty/run/src-2/tastytest/TestErasure.scala b/test/tasty/run/src-2/tastytest/TestErasure.scala new file mode 100644 index 000000000000..1ba9cc7ae3d2 --- /dev/null +++ b/test/tasty/run/src-2/tastytest/TestErasure.scala @@ -0,0 +1,178 @@ +package tastytest + +import tastytest.{dottyErasure => dotc, scala2Erasure => nsc} + +object TestErasure extends Suite("TestErasure") { + + val z = new dotc.Z + + test("erasure of scala 3 from scala 2") { + z.a_01(anyObj) + z.a_02(anyObj) + z.a_02X(anyObj) + z.a_03(anyObj) + z.a_04(anyObj) + z.a_04X(anyObj) + z.a_05(anyObj) + z.a_06(anyObj) + z.a_07(anyObj) + z.a_08(anyObj) + z.a_09(anyObj) + z.a_10(anyObj) + z.b_11(anyObj) + z.subb_12(anyObj) + z.d_13(anyObj) + z.d_13x(anyObj) + z.d_14(anyObj) + z.d_14x(anyObj) + z.d_15(anyObj) + z.d_15b(anyObj) + z.d_16(anyObj) + z.d_16b(anyObj) + z.d_17(anyObj) + z.d_18(anyObj) + z.d_19(anyObj) + z.d_19x(anyObj) + z.d_20(anyObj) + z.a_21(anyObj) + z.a_22(anyObj) + z.z_23(anyObj) + z.z_24(anyObj) + z.a_25(anyObj) + z.a_26(anyObj) + z.a_27(anyObj) @@ ExpectCastOrNull + z.a_28(anyObj) @@ ExpectCastOrNull + z.e_29(anyObj) + z.e_30(anyObj) + z.e_31(anyObj) + z.e_32(anyObj) + z.e_33(anyObj) + z.e_34(anyObj) + z.d_35(anyObj) + z.d_36(anyObj) + z.d_37(anyObj) + z.d_38(anyObj) + z.b_39(anyObj) + z.b_40(anyObj) + z.b_41(anyObj) + z.b_42(anyObj) + z.b_43(anyObj) + z.b_44(anyObj) + z.b_45(anyObj) + z.b_46(anyObj) + z.a_47(anyObj) + z.a_48(anyObj) + z.a_49(anyObj) + z.a_50(anyObj) + z.a_51(anyObj) + z.a_52(anyObj) + z.a_53(anyObj) + z.a_54(anyObj) + z.a_55(anyObj) + z.a_56(anyObj) + z.a_57(anyObj) + z.int_58(1) + z.int_59(1) + z.int_60(1) + z.int_61(1) + z.int_62(1) + z.int_63(1) + z.intARRAY_64(anyObj) + z.intARRAY_65(anyObj) + z.intARRAY_66(anyObj) + z.intARRAY_67(anyObj) + z.intARRAY_68(anyObj) + z.intARRAY_69(anyObj) + z.intARRAY_70(anyObj) + z.intARRAY_71(anyObj) + // z.intARRAY_71a(anyObj) // illegal union type + // z.intARRAY_71b(anyObj) // illegal union type + z.stringARRAY_72(anyObj) + z.stringARRAY_73(anyObj) + z.stringARRAY_74(anyObj) + z.stringARRAY_75(anyObj) + z.stringARRAY_76(anyObj) + z.stringARRAY_77(anyObj) + z.stringARRAY_78(anyObj) + z.stringARRAY_79(anyObj) + // z.stringARRAY_79a(anyObj) // illegal union type + // z.stringARRAY_79b(anyObj) // illegal union type + z.object_80(anyObj) + z.object_81(anyObj) + z.objectARRAY_82(anyObj) + z.object_83(anyObj) + z.object_83a(anyObj) + // z.object_83b(anyObj) // illegal union type + // z.object_83c(anyObj) // illegal union type + // z.object_83d(anyObj) // illegal union type + // z.object_83e(anyObj) // illegal union type + z.serializableARRAY_84(anyObj) + z.univARRAY_85(anyObj) + z.aARRAY_86(anyObj) + z.aARRAY_87(anyObj) + z.objectARRAY_88(anyObj) + z.objectARRAY_89(anyObj) + z.objectARRAY_90(anyObj) + z.stringARRAY_91(anyObj) + z.stringARRAY_92(anyObj) + z.stringARRAY_93(anyObj) + z.covARRAY_94(anyObj) + z.aARRAY_95(anyObj) + z.aARRAY_96(anyObj) + z.zARRAY_97(anyObj) + z.aARRAY_98(anyObj) + z.stringARRAY_99(anyObj) + z.aARRAY_100(anyObj) + z.dARRAY_101(anyObj) + z.aARRAY_102(anyObj) + z.aARRAY_103(anyObj) + z.dARRAY_104(anyObj) + z.intARRAY_105(anyObj) + z.vcARRAY_106(anyObj) + z.listARRAY_107(anyObj) + z.intARRAY_108(anyObj) + z.intARRAY_109(anyObj) + z.a_110(anyObj) @@ ExpectCastOrNull + z.a_111(anyObj) @@ ExpectCastOrNull + z.vcARRAY_112(anyObj) + z.vcARRAY_113(anyObj) + z.a_114(anyObj) @@ ExpectCastOrNull + z.a_115(anyObj) @@ ExpectCastOrNull + z.a_116(anyObj) @@ ExpectCastOrNull + z.a_117(anyObj) @@ ExpectCastOrNull + z.a_118(anyObj) @@ ExpectCastOrNull + z.a_119(anyObj) @@ ExpectCastOrNull + z.a_120(anyObj) @@ ExpectCastOrNull + z.object_121(anyObj) + z.object_122(anyObj) + z.objectARRAY_123(anyObj) + z.object_124(anyObj) + z.objectARRAY_125(anyObj) + z.covARRAY_126(anyObj) + z.covARRAY_127(anyObj) + z.object_128(anyObj) + z.intARRAYARRAY_129(anyObj) + z.intARRAYARRAY_130(anyObj) + z.objectARRAY_130(anyObj) + z.intARRAY_131(anyObj) + z.enumerated_132(anyObj) + z.enumerated_133(anyObj) + z.enumerated_134(anyObj) + z.enumeratedARRAY_135(anyObj) + z.enumeratedARRAY_136(anyObj) + z.enumeratedARRAY_137(anyObj) + } + + test("erasure matches name") { + val methods = classOf[nsc.Z].getDeclaredMethods.toList ++ classOf[dotc.Z].getDeclaredMethods.toList + methods.foreach { m => + m.getName match { + case s"${prefix}_${suffix}" => + val paramClass = m.getParameterTypes()(0).getSimpleName.toLowerCase.replaceAll("""\[\]""", "ARRAY") + assert(prefix == paramClass, s"Method `$m` erased to `$paramClass` which does not match its prefix `$prefix`") + case _ => + } + } + } + +} diff --git a/test/tasty/run/src-2/tastytest/TestImports.scala b/test/tasty/run/src-2/tastytest/TestImports.scala new file mode 100644 index 000000000000..f9da4fac2e12 --- /dev/null +++ b/test/tasty/run/src-2/tastytest/TestImports.scala @@ -0,0 +1,5 @@ +package tastytest + +import test.Imports + +object TestImports extends App diff --git a/test/tasty/run/src-2/tastytest/TestIntersectionErasure.scala b/test/tasty/run/src-2/tastytest/TestIntersectionErasure.scala new file mode 100644 index 000000000000..3137a9500034 --- /dev/null +++ b/test/tasty/run/src-2/tastytest/TestIntersectionErasure.scala @@ -0,0 +1,40 @@ +package tastytest + +import IntersectionErasure._ + +object TestIntersectionErasure extends Suite("TestIntersectionErasure") { + + def boxedId[T](t: T): T = t + + val bWithA: B with A = new B with A {} // dotc erases to A, scalac to B + + test("SAM bridges") { + val sam: IntersectionSAM = x => x + assert(sam(bWithA) === bWithA) + } + + test("VC param")( + assert(boxedId(new IntersectionVC(bWithA)).unwrapped == bWithA) + ) + + test("VC method unboxed")( + assert(boxedId(new IntersectionVC(bWithA)).matchesInternal(bWithA)) + ) + + test("VC method boxed")( + assert(boxedId(new IntersectionVC(bWithA)).matches(new IntersectionVC(bWithA))) + ) + + test("VC parametric param")( + assert(boxedId(new IntersectionVCParametric(bWithA)).unwrapped == bWithA) + ) + + test("VC parametric method unboxed")( + assert(boxedId(new IntersectionVCParametric(bWithA)).matchesInternal(bWithA)) + ) + + test("VC parametric method boxed")( + assert(boxedId(new IntersectionVCParametric(bWithA)).matches(new IntersectionVCParametric(bWithA))) + ) + +} diff --git a/test/tasty/run/src-2/tastytest/TestInvisibleDefs.scala b/test/tasty/run/src-2/tastytest/TestInvisibleDefs.scala new file mode 100644 index 000000000000..4962af12bbe4 --- /dev/null +++ b/test/tasty/run/src-2/tastytest/TestInvisibleDefs.scala @@ -0,0 +1,15 @@ +package tastytest + +object TestInvisibleDefs extends Suite("TestInvisibleDefs") { + + test("invoke '@main def argIsHello'") { + InvisibleDefs.argIsHello("Hello") + } + + test("update bean.status") { + val mybean = new InvisibleDefs.MyBean + mybean.status = "open" + assert(mybean.status === "open") + } + +} diff --git a/test/tasty/run/src-2/tastytest/TestIssue12420.scala b/test/tasty/run/src-2/tastytest/TestIssue12420.scala new file mode 100644 index 000000000000..d420527b6148 --- /dev/null +++ b/test/tasty/run/src-2/tastytest/TestIssue12420.scala @@ -0,0 +1,19 @@ +package tastytest + +import issue12420._ +import issue12420.{ShareLambda => sl} + +object TestIssue12420 extends Suite("TestIssue12420") { + + def foo = new Foo + def eta = new Eta + + test(assert(foo.bar.id.id == "Foo")) + + test(foo.bar match { case User(UserId(id: String)) => assert(id == "Foo") }) + + test(assert(eta.inner == Boxxy.default)) + + test(assert(new sl.Foo[sl.Bar].foo(new sl.Bar[List]) == "Bar")) + +} diff --git a/test/tasty/run/src-2/tastytest/TestNestedEnum.scala b/test/tasty/run/src-2/tastytest/TestNestedEnum.scala new file mode 100644 index 000000000000..fc9ab156eb40 --- /dev/null +++ b/test/tasty/run/src-2/tastytest/TestNestedEnum.scala @@ -0,0 +1,17 @@ +package tastytest + +object TestNestedEnum extends Suite("TestNestedEnum") { + + test("call toString on enum of inner class") { + val n = new NestedEnum() + assert(n.Mode.On.toString == "On") + } + + test("nested enums do not have same type") { + val n1 = new NestedEnum() + val n2 = new NestedEnum() + implicitly[scala.util.NotGiven[n1.Mode.Off.type =:= n2.Mode.Off.type]] + assert(n1.Mode.Off != n2.Mode.Off) + } + +} diff --git a/test/tasty/run/src-2/tastytest/TestOpaquesPackage.scala b/test/tasty/run/src-2/tastytest/TestOpaquesPackage.scala new file mode 100644 index 000000000000..f5a7d10c58a7 --- /dev/null +++ b/test/tasty/run/src-2/tastytest/TestOpaquesPackage.scala @@ -0,0 +1,24 @@ +package tastytest.opaques { + import tastytest.opaques.Offset + import a.A + + class Test1 { + import tastytest._ + + def test(): Unit = { + assert(A(Offset(10)) === A(Offset(10))) + } + + } +} + + +package tastytest { + + object TestOpaquesPackage extends Suite("TestOpaquesPackage") { + + test(new opaques.Test1().test()) + test(a.Main.foo()) + + } +} diff --git a/test/tasty/run/src-2/tastytest/TestOperatorToken.scala b/test/tasty/run/src-2/tastytest/TestOperatorToken.scala new file mode 100644 index 000000000000..ed7fc22cf327 --- /dev/null +++ b/test/tasty/run/src-2/tastytest/TestOperatorToken.scala @@ -0,0 +1,7 @@ +package tastytest + +object TestOperatorToken extends Suite("TestOperatorToken") { + test(assert(OperatorToken.<:< != null)) + test(assert(OperatorToken.=:= != null)) + test(assert(OperatorToken.<*> != null)) +} diff --git a/test/tasty/run/src-2/tastytest/TestReflection.scala b/test/tasty/run/src-2/tastytest/TestReflection.scala new file mode 100644 index 000000000000..d292b8b7e737 --- /dev/null +++ b/test/tasty/run/src-2/tastytest/TestReflection.scala @@ -0,0 +1,18 @@ +package tastytest + +import tastytest.reflectshims.impl.Context +import Context.universe.EmptyTree +import Context.TreeShim + +object TestReflection extends Suite("TestReflection") { + + test(assert(Reflection.reflectionInvokerIdentity(Context)(EmptyTree) === (EmptyTree: TreeShim))) + test(assert(new Reflection.Invoker(Context)(EmptyTree).tree === (EmptyTree: TreeShim))) + + // bridge method not generated (AbstractMethodError) [same if Reflection.InvokerSAM is compiled by Scala 2] + // test { + // val invoker = new Reflection.InvokerSAM(Context) + // val id: invoker.TreeFn = x => x + // assert(id(EmptyTree) === (EmptyTree: TreeShim)) + // } +} diff --git a/test/tasty/run/src-2/tastytest/TestSAMErasure.scala b/test/tasty/run/src-2/tastytest/TestSAMErasure.scala new file mode 100644 index 000000000000..4aa5e88b1535 --- /dev/null +++ b/test/tasty/run/src-2/tastytest/TestSAMErasure.scala @@ -0,0 +1,23 @@ +package tastytest + +import SAMErasure._ + +object TestSAMErasure extends Suite("TestSAMErasure") { + + def f = ((x: TreeShimSAM) => x): FunTreeShimSAM + + def g = ((xs: Array[TreeShimSAM]) => xs): FunTreeShimSAM2 + + case object EmptyTree extends TreeShimSAMApi + val tree = EmptyTree.asInstanceOf[TreeShimSAM] + + test { + assert(f(tree) == tree) + } + + test { + val trees = Array(tree) + assert(g(trees) == trees) + } + +} diff --git a/test/tasty/run/src-2/tastytest/TestSuperTypes.scala b/test/tasty/run/src-2/tastytest/TestSuperTypes.scala index b675a0ec87d0..ed552f561ac6 100644 --- a/test/tasty/run/src-2/tastytest/TestSuperTypes.scala +++ b/test/tasty/run/src-2/tastytest/TestSuperTypes.scala @@ -11,11 +11,10 @@ object TestSuperTypes extends Suite("TestSuperTypes") { assert(("" match { case bar.A(x) => x: "Foo.foo" }) === "Foo.foo") } - // TODO [tasty]: what is happening here - // test("SUPERtype in type, version 2") { - // val BarA = (new SuperTypes.Bar()).A - // assert(("" match { case BarA(x) => x: "Foo.foo" }) === "Foo.foo") - // } + test("SUPERtype in type, version 2") { + val bar = new SuperTypes.Bar() + assert(("" match { case bar.A(x) => x : bar.foo.type }) === "Foo.foo") + } test("SUPER qualified in type tree") { assert((new SuperTypes.Baz().baz: "Foo.foo") === "Foo.foo") diff --git a/test/tasty/run/src-2/tastytest/TestThisTypes.scala b/test/tasty/run/src-2/tastytest/TestThisTypes.scala new file mode 100644 index 000000000000..ae7f12fbaa54 --- /dev/null +++ b/test/tasty/run/src-2/tastytest/TestThisTypes.scala @@ -0,0 +1,10 @@ +package tastytest + +import ThisTypes._ + +object TestThisTypes extends Suite("TestThisTypes") { + + test(assert(new Sub().doTest.get.x === 23)) + test(assert(new Sub2().doTest.get.x === 23)) + +} diff --git a/test/tasty/run/src-3/tastytest/AnythingIsPossible.scala b/test/tasty/run/src-3/tastytest/AnythingIsPossible.scala index 31fa2f8da1e2..148b2d9caa21 100644 --- a/test/tasty/run/src-3/tastytest/AnythingIsPossible.scala +++ b/test/tasty/run/src-3/tastytest/AnythingIsPossible.scala @@ -2,7 +2,8 @@ package tastytest object AnythingIsPossible { - class Box[A](val a: A) + class Box[A](accept: => A): + val a: A = accept class Class extends Box({ class X { final val x = Map(("", 3)) } ; val foo = new X(); foo.x: foo.x.type }) diff --git a/test/tasty/run/src-3/tastytest/CaseClassDefault.scala b/test/tasty/run/src-3/tastytest/CaseClassDefault.scala new file mode 100644 index 000000000000..4e08c03851f1 --- /dev/null +++ b/test/tasty/run/src-3/tastytest/CaseClassDefault.scala @@ -0,0 +1,16 @@ +package tastytest + +case class CaseClassDefault(value: Int = 23) + +object CaseClassDefault { + + class Inner { + case class Local(value: Int = 47) + } + + class FakeCaseClass(val value: Int = 47) + object FakeCaseClass { + def apply(value: Int = 97): FakeCaseClass = new FakeCaseClass(value) + } + +} diff --git a/test/tasty/run/src-3/tastytest/Imports.scala b/test/tasty/run/src-3/tastytest/Imports.scala new file mode 100644 index 000000000000..f153b584a254 --- /dev/null +++ b/test/tasty/run/src-3/tastytest/Imports.scala @@ -0,0 +1,9 @@ +package tastytest.test + +sealed trait Imports + +object Imports { + sealed trait Mixin + case object First extends Imports with Mixin + case object Second extends Imports with Mixin +} diff --git a/test/tasty/run/src-3/tastytest/InlineCompat.scala b/test/tasty/run/src-3/tastytest/InlineCompat.scala index 286a30dd0f46..80600b8ed85f 100644 --- a/test/tasty/run/src-3/tastytest/InlineCompat.scala +++ b/test/tasty/run/src-3/tastytest/InlineCompat.scala @@ -4,9 +4,14 @@ import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context +import scala.annotation.experimental + object InlineCompat { + @experimental def foo(code: String): String = macro InlineCompatScala2Macro.foo + + @experimental inline def foo(inline code: String): String = code // inline method, not macro } diff --git a/test/tasty/run/src-3/tastytest/InlineCompat2.scala b/test/tasty/run/src-3/tastytest/InlineCompat2.scala index c6fcbd6090fa..15e67f43e3b8 100644 --- a/test/tasty/run/src-3/tastytest/InlineCompat2.scala +++ b/test/tasty/run/src-3/tastytest/InlineCompat2.scala @@ -4,9 +4,14 @@ import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context +import scala.annotation.experimental + object InlineCompat2 { + @experimental def foo(code: String): String = macro InnerScala2MacroImpl.fooImpl + + @experimental inline def foo(inline code: String): String = code // inline method, not macro object InnerScala2MacroImpl { diff --git a/test/tasty/run/src-3/tastytest/IntersectionErasure.scala b/test/tasty/run/src-3/tastytest/IntersectionErasure.scala new file mode 100644 index 000000000000..8a75f53056ed --- /dev/null +++ b/test/tasty/run/src-3/tastytest/IntersectionErasure.scala @@ -0,0 +1,23 @@ +package tastytest + +object IntersectionErasure { + + trait A + trait B + + @FunctionalInterface + abstract class IntersectionSAM { + def apply(arg: B with A): B with A + } + + final class IntersectionVC(val unwrapped: B with A) extends AnyVal { + def matchesInternal(that: B with A): Boolean = that == unwrapped + def matches(that: IntersectionVC): Boolean = this == that + } + + final class IntersectionVCParametric[T <: B with A](val unwrapped: T) extends AnyVal { + def matchesInternal(that: T): Boolean = that == unwrapped + def matches(that: IntersectionVCParametric[T]): Boolean = this == that + } + +} diff --git a/test/tasty/run/src-3/tastytest/InvisibleDefs.scala b/test/tasty/run/src-3/tastytest/InvisibleDefs.scala new file mode 100644 index 000000000000..5bd0190c28e1 --- /dev/null +++ b/test/tasty/run/src-3/tastytest/InvisibleDefs.scala @@ -0,0 +1,16 @@ +package tastytest + +import scala.beans.BeanProperty + +object InvisibleDefs { + + @main def argIsHello(arg: String): Unit = assert(arg == "Hello") + + class MyBean { + + @BeanProperty + var status = "" + + } + +} diff --git a/test/tasty/run/src-3/tastytest/MacroCompat.scala b/test/tasty/run/src-3/tastytest/MacroCompat.scala index 71ba8c03f063..8b2ca3e5fcb3 100644 --- a/test/tasty/run/src-3/tastytest/MacroCompat.scala +++ b/test/tasty/run/src-3/tastytest/MacroCompat.scala @@ -2,19 +2,33 @@ package tastytest import scala.language.experimental.macros +import scala.annotation.experimental + object MacroCompat { + @experimental implicit def pos: Position = macro Position.posImpl + + @experimental implicit inline def pos: Position = ${ Macros3.posImpl } + @experimental def constInt[T](x: T): Int = macro Macros.constIntImpl[T] + + @experimental inline def constInt[T](x: T): Int = ${ Macros3.constIntImpl[T]('x) } object Bundles { + @experimental def mono: Int = macro MacroImpl.mono + + @experimental inline def mono: Int = ${ Macros3.monoImpl } + @experimental def poly[T]: String = macro MacroImpl.poly[T] + + @experimental inline def poly[T]: String = ${ Macros3.polyImpl[T] } } @@ -30,7 +44,7 @@ object MacroCompat { def posImpl(using quotes: Quotes): Expr[Position] = { import quotes.reflect.given val pos = quotes.reflect.Position.ofMacroExpansion - val name = pos.sourceFile.jpath.getFileName.toString + val name = pos.sourceFile.getJPath.map(_.getFileName.toString).getOrElse("?.scala") val line = pos.startLine + 1 '{ Position(${Expr(name)}, ${Expr(line)}) } } diff --git a/test/tasty/run/src-3/tastytest/NestedEnum.scala b/test/tasty/run/src-3/tastytest/NestedEnum.scala new file mode 100644 index 000000000000..5a89c90fb64e --- /dev/null +++ b/test/tasty/run/src-3/tastytest/NestedEnum.scala @@ -0,0 +1,6 @@ +package tastytest + +class NestedEnum: + + enum Mode: + case On, Off diff --git a/test/tasty/run/src-3/tastytest/OperatorToken.scala b/test/tasty/run/src-3/tastytest/OperatorToken.scala new file mode 100644 index 000000000000..8ac355db1dd7 --- /dev/null +++ b/test/tasty/run/src-3/tastytest/OperatorToken.scala @@ -0,0 +1,7 @@ +package tastytest + +enum OperatorToken { + case <:< + case =:= + case <*> +} diff --git a/test/tasty/run/src-3/tastytest/Reflection.scala b/test/tasty/run/src-3/tastytest/Reflection.scala new file mode 100644 index 000000000000..434cc62ee39c --- /dev/null +++ b/test/tasty/run/src-3/tastytest/Reflection.scala @@ -0,0 +1,22 @@ +package tastytest + +import tastytest.reflectshims + +object Reflection { + + def reflectionInvokerIdentity(ctx: reflectshims.Context)(tree: ctx.TreeShim): ctx.TreeShim = tree + + class Invoker[C <: reflectshims.Context with Singleton](val ctx: C)(root: ctx.TreeShim) { + def tree: ctx.TreeShim = root + } + + class InvokerSAM[C <: reflectshims.Context with Singleton](val ctx: C) { + + @FunctionalInterface + trait TreeFn { + def apply(tree: ctx.TreeShim): ctx.TreeShim + } + + } + +} diff --git a/test/tasty/run/src-3/tastytest/SAMErasure.scala b/test/tasty/run/src-3/tastytest/SAMErasure.scala new file mode 100644 index 000000000000..00a471cd95e0 --- /dev/null +++ b/test/tasty/run/src-3/tastytest/SAMErasure.scala @@ -0,0 +1,18 @@ +package tastytest + +object SAMErasure { + + trait TreeShimSAMApi extends Product + + type TreeShimSAM >: Null <: AnyRef with TreeShimSAMApi + + implicit val TreeShimSAMTag: reflect.ClassTag[TreeShimSAM] = + reflect.classTag[TreeShimSAMApi].asInstanceOf[reflect.ClassTag[TreeShimSAM]] + + @FunctionalInterface + trait FunTreeShimSAM { def apply(a: TreeShimSAM): TreeShimSAM } + + @FunctionalInterface + trait FunTreeShimSAM2 { def apply(a: Array[TreeShimSAM]): Array[TreeShimSAM] } + +} diff --git a/test/tasty/run/src-3/tastytest/SuperTypes.scala b/test/tasty/run/src-3/tastytest/SuperTypes.scala index 90c3cb331777..3e89f38b4fa9 100644 --- a/test/tasty/run/src-3/tastytest/SuperTypes.scala +++ b/test/tasty/run/src-3/tastytest/SuperTypes.scala @@ -3,7 +3,7 @@ package tastytest object SuperTypes { class Foo { - final val foo = "Foo.foo" + final val foo: "Foo.foo" = "Foo.foo" } class Bar extends Foo { diff --git a/test/tasty/run/src-3/tastytest/ThisTypes.scala b/test/tasty/run/src-3/tastytest/ThisTypes.scala new file mode 100644 index 000000000000..43936831ebe3 --- /dev/null +++ b/test/tasty/run/src-3/tastytest/ThisTypes.scala @@ -0,0 +1,33 @@ +package tastytest + +object ThisTypes { + + abstract class Wrap[T] { + type Base[A] <: { // if not resolved to Sub.this.Base then reflective calls will be needed + def x: A + } + final type Res = Option[Base[T]] + def doTest: Res + } + + class Sub extends Wrap[Int] { + class BaseImpl[A](a: A) { + def x: A = a + } + override type Base[A] = BaseImpl[A] + def doTest: Res = Some(new BaseImpl(23)) + } + + abstract class Wrap2[T] { + class Base[A](a: A) { + def x: A = a + } + final type Res = Option[Base[T]] + def doTest: Res + } + + class Sub2 extends Wrap2[Int] { + def doTest: Res = Some(new Base(23)) + } + +} diff --git a/test/tasty/run/src-3/tastytest/dottyErasure/api.scala b/test/tasty/run/src-3/tastytest/dottyErasure/api.scala new file mode 100644 index 000000000000..5d563fb8eaf8 --- /dev/null +++ b/test/tasty/run/src-3/tastytest/dottyErasure/api.scala @@ -0,0 +1,271 @@ +package tastytest + +// Keep synchronized with pre/tastytest/scala2ErasureApi/api.scala +package dottyErasure + +class foo extends scala.annotation.StaticAnnotation + +trait A +trait B +trait SubB extends B +trait C +trait Cov[+T] +trait Univ extends Any + +class D + +class VC(val self: A) extends AnyVal +class VC2(val self: A) extends AnyVal + +class Outer { + class E + trait F extends E +} + +object OpaqueHolder { + opaque type Q[T] <: Any = Cov[T] + opaque type Y[T] <: Any = Cov[T] +} +import OpaqueHolder._ + +enum Enumerated { + case C1 extends Enumerated with A + case C2 extends Enumerated with B +} + +// The parameter type of `a_XX` should erase to A, `b_XX` to `B`, etc. +// This is enforced by dottyApp/Main.scala +class Z { self => + def a_01(a: A with B): Unit = {} + def a_02X(b: B with A): Unit = {} + def a_02(a: A with B with A): Unit = {} + def a_03(a: A with (B with A)): Unit = {} + def a_04(b: A with (B with A) @foo): Unit = {} + def a_04X(b: A with (B with C) @foo): Unit = {} + def a_05(b: A with (B with A) @foo with (C with B with A) @foo): Unit = {} + + type T1 <: A with B + def a_06(a: T1): Unit = {} + + type S <: B with T1 + def a_07(a: S): Unit = {} + + type T2 <: B with A + type U <: T2 with S + def a_08(b: U): Unit = {} + + val singB: B = new B {} + def a_09(a: A with singB.type): Unit = {} + def a_10(b: singB.type with A): Unit = {} + + type V >: SubB <: B + def b_11(b: V): Unit = {} + def subb_12(b: V with SubB): Unit = {} + + def d_13(d: D with A): Unit = {} + def d_14(d: A with D): Unit = {} + + val singD: D = new D {} + def d_13x(d: singD.type with A): Unit = {} + def d_14x(d: A with singD.type): Unit = {} + + type DEq = D + def d_15(d: A with DEq): Unit = {} + def d_16(d: A with (DEq @foo)): Unit = {} + def d_17(d: DEq with A): Unit = {} + def d_18(d: (DEq @foo) with A): Unit = {} + + val singDEq: DEq @foo = new D {} + def d_15b(d: A with singDEq.type): Unit = {} + def d_16b(d: A with (singDEq.type @foo)): Unit = {} + + type DSub <: D + def d_19(a: A with DSub): Unit = {} + def d_19x(d: DSub with A): Unit = {} + def d_20(z: DSub with Z): Unit = {} + + type W1 <: A with Cov[Any] + type X1 <: Cov[Int] with W1 + def a_21(a: X1): Unit = {} + + type W2 <: A with Cov[Any] + type X2 <: Cov[Int] with W2 + def a_22(a: X2): Unit = {} + + def z_23(z: A with this.type): Unit = {} + def z_24(z: this.type with A): Unit = {} + + def a_25(b: A with (B { type T })): Unit = {} + def a_26(a: (A { type T }) with ((B with A) { type T })): Unit = {} + + def a_27(a: VC with B): Unit = {} + def a_28(a: B with VC): Unit = {} + + val o1: Outer = new Outer + val o2: Outer = new Outer + def e_29(f: o1.E with o1.F): Unit = {} + def e_30(f: o1.F with o1.E): Unit = {} + def e_31(f: o1.E with o2.F): Unit = {} + def e_32(f: o2.F with o1.E): Unit = {} + def e_33(f: Outer#E with Outer#F): Unit = {} + def e_34(f: Outer#F with Outer#E): Unit = {} + + val structural1: { type DSub <: D } = new { type DSub <: D } + def d_35(a: A with structural1.DSub): Unit = {} + def d_36(a: structural1.DSub with A): Unit = {} + def d_37(z: Z with structural1.DSub): Unit = {} + def d_38(z: structural1.DSub with Z): Unit = {} + + val structural2: { type SubCB <: C with B } = new { type SubCB <: C with B } + def b_39(c: structural2.SubCB with B): Unit = {} + def b_40(c: B with structural2.SubCB): Unit = {} + + val structural3a: { type SubB <: B; type SubCB <: C with SubB } = new { type SubB <: B; type SubCB <: C with SubB } + val structural3b: { type SubB <: B; type SubCB <: C with SubB } = new { type SubB <: B; type SubCB <: C with SubB } + def b_41(c: structural3a.SubB with structural3a.SubCB): Unit = {} + def b_42(c: structural3a.SubCB with structural3a.SubB): Unit = {} + def b_43(b: structural3a.SubB with structural3b.SubCB): Unit = {} + def b_44(c: structural3b.SubCB with structural3a.SubB): Unit = {} + + type SubStructural <: C with structural3a.SubB + def b_45(x: structural3a.SubB with SubStructural): Unit = {} + def b_46(x: structural3b.SubB with SubStructural): Unit = {} + + type Rec1 <: A with B + type Rec2 <: C with Rec1 + def a_47(a: A with B with Rec2): Unit = {} + def a_48(a: (A with B) @foo with Rec2): Unit = {} + + type F1 = A with B + type F2 = A with B + type Rec3 <: F1 + type Rec4 <: C with Rec3 + def a_49(a: F1 @foo with Rec4): Unit = {} + def a_50(a: F1 with Rec4): Unit = {} + def a_51(a: F2 @foo with Rec4): Unit = {} + def a_52(a: F2 with Rec4): Unit = {} + + type AA = A + type F3 = AA with B + type Rec5 <: F3 + type Rec6 <: C with Rec5 + def a_53(a: F3 @foo with Rec6): Unit = {} + def a_54(a: F3 with Rec6): Unit = {} + + val structural4a: { type M[X] <: A } = new { type M[X] <: A } + val structural4b: { type N <: B with structural4a.M[Int] } = new { type N <: B with structural4a.M[Int] } + def a_55(x: structural4a.M[Any] with structural4b.N): Unit = {} + + type Bla = A { type M[X] <: A } + def a_56(x: Bla#M[Any] with ({ type N <: B with Bla#M[Int] })#N): Unit = {} + type AEq = A + type Bla2 = AEq { type M[X] <: A } + def a_57(x: Bla2#M[Any] with ({ type N <: B with Bla2#M[Int] })#N): Unit = {} + + def int_58(x: Int with Singleton): Unit = {} + def int_59(x: Singleton with Int): Unit = {} + def int_60(x: Int with Any): Unit = {} + def int_61(x: Any with Int): Unit = {} + def int_62(x: Int with AnyVal): Unit = {} + def int_63(x: AnyVal with Int): Unit = {} + + def intARRAY_64(x: Array[Int with Singleton]): Unit = {} + def intARRAY_65(x: Array[_ <: Int]): Unit = {} + def intARRAY_66(x: Array[_ <: Int with Singleton]): Unit = {} + def intARRAY_67(x: Array[_ <: Singleton with Int]): Unit = {} + def intARRAY_68(x: Array[_ <: Int with Any]): Unit = {} + def intARRAY_69(x: Array[_ <: Any with Int]): Unit = {} + def intARRAY_70(x: Array[_ <: Int with AnyVal]): Unit = {} + def intARRAY_71(x: Array[_ <: AnyVal with Int]): Unit = {} + def intARRAY_71a(x: Array[_ <: Int | Int]): Unit = {} + def intARRAY_71b(x: Array[_ <: 1 | 2]): Unit = {} + + def stringARRAY_72(x: Array[String with Singleton]): Unit = {} + def stringARRAY_73(x: Array[_ <: String]): Unit = {} + def stringARRAY_74(x: Array[_ <: String with Singleton]): Unit = {} + def stringARRAY_75(x: Array[_ <: Singleton with String]): Unit = {} + def stringARRAY_76(x: Array[_ <: String with Any]): Unit = {} + def stringARRAY_77(x: Array[_ <: Any with String]): Unit = {} + def stringARRAY_78(x: Array[_ <: String with AnyRef]): Unit = {} + def stringARRAY_79(x: Array[_ <: AnyRef with String]): Unit = {} + def stringARRAY_79a(x: Array[_ <: String | String]): Unit = {} + def stringARRAY_79b(x: Array[_ <: "a" | "b"]): Unit = {} + + def object_80(x: Array[_ <: Singleton]): Unit = {} + def object_81(x: Array[_ <: AnyVal]): Unit = {} + def objectARRAY_82(x: Array[_ <: AnyRef]): Unit = {} + def object_83(x: Array[_ <: Any]): Unit = {} + def object_83a(x: Array[_ <: Matchable]): Unit = {} + def object_83b(x: Array[_ <: Int | Double]): Unit = {} + def object_83c(x: Array[_ <: String | Int]): Unit = {} + def object_83d(x: Array[_ <: Int | Matchable]): Unit = {} + def object_83e(x: Array[_ <: AnyRef | AnyVal]): Unit = {} + + def serializableARRAY_84(x: Array[_ <: Serializable]): Unit = {} + def univARRAY_85(x: Array[_ <: Univ]): Unit = {} + def aARRAY_86(x: Array[_ <: A]): Unit = {} + def aARRAY_87(x: Array[_ <: A with B]): Unit = {} + + def objectARRAY_88(x: Array[Any]): Unit = {} + def objectARRAY_89(x: Array[AnyRef]): Unit = {} + def objectARRAY_90(x: Array[AnyVal]): Unit = {} + + def stringARRAY_91(x: Array[_ <: ({ type Foo <: String with Singleton })#Foo]): Unit = {} + def stringARRAY_92(x: Array[({ type Foo <: String with Singleton })#Foo]): Unit = {} + def stringARRAY_93(x: Array[({ type Id[T] = T })#Id[String with Singleton]]): Unit = {} + + def covARRAY_94(x: Array[Q[String]]): Unit = {} + + def aARRAY_95(x: Array[(A with B { type L <: String }) with C]): Unit = {} + def aARRAY_96(x: Array[A { type L <: String }]): Unit = {} + def zARRAY_97(x: Array[self.type]): Unit = {} + def aARRAY_98(x: Array[(A { type L <: String }) with B]): Unit = {} + def stringARRAY_99[Arg <: String](x: Array[Arg]): Unit = {} + def aARRAY_100(x: Array[Bla2#M[Any] with ({ type N <: B with Bla2#M[Int] })#N]): Unit = {} + def dARRAY_101(x: Array[structural1.DSub with Z]): Unit = {} + def aARRAY_102(x: Array[F3 @foo with Rec6]): Unit = {} + def aARRAY_103(x: Array[A @foo]): Unit = {} + def dARRAY_104(x: Array[singD.type]): Unit = {} + def intARRAY_105(x: Array[3]): Unit = {} + def vcARRAY_106(x: Array[VC]): Unit = {} + def listARRAY_107(x: Array[List[_]]): Unit = {} + def intARRAY_108(x: Array[Int with String]): Unit = {} + def intARRAY_109(x: Array[String with Int]): Unit = {} + + def a_110(x: VC with VC2): Unit = {} + def a_111(x: VC2 with VC): Unit = {} + def vcARRAY_112(x: Array[VC2 with VC]): Unit = {} + def vcARRAY_113(x: Array[VC with VC2]): Unit = {} + def a_114(x: VC with D): Unit = {} + def a_115(x: D with VC): Unit = {} + def a_116(x: D with B with VC): Unit = {} + def a_117(x: B with D with VC): Unit = {} + def a_118(x: VC with B with D): Unit = {} + def a_119(x: VC with Int): Unit = {} + def a_120(x: Int with VC): Unit = {} + + def object_121[T](x: Array[T]): Unit = {} + def object_122(x: Array[_ <: AnyVal with Singleton]): Unit = {} + def objectARRAY_123(x: Array[AnyVal with Singleton]): Unit = {} + def object_124[T, U](x: Array[T with U]): Unit = {} + def objectARRAY_125(x: Array[({ type W <: String }) with ({ type X <: Int })]): Unit = {} + def covARRAY_126(x: Array[Q[B] with Y[SubB]]): Unit = {} + def covARRAY_127(x: Array[Q[B] with Y[SubB] { type X <: Cov[String] }]): Unit = {} + + type SubAny <: Any + type SubAnyVal <: AnyVal + + def object_128(x: Array[SubAny with SubAnyVal]): Unit = {} + def intARRAYARRAY_129(x: Array[Array[Int]]): Unit = {} + def intARRAYARRAY_130(x: Array[_ <: Array[Int]]): Unit = {} + def objectARRAY_130(x: Array[_ <: Array[_ <: AnyVal]]): Unit = {} + def intARRAY_131(x: Array[String] with Array[Int]): Unit = {} + + def enumerated_132(x: Enumerated.C1.type with Enumerated.C2.type): Unit = {} + def enumerated_133(x: Enumerated.C2.type with Enumerated.C1.type): Unit = {} + def enumerated_134(x: Enumerated.C1.type): Unit = {} + def enumeratedARRAY_135(x: Array[Enumerated.C1.type]): Unit = {} + def enumeratedARRAY_136(x: Array[Enumerated.C2.type with Enumerated.C1.type]): Unit = {} + def enumeratedARRAY_137(x: Array[Enumerated.C1.type with Enumerated.C2.type]): Unit = {} + +} diff --git a/test/tasty/run/src-3/tastytest/issue12420/ShareLambda.scala b/test/tasty/run/src-3/tastytest/issue12420/ShareLambda.scala new file mode 100644 index 000000000000..dc64b3889de7 --- /dev/null +++ b/test/tasty/run/src-3/tastytest/issue12420/ShareLambda.scala @@ -0,0 +1,14 @@ +package tastytest.issue12420 + +object ShareLambda { + + class Foo[K[F[X] <: List[X]]] { + def foo[F[X] <: List[X]](x: K[F]): String = x.toString() + } + + // `F[X] <: List[X]` is structurally shared in TASTy and defined in `Foo.K` + class Bar[F[X] <: List[X]] { + override def toString(): String = "Bar" + } + +} diff --git a/test/tasty/run/src-3/tastytest/issue12420/absurd.scala b/test/tasty/run/src-3/tastytest/issue12420/absurd.scala new file mode 100644 index 000000000000..5bc01826c22f --- /dev/null +++ b/test/tasty/run/src-3/tastytest/issue12420/absurd.scala @@ -0,0 +1,10 @@ +package tastytest.issue12420 + +class Boxxy[I <: Int, B <: Boxxy[I, B]] + +object Boxxy { + object default extends Boxxy[0, default.type] +} + +class Qux[I <: Int, B <: Boxxy[I, B]](val inner: B) +class Eta extends Qux(Boxxy.default) diff --git a/test/tasty/run/src-3/tastytest/issue12420/hasId.scala b/test/tasty/run/src-3/tastytest/issue12420/hasId.scala new file mode 100644 index 000000000000..4a883ec7ede2 --- /dev/null +++ b/test/tasty/run/src-3/tastytest/issue12420/hasId.scala @@ -0,0 +1,15 @@ +package tastytest.issue12420 + +trait HasId[+K] { + def id: K +} + +trait Id[+T, K] { + def id: K +} + +case class UserId(id: String) extends Id[User, String] +case class User(id: UserId) extends HasId[UserId] + +class Bar[A <: HasId[Id[A, String]]](val bar: A) +class Foo extends Bar(User(UserId("Foo"))) diff --git a/test/tasty/run/src-3/tastytest/opaques/package.scala b/test/tasty/run/src-3/tastytest/opaques/package.scala new file mode 100644 index 000000000000..9a2866892e1b --- /dev/null +++ b/test/tasty/run/src-3/tastytest/opaques/package.scala @@ -0,0 +1,7 @@ +package tastytest + +package object opaques { + opaque type Offset = Long + object Offset: + def apply(o: Long): Offset = o +} diff --git a/test/tasty/run/src-3/tastytest/testsuite/testsuites.scala b/test/tasty/run/src-3/tastytest/testsuite/testsuites.scala new file mode 100644 index 000000000000..b1d6330fe215 --- /dev/null +++ b/test/tasty/run/src-3/tastytest/testsuite/testsuites.scala @@ -0,0 +1,22 @@ +package tastytest.testsuite + +import scala.concurrent.Future + +class AsyncSuite extends TestSuite { + final type TestBody = Future[Any] + + def testsuiteTests(): Seq[Test] = ??? +} + +abstract class TestSuite { + + type TestBody + final type Test = TestImpl[TestBody] + + def testsuiteTests(): Seq[Test] + +} + +class TestImpl[T] + +class MySuite extends AsyncSuite diff --git a/test/tasty/test/scala/tools/tastytest/Scripted.scala b/test/tasty/test/scala/tools/tastytest/Scripted.scala index ae102fa68e7b..04433e0c00da 100644 --- a/test/tasty/test/scala/tools/tastytest/Scripted.scala +++ b/test/tasty/test/scala/tools/tastytest/Scripted.scala @@ -2,7 +2,7 @@ package scala.tools.tastytest object Scripted extends Script { - val subcommands = List(Dotc, DotcDecompiler, Scalac, Runner, Javac) + val subcommands = List(Dotc, DotcDecompiler, PrintTasty, Scalac, Runner, Javac) val commandName = "Scripted" } diff --git a/test/tasty/test/scala/tools/tastytest/TastyTestJUnit.scala b/test/tasty/test/scala/tools/tastytest/TastyTestJUnit.scala index a267db9b6cc7..71b901161da1 100644 --- a/test/tasty/test/scala/tools/tastytest/TastyTestJUnit.scala +++ b/test/tasty/test/scala/tools/tastytest/TastyTestJUnit.scala @@ -1,6 +1,6 @@ package scala.tools.tastytest -import org.junit.{Test => test} +import org.junit.{Test => test, BeforeClass => setup, AfterClass => teardown} import org.junit.Assert._ import scala.util.{Try, Failure, Properties} @@ -63,17 +63,32 @@ class TastyTestJUnit { additionalDottySettings = Nil ).eval - val propSrc = "tastytest.src" - val propPkgName = "tastytest.packageName" + val propSrc = "tastytest.src" + val propPkgName = "tastytest.packageName" def assertPropIsSet(prop: String): String = { - Properties.propOrNull(prop).ensuring(_ != null, s"-D$prop is not set") + Properties.propOrElse(prop, { + fail(s"-D$prop is not set") + "(unknown)" + }) } } -import scala.reflect.runtime.ReflectionUtils - object TastyTestJUnit { + + private[this] var _dottyClassLoader: Dotc.ClassLoader = _ + implicit def dottyClassLoader: Dotc.ClassLoader = _dottyClassLoader + + @setup + def init(): Unit = { + _dottyClassLoader = Dotc.initClassloader().get + } + + @teardown + def finish(): Unit = { + _dottyClassLoader = null + } + final implicit class TryOps(val op: Try[Unit]) extends AnyVal { def eval: Unit = op match { case Failure(err) => fail(err.toString) diff --git a/versions.properties b/versions.properties index e9902399194f..97b4bc3c3634 100644 --- a/versions.properties +++ b/versions.properties @@ -1,13 +1,13 @@ # Scala version used for bootstrapping (see README.md) -starr.version=2.13.5 +starr.version=2.13.7 # These are the versions of the modules that go with this release. # Artifact dependencies: # - scala-compiler: jline (% "optional") # Other usages: # - scala-asm: jar content included in scala-compiler -scala-asm.version=9.1.0-scala-1 +scala-asm.version=9.2.0-scala-1 # jna.version must be updated together with jline-terminal-jna -jline.version=3.19.0 -jna.version=5.3.1 +jline.version=3.21.0 +jna.version=5.9.0