Skip to content

Commit 972825c

Browse files
authored
Resolve signer issue by moving DotnetRunner to different package name. (#186)
1 parent b483013 commit 972825c

File tree

13 files changed

+30
-28
lines changed

13 files changed

+30
-28
lines changed

benchmark/csharp/Tpch/Program.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ private static void Main(string[] args)
1616
{
1717
Console.WriteLine("Usage:");
1818
Console.WriteLine("\t<spark-submit> --master local");
19-
Console.WriteLine("\t\t--class org.apache.spark.deploy.DotnetRunner <path-to-microsoft-spark-jar>");
19+
Console.WriteLine("\t\t--class org.apache.spark.deploy.dotnet.DotnetRunner <path-to-microsoft-spark-jar>");
2020
Console.WriteLine("\t\tTpch.exe <tpch_data_root_path> <query_number> <num_iterations> <true for SQL | false for functional>");
2121

2222
return;

benchmark/run_csharp_benchmark.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,6 @@ for i in {1..22}
1616
do
1717
for j in $(seq 1 $COLD_ITERATION)
1818
do
19-
$SPARK_HOME/bin/spark-submit --master yarn --num-executors $NUM_EXECUTORS --driver-memory $DRIVER_MEMORY --executor-memory $EXECUTOR_MEMORY --executor-cores $EXECUTOR_CORES --files $CSHARP_DLL --class org.apache.spark.deploy.DotnetRunner $JAR_PATH $CSHARP_EXECUTABLE $DATA_PATH $i $NUM_ITERATION $IS_SQL
19+
$SPARK_HOME/bin/spark-submit --master yarn --num-executors $NUM_EXECUTORS --driver-memory $DRIVER_MEMORY --executor-memory $EXECUTOR_MEMORY --executor-cores $EXECUTOR_CORES --files $CSHARP_DLL --class org.apache.spark.deploy.dotnet.DotnetRunner $JAR_PATH $CSHARP_EXECUTABLE $DATA_PATH $i $NUM_ITERATION $IS_SQL
2020
done
2121
done

deployment/README.md

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ The following captures the setting for a HDInsight Script Action:
8888
```shell
8989
foo@bar:~$ $SPARK_HOME/bin/spark-submit \
9090
--master yarn \
91-
--class org.apache.spark.deploy.DotnetRunner \
91+
--class org.apache.spark.deploy.dotnet.DotnetRunner \
9292
--files <comma-separated list of assemblies that contain UDF definitions, if any> \
9393
adl://<cluster name>.azuredatalakestore.net/<some dir>/microsoft-spark-<spark_majorversion.spark_minorversion.x>-<spark_dotnet_version>.jar \
9494
adl://<cluster name>.azuredatalakestore.net/<some dir>/<your app>.zip <your app> <app arg 1> <app arg 2> ... <app arg n>
@@ -105,7 +105,7 @@ foo@bar:~$ curl -k -v -X POST "https://<your spark cluster>.azurehdinsight.net/l
105105
-d @- << EOF
106106
{
107107
"file":"adl://<cluster name>.azuredatalakestore.net/<some dir>/microsoft-spark-<spark_majorversion.spark_minorversion.x>-<spark_dotnet_version>.jar",
108-
"className":"org.apache.spark.deploy.DotnetRunner",
108+
"className":"org.apache.spark.deploy.dotnet.DotnetRunner",
109109
"files":["adl://<cluster name>.azuredatalakestore.net/<some dir>/<udf assembly>", "adl://<cluster name>.azuredatalakestore.net/<some dir>/<file>"],
110110
"args":["adl://<cluster name>.azuredatalakestore.net/<some dir>/<your app>.zip","<your app>","<app arg 1>","<app arg 2>,"...","<app arg n>"]
111111
}
@@ -142,7 +142,7 @@ foo@bar:~$ aws emr create-cluster \
142142
```shell
143143
foo@bar:~$ spark-submit \
144144
--master yarn \
145-
--class org.apache.spark.deploy.DotnetRunner \
145+
--class org.apache.spark.deploy.dotnet.DotnetRunner \
146146
--files <comma-separated list of assemblies that contain UDF definitions, if any> \
147147
s3://mybucket/<some dir>/microsoft-spark-<spark_majorversion.spark_minorversion.x>-<spark_dotnet_version>.jar \
148148
s3://mybucket/<some dir>/<your app>.zip <your app> <app args>
@@ -154,7 +154,7 @@ Amazon EMR Steps can be used to submit jobs to the Spark framework installed on
154154
# For example, you can run the following on Linux using `aws` cli.
155155
foo@bar:~$ aws emr add-steps \
156156
--cluster-id j-xxxxxxxxxxxxx \
157-
--steps Type=spark,Name="Spark Program",Args=[--master,yarn,--files,s3://mybucket/<some dir>/<udf assembly>,--class,org.apache.spark.deploy.DotnetRunner,s3://mybucket/<some dir>/microsoft-spark-<spark_majorversion.spark_minorversion.x>-<spark_dotnet_version>.jar,s3://mybucket/<some dir>/<your app>.zip,<your app>,<app arg 1>,<app arg 2>,...,<app arg n>],ActionOnFailure=CONTINUE
157+
--steps Type=spark,Name="Spark Program",Args=[--master,yarn,--files,s3://mybucket/<some dir>/<udf assembly>,--class,org.apache.spark.deploy.dotnet.DotnetRunner,s3://mybucket/<some dir>/microsoft-spark-<spark_majorversion.spark_minorversion.x>-<spark_dotnet_version>.jar,s3://mybucket/<some dir>/<your app>.zip,<your app>,<app arg 1>,<app arg 2>,...,<app arg n>],ActionOnFailure=CONTINUE
158158
```
159159
160160
## Databricks
@@ -195,7 +195,7 @@ One-time Setup:
195195
2. Upload the appropriate `microsoft-spark-<spark-version>-<spark-dotnet-version>.jar`
196196
3. Set the params appropriately:
197197
```
198-
Main Class: org.apache.spark.deploy.DotnetRunner
198+
Main Class: org.apache.spark.deploy.dotnet.DotnetRunner
199199
Arguments /dbfs/apps/<your-app-name>.zip <your-app-name>
200200
```
201201
> **Note:** `<your-app-name>` is a name for the executable that you [published](#preparing-your-spark-net-app).
@@ -231,5 +231,5 @@ Publishing your App & Running:
231231
1. [Create a Job](https://docs.databricks.com/user-guide/jobs.html) and select *Configure spark-submit*.
232232
2. Configure `spark-submit` with the following parameters:
233233
```shell
234-
["--files","/dbfs/<path-to>/<app assembly/file to deploy to worker>","--class","org.apache.spark.deploy.DotnetRunner","/dbfs/<path-to>/microsoft-spark-<spark_majorversion.spark_minorversion.x>-<spark_dotnet_version>.jar","/dbfs/<path-to>/<app name>.zip","<app bin name>","app arg1","app arg2"]
234+
["--files","/dbfs/<path-to>/<app assembly/file to deploy to worker>","--class","org.apache.spark.deploy.dotnet.DotnetRunner","/dbfs/<path-to>/microsoft-spark-<spark_majorversion.spark_minorversion.x>-<spark_dotnet_version>.jar","/dbfs/<path-to>/<app name>.zip","<app bin name>","app arg1","app arg2"]
235235
```

docs/building/ubuntu-instructions.md

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -172,7 +172,7 @@ Once you build the samples, you can use `spark-submit` to submit your .NET Core
172172
```bash
173173
spark-submit \
174174
[--jars <any-jars-your-app-is-dependent-on>] \
175-
--class org.apache.spark.deploy.DotnetRunner \
175+
--class org.apache.spark.deploy.dotnet.DotnetRunner \
176176
--master local \
177177
<path-to-microsoft-spark-jar> \
178178
<path-to-your-app-binary> <argument(s)-to-your-app>
@@ -182,15 +182,15 @@ Once you build the samples, you can use `spark-submit` to submit your .NET Core
182182
- **[Microsoft.Spark.Examples.Sql.Basic](../../examples/Microsoft.Spark.CSharp.Examples/Sql/Basic.cs)**
183183
```bash
184184
spark-submit \
185-
--class org.apache.spark.deploy.DotnetRunner \
185+
--class org.apache.spark.deploy.dotnet.DotnetRunner \
186186
--master local \
187187
~/dotnet.spark/src/scala/microsoft-spark-2.3.x/target/microsoft-spark-2.3.x-1.0.0-alpha.jar \
188188
Microsoft.Spark.CSharp.Examples Sql.Basic $SPARK_HOME/examples/src/main/resources/people.json
189189
```
190190
- **[Microsoft.Spark.Examples.Sql.Streaming.StructuredNetworkWordCount](../../examples/Microsoft.Spark.CSharp.Examples/Sql/Streaming/StructuredNetworkWordCount.cs)**
191191
```bash
192192
spark-submit \
193-
--class org.apache.spark.deploy.DotnetRunner \
193+
--class org.apache.spark.deploy.dotnet.DotnetRunner \
194194
--master local \
195195
~/dotnet.spark/src/scala/microsoft-spark-2.3.x/target/microsoft-spark-2.3.x-1.0.0-alpha.jar \
196196
Microsoft.Spark.CSharp.Examples Sql.Streaming.StructuredNetworkWordCount localhost 9999
@@ -199,7 +199,7 @@ Once you build the samples, you can use `spark-submit` to submit your .NET Core
199199
```bash
200200
spark-submit \
201201
--packages org.apache.spark:spark-sql-kafka-0-10_2.11:2.3.2 \
202-
--class org.apache.spark.deploy.DotnetRunner \
202+
--class org.apache.spark.deploy.dotnet.DotnetRunner \
203203
--master local \
204204
~/dotnet.spark/src/scala/microsoft-spark-2.3.x/target/microsoft-spark-2.3.x-1.0.0-alpha.jar \
205205
Microsoft.Spark.CSharp.Examples Sql.Streaming.StructuredKafkaWordCount localhost:9092 subscribe test
@@ -208,7 +208,7 @@ Once you build the samples, you can use `spark-submit` to submit your .NET Core
208208
```bash
209209
spark-submit \
210210
--jars path/to/net.jpountz.lz4/lz4-1.3.0.jar,path/to/org.apache.kafka/kafka-clients-0.10.0.1.jar,path/to/org.apache.spark/spark-sql-kafka-0-10_2.11-2.3.2.jar,`path/to/org.slf4j/slf4j-api-1.7.6.jar,path/to/org.spark-project.spark/unused-1.0.0.jar,path/to/org.xerial.snappy/snappy-java-1.1.2.6.jar \
211-
--class org.apache.spark.deploy.DotnetRunner \
211+
--class org.apache.spark.deploy.dotnet.DotnetRunner \
212212
--master local \
213213
~/dotnet.spark/src/scala/microsoft-spark-2.3.x/target/microsoft-spark-2.3.x-1.0.0-alpha.jar \
214214
Microsoft.Spark.CSharp.Examples Sql.Streaming.StructuredKafkaWordCount localhost:9092 subscribe test

docs/building/windows-instructions.md

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -204,7 +204,7 @@ Once you build the samples, running them will be through `spark-submit` regardle
204204
```powershell
205205
spark-submit.cmd `
206206
[--jars <any-jars-your-app-is-dependent-on>] `
207-
--class org.apache.spark.deploy.DotnetRunner `
207+
--class org.apache.spark.deploy.dotnet.DotnetRunner `
208208
--master local `
209209
<path-to-microsoft-spark-jar> `
210210
<path-to-your-app-exe> <argument(s)-to-your-app>
@@ -214,15 +214,15 @@ Once you build the samples, running them will be through `spark-submit` regardle
214214
- **[Microsoft.Spark.Examples.Sql.Basic](../../examples/Microsoft.Spark.CSharp.Examples/Sql/Basic.cs)**
215215
```powershell
216216
spark-submit.cmd `
217-
--class org.apache.spark.deploy.DotnetRunner `
217+
--class org.apache.spark.deploy.dotnet.DotnetRunner `
218218
--master local `
219219
C:\github\dotnet-spark\src\scala\microsoft-spark-2.3.x\target\microsoft-spark-2.3.x-1.0.0-alpha.jar `
220220
Microsoft.Spark.CSharp.Examples.exe Sql.Basic %SPARK_HOME%\examples\src\main\resources\people.json
221221
```
222222
- **[Microsoft.Spark.Examples.Sql.Streaming.StructuredNetworkWordCount](../../examples/Microsoft.Spark.CSharp.Examples/Sql/Streaming/StructuredNetworkWordCount.cs)**
223223
```powershell
224224
spark-submit.cmd `
225-
--class org.apache.spark.deploy.DotnetRunner `
225+
--class org.apache.spark.deploy.dotnet.DotnetRunner `
226226
--master local `
227227
C:\github\dotnet-spark\src\scala\microsoft-spark-2.3.x\target\microsoft-spark-2.3.x-1.0.0-alpha.jar `
228228
Microsoft.Spark.CSharp.Examples.exe Sql.Streaming.StructuredNetworkWordCount localhost 9999
@@ -231,7 +231,7 @@ Once you build the samples, running them will be through `spark-submit` regardle
231231
```powershell
232232
spark-submit.cmd `
233233
--packages org.apache.spark:spark-sql-kafka-0-10_2.11:2.3.2 `
234-
--class org.apache.spark.deploy.DotnetRunner `
234+
--class org.apache.spark.deploy.dotnet.DotnetRunner `
235235
--master local `
236236
C:\github\dotnet-spark\src\scala\microsoft-spark-2.3.x\target\microsoft-spark-2.3.x-1.0.0-alpha.jar `
237237
Microsoft.Spark.CSharp.Examples.exe Sql.Streaming.StructuredKafkaWordCount localhost:9092 subscribe test
@@ -240,7 +240,7 @@ Once you build the samples, running them will be through `spark-submit` regardle
240240
```powershell
241241
spark-submit.cmd
242242
--jars path\to\net.jpountz.lz4\lz4-1.3.0.jar,path\to\org.apache.kafka\kafka-clients-0.10.0.1.jar,path\to\org.apache.spark\spark-sql-kafka-0-10_2.11-2.3.2.jar,`path\to\org.slf4j\slf4j-api-1.7.6.jar,path\to\org.spark-project.spark\unused-1.0.0.jar,path\to\org.xerial.snappy\snappy-java-1.1.2.6.jar `
243-
--class org.apache.spark.deploy.DotnetRunner `
243+
--class org.apache.spark.deploy.dotnet.DotnetRunner `
244244
--master local `
245245
C:\github\dotnet-spark\src\scala\microsoft-spark-2.3.x\target\microsoft-spark-2.3.x-1.0.0-alpha.jar `
246246
Microsoft.Spark.CSharp.Examples.exe Sql.Streaming.StructuredKafkaWordCount localhost:9092 subscribe test

docs/developer-guide.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
Open a new command prompt window, run the following:
1212
```shell
1313
spark-submit \
14-
--class org.apache.spark.deploy.DotnetRunner \
14+
--class org.apache.spark.deploy.dotnet.DotnetRunner \
1515
--master local \
1616
<path-to-microsoft-spark-jar> \
1717
debug
@@ -33,7 +33,7 @@ If you need to debug the Scala side code (`DotnetRunner`, `DotnetBackendHandler`
3333
```shell
3434
spark-submit \
3535
--driver-java-options -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 \
36-
--class org.apache.spark.deploy.DotnetRunner \
36+
--class org.apache.spark.deploy.dotnet.DotnetRunner \
3737
--master local \
3838
<path-to-microsoft-spark-jar> \
3939
<path-to-your-app-exe> <argument(s)-to-your-app>
@@ -51,7 +51,7 @@ Since Apache Spark's [MAINTENANCE] releases involve only internal changes (e.g.,
5151
1. In the corresponding `pom.xml`, update the `spark.version` value to the newly released version.
5252
* For example, if a new patch release is 2.4.3, you will update [src/scala/microsoft-spark-2.4.x/pom.xml](/src/scala/microsoft-spark-2.4.x/pom.xml) to have `<spark.version>2.4.3</spark.version>`.
5353
2. Update `DotnetRunner.supportedSparkVersions` to include the newly released version.
54-
* For example, if a new patch release is 2.4.3, you will update [src/scala/microsoft-spark-2.4.x/src/main/scala/org/apache/spark/deploy/DotnetRunner.scala](/src/scala/microsoft-spark-2.4.x/src/main/scala/org/apache/spark/deploy/DotnetRunner.scala).
54+
* For example, if a new patch release is 2.4.3, you will update [src/scala/microsoft-spark-2.4.x/src/main/scala/org/apache/spark/deploy/dotnet/DotnetRunner.scala](/src/scala/microsoft-spark-2.4.x/src/main/scala/org/apache/spark/deploy/dotnet/DotnetRunner.scala).
5555
3. Update the [azure-pipelines.yml](/azure-pipelines.yml) to include E2E testing for the newly released version.
5656

5757
Refer to [this commit](https://github.com/dotnet/spark/commit/eb26baa46200bfcbe3e1080e650f335853d9990e) for an example.

docs/getting-started/ubuntu-instructions.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ For detailed instructions, you can see [Building .NET for Apache Spark from Sour
5959
- Run your app.
6060
```shell
6161
spark-submit \
62-
--class org.apache.spark.deploy.DotnetRunner \
62+
--class org.apache.spark.deploy.dotnet.DotnetRunner \
6363
--master local \
6464
microsoft-spark-2.4.x-<version>.jar \
6565
dotnet HelloSpark.dll

docs/getting-started/windows-instructions.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ For detailed instructions, you can see [Building .NET for Apache Spark from Sour
4747
- Run your app
4848
```
4949
spark-submit `
50-
--class org.apache.spark.deploy.DotnetRunner `
50+
--class org.apache.spark.deploy.dotnet.DotnetRunner `
5151
--master local `
5252
microsoft-spark-2.4.x-<version>.jar `
5353
dotnet HelloSpark.dll

src/csharp/Microsoft.Spark.E2ETest/SparkFixture.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,7 @@ private void BuildSparkCmd(out string filename, out string args)
113113
}
114114

115115
// Build the arguments for the spark-submit.
116-
string classArg = "--class org.apache.spark.deploy.DotnetRunner";
116+
string classArg = "--class org.apache.spark.deploy.dotnet.DotnetRunner";
117117
string curDir = AppDomain.CurrentDomain.BaseDirectory;
118118
string jarPrefix = GetJarPrefix(sparkHome);
119119
string scalaDir = $"{curDir}{sep}..{sep}..{sep}..{sep}..{sep}..{sep}src{sep}scala";

src/scala/microsoft-spark-2.3.x/src/main/scala/org/apache/spark/deploy/DotnetRunner.scala renamed to src/scala/microsoft-spark-2.3.x/src/main/scala/org/apache/spark/deploy/dotnet/DotnetRunner.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
* See the LICENSE file in the project root for more information.
55
*/
66

7-
package org.apache.spark.deploy
7+
package org.apache.spark.deploy.dotnet
88

99
import java.io.File
1010
import java.net.URI
@@ -17,6 +17,7 @@ import org.apache.commons.io.FilenameUtils
1717
import org.apache.hadoop.fs.Path
1818
import org.apache.spark
1919
import org.apache.spark.api.dotnet.DotnetBackend
20+
import org.apache.spark.deploy.{PythonRunner, SparkHadoopUtil}
2021
import org.apache.spark.internal.Logging
2122
import org.apache.spark.util.dotnet.{Utils => DotnetUtils}
2223
import org.apache.spark.util.{RedirectThread, Utils}

0 commit comments

Comments
 (0)