diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index e8ac65262..caf68b81c 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -13,7 +13,7 @@ jobs: strategy: fail-fast: false matrix: - profile: [ '-Pspark-3.1', '-Pspark-3.2', '-Pspark-3.3' ] + profile: [] steps: - uses: actions/checkout@v3 - name: Set up JDK 11 diff --git a/Dockerfile b/Dockerfile index 28f8eabe1..9787725d7 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,5 @@ #FROM docker.io/bitnami/spark:3.1.2 -FROM apache/spark-py:v3.4.0 +FROM apache/spark-py:v3.5.0 USER 0 RUN apt-get update && \ apt install -y curl vim diff --git a/docs/create-docker-multiplatform-steps.md b/docs/create-docker-multiplatform-steps.md deleted file mode 100644 index e0feeb2ec..000000000 --- a/docs/create-docker-multiplatform-steps.md +++ /dev/null @@ -1,23 +0,0 @@ -docker multi platform image: - -on mac: - - docker build -t /:arm64v8 --build-arg ARCH=arm64v8/ . - - docker push /:arm64v8 - - -on ubuntu: - sudo su - docker login => give username and password - - docker build -t /:amd64 --build-arg ARCH=amd64/ . - - docker push /:amd64 - - docker manifest create \ - /:latest \ - --amend /:amd64 \ - --amend /:arm64v8 - - docker manifest push /:latest diff --git a/docs/settingUpZingg.md b/docs/settingUpZingg.md index b1fec956e..9f1f3982a 100644 --- a/docs/settingUpZingg.md +++ b/docs/settingUpZingg.md @@ -53,17 +53,17 @@ _**Step 3 : Install Apache Spark -**_ * Download Apache Spark - from the [Apache Spark Official Website](https://spark.apache.org/downloads.html). * Install downloaded Apache Spark - on your Ubuntu by following [this tutorial](https://computingforgeeks.com/how-to-install-apache-spark-on-ubuntu-debian/). -* For example for 3.3.2: +* For example for 3.5.0: ``` -wget https://dlcdn.apache.org/spark/spark-3.3.2/spark-3.3.2-bin-hadoop3.tgz -tar -xvf spark-3.3.2-bin-hadoop3.tgz -rm -rf spark-3.3.2-bin-hadoop3.tgz -sudo mv spark-3.3.2-bin-hadoop3 /opt/spark +wget https://www.apache.org/dyn/closer.lua/spark/spark-3.5.0/spark-3.5.0-bin-hadoop3.tgz +tar -xvf spark-3.5.0-bin-hadoop3.tgz +rm -rf spark-3.5.0-bin-hadoop3.tgz +sudo mv spark-3.5.0-bin-hadoop3 /opt/spark ``` Make sure that spark version you have installed is compatible with java you have installed, and Zingg is supporting those versions. -_**Note :-**_ Zingg currently supports only up to spark 3.3 and the corresponding Java version. +_**Note :-**_ Zingg supports Spark 3.5 and the corresponding Java version. **** @@ -122,7 +122,7 @@ mvn initialize * **mvn clean compile package -Dspark=sparkVer** ``` -_**Note :-**_ Replace the **sparkVer** with the version of spark you installed, For example, **-Dspark=3.2** and if still facing error, include **-Dmaven.test.skip=true** with the above command. +_**Note :-**_ Replace the **sparkVer** with the version of spark you installed, For example, **-Dspark=3.5** and if still facing error, include **-Dmaven.test.skip=true** with the above command. _**Note :-**_ substitute 3.3 with profile of the spark version you have installed. This is based on profiles specified in pom.xml diff --git a/docs/stepbystep/installation/compiling-from-source.md b/docs/stepbystep/installation/compiling-from-source.md index 0b623033f..1de4f32fa 100644 --- a/docs/stepbystep/installation/compiling-from-source.md +++ b/docs/stepbystep/installation/compiling-from-source.md @@ -9,6 +9,6 @@ If you need to compile the latest code or build for a different Spark version, y * Install maven * Install JDK 1.8 * Set JAVA\_HOME to JDK base directory -* Run the following: `mvn initialize` and then `mvn clean compile package -Dspark=sparkVer` +* Run the following: `mvn initialize` and then `mvn clean compile package` + -where sparkVer is one of 3.4 or 3.5 diff --git a/docs/stepbystep/installation/installing-from-release/README.md b/docs/stepbystep/installation/installing-from-release/README.md index 53bb03fbc..66180845a 100644 --- a/docs/stepbystep/installation/installing-from-release/README.md +++ b/docs/stepbystep/installation/installing-from-release/README.md @@ -10,7 +10,7 @@ Zingg is prebuilt for common Spark versions so that you can use those directly. A) Java JDK - version "1.8.0\_131" -B) Apache Spark - version spark-3.1.2-bin-hadoop3.2 +B) Apache Spark - version spark-3.5.0-bin-hadoop3 **** diff --git a/releaseActivities.txt b/releaseActivities.txt index c0f61cd60..2866c11e3 100644 --- a/releaseActivities.txt +++ b/releaseActivities.txt @@ -12,4 +12,28 @@ merge changes back to main maven jar Databricks Wheel -- update location of py package and maven loc of jar \ No newline at end of file +- update location of py package and maven loc of jar + +docker multi platform image: + +on mac: + + docker build -t /:arm64v8 --build-arg ARCH=arm64v8/ . + + docker push /:arm64v8 + + +on ubuntu: + sudo su + docker login => give username and password + + docker build -t /:amd64 --build-arg ARCH=amd64/ . + + docker push /:amd64 + + docker manifest create \ + /:latest \ + --amend /:amd64 \ + --amend /:arm64v8 + + docker manifest push /:latest