Skip to content

Commit

Permalink
Merge branch 'apache:master' into skipSnapshotAtBatch
Browse files Browse the repository at this point in the history
  • Loading branch information
eason-yuchen-liu authored Jul 2, 2024
2 parents 9dbe295 + ee0d306 commit 8fa9ef5
Show file tree
Hide file tree
Showing 578 changed files with 14,790 additions and 4,081 deletions.
11 changes: 4 additions & 7 deletions .github/workflows/build_and_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -213,6 +213,8 @@ jobs:
INCLUDED_TAGS: ${{ matrix.included-tags }}
HADOOP_PROFILE: ${{ matrix.hadoop }}
HIVE_PROFILE: ${{ matrix.hive }}
# GitHub Actions' default miniconda to use in pip packaging test.
CONDA_PREFIX: /usr/share/miniconda
GITHUB_PREV_SHA: ${{ github.event.before }}
SPARK_LOCAL_IP: localhost
NOLINT_ON_COMPILE: true
Expand Down Expand Up @@ -400,6 +402,8 @@ jobs:
PYTHON_TO_TEST: 'python3.11'
HADOOP_PROFILE: ${{ inputs.hadoop }}
HIVE_PROFILE: hive2.3
# GitHub Actions' default miniconda to use in pip packaging test.
CONDA_PREFIX: /usr/share/miniconda
GITHUB_PREV_SHA: ${{ github.event.before }}
SPARK_LOCAL_IP: localhost
SKIP_UNIDOC: true
Expand Down Expand Up @@ -463,19 +467,12 @@ jobs:
echo $py
$py -m pip list
done
- name: Install Conda for pip packaging test
if: contains(matrix.modules, 'pyspark-errors')
run: |
curl -s https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh > miniconda.sh
bash miniconda.sh -b -p $HOME/miniconda
rm miniconda.sh
# Run the tests.
- name: Run tests
env: ${{ fromJSON(inputs.envs) }}
shell: 'script -q -e -c "bash {0}"'
run: |
if [[ "$MODULES_TO_TEST" == *"pyspark-errors"* ]]; then
export PATH=$PATH:$HOME/miniconda/bin
export SKIP_PACKAGING=false
echo "Python Packaging Tests Enabled!"
fi
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/build_python_connect.yml
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ jobs:
python packaging/connect/setup.py sdist
cd dist
pip install pyspark*connect-*.tar.gz
pip install 'six==1.16.0' 'pandas<=2.2.2' scipy 'plotly>=4.8' 'mlflow>=2.8.1' coverage matplotlib openpyxl 'memory-profiler>=0.61.0' 'scikit-learn>=1.3.2' torch torchvision torcheval deepspeed unittest-xml-reporting
pip install 'six==1.16.0' 'pandas<=2.2.2' scipy 'plotly>=4.8' 'mlflow>=2.8.1' coverage matplotlib openpyxl 'memory-profiler>=0.61.0' 'scikit-learn>=1.3.2' 'graphviz==0.20.3' torch torchvision torcheval deepspeed unittest-xml-reporting
- name: Run tests
env:
SPARK_TESTING: 1
Expand Down Expand Up @@ -110,7 +110,7 @@ jobs:
mv python/lib lib.back
mv python/pyspark lib.back
./python/run-tests --parallelism=1 --python-executables=python3 --testnames "pyspark.resource.tests.test_connect_resources,pyspark.sql.tests.connect.client.test_artifact,pyspark.sql.tests.connect.test_resources"
./python/run-tests --parallelism=1 --python-executables=python3 --testnames "pyspark.resource.tests.test_connect_resources,pyspark.sql.tests.connect.client.test_artifact,pyspark.sql.tests.connect.client.test_artifact_localcluster,pyspark.sql.tests.connect.test_resources"
- name: Upload test results to report
if: always()
uses: actions/upload-artifact@v4
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/build_python_connect35.yml
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ jobs:
pip install 'numpy==1.25.1' 'pyarrow==12.0.1' 'pandas<=2.0.3' scipy unittest-xml-reporting plotly>=4.8 'mlflow>=2.3.1' coverage 'matplotlib==3.7.2' openpyxl 'memory-profiler==0.60.0' 'scikit-learn==1.1.*'
# Add Python deps for Spark Connect.
pip install 'grpcio>=1.48,<1.57' 'grpcio-status>=1.48,<1.57' 'protobuf==3.20.3' 'googleapis-common-protos==1.56.4'
pip install 'grpcio>=1.48,<1.57' 'grpcio-status>=1.48,<1.57' 'protobuf==3.20.3' 'googleapis-common-protos==1.56.4' 'graphviz==0.20.3'
# Add torch as a testing dependency for TorchDistributor
pip install 'torch==2.0.1' 'torchvision==0.15.2' torcheval
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/build_sparkr_window.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
# specific language governing permissions and limitations
# under the License.
#
name: "Build / SparkR-only (master, 4.4.0, windows-2019)"
name: "Build / SparkR-only (master, 4.4.0, windows-2022)"

on:
schedule:
Expand All @@ -25,7 +25,7 @@ on:
jobs:
build:
name: "Build module: sparkr"
runs-on: windows-2019
runs-on: windows-2022
timeout-minutes: 300
if: github.repository == 'apache/spark'
steps:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,8 @@ class AuthEngine implements Closeable {
public static final byte[] INPUT_IV_INFO = "inputIv".getBytes(UTF_8);
public static final byte[] OUTPUT_IV_INFO = "outputIv".getBytes(UTF_8);
private static final String MAC_ALGORITHM = "HMACSHA256";
private static final String LEGACY_CIPHER_ALGORITHM = "AES/CTR/NoPadding";
private static final String CIPHER_ALGORITHM = "AES/GCM/NoPadding";
private static final int AES_GCM_KEY_SIZE_BYTES = 16;
private static final byte[] EMPTY_TRANSCRIPT = new byte[0];
private static final int UNSAFE_SKIP_HKDF_VERSION = 1;
Expand Down Expand Up @@ -227,12 +229,19 @@ private TransportCipher generateTransportCipher(
OUTPUT_IV_INFO, // This is the HKDF info field used to differentiate IV values
AES_GCM_KEY_SIZE_BYTES);
SecretKeySpec sessionKey = new SecretKeySpec(derivedKey, "AES");
return new TransportCipher(
cryptoConf,
conf.cipherTransformation(),
sessionKey,
isClient ? clientIv : serverIv, // If it's the client, use the client IV first
isClient ? serverIv : clientIv);
if (LEGACY_CIPHER_ALGORITHM.equalsIgnoreCase(conf.cipherTransformation())) {
return new CtrTransportCipher(
cryptoConf,
sessionKey,
isClient ? clientIv : serverIv, // If it's the client, use the client IV first
isClient ? serverIv : clientIv);
} else if (CIPHER_ALGORITHM.equalsIgnoreCase(conf.cipherTransformation())) {
return new GcmTransportCipher(sessionKey);
} else {
throw new IllegalArgumentException(
String.format("Unsupported cipher mode: %s. %s and %s are supported.",
conf.cipherTransformation(), CIPHER_ALGORITHM, LEGACY_CIPHER_ALGORITHM));
}
}

private byte[] getTranscript(AuthMessage... encryptedPublicKeys) {
Expand Down
Loading

0 comments on commit 8fa9ef5

Please sign in to comment.