From 30971ac1a3cc0a79cfe907a4bb2eafe0600130f8 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 16 Jan 2025 16:06:47 +0100 Subject: [PATCH 1/4] Make regression tests use default authentication Also align realm names and fix docs. --- README.md | 18 +++++------------ quarkus/server/build.gradle.kts | 6 ++++++ regtests/README.md | 14 +++++-------- regtests/docker-compose.yml | 10 ++-------- regtests/run.sh | 20 +++++++++++++++++++ regtests/run_spark_sql.sh | 2 +- regtests/t_cli/src/test_cli.py | 2 +- regtests/t_pyspark/src/conftest.py | 4 ++-- regtests/t_pyspark/src/iceberg_spark.py | 2 +- .../src/test_spark_sql_s3_with_privileges.py | 8 ++++---- .../t_spark_sql/src/spark_sql_azure_blob.sh | 2 +- .../t_spark_sql/src/spark_sql_azure_dfs.sh | 2 +- regtests/t_spark_sql/src/spark_sql_basic.sh | 2 +- regtests/t_spark_sql/src/spark_sql_gcp.sh | 2 +- regtests/t_spark_sql/src/spark_sql_s3.sh | 2 +- .../src/spark_sql_s3_cross_region.sh | 2 +- regtests/t_spark_sql/src/spark_sql_views.sh | 2 +- 17 files changed, 54 insertions(+), 46 deletions(-) diff --git a/README.md b/README.md index 1e1e9ed54..c103a7564 100644 --- a/README.md +++ b/README.md @@ -62,11 +62,8 @@ Apache Polaris is built using Gradle with Java 21+ and Docker 27+. - `./gradlew assemble` - To skip tests. - `./gradlew test` - To run unit tests and integration tests. - `./gradlew polarisServerRun` - To run the Polaris server locally, with profile `prod`; the server - is reachable at localhost:8181. -- `java -Dquarkus.profile=test -jar quarkus/server/build/quarkus-app/quarkus-run.jar` - To run the - Polaris server locally, with profile `test`. With this profile, Polaris uses the `test` - Authenticator and `test` TokenBroker; this configuration is suitable for running regressions - tests, or for connecting with Spark. + is reachable at localhost:8181. This configuration is also suitable for running regression tests, + or for connecting with Spark. See below for more information on regression tests. - `./regtests/run_spark_sql.sh` - To connect from Spark SQL. Here are some example commands to run in the Spark SQL shell: ```sql @@ -102,16 +99,11 @@ select * from db1.table1; Regression tests can be run in a local environment or in a Docker environment. -To run regression tests locally, you need to have a Polaris server running locally, with the -`test` Authenticator enabled. You can do this by running Polaris as below: - -```shell -java -Dquarkus.profile=test -jar quarkus/server/build/quarkus-app/quarkus-run.jar -``` - -Then, you can run the regression tests using the following command: +To run regression tests locally, you first need to start Polaris, with default realm `POLARIS` and +root credentials: `root:secret`, then run the tests: ```shell +./gradlew polarisServerRun env POLARIS_HOST=localhost ./regtests/run.sh ``` diff --git a/quarkus/server/build.gradle.kts b/quarkus/server/build.gradle.kts index d7db59fdb..61354040f 100644 --- a/quarkus/server/build.gradle.kts +++ b/quarkus/server/build.gradle.kts @@ -17,6 +17,8 @@ * under the License. */ +import io.quarkus.gradle.tasks.QuarkusRun + plugins { alias(libs.plugins.quarkus) alias(libs.plugins.jandex) @@ -70,6 +72,10 @@ tasks.withType { isFailOnError = false } tasks.register("polarisServerRun") { dependsOn("quarkusRun") } +tasks.named("quarkusRun") { + jvmArgs = listOf("-Dpolaris.bootstrap.credentials=POLARIS,root,secret") +} + distributions { main { contents { diff --git a/regtests/README.md b/regtests/README.md index 90a0f75ab..801b1e4ad 100644 --- a/regtests/README.md +++ b/regtests/README.md @@ -67,17 +67,13 @@ In this setup, a Polaris server must be running on localhost:8181 before running way to do this is to run the Polaris server in a separate terminal window: ```shell -./gradlew polarisServerRun \ - '-Dpolaris.authentication.authenticator.type=test' \ - '-Dpolaris.authentication.token-service.type=test' \ - '-Dpolaris.features.defaults."SUPPORTED_CATALOG_STORAGE_TYPES"=["FILE","S3","GCS","AZURE"]' \ - '-Dpolaris.realm-context.realms=default-realm,realm1' \ - '-Dquarkus.otel.sdk.disabled=true' +./gradlew polarisServerRun ``` Note: the regression tests expect Polaris to run with certain options, e.g. with support for `FILE` -storage and with realms `default-realm,realm1`; if you run the above command, this will be the case. -If you run Polaris in a different way, make sure that Polaris is configured appropriately. +storage, default realm `POLARIS` and root credentials `root:secret`; if you run the above command, +this will be the case. If you run Polaris in a different way, make sure that Polaris is configured +appropriately. Running the test harness will automatically run the idempotent setup script. From the root of the project, just run: @@ -92,7 +88,7 @@ test directories as arguments to `run.sh`. For example, to run only the `t_spark verbose mode: ```shell -VERBOSE=1 POLARIS_HOST=localhost ./regtests/run.sh t_spark_sql/src/spark_sql_basic.sh +env VERBOSE=1 POLARIS_HOST=localhost ./regtests/run.sh t_spark_sql/src/spark_sql_basic.sh ``` ## Run with Cloud resources diff --git a/regtests/docker-compose.yml b/regtests/docker-compose.yml index de47b6020..2ca144376 100644 --- a/regtests/docker-compose.yml +++ b/regtests/docker-compose.yml @@ -31,14 +31,8 @@ services: AZURE_TENANT_ID: $AZURE_TENANT_ID AZURE_CLIENT_ID: $AZURE_CLIENT_ID AZURE_CLIENT_SECRET: $AZURE_CLIENT_SECRET - polaris.persistence.type: in-memory - polaris.authentication.authenticator.type: test - polaris.authentication.token-service.type: test - polaris.authentication.token-broker.type: symmetric-key - polaris.authentication.token-broker.symmetric-key.secret: polaris - polaris.features.defaults."SUPPORTED_CATALOG_STORAGE_TYPES": '["FILE","S3","GCS","AZURE"]' - polaris.realm-context.realms: default-realm,realm1 - quarkus.log.file.enable: false + POLARIS_BOOTSTRAP_CREDENTIALS: POLARIS,root,secret + quarkus.log.file.enable: "false" quarkus.otel.sdk.disabled: "true" volumes: - ./credentials:/tmp/credentials/ diff --git a/regtests/run.sh b/regtests/run.sh index f91b2dcf3..bd614150c 100755 --- a/regtests/run.sh +++ b/regtests/run.sh @@ -65,6 +65,26 @@ NUM_SUCCESSES=0 export AWS_ACCESS_KEY_ID='' export AWS_SECRET_ACCESS_KEY='' +if ! output=$(curl -X POST -H "Polaris-Realm: POLARIS" "http://${POLARIS_HOST:-localhost}:8181/api/catalog/v1/oauth/tokens" \ + -d "grant_type=client_credentials" \ + -d "client_id=root" \ + -d "client_secret=secret" \ + -d "scope=PRINCIPAL_ROLE:ALL"); then + logred "Error: Failed to retrieve bearer token" + exit 1 +fi + +token=$(echo "$output" | awk -F\" '{print $4}') + +if [ "$token" == "unauthorized_client" ]; then + logred "Error: Failed to retrieve bearer token" + exit 1 +fi + +export REGTEST_ROOT_BEARER_TOKEN=$token + +echo "Root bearer token: ${REGTEST_ROOT_BEARER_TOKEN}" + for TEST_FILE in ${TEST_LIST}; do TEST_SUITE=$(dirname $(dirname ${TEST_FILE})) TEST_SHORTNAME=$(basename ${TEST_FILE}) diff --git a/regtests/run_spark_sql.sh b/regtests/run_spark_sql.sh index cc355095e..fdde29a13 100755 --- a/regtests/run_spark_sql.sh +++ b/regtests/run_spark_sql.sh @@ -55,7 +55,7 @@ if [ -z "${SPARK_HOME}"]; then export SPARK_HOME=$(realpath ~/${SPARK_DISTRIBUTION}) fi -SPARK_BEARER_TOKEN="${REGTEST_ROOT_BEARER_TOKEN:-principal:root;realm:default-realm}" +SPARK_BEARER_TOKEN="${REGTEST_ROOT_BEARER_TOKEN:-principal:root;realm:POLARIS}" if [ $# -eq 0 ]; then # create a catalog backed by the local filesystem diff --git a/regtests/t_cli/src/test_cli.py b/regtests/t_cli/src/test_cli.py index c9af207d1..736f125ab 100644 --- a/regtests/t_cli/src/test_cli.py +++ b/regtests/t_cli/src/test_cli.py @@ -38,7 +38,7 @@ def get_salt(length=8) -> str: def root_cli(*args): - return cli('principal:root;realm:default-realm')(*args) + return cli(os.getenv('REGTEST_ROOT_BEARER_TOKEN', 'principal:root;realm:POLARIS'))(*args) def cli(access_token): diff --git a/regtests/t_pyspark/src/conftest.py b/regtests/t_pyspark/src/conftest.py index 041ef7b2c..5ce3f8178 100644 --- a/regtests/t_pyspark/src/conftest.py +++ b/regtests/t_pyspark/src/conftest.py @@ -66,7 +66,7 @@ def catalog_client(polaris_catalog_url): :return: """ client = CatalogApiClient( - Configuration(access_token=os.getenv('REGTEST_ROOT_BEARER_TOKEN', 'principal:root;realm:default-realm'), + Configuration(access_token=os.getenv('REGTEST_ROOT_BEARER_TOKEN', 'principal:root;realm:POLARIS'), host=polaris_catalog_url)) return IcebergCatalogAPI(client) @@ -143,7 +143,7 @@ def format_namespace(namespace): @pytest.fixture def root_client(polaris_host, polaris_url): - client = ApiClient(Configuration(access_token=os.getenv('REGTEST_ROOT_BEARER_TOKEN', 'principal:root;realm:default-realm'), + client = ApiClient(Configuration(access_token=os.getenv('REGTEST_ROOT_BEARER_TOKEN', 'principal:root;realm:POLARIS'), host=polaris_url)) api = PolarisDefaultApi(client) return api diff --git a/regtests/t_pyspark/src/iceberg_spark.py b/regtests/t_pyspark/src/iceberg_spark.py index db907ed2b..9b6a393d0 100644 --- a/regtests/t_pyspark/src/iceberg_spark.py +++ b/regtests/t_pyspark/src/iceberg_spark.py @@ -46,7 +46,7 @@ def __init__( aws_region: str = "us-west-2", catalog_name: str = None, polaris_url: str = None, - realm: str = 'default-realm' + realm: str = 'POLARIS' ): """Constructor for Iceberg Spark session. Sets the member variables.""" self.bearer_token = bearer_token diff --git a/regtests/t_pyspark/src/test_spark_sql_s3_with_privileges.py b/regtests/t_pyspark/src/test_spark_sql_s3_with_privileges.py index 9ae9a1a39..ce00ce58c 100644 --- a/regtests/t_pyspark/src/test_spark_sql_s3_with_privileges.py +++ b/regtests/t_pyspark/src/test_spark_sql_s3_with_privileges.py @@ -154,7 +154,7 @@ def snowman_catalog_client(polaris_catalog_url, snowman): token = oauth_api.get_token(scope='PRINCIPAL_ROLE:ALL', client_id=snowman.principal.client_id, client_secret=snowman.credentials.client_secret, grant_type='client_credentials', - _headers={'realm': 'default-realm'}) + _headers={'realm': 'POLARIS'}) return IcebergCatalogAPI(CatalogApiClient(Configuration(access_token=token.access_token, host=polaris_catalog_url))) @@ -175,7 +175,7 @@ def creator_catalog_client(polaris_catalog_url, creator): token = oauth_api.get_token(scope='PRINCIPAL_ROLE:ALL', client_id=creator.principal.client_id, client_secret=creator.credentials.client_secret, grant_type='client_credentials', - _headers={'realm': 'default-realm'}) + _headers={'realm': 'POLARIS'}) return IcebergCatalogAPI(CatalogApiClient(Configuration(access_token=token.access_token, host=polaris_catalog_url))) @@ -230,7 +230,7 @@ def reader_catalog_client(polaris_catalog_url, reader): token = oauth_api.get_token(scope='PRINCIPAL_ROLE:ALL', client_id=reader.principal.client_id, client_secret=reader.credentials.client_secret, grant_type='client_credentials', - _headers={'realm': 'default-realm'}) + _headers={'realm': 'POLARIS'}) return IcebergCatalogAPI(CatalogApiClient(Configuration(access_token=token.access_token, host=polaris_catalog_url))) @@ -1015,7 +1015,7 @@ def create_principal(polaris_url, polaris_catalog_url, api, principal_name): token = oauth_api.get_token(scope='PRINCIPAL_ROLE:ALL', client_id=principal_result.principal.client_id, client_secret=principal_result.credentials.client_secret, grant_type='client_credentials', - _headers={'realm': 'default-realm'}) + _headers={'realm': 'POLARIS'}) rotate_client = ManagementApiClient(Configuration(access_token=token.access_token, host=polaris_url)) rotate_api = PolarisDefaultApi(rotate_client) diff --git a/regtests/t_spark_sql/src/spark_sql_azure_blob.sh b/regtests/t_spark_sql/src/spark_sql_azure_blob.sh index 21585b759..846692a9b 100755 --- a/regtests/t_spark_sql/src/spark_sql_azure_blob.sh +++ b/regtests/t_spark_sql/src/spark_sql_azure_blob.sh @@ -19,7 +19,7 @@ # under the License. # -SPARK_BEARER_TOKEN="${REGTEST_ROOT_BEARER_TOKEN:-principal:root;realm:realm1}" +SPARK_BEARER_TOKEN="${REGTEST_ROOT_BEARER_TOKEN:-principal:root;realm:POLARIS}" curl -i -X POST -H "Authorization: Bearer ${SPARK_BEARER_TOKEN}" -H 'Accept: application/json' -H 'Content-Type: application/json' \ http://${POLARIS_HOST:-localhost}:8181/api/management/v1/catalogs \ diff --git a/regtests/t_spark_sql/src/spark_sql_azure_dfs.sh b/regtests/t_spark_sql/src/spark_sql_azure_dfs.sh index 93ac6d7e5..d9562b849 100755 --- a/regtests/t_spark_sql/src/spark_sql_azure_dfs.sh +++ b/regtests/t_spark_sql/src/spark_sql_azure_dfs.sh @@ -19,7 +19,7 @@ # under the License. # -SPARK_BEARER_TOKEN="${REGTEST_ROOT_BEARER_TOKEN:-principal:root;realm:realm1}" +SPARK_BEARER_TOKEN="${REGTEST_ROOT_BEARER_TOKEN:-principal:root;realm:POLARIS}" curl -i -X POST -H "Authorization: Bearer ${SPARK_BEARER_TOKEN}" -H 'Accept: application/json' -H 'Content-Type: application/json' \ http://${POLARIS_HOST:-localhost}:8181/api/management/v1/catalogs \ diff --git a/regtests/t_spark_sql/src/spark_sql_basic.sh b/regtests/t_spark_sql/src/spark_sql_basic.sh index 2d5f1cc10..9d8d45d7a 100755 --- a/regtests/t_spark_sql/src/spark_sql_basic.sh +++ b/regtests/t_spark_sql/src/spark_sql_basic.sh @@ -19,7 +19,7 @@ # under the License. # -SPARK_BEARER_TOKEN="${REGTEST_ROOT_BEARER_TOKEN:-principal:root;realm:realm1}" +SPARK_BEARER_TOKEN="${REGTEST_ROOT_BEARER_TOKEN:-principal:root;realm:POLARIS}" curl -i -X POST -H "Authorization: Bearer ${SPARK_BEARER_TOKEN}" -H 'Accept: application/json' -H 'Content-Type: application/json' \ http://${POLARIS_HOST:-localhost}:8181/api/management/v1/catalogs \ diff --git a/regtests/t_spark_sql/src/spark_sql_gcp.sh b/regtests/t_spark_sql/src/spark_sql_gcp.sh index 0e124bf61..8638c12b3 100755 --- a/regtests/t_spark_sql/src/spark_sql_gcp.sh +++ b/regtests/t_spark_sql/src/spark_sql_gcp.sh @@ -19,7 +19,7 @@ # under the License. # -SPARK_BEARER_TOKEN="${REGTEST_ROOT_BEARER_TOKEN:-principal:root;realm:realm1}" +SPARK_BEARER_TOKEN="${REGTEST_ROOT_BEARER_TOKEN:-principal:root;realm:POLARIS}" curl -i -X POST -H "Authorization: Bearer ${SPARK_BEARER_TOKEN}" -H 'Accept: application/json' -H 'Content-Type: application/json' \ http://${POLARIS_HOST:-localhost}:8181/api/management/v1/catalogs \ diff --git a/regtests/t_spark_sql/src/spark_sql_s3.sh b/regtests/t_spark_sql/src/spark_sql_s3.sh index aebfdd585..181d68078 100755 --- a/regtests/t_spark_sql/src/spark_sql_s3.sh +++ b/regtests/t_spark_sql/src/spark_sql_s3.sh @@ -24,7 +24,7 @@ if [ -z "$AWS_TEST_ENABLED" ] || [ "$AWS_TEST_ENABLED" != "true" ]; then exit 0 fi -SPARK_BEARER_TOKEN="${REGTEST_ROOT_BEARER_TOKEN:-principal:root;realm:realm1}" +SPARK_BEARER_TOKEN="${REGTEST_ROOT_BEARER_TOKEN:-principal:root;realm:POLARIS}" curl -i -X POST -H "Authorization: Bearer ${SPARK_BEARER_TOKEN}" -H 'Accept: application/json' -H 'Content-Type: application/json' \ http://${POLARIS_HOST:-localhost}:8181/api/management/v1/catalogs \ diff --git a/regtests/t_spark_sql/src/spark_sql_s3_cross_region.sh b/regtests/t_spark_sql/src/spark_sql_s3_cross_region.sh index 7808dc8c3..d29c40058 100644 --- a/regtests/t_spark_sql/src/spark_sql_s3_cross_region.sh +++ b/regtests/t_spark_sql/src/spark_sql_s3_cross_region.sh @@ -24,7 +24,7 @@ if [ -z "$AWS_CROSS_REGION_TEST_ENABLED" ] || [ "$AWS_CROSS_REGION_TEST_ENABLED" exit 0 fi -SPARK_BEARER_TOKEN="${REGTEST_ROOT_BEARER_TOKEN:-principal:root;realm:realm1}" +SPARK_BEARER_TOKEN="${REGTEST_ROOT_BEARER_TOKEN:-principal:root;realm:POLARIS}" BUCKET="${AWS_CROSS_REGION_BUCKET}" ROLE_ARN="${AWS_ROLE_FOR_CROSS_REGION_BUCKET}" diff --git a/regtests/t_spark_sql/src/spark_sql_views.sh b/regtests/t_spark_sql/src/spark_sql_views.sh index 50b2c59d5..ba6b66eee 100755 --- a/regtests/t_spark_sql/src/spark_sql_views.sh +++ b/regtests/t_spark_sql/src/spark_sql_views.sh @@ -19,7 +19,7 @@ # under the License. # -SPARK_BEARER_TOKEN="${REGTEST_ROOT_BEARER_TOKEN:-principal:root;realm:default-realm}" +SPARK_BEARER_TOKEN="${REGTEST_ROOT_BEARER_TOKEN:-principal:root;realm:POLARIS}" curl -i -X POST -H "Authorization: Bearer ${SPARK_BEARER_TOKEN}" -H 'Accept: application/json' -H 'Content-Type: application/json' \ http://${POLARIS_HOST:-localhost}:8181/api/management/v1/catalogs \ From d97210709cc368a3e181dfd5dfabab28d7368154 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 24 Jan 2025 20:46:29 +0100 Subject: [PATCH 2/4] Adapt run_spark_sql.sh --- regtests/README.md | 6 +++--- regtests/run_spark_sql.sh | 16 +++++++++++++++- 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/regtests/README.md b/regtests/README.md index 801b1e4ad..5548f7c99 100644 --- a/regtests/README.md +++ b/regtests/README.md @@ -151,11 +151,11 @@ Then commit the changes to the ref file. ## Run a spark-sql interactive shell -With a Polaris server running in "dev" mode (see above), you can run a spark-sql interactive shell -to test. From the root of the project: +With a Polaris server running, you can run a spark-sql interactive shell to test. From the root of +the project: ```shell -POLARIS_HOST=localhost ./regtests/run_spark_sql.sh +env POLARIS_HOST=localhost ./regtests/run_spark_sql.sh ``` Some SQL commands that you can try: diff --git a/regtests/run_spark_sql.sh b/regtests/run_spark_sql.sh index fdde29a13..699f9219e 100755 --- a/regtests/run_spark_sql.sh +++ b/regtests/run_spark_sql.sh @@ -55,7 +55,21 @@ if [ -z "${SPARK_HOME}"]; then export SPARK_HOME=$(realpath ~/${SPARK_DISTRIBUTION}) fi -SPARK_BEARER_TOKEN="${REGTEST_ROOT_BEARER_TOKEN:-principal:root;realm:POLARIS}" +if ! output=$(curl -X POST -H "Polaris-Realm: POLARIS" "http://${POLARIS_HOST:-localhost}:8181/api/catalog/v1/oauth/tokens" \ + -d "grant_type=client_credentials" \ + -d "client_id=root" \ + -d "client_secret=secret" \ + -d "scope=PRINCIPAL_ROLE:ALL"); then + logred "Error: Failed to retrieve bearer token" + exit 1 +fi + +SPARK_BEARER_TOKEN=$(echo "$output" | awk -F\" '{print $4}') + +if [ "SPARK_BEARER_TOKEN" == "unauthorized_client" ]; then + logred "Error: Failed to retrieve bearer token" + exit 1 +fi if [ $# -eq 0 ]; then # create a catalog backed by the local filesystem From a45be42e2e2c22d6fe38ea4d80da29fa1be5d249 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 24 Jan 2025 22:05:34 +0100 Subject: [PATCH 3/4] review --- README.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/README.md b/README.md index c103a7564..5821aa8c2 100644 --- a/README.md +++ b/README.md @@ -99,8 +99,7 @@ select * from db1.table1; Regression tests can be run in a local environment or in a Docker environment. -To run regression tests locally, you first need to start Polaris, with default realm `POLARIS` and -root credentials: `root:secret`, then run the tests: +To run regression tests locally, you first need to start Polaris, then run the tests: ```shell ./gradlew polarisServerRun From f71b14f7db6de1db445927361c36bb3b15e14d10 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Sat, 25 Jan 2025 11:41:51 +0100 Subject: [PATCH 4/4] review --- README.md | 6 +++--- regtests/README.md | 4 ++-- regtests/t_cli/src/test_cli.py | 2 +- regtests/t_pyspark/src/conftest.py | 4 ++-- regtests/t_spark_sql/src/spark_sql_azure_blob.sh | 2 +- regtests/t_spark_sql/src/spark_sql_azure_dfs.sh | 2 +- regtests/t_spark_sql/src/spark_sql_basic.sh | 2 +- regtests/t_spark_sql/src/spark_sql_gcp.sh | 2 +- regtests/t_spark_sql/src/spark_sql_s3.sh | 2 +- regtests/t_spark_sql/src/spark_sql_s3_cross_region.sh | 2 +- regtests/t_spark_sql/src/spark_sql_views.sh | 2 +- 11 files changed, 15 insertions(+), 15 deletions(-) diff --git a/README.md b/README.md index 5821aa8c2..22dc61d85 100644 --- a/README.md +++ b/README.md @@ -61,9 +61,9 @@ Apache Polaris is built using Gradle with Java 21+ and Docker 27+. - `./gradlew build` - To build and run tests. Make sure Docker is running, as the integration tests depend on it. - `./gradlew assemble` - To skip tests. - `./gradlew test` - To run unit tests and integration tests. -- `./gradlew polarisServerRun` - To run the Polaris server locally, with profile `prod`; the server - is reachable at localhost:8181. This configuration is also suitable for running regression tests, - or for connecting with Spark. See below for more information on regression tests. +- `./gradlew polarisServerRun` - To run the Polaris server locally; the server is reachable at + localhost:8181. This is also suitable for running regression tests, or for connecting with Spark. + See below for more information on regression tests. - `./regtests/run_spark_sql.sh` - To connect from Spark SQL. Here are some example commands to run in the Spark SQL shell: ```sql diff --git a/regtests/README.md b/regtests/README.md index 5548f7c99..5e40471ce 100644 --- a/regtests/README.md +++ b/regtests/README.md @@ -177,14 +177,14 @@ The client can be generated using two commands: ```shell # generate the management api client -$ docker run --rm \ +docker run --rm \ -v ${PWD}:/local openapitools/openapi-generator-cli generate \ -i /local/spec/polaris-management-service.yml \ -g python \ -o /local/regtests/client/python --additional-properties=packageName=polaris.management --additional-properties=apiNamePrefix=polaris # generate the iceberg rest client -$ docker run --rm \ +docker run --rm \ -v ${PWD}:/local openapitools/openapi-generator-cli generate \ -i /local/spec/rest-catalog-open-api.yaml \ -g python \ diff --git a/regtests/t_cli/src/test_cli.py b/regtests/t_cli/src/test_cli.py index 736f125ab..876a53e8f 100644 --- a/regtests/t_cli/src/test_cli.py +++ b/regtests/t_cli/src/test_cli.py @@ -38,7 +38,7 @@ def get_salt(length=8) -> str: def root_cli(*args): - return cli(os.getenv('REGTEST_ROOT_BEARER_TOKEN', 'principal:root;realm:POLARIS'))(*args) + return cli(os.getenv('REGTEST_ROOT_BEARER_TOKEN'))(*args) def cli(access_token): diff --git a/regtests/t_pyspark/src/conftest.py b/regtests/t_pyspark/src/conftest.py index 5ce3f8178..db858acdb 100644 --- a/regtests/t_pyspark/src/conftest.py +++ b/regtests/t_pyspark/src/conftest.py @@ -66,7 +66,7 @@ def catalog_client(polaris_catalog_url): :return: """ client = CatalogApiClient( - Configuration(access_token=os.getenv('REGTEST_ROOT_BEARER_TOKEN', 'principal:root;realm:POLARIS'), + Configuration(access_token=os.getenv('REGTEST_ROOT_BEARER_TOKEN'), host=polaris_catalog_url)) return IcebergCatalogAPI(client) @@ -143,7 +143,7 @@ def format_namespace(namespace): @pytest.fixture def root_client(polaris_host, polaris_url): - client = ApiClient(Configuration(access_token=os.getenv('REGTEST_ROOT_BEARER_TOKEN', 'principal:root;realm:POLARIS'), + client = ApiClient(Configuration(access_token=os.getenv('REGTEST_ROOT_BEARER_TOKEN'), host=polaris_url)) api = PolarisDefaultApi(client) return api diff --git a/regtests/t_spark_sql/src/spark_sql_azure_blob.sh b/regtests/t_spark_sql/src/spark_sql_azure_blob.sh index 846692a9b..72d715552 100755 --- a/regtests/t_spark_sql/src/spark_sql_azure_blob.sh +++ b/regtests/t_spark_sql/src/spark_sql_azure_blob.sh @@ -19,7 +19,7 @@ # under the License. # -SPARK_BEARER_TOKEN="${REGTEST_ROOT_BEARER_TOKEN:-principal:root;realm:POLARIS}" +SPARK_BEARER_TOKEN="${REGTEST_ROOT_BEARER_TOKEN}" curl -i -X POST -H "Authorization: Bearer ${SPARK_BEARER_TOKEN}" -H 'Accept: application/json' -H 'Content-Type: application/json' \ http://${POLARIS_HOST:-localhost}:8181/api/management/v1/catalogs \ diff --git a/regtests/t_spark_sql/src/spark_sql_azure_dfs.sh b/regtests/t_spark_sql/src/spark_sql_azure_dfs.sh index d9562b849..9c8117d01 100755 --- a/regtests/t_spark_sql/src/spark_sql_azure_dfs.sh +++ b/regtests/t_spark_sql/src/spark_sql_azure_dfs.sh @@ -19,7 +19,7 @@ # under the License. # -SPARK_BEARER_TOKEN="${REGTEST_ROOT_BEARER_TOKEN:-principal:root;realm:POLARIS}" +SPARK_BEARER_TOKEN="${REGTEST_ROOT_BEARER_TOKEN}" curl -i -X POST -H "Authorization: Bearer ${SPARK_BEARER_TOKEN}" -H 'Accept: application/json' -H 'Content-Type: application/json' \ http://${POLARIS_HOST:-localhost}:8181/api/management/v1/catalogs \ diff --git a/regtests/t_spark_sql/src/spark_sql_basic.sh b/regtests/t_spark_sql/src/spark_sql_basic.sh index 9d8d45d7a..2ec97cc26 100755 --- a/regtests/t_spark_sql/src/spark_sql_basic.sh +++ b/regtests/t_spark_sql/src/spark_sql_basic.sh @@ -19,7 +19,7 @@ # under the License. # -SPARK_BEARER_TOKEN="${REGTEST_ROOT_BEARER_TOKEN:-principal:root;realm:POLARIS}" +SPARK_BEARER_TOKEN="${REGTEST_ROOT_BEARER_TOKEN}" curl -i -X POST -H "Authorization: Bearer ${SPARK_BEARER_TOKEN}" -H 'Accept: application/json' -H 'Content-Type: application/json' \ http://${POLARIS_HOST:-localhost}:8181/api/management/v1/catalogs \ diff --git a/regtests/t_spark_sql/src/spark_sql_gcp.sh b/regtests/t_spark_sql/src/spark_sql_gcp.sh index 8638c12b3..20f8fd1a9 100755 --- a/regtests/t_spark_sql/src/spark_sql_gcp.sh +++ b/regtests/t_spark_sql/src/spark_sql_gcp.sh @@ -19,7 +19,7 @@ # under the License. # -SPARK_BEARER_TOKEN="${REGTEST_ROOT_BEARER_TOKEN:-principal:root;realm:POLARIS}" +SPARK_BEARER_TOKEN="${REGTEST_ROOT_BEARER_TOKEN}" curl -i -X POST -H "Authorization: Bearer ${SPARK_BEARER_TOKEN}" -H 'Accept: application/json' -H 'Content-Type: application/json' \ http://${POLARIS_HOST:-localhost}:8181/api/management/v1/catalogs \ diff --git a/regtests/t_spark_sql/src/spark_sql_s3.sh b/regtests/t_spark_sql/src/spark_sql_s3.sh index 181d68078..6748bc901 100755 --- a/regtests/t_spark_sql/src/spark_sql_s3.sh +++ b/regtests/t_spark_sql/src/spark_sql_s3.sh @@ -24,7 +24,7 @@ if [ -z "$AWS_TEST_ENABLED" ] || [ "$AWS_TEST_ENABLED" != "true" ]; then exit 0 fi -SPARK_BEARER_TOKEN="${REGTEST_ROOT_BEARER_TOKEN:-principal:root;realm:POLARIS}" +SPARK_BEARER_TOKEN="${REGTEST_ROOT_BEARER_TOKEN}" curl -i -X POST -H "Authorization: Bearer ${SPARK_BEARER_TOKEN}" -H 'Accept: application/json' -H 'Content-Type: application/json' \ http://${POLARIS_HOST:-localhost}:8181/api/management/v1/catalogs \ diff --git a/regtests/t_spark_sql/src/spark_sql_s3_cross_region.sh b/regtests/t_spark_sql/src/spark_sql_s3_cross_region.sh index d29c40058..d68e121cb 100644 --- a/regtests/t_spark_sql/src/spark_sql_s3_cross_region.sh +++ b/regtests/t_spark_sql/src/spark_sql_s3_cross_region.sh @@ -24,7 +24,7 @@ if [ -z "$AWS_CROSS_REGION_TEST_ENABLED" ] || [ "$AWS_CROSS_REGION_TEST_ENABLED" exit 0 fi -SPARK_BEARER_TOKEN="${REGTEST_ROOT_BEARER_TOKEN:-principal:root;realm:POLARIS}" +SPARK_BEARER_TOKEN="${REGTEST_ROOT_BEARER_TOKEN}" BUCKET="${AWS_CROSS_REGION_BUCKET}" ROLE_ARN="${AWS_ROLE_FOR_CROSS_REGION_BUCKET}" diff --git a/regtests/t_spark_sql/src/spark_sql_views.sh b/regtests/t_spark_sql/src/spark_sql_views.sh index ba6b66eee..a47557a23 100755 --- a/regtests/t_spark_sql/src/spark_sql_views.sh +++ b/regtests/t_spark_sql/src/spark_sql_views.sh @@ -19,7 +19,7 @@ # under the License. # -SPARK_BEARER_TOKEN="${REGTEST_ROOT_BEARER_TOKEN:-principal:root;realm:POLARIS}" +SPARK_BEARER_TOKEN="${REGTEST_ROOT_BEARER_TOKEN}" curl -i -X POST -H "Authorization: Bearer ${SPARK_BEARER_TOKEN}" -H 'Accept: application/json' -H 'Content-Type: application/json' \ http://${POLARIS_HOST:-localhost}:8181/api/management/v1/catalogs \