From f02383ea63d9578d10a978e14121702f9dde41ac Mon Sep 17 00:00:00 2001 From: Lawrence Qiu Date: Thu, 16 Mar 2023 17:12:04 +0000 Subject: [PATCH 1/2] feat: [biglake] new module for biglake --- gapic-libraries-bom/pom.xml | 7 + java-accessapproval/README.md | 2 + java-accesscontextmanager/README.md | 2 + java-aiplatform/README.md | 2 + java-alloydb/README.md | 2 + java-api-gateway/README.md | 2 + java-apigee-connect/README.md | 2 + java-apigee-registry/README.md | 2 + java-apikeys/README.md | 2 + java-appengine-admin/README.md | 2 + java-area120-tables/README.md | 2 + java-artifact-registry/README.md | 2 + java-asset/README.md | 2 + java-assured-workloads/README.md | 2 + java-automl/README.md | 2 + java-batch/README.md | 2 + java-biglake/.OwlBot.yaml | 36 + java-biglake/.repo-metadata.json | 16 + java-biglake/README.md | 203 + java-biglake/google-cloud-biglake-bom/pom.xml | 44 + java-biglake/google-cloud-biglake/pom.xml | 113 + .../v1alpha1/MetastoreServiceClient.java | 2797 ++++++++++++++ .../v1alpha1/MetastoreServiceSettings.java | 393 ++ .../biglake/v1alpha1/gapic_metadata.json | 72 + .../biglake/v1alpha1/package-info.java | 56 + .../GrpcMetastoreServiceCallableFactory.java | 115 + .../stub/GrpcMetastoreServiceStub.java | 716 ++++ ...tpJsonMetastoreServiceCallableFactory.java | 105 + .../stub/HttpJsonMetastoreServiceStub.java | 1129 ++++++ .../v1alpha1/stub/MetastoreServiceStub.java | 157 + .../stub/MetastoreServiceStubSettings.java | 980 +++++ .../MetastoreServiceClientHttpJsonTest.java | 1820 +++++++++ .../v1alpha1/MetastoreServiceClientTest.java | 1581 ++++++++ .../v1alpha1/MockMetastoreService.java | 59 + .../v1alpha1/MockMetastoreServiceImpl.java | 430 +++ .../pom.xml | 45 + .../v1alpha1/MetastoreServiceGrpc.java | 2426 ++++++++++++ java-biglake/owlbot.py | 36 + java-biglake/pom.xml | 53 + .../clirr-ignored-differences.xml | 19 + .../pom.xml | 37 + .../bigquery/biglake/v1alpha1/Catalog.java | 1805 +++++++++ .../biglake/v1alpha1/CatalogName.java | 223 ++ .../biglake/v1alpha1/CatalogOrBuilder.java | 216 ++ .../biglake/v1alpha1/CheckLockRequest.java | 658 ++++ .../v1alpha1/CheckLockRequestOrBuilder.java | 58 + .../v1alpha1/CreateCatalogRequest.java | 1149 ++++++ .../CreateCatalogRequestOrBuilder.java | 127 + .../v1alpha1/CreateDatabaseRequest.java | 1156 ++++++ .../CreateDatabaseRequestOrBuilder.java | 129 + .../biglake/v1alpha1/CreateLockRequest.java | 963 +++++ .../v1alpha1/CreateLockRequestOrBuilder.java | 102 + .../biglake/v1alpha1/CreateTableRequest.java | 1152 ++++++ .../v1alpha1/CreateTableRequestOrBuilder.java | 129 + .../bigquery/biglake/v1alpha1/Database.java | 2480 ++++++++++++ .../biglake/v1alpha1/DatabaseName.java | 259 ++ .../biglake/v1alpha1/DatabaseOrBuilder.java | 278 ++ .../v1alpha1/DeleteCatalogRequest.java | 665 ++++ .../DeleteCatalogRequestOrBuilder.java | 58 + .../v1alpha1/DeleteDatabaseRequest.java | 665 ++++ .../DeleteDatabaseRequestOrBuilder.java | 58 + .../biglake/v1alpha1/DeleteLockRequest.java | 662 ++++ .../v1alpha1/DeleteLockRequestOrBuilder.java | 58 + .../biglake/v1alpha1/DeleteTableRequest.java | 662 ++++ .../v1alpha1/DeleteTableRequestOrBuilder.java | 58 + .../biglake/v1alpha1/GetCatalogRequest.java | 662 ++++ .../v1alpha1/GetCatalogRequestOrBuilder.java | 58 + .../biglake/v1alpha1/GetDatabaseRequest.java | 662 ++++ .../v1alpha1/GetDatabaseRequestOrBuilder.java | 58 + .../biglake/v1alpha1/GetTableRequest.java | 658 ++++ .../v1alpha1/GetTableRequestOrBuilder.java | 58 + .../biglake/v1alpha1/HiveDatabaseOptions.java | 978 +++++ .../HiveDatabaseOptionsOrBuilder.java | 110 + .../biglake/v1alpha1/HiveTableOptions.java | 3364 +++++++++++++++++ .../v1alpha1/HiveTableOptionsOrBuilder.java | 151 + .../biglake/v1alpha1/ListCatalogsRequest.java | 965 +++++ .../ListCatalogsRequestOrBuilder.java | 103 + .../v1alpha1/ListCatalogsResponse.java | 1147 ++++++ .../ListCatalogsResponseOrBuilder.java | 104 + .../v1alpha1/ListDatabasesRequest.java | 973 +++++ .../ListDatabasesRequestOrBuilder.java | 105 + .../v1alpha1/ListDatabasesResponse.java | 1148 ++++++ .../ListDatabasesResponseOrBuilder.java | 104 + .../biglake/v1alpha1/ListLocksRequest.java | 966 +++++ .../v1alpha1/ListLocksRequestOrBuilder.java | 105 + .../biglake/v1alpha1/ListLocksResponse.java | 1136 ++++++ .../v1alpha1/ListLocksResponseOrBuilder.java | 104 + .../biglake/v1alpha1/ListTablesRequest.java | 970 +++++ .../v1alpha1/ListTablesRequestOrBuilder.java | 105 + .../biglake/v1alpha1/ListTablesResponse.java | 1137 ++++++ .../v1alpha1/ListTablesResponseOrBuilder.java | 104 + .../biglake/v1alpha1/LocationName.java | 192 + .../cloud/bigquery/biglake/v1alpha1/Lock.java | 1875 +++++++++ .../bigquery/biglake/v1alpha1/LockName.java | 298 ++ .../biglake/v1alpha1/LockOrBuilder.java | 192 + .../biglake/v1alpha1/MetastoreProto.java | 724 ++++ .../bigquery/biglake/v1alpha1/Table.java | 2476 ++++++++++++ .../bigquery/biglake/v1alpha1/TableName.java | 298 ++ .../biglake/v1alpha1/TableOrBuilder.java | 278 ++ .../v1alpha1/UpdateDatabaseRequest.java | 1076 ++++++ .../UpdateDatabaseRequestOrBuilder.java | 119 + .../biglake/v1alpha1/UpdateTableRequest.java | 1072 ++++++ .../v1alpha1/UpdateTableRequestOrBuilder.java | 119 + .../bigquery/biglake/v1alpha1/metastore.proto | 782 ++++ .../checklock/AsyncCheckLock.java | 51 + .../checklock/SyncCheckLock.java | 48 + .../checklock/SyncCheckLockLockname.java | 42 + .../checklock/SyncCheckLockString.java | 43 + .../SyncCreateSetCredentialsProvider.java | 45 + .../SyncCreateSetCredentialsProvider1.java | 41 + .../create/SyncCreateSetEndpoint.java | 42 + .../createcatalog/AsyncCreateCatalog.java | 52 + .../createcatalog/SyncCreateCatalog.java | 48 + ...reateCatalogLocationnameCatalogString.java | 44 + .../SyncCreateCatalogStringCatalogString.java | 44 + .../createdatabase/AsyncCreateDatabase.java | 52 + .../createdatabase/SyncCreateDatabase.java | 48 + ...eateDatabaseCatalognameDatabaseString.java | 44 + ...yncCreateDatabaseStringDatabaseString.java | 44 + .../createlock/AsyncCreateLock.java | 51 + .../createlock/SyncCreateLock.java | 48 + .../SyncCreateLockDatabasenameLock.java | 43 + .../createlock/SyncCreateLockStringLock.java | 44 + .../createtable/AsyncCreateTable.java | 52 + .../createtable/SyncCreateTable.java | 49 + ...yncCreateTableDatabasenameTableString.java | 44 + .../SyncCreateTableStringTableString.java | 45 + .../deletecatalog/AsyncDeleteCatalog.java | 50 + .../deletecatalog/SyncDeleteCatalog.java | 46 + .../SyncDeleteCatalogCatalogname.java | 42 + .../SyncDeleteCatalogString.java | 42 + .../deletedatabase/AsyncDeleteDatabase.java | 51 + .../deletedatabase/SyncDeleteDatabase.java | 47 + .../SyncDeleteDatabaseDatabasename.java | 42 + .../SyncDeleteDatabaseString.java | 43 + .../deletelock/AsyncDeleteLock.java | 51 + .../deletelock/SyncDeleteLock.java | 48 + .../deletelock/SyncDeleteLockLockname.java | 42 + .../deletelock/SyncDeleteLockString.java | 43 + .../deletetable/AsyncDeleteTable.java | 51 + .../deletetable/SyncDeleteTable.java | 48 + .../deletetable/SyncDeleteTableString.java | 43 + .../deletetable/SyncDeleteTableTablename.java | 43 + .../getcatalog/AsyncGetCatalog.java | 49 + .../getcatalog/SyncGetCatalog.java | 46 + .../getcatalog/SyncGetCatalogCatalogname.java | 42 + .../getcatalog/SyncGetCatalogString.java | 42 + .../getdatabase/AsyncGetDatabase.java | 50 + .../getdatabase/SyncGetDatabase.java | 47 + .../SyncGetDatabaseDatabasename.java | 42 + .../getdatabase/SyncGetDatabaseString.java | 43 + .../gettable/AsyncGetTable.java | 51 + .../gettable/SyncGetTable.java | 48 + .../gettable/SyncGetTableString.java | 43 + .../gettable/SyncGetTableTablename.java | 43 + .../listcatalogs/AsyncListCatalogs.java | 54 + .../listcatalogs/AsyncListCatalogsPaged.java | 61 + .../listcatalogs/SyncListCatalogs.java | 50 + .../SyncListCatalogsLocationname.java | 44 + .../listcatalogs/SyncListCatalogsString.java | 44 + .../listdatabases/AsyncListDatabases.java | 54 + .../AsyncListDatabasesPaged.java | 62 + .../listdatabases/SyncListDatabases.java | 50 + .../SyncListDatabasesCatalogname.java | 44 + .../SyncListDatabasesString.java | 44 + .../listlocks/AsyncListLocks.java | 54 + .../listlocks/AsyncListLocksPaged.java | 62 + .../listlocks/SyncListLocks.java | 51 + .../listlocks/SyncListLocksDatabasename.java | 44 + .../listlocks/SyncListLocksString.java | 45 + .../listtables/AsyncListTables.java | 55 + .../listtables/AsyncListTablesPaged.java | 62 + .../listtables/SyncListTables.java | 51 + .../SyncListTablesDatabasename.java | 44 + .../listtables/SyncListTablesString.java | 45 + .../updatedatabase/AsyncUpdateDatabase.java | 51 + .../updatedatabase/SyncUpdateDatabase.java | 47 + .../SyncUpdateDatabaseDatabaseFieldmask.java | 43 + .../updatetable/AsyncUpdateTable.java | 50 + .../updatetable/SyncUpdateTable.java | 47 + .../SyncUpdateTableTableFieldmask.java | 43 + .../createcatalog/SyncCreateCatalog.java | 49 + .../createcatalog/SyncCreateCatalog.java | 49 + java-bigqueryconnection/README.md | 2 + java-bigquerydatatransfer/README.md | 2 + java-bigquerymigration/README.md | 2 + java-billing/README.md | 2 + java-billingbudgets/README.md | 2 + java-binary-authorization/README.md | 2 + java-certificate-manager/README.md | 2 + java-channel/README.md | 2 + java-cloudbuild/README.md | 2 + .../README.md | 2 + java-contact-center-insights/README.md | 2 + java-container/README.md | 2 + java-data-fusion/README.md | 2 + java-datacatalog/README.md | 2 + java-datalabeling/README.md | 2 + java-datalineage/README.md | 2 + java-dataplex/README.md | 2 + java-dataproc-metastore/README.md | 2 + java-dataproc/README.md | 2 + java-datastream/README.md | 2 + java-debugger-client/README.md | 2 + java-dialogflow-cx/README.md | 2 + java-dialogflow/README.md | 2 + java-distributedcloudedge/README.md | 2 + java-dlp/README.md | 2 + java-dms/README.md | 2 + java-document-ai/README.md | 2 + java-errorreporting/README.md | 2 + java-essential-contacts/README.md | 2 + java-filestore/README.md | 2 + java-game-servers/README.md | 2 + java-gke-connect-gateway/README.md | 2 + java-gkehub/README.md | 2 + java-grafeas/README.md | 2 + java-gsuite-addons/README.md | 2 + java-iamcredentials/README.md | 2 + java-iot/README.md | 2 + java-kms/README.md | 2 + java-life-sciences/README.md | 2 + java-managed-identities/README.md | 2 + java-maps-routing/README.md | 2 + java-mediatranslation/README.md | 2 + java-memcache/README.md | 2 + java-network-management/README.md | 2 + java-network-security/README.md | 2 + java-networkconnectivity/README.md | 2 + java-notebooks/README.md | 2 + java-optimization/README.md | 2 + java-orchestration-airflow/README.md | 2 + java-os-config/README.md | 2 + java-os-login/README.md | 2 + java-phishingprotection/README.md | 2 + java-policy-troubleshooter/README.md | 2 + java-private-catalog/README.md | 2 + java-profiler/README.md | 2 + java-publicca/README.md | 2 + java-recaptchaenterprise/README.md | 2 + java-recommendations-ai/README.md | 2 + java-redis/README.md | 2 + java-resource-settings/README.md | 2 + java-resourcemanager/README.md | 2 + java-retail/README.md | 2 + java-run/README.md | 2 + java-scheduler/README.md | 2 + java-secretmanager/README.md | 2 + java-security-private-ca/README.md | 2 + java-securitycenter-settings/README.md | 2 + java-securitycenter/README.md | 2 + java-service-control/README.md | 2 + java-service-management/README.md | 2 + java-service-usage/README.md | 2 + java-servicedirectory/README.md | 2 + java-shell/README.md | 2 + java-speech/README.md | 2 + java-storage-transfer/README.md | 2 + java-talent/README.md | 2 + java-tasks/README.md | 2 + java-tpu/README.md | 2 + java-trace/README.md | 2 + java-translate/README.md | 2 + java-video-intelligence/README.md | 2 + java-video-live-stream/README.md | 2 + java-video-transcoder/README.md | 2 + java-vision/README.md | 2 + java-vmmigration/README.md | 2 + java-vmwareengine/README.md | 2 + java-vpcaccess/README.md | 2 + java-websecurityscanner/README.md | 2 + java-workflow-executions/README.md | 2 + java-workflows/README.md | 2 + pom.xml | 1 + versions.txt | 3 + 275 files changed, 56706 insertions(+) create mode 100644 java-biglake/.OwlBot.yaml create mode 100644 java-biglake/.repo-metadata.json create mode 100644 java-biglake/README.md create mode 100644 java-biglake/google-cloud-biglake-bom/pom.xml create mode 100644 java-biglake/google-cloud-biglake/pom.xml create mode 100644 java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/MetastoreServiceClient.java create mode 100644 java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/MetastoreServiceSettings.java create mode 100644 java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/gapic_metadata.json create mode 100644 java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/package-info.java create mode 100644 java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/stub/GrpcMetastoreServiceCallableFactory.java create mode 100644 java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/stub/GrpcMetastoreServiceStub.java create mode 100644 java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/stub/HttpJsonMetastoreServiceCallableFactory.java create mode 100644 java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/stub/HttpJsonMetastoreServiceStub.java create mode 100644 java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/stub/MetastoreServiceStub.java create mode 100644 java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/stub/MetastoreServiceStubSettings.java create mode 100644 java-biglake/google-cloud-biglake/src/test/java/com/google/cloud/bigquery/biglake/v1alpha1/MetastoreServiceClientHttpJsonTest.java create mode 100644 java-biglake/google-cloud-biglake/src/test/java/com/google/cloud/bigquery/biglake/v1alpha1/MetastoreServiceClientTest.java create mode 100644 java-biglake/google-cloud-biglake/src/test/java/com/google/cloud/bigquery/biglake/v1alpha1/MockMetastoreService.java create mode 100644 java-biglake/google-cloud-biglake/src/test/java/com/google/cloud/bigquery/biglake/v1alpha1/MockMetastoreServiceImpl.java create mode 100644 java-biglake/grpc-google-cloud-biglake-v1alpha1/pom.xml create mode 100644 java-biglake/grpc-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/MetastoreServiceGrpc.java create mode 100644 java-biglake/owlbot.py create mode 100644 java-biglake/pom.xml create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/clirr-ignored-differences.xml create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/pom.xml create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/Catalog.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CatalogName.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CatalogOrBuilder.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CheckLockRequest.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CheckLockRequestOrBuilder.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CreateCatalogRequest.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CreateCatalogRequestOrBuilder.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CreateDatabaseRequest.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CreateDatabaseRequestOrBuilder.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CreateLockRequest.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CreateLockRequestOrBuilder.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CreateTableRequest.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CreateTableRequestOrBuilder.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/Database.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DatabaseName.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DatabaseOrBuilder.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DeleteCatalogRequest.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DeleteCatalogRequestOrBuilder.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DeleteDatabaseRequest.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DeleteDatabaseRequestOrBuilder.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DeleteLockRequest.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DeleteLockRequestOrBuilder.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DeleteTableRequest.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DeleteTableRequestOrBuilder.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/GetCatalogRequest.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/GetCatalogRequestOrBuilder.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/GetDatabaseRequest.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/GetDatabaseRequestOrBuilder.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/GetTableRequest.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/GetTableRequestOrBuilder.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/HiveDatabaseOptions.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/HiveDatabaseOptionsOrBuilder.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/HiveTableOptions.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/HiveTableOptionsOrBuilder.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListCatalogsRequest.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListCatalogsRequestOrBuilder.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListCatalogsResponse.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListCatalogsResponseOrBuilder.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListDatabasesRequest.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListDatabasesRequestOrBuilder.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListDatabasesResponse.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListDatabasesResponseOrBuilder.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListLocksRequest.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListLocksRequestOrBuilder.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListLocksResponse.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListLocksResponseOrBuilder.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListTablesRequest.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListTablesRequestOrBuilder.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListTablesResponse.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListTablesResponseOrBuilder.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/LocationName.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/Lock.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/LockName.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/LockOrBuilder.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/MetastoreProto.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/Table.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/TableName.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/TableOrBuilder.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/UpdateDatabaseRequest.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/UpdateDatabaseRequestOrBuilder.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/UpdateTableRequest.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/UpdateTableRequestOrBuilder.java create mode 100644 java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/proto/google/cloud/bigquery/biglake/v1alpha1/metastore.proto create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/checklock/AsyncCheckLock.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/checklock/SyncCheckLock.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/checklock/SyncCheckLockLockname.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/checklock/SyncCheckLockString.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/create/SyncCreateSetCredentialsProvider.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/create/SyncCreateSetCredentialsProvider1.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/create/SyncCreateSetEndpoint.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createcatalog/AsyncCreateCatalog.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createcatalog/SyncCreateCatalog.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createcatalog/SyncCreateCatalogLocationnameCatalogString.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createcatalog/SyncCreateCatalogStringCatalogString.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createdatabase/AsyncCreateDatabase.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createdatabase/SyncCreateDatabase.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createdatabase/SyncCreateDatabaseCatalognameDatabaseString.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createdatabase/SyncCreateDatabaseStringDatabaseString.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createlock/AsyncCreateLock.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createlock/SyncCreateLock.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createlock/SyncCreateLockDatabasenameLock.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createlock/SyncCreateLockStringLock.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createtable/AsyncCreateTable.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createtable/SyncCreateTable.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createtable/SyncCreateTableDatabasenameTableString.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createtable/SyncCreateTableStringTableString.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletecatalog/AsyncDeleteCatalog.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletecatalog/SyncDeleteCatalog.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletecatalog/SyncDeleteCatalogCatalogname.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletecatalog/SyncDeleteCatalogString.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletedatabase/AsyncDeleteDatabase.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletedatabase/SyncDeleteDatabase.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletedatabase/SyncDeleteDatabaseDatabasename.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletedatabase/SyncDeleteDatabaseString.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletelock/AsyncDeleteLock.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletelock/SyncDeleteLock.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletelock/SyncDeleteLockLockname.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletelock/SyncDeleteLockString.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletetable/AsyncDeleteTable.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletetable/SyncDeleteTable.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletetable/SyncDeleteTableString.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletetable/SyncDeleteTableTablename.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/getcatalog/AsyncGetCatalog.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/getcatalog/SyncGetCatalog.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/getcatalog/SyncGetCatalogCatalogname.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/getcatalog/SyncGetCatalogString.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/getdatabase/AsyncGetDatabase.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/getdatabase/SyncGetDatabase.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/getdatabase/SyncGetDatabaseDatabasename.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/getdatabase/SyncGetDatabaseString.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/gettable/AsyncGetTable.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/gettable/SyncGetTable.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/gettable/SyncGetTableString.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/gettable/SyncGetTableTablename.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listcatalogs/AsyncListCatalogs.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listcatalogs/AsyncListCatalogsPaged.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listcatalogs/SyncListCatalogs.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listcatalogs/SyncListCatalogsLocationname.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listcatalogs/SyncListCatalogsString.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listdatabases/AsyncListDatabases.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listdatabases/AsyncListDatabasesPaged.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listdatabases/SyncListDatabases.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listdatabases/SyncListDatabasesCatalogname.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listdatabases/SyncListDatabasesString.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listlocks/AsyncListLocks.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listlocks/AsyncListLocksPaged.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listlocks/SyncListLocks.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listlocks/SyncListLocksDatabasename.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listlocks/SyncListLocksString.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listtables/AsyncListTables.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listtables/AsyncListTablesPaged.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listtables/SyncListTables.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listtables/SyncListTablesDatabasename.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listtables/SyncListTablesString.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/updatedatabase/AsyncUpdateDatabase.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/updatedatabase/SyncUpdateDatabase.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/updatedatabase/SyncUpdateDatabaseDatabaseFieldmask.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/updatetable/AsyncUpdateTable.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/updatetable/SyncUpdateTable.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/updatetable/SyncUpdateTableTableFieldmask.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservicesettings/createcatalog/SyncCreateCatalog.java create mode 100644 java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/stub/metastoreservicestubsettings/createcatalog/SyncCreateCatalog.java diff --git a/gapic-libraries-bom/pom.xml b/gapic-libraries-bom/pom.xml index b54e4e65a899..74792b56484f 100644 --- a/gapic-libraries-bom/pom.xml +++ b/gapic-libraries-bom/pom.xml @@ -196,6 +196,13 @@ pom import + + com.google.cloud + google-cloud-biglake-bom + 0.0.1-SNAPSHOT + pom + import + com.google.cloud google-cloud-bigqueryconnection-bom diff --git a/java-accessapproval/README.md b/java-accessapproval/README.md index d3eeef613ee9..be70127ea920 100644 --- a/java-accessapproval/README.md +++ b/java-accessapproval/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Access Approval][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-accessapproval" % "2.13.0" ``` + ## Authentication diff --git a/java-accesscontextmanager/README.md b/java-accesscontextmanager/README.md index 88607bda3ebb..4f52d91426f6 100644 --- a/java-accesscontextmanager/README.md +++ b/java-accesscontextmanager/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Identity Access Context Manager][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-identity-accesscontextmanager" % "1.13.0" ``` + ## Authentication diff --git a/java-aiplatform/README.md b/java-aiplatform/README.md index a839d7994183..ca7f605192b5 100644 --- a/java-aiplatform/README.md +++ b/java-aiplatform/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Vertex AI][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-aiplatform" % "3.13.0" ``` + ## Authentication diff --git a/java-alloydb/README.md b/java-alloydb/README.md index de4e72d1b7fa..0b8683e3ceb4 100644 --- a/java-alloydb/README.md +++ b/java-alloydb/README.md @@ -17,6 +17,7 @@ Java idiomatic client for [AlloyDB][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -37,6 +38,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-alloydb" % "0.1.0" ``` + ## Authentication diff --git a/java-api-gateway/README.md b/java-api-gateway/README.md index f25a1f318ee7..43075b0e2a1f 100644 --- a/java-api-gateway/README.md +++ b/java-api-gateway/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [API Gateway][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-api-gateway" % "2.12.0" ``` + ## Authentication diff --git a/java-apigee-connect/README.md b/java-apigee-connect/README.md index b977e0cce8ab..23a757a6c946 100644 --- a/java-apigee-connect/README.md +++ b/java-apigee-connect/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Apigee Connect][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-apigee-connect" % "2.12.0" ``` + ## Authentication diff --git a/java-apigee-registry/README.md b/java-apigee-registry/README.md index aceee97acc03..045a043affa2 100644 --- a/java-apigee-registry/README.md +++ b/java-apigee-registry/README.md @@ -17,6 +17,7 @@ Java idiomatic client for [Registry API][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -37,6 +38,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-apigee-registry" % "0.12.0" ``` + ## Authentication diff --git a/java-apikeys/README.md b/java-apikeys/README.md index 8571f6d3deec..f4832b666c51 100644 --- a/java-apikeys/README.md +++ b/java-apikeys/README.md @@ -17,6 +17,7 @@ Java idiomatic client for [API Keys API][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -37,6 +38,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-apikeys" % "0.10.0" ``` + ## Authentication diff --git a/java-appengine-admin/README.md b/java-appengine-admin/README.md index 08787e393ebe..62c9d9c5b8a4 100644 --- a/java-appengine-admin/README.md +++ b/java-appengine-admin/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [App Engine Admin API][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-appengine-admin" % "2.12.0" ``` + ## Authentication diff --git a/java-area120-tables/README.md b/java-area120-tables/README.md index b728ffcbe9e0..d0f9d3ee8f11 100644 --- a/java-area120-tables/README.md +++ b/java-area120-tables/README.md @@ -17,6 +17,7 @@ Java idiomatic client for [Area 120 Tables][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -37,6 +38,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.area120" % "google-area120-tables" % "0.16.0" ``` + ## Authentication diff --git a/java-artifact-registry/README.md b/java-artifact-registry/README.md index 0a9d1cf762ce..d0c526067585 100644 --- a/java-artifact-registry/README.md +++ b/java-artifact-registry/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Artifact Registry][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-artifact-registry" % "1.11.0" ``` + ## Authentication diff --git a/java-asset/README.md b/java-asset/README.md index 6a57544f9dc9..43fd52ba1a31 100644 --- a/java-asset/README.md +++ b/java-asset/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Cloud Asset Inventory][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-asset" % "3.16.0" ``` + ## Authentication diff --git a/java-assured-workloads/README.md b/java-assured-workloads/README.md index b9bd7b927332..a5492006e0f2 100644 --- a/java-assured-workloads/README.md +++ b/java-assured-workloads/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Assured Workloads for Government][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-assured-workloads" % "2.12.0" ``` + ## Authentication diff --git a/java-automl/README.md b/java-automl/README.md index d2f0f7db9361..9da04b080cac 100644 --- a/java-automl/README.md +++ b/java-automl/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Cloud Auto ML][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-automl" % "2.12.0" ``` + ## Authentication diff --git a/java-batch/README.md b/java-batch/README.md index 8eb706827cd8..707b1612c23c 100644 --- a/java-batch/README.md +++ b/java-batch/README.md @@ -17,6 +17,7 @@ Java idiomatic client for [Cloud Batch][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -37,6 +38,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-batch" % "0.12.0" ``` + ## Authentication diff --git a/java-biglake/.OwlBot.yaml b/java-biglake/.OwlBot.yaml new file mode 100644 index 000000000000..08e729e40778 --- /dev/null +++ b/java-biglake/.OwlBot.yaml @@ -0,0 +1,36 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +deep-remove-regex: +- "/java-biglake/grpc-google-.*/src" +- "/java-biglake/proto-google-.*/src" +- "/java-biglake/google-.*/src" +- "/java-biglake/samples/snippets/generated" + +deep-preserve-regex: +- "/java-biglake/google-.*/src/test/java/com/google/cloud/.*/v.*/it/IT.*Test.java" + +deep-copy-regex: +- source: "/google/cloud/bigquery/biglake/(v.*)/.*-java/proto-google-.*/src" + dest: "/owl-bot-staging/java-biglake/$1/proto-google-cloud-biglake-$1/src" +- source: "/google/cloud/bigquery/biglake/(v.*)/.*-java/grpc-google-.*/src" + dest: "/owl-bot-staging/java-biglake/$1/grpc-google-cloud-biglake-$1/src" +- source: "/google/cloud/bigquery/biglake/(v.*)/.*-java/gapic-google-.*/src" + dest: "/owl-bot-staging/java-biglake/$1/google-cloud-biglake/src" +- source: "/google/cloud/bigquery/biglake/(v.*)/.*-java/samples/snippets/generated" + dest: "/owl-bot-staging/java-biglake/$1/samples/snippets/generated" + + +api-name: biglake \ No newline at end of file diff --git a/java-biglake/.repo-metadata.json b/java-biglake/.repo-metadata.json new file mode 100644 index 000000000000..48ce008ca788 --- /dev/null +++ b/java-biglake/.repo-metadata.json @@ -0,0 +1,16 @@ +{ + "api_shortname": "biglake", + "name_pretty": "BigLake", + "product_documentation": "https://cloud.google.com/biglake", + "api_description": "The BigLake API provides access to BigLake Metastore, a serverless, fully managed, and highly available metastore for open-source data that can be used for querying Apache Iceberg tables in BigQuery.", + "client_documentation": "https://cloud.google.com/java/docs/reference/google-cloud-biglake/latest/overview", + "release_level": "preview", + "transport": "grpc", + "language": "java", + "repo": "googleapis/java-biglake", + "repo_short": "java-biglake", + "distribution_name": "com.google.cloud:google-cloud-biglake", + "api_id": "biglake.googleapis.com", + "library_type": "GAPIC_AUTO", + "requires_billing": true +} \ No newline at end of file diff --git a/java-biglake/README.md b/java-biglake/README.md new file mode 100644 index 000000000000..8efc9dbf8cba --- /dev/null +++ b/java-biglake/README.md @@ -0,0 +1,203 @@ +# Google BigLake Client for Java + +Java idiomatic client for [BigLake][product-docs]. + +[![Maven][maven-version-image]][maven-version-link] +![Stability][stability-image] + +- [Product Documentation][product-docs] +- [Client Library Documentation][javadocs] + +> Note: This client is a work-in-progress, and may occasionally +> make backwards-incompatible changes. + + +## Quickstart + + +If you are using Maven, add this to your pom.xml file: + + + +```xml + + com.google.cloud + google-cloud-biglake + 0.0.0 + +``` + +If you are using Gradle without BOM, add this to your dependencies: + +```Groovy +implementation 'com.google.cloud:google-cloud-biglake:0.0.0' +``` + +If you are using SBT, add this to your dependencies: + +```Scala +libraryDependencies += "com.google.cloud" % "google-cloud-biglake" % "0.0.0" +``` + + +## Authentication + +See the [Authentication][authentication] section in the base directory's README. + +## Authorization + +The client application making API calls must be granted [authorization scopes][auth-scopes] required for the desired BigLake APIs, and the authenticated principal must have the [IAM role(s)][predefined-iam-roles] required to access GCP resources using the BigLake API calls. + +## Getting Started + +### Prerequisites + +You will need a [Google Cloud Platform Console][developer-console] project with the BigLake [API enabled][enable-api]. +You will need to [enable billing][enable-billing] to use Google BigLake. +[Follow these instructions][create-project] to get your project set up. You will also need to set up the local development environment by +[installing the Google Cloud Command Line Interface][cloud-cli] and running the following commands in command line: +`gcloud auth login` and `gcloud config set project [YOUR PROJECT ID]`. + +### Installation and setup + +You'll need to obtain the `google-cloud-biglake` library. See the [Quickstart](#quickstart) section +to add `google-cloud-biglake` as a dependency in your code. + +## About BigLake + + +[BigLake][product-docs] The BigLake API provides access to BigLake Metastore, a serverless, fully managed, and highly available metastore for open-source data that can be used for querying Apache Iceberg tables in BigQuery. + +See the [BigLake client library docs][javadocs] to learn how to +use this BigLake Client Library. + + + + + + +## Troubleshooting + +To get help, follow the instructions in the [shared Troubleshooting document][troubleshooting]. + +## Transport + +BigLake uses gRPC for the transport layer. + +## Supported Java Versions + +Java 8 or above is required for using this client. + +Google's Java client libraries, +[Google Cloud Client Libraries][cloudlibs] +and +[Google Cloud API Libraries][apilibs], +follow the +[Oracle Java SE support roadmap][oracle] +(see the Oracle Java SE Product Releases section). + +### For new development + +In general, new feature development occurs with support for the lowest Java +LTS version covered by Oracle's Premier Support (which typically lasts 5 years +from initial General Availability). If the minimum required JVM for a given +library is changed, it is accompanied by a [semver][semver] major release. + +Java 11 and (in September 2021) Java 17 are the best choices for new +development. + +### Keeping production systems current + +Google tests its client libraries with all current LTS versions covered by +Oracle's Extended Support (which typically lasts 8 years from initial +General Availability). + +#### Legacy support + +Google's client libraries support legacy versions of Java runtimes with long +term stable libraries that don't receive feature updates on a best efforts basis +as it may not be possible to backport all patches. + +Google provides updates on a best efforts basis to apps that continue to use +Java 7, though apps might need to upgrade to current versions of the library +that supports their JVM. + +#### Where to find specific information + +The latest versions and the supported Java versions are identified on +the individual GitHub repository `github.com/GoogleAPIs/java-SERVICENAME` +and on [google-cloud-java][g-c-j]. + +## Versioning + + +This library follows [Semantic Versioning](http://semver.org/). + + +It is currently in major version zero (``0.y.z``), which means that anything may change at any time +and the public API should not be considered stable. + + +## Contributing + + +Contributions to this library are always welcome and highly encouraged. + +See [CONTRIBUTING][contributing] for more information how to get started. + +Please note that this project is released with a Contributor Code of Conduct. By participating in +this project you agree to abide by its terms. See [Code of Conduct][code-of-conduct] for more +information. + + +## License + +Apache 2.0 - See [LICENSE][license] for more information. + +## CI Status + +Java Version | Status +------------ | ------ +Java 8 | [![Kokoro CI][kokoro-badge-image-2]][kokoro-badge-link-2] +Java 8 OSX | [![Kokoro CI][kokoro-badge-image-3]][kokoro-badge-link-3] +Java 8 Windows | [![Kokoro CI][kokoro-badge-image-4]][kokoro-badge-link-4] +Java 11 | [![Kokoro CI][kokoro-badge-image-5]][kokoro-badge-link-5] + +Java is a registered trademark of Oracle and/or its affiliates. + +[product-docs]: https://cloud.google.com/biglake +[javadocs]: https://cloud.google.com/java/docs/reference/google-cloud-biglake/latest/overview +[kokoro-badge-image-1]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-biglake/java7.svg +[kokoro-badge-link-1]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-biglake/java7.html +[kokoro-badge-image-2]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-biglake/java8.svg +[kokoro-badge-link-2]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-biglake/java8.html +[kokoro-badge-image-3]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-biglake/java8-osx.svg +[kokoro-badge-link-3]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-biglake/java8-osx.html +[kokoro-badge-image-4]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-biglake/java8-win.svg +[kokoro-badge-link-4]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-biglake/java8-win.html +[kokoro-badge-image-5]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-biglake/java11.svg +[kokoro-badge-link-5]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-biglake/java11.html +[stability-image]: https://img.shields.io/badge/stability-preview-yellow +[maven-version-image]: https://img.shields.io/maven-central/v/com.google.cloud/google-cloud-biglake.svg +[maven-version-link]: https://central.sonatype.com/artifact/com.google.cloud/google-cloud-biglake/0.0.0 +[authentication]: https://github.com/googleapis/google-cloud-java#authentication +[auth-scopes]: https://developers.google.com/identity/protocols/oauth2/scopes +[predefined-iam-roles]: https://cloud.google.com/iam/docs/understanding-roles#predefined_roles +[iam-policy]: https://cloud.google.com/iam/docs/overview#cloud-iam-policy +[developer-console]: https://console.developers.google.com/ +[create-project]: https://cloud.google.com/resource-manager/docs/creating-managing-projects +[cloud-cli]: https://cloud.google.com/cli +[troubleshooting]: https://github.com/googleapis/google-cloud-common/blob/main/troubleshooting/readme.md#troubleshooting +[contributing]: https://github.com/googleapis/java-biglake/blob/main/CONTRIBUTING.md +[code-of-conduct]: https://github.com/googleapis/java-biglake/blob/main/CODE_OF_CONDUCT.md#contributor-code-of-conduct +[license]: https://github.com/googleapis/java-biglake/blob/main/LICENSE +[enable-billing]: https://cloud.google.com/apis/docs/getting-started#enabling_billing +[enable-api]: https://console.cloud.google.com/flows/enableapi?apiid=biglake.googleapis.com +[libraries-bom]: https://github.com/GoogleCloudPlatform/cloud-opensource-java/wiki/The-Google-Cloud-Platform-Libraries-BOM +[shell_img]: https://gstatic.com/cloudssh/images/open-btn.png + +[semver]: https://semver.org/ +[cloudlibs]: https://cloud.google.com/apis/docs/client-libraries-explained +[apilibs]: https://cloud.google.com/apis/docs/client-libraries-explained#google_api_client_libraries +[oracle]: https://www.oracle.com/java/technologies/java-se-support-roadmap.html +[g-c-j]: http://github.com/googleapis/google-cloud-java diff --git a/java-biglake/google-cloud-biglake-bom/pom.xml b/java-biglake/google-cloud-biglake-bom/pom.xml new file mode 100644 index 000000000000..f74616604375 --- /dev/null +++ b/java-biglake/google-cloud-biglake-bom/pom.xml @@ -0,0 +1,44 @@ + + + 4.0.0 + com.google.cloud + google-cloud-biglake-bom + 0.0.1-SNAPSHOT + pom + + + com.google.cloud + google-cloud-pom-parent + 1.7.0-SNAPSHOT + ../../google-cloud-pom-parent/pom.xml + + + Google BigLake BOM + + BOM for BigLake + + + + true + + + + + + com.google.cloud + google-cloud-biglake + 0.0.1-SNAPSHOT + + + com.google.api.grpc + grpc-google-cloud-biglake-v1alpha1 + 0.0.1-SNAPSHOT + + + com.google.api.grpc + proto-google-cloud-biglake-v1alpha1 + 0.0.1-SNAPSHOT + + + + diff --git a/java-biglake/google-cloud-biglake/pom.xml b/java-biglake/google-cloud-biglake/pom.xml new file mode 100644 index 000000000000..35dec0603886 --- /dev/null +++ b/java-biglake/google-cloud-biglake/pom.xml @@ -0,0 +1,113 @@ + + + 4.0.0 + com.google.cloud + google-cloud-biglake + 0.0.1-SNAPSHOT + jar + Google BigLake + BigLake The BigLake API provides access to BigLake Metastore, a serverless, fully managed, and highly available metastore for open-source data that can be used for querying Apache Iceberg tables in BigQuery. + + com.google.cloud + google-cloud-biglake-parent + 0.0.1-SNAPSHOT + + + google-cloud-biglake + + + + io.grpc + grpc-api + + + io.grpc + grpc-stub + + + io.grpc + grpc-protobuf + + + com.google.api + api-common + + + com.google.protobuf + protobuf-java + + + com.google.api.grpc + proto-google-common-protos + + + + com.google.api.grpc + proto-google-cloud-biglake-v1alpha1 + + + com.google.guava + guava + + + com.google.api + gax + + + com.google.api + gax-grpc + + + com.google.api + gax-httpjson + + + com.google.api.grpc + grpc-google-common-protos + + + com.google.api.grpc + proto-google-iam-v1 + + + com.google.api.grpc + grpc-google-iam-v1 + + + org.threeten + threetenbp + + + + + junit + junit + test + + + + com.google.api.grpc + grpc-google-cloud-biglake-v1alpha1 + test + + + + com.google.api + gax + testlib + test + + + com.google.api + gax-grpc + testlib + test + + + com.google.api + gax-httpjson + testlib + test + + + \ No newline at end of file diff --git a/java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/MetastoreServiceClient.java b/java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/MetastoreServiceClient.java new file mode 100644 index 000000000000..e40523fc4fd0 --- /dev/null +++ b/java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/MetastoreServiceClient.java @@ -0,0 +1,2797 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1; + +import com.google.api.core.ApiFuture; +import com.google.api.core.ApiFutures; +import com.google.api.core.BetaApi; +import com.google.api.gax.core.BackgroundResource; +import com.google.api.gax.paging.AbstractFixedSizeCollection; +import com.google.api.gax.paging.AbstractPage; +import com.google.api.gax.paging.AbstractPagedListResponse; +import com.google.api.gax.rpc.PageContext; +import com.google.api.gax.rpc.UnaryCallable; +import com.google.cloud.bigquery.biglake.v1alpha1.stub.MetastoreServiceStub; +import com.google.cloud.bigquery.biglake.v1alpha1.stub.MetastoreServiceStubSettings; +import com.google.common.util.concurrent.MoreExecutors; +import com.google.protobuf.Empty; +import com.google.protobuf.FieldMask; +import java.io.IOException; +import java.util.List; +import java.util.concurrent.TimeUnit; +import javax.annotation.Generated; + +// AUTO-GENERATED DOCUMENTATION AND CLASS. +/** + * Service Description: BigLake Metastore is a serverless, highly available, multi-tenant runtime + * metastore for Google Cloud Data Analytics products. + * + *

The BigLake Metastore API defines the following resource model: + * + *

    + *
  • A collection of Google Cloud projects: `/projects/*` + *
  • Each project has a collection of available locations: `/locations/*` + *
  • Each location has a collection of catalogs: `/catalogs/*` + *
  • Each catalog has a collection of databases: `/databases/*` + *
  • Each database has a collection of tables: `/tables/*` + *
+ * + *

This class provides the ability to make remote calls to the backing service through method + * calls that map to API methods. Sample code to get started: + * + *

{@code
+ * // This snippet has been automatically generated and should be regarded as a code template only.
+ * // It will require modifications to work:
+ * // - It may require correct/in-range values for request initialization.
+ * // - It may require specifying regional endpoints when creating the service client as shown in
+ * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+ * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+ *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
+ *   Catalog catalog = Catalog.newBuilder().build();
+ *   String catalogId = "catalogId1455933204";
+ *   Catalog response = metastoreServiceClient.createCatalog(parent, catalog, catalogId);
+ * }
+ * }
+ * + *

Note: close() needs to be called on the MetastoreServiceClient object to clean up resources + * such as threads. In the example above, try-with-resources is used, which automatically calls + * close(). + * + *

The surface of this class includes several types of Java methods for each of the API's + * methods: + * + *

    + *
  1. A "flattened" method. With this type of method, the fields of the request type have been + * converted into function parameters. It may be the case that not all fields are available as + * parameters, and not every API method will have a flattened method entry point. + *
  2. A "request object" method. This type of method only takes one parameter, a request object, + * which must be constructed before the call. Not every API method will have a request object + * method. + *
  3. A "callable" method. This type of method takes no parameters and returns an immutable API + * callable object, which can be used to initiate calls to the service. + *
+ * + *

See the individual methods for example code. + * + *

Many parameters require resource names to be formatted in a particular way. To assist with + * these names, this class includes a format method for each type of name, and additionally a parse + * method to extract the individual identifiers contained within names that are returned. + * + *

This class can be customized by passing in a custom instance of MetastoreServiceSettings to + * create(). For example: + * + *

To customize credentials: + * + *

{@code
+ * // This snippet has been automatically generated and should be regarded as a code template only.
+ * // It will require modifications to work:
+ * // - It may require correct/in-range values for request initialization.
+ * // - It may require specifying regional endpoints when creating the service client as shown in
+ * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+ * MetastoreServiceSettings metastoreServiceSettings =
+ *     MetastoreServiceSettings.newBuilder()
+ *         .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
+ *         .build();
+ * MetastoreServiceClient metastoreServiceClient =
+ *     MetastoreServiceClient.create(metastoreServiceSettings);
+ * }
+ * + *

To customize the endpoint: + * + *

{@code
+ * // This snippet has been automatically generated and should be regarded as a code template only.
+ * // It will require modifications to work:
+ * // - It may require correct/in-range values for request initialization.
+ * // - It may require specifying regional endpoints when creating the service client as shown in
+ * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+ * MetastoreServiceSettings metastoreServiceSettings =
+ *     MetastoreServiceSettings.newBuilder().setEndpoint(myEndpoint).build();
+ * MetastoreServiceClient metastoreServiceClient =
+ *     MetastoreServiceClient.create(metastoreServiceSettings);
+ * }
+ * + *

To use REST (HTTP1.1/JSON) transport (instead of gRPC) for sending and receiving requests over + * the wire: + * + *

{@code
+ * // This snippet has been automatically generated and should be regarded as a code template only.
+ * // It will require modifications to work:
+ * // - It may require correct/in-range values for request initialization.
+ * // - It may require specifying regional endpoints when creating the service client as shown in
+ * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+ * MetastoreServiceSettings metastoreServiceSettings =
+ *     MetastoreServiceSettings.newHttpJsonBuilder().build();
+ * MetastoreServiceClient metastoreServiceClient =
+ *     MetastoreServiceClient.create(metastoreServiceSettings);
+ * }
+ * + *

Please refer to the GitHub repository's samples for more quickstart code snippets. + */ +@BetaApi +@Generated("by gapic-generator-java") +public class MetastoreServiceClient implements BackgroundResource { + private final MetastoreServiceSettings settings; + private final MetastoreServiceStub stub; + + /** Constructs an instance of MetastoreServiceClient with default settings. */ + public static final MetastoreServiceClient create() throws IOException { + return create(MetastoreServiceSettings.newBuilder().build()); + } + + /** + * Constructs an instance of MetastoreServiceClient, using the given settings. The channels are + * created based on the settings passed in, or defaults for any settings that are not set. + */ + public static final MetastoreServiceClient create(MetastoreServiceSettings settings) + throws IOException { + return new MetastoreServiceClient(settings); + } + + /** + * Constructs an instance of MetastoreServiceClient, using the given stub for making calls. This + * is for advanced usage - prefer using create(MetastoreServiceSettings). + */ + public static final MetastoreServiceClient create(MetastoreServiceStub stub) { + return new MetastoreServiceClient(stub); + } + + /** + * Constructs an instance of MetastoreServiceClient, using the given settings. This is protected + * so that it is easy to make a subclass, but otherwise, the static factory methods should be + * preferred. + */ + protected MetastoreServiceClient(MetastoreServiceSettings settings) throws IOException { + this.settings = settings; + this.stub = ((MetastoreServiceStubSettings) settings.getStubSettings()).createStub(); + } + + protected MetastoreServiceClient(MetastoreServiceStub stub) { + this.settings = null; + this.stub = stub; + } + + public final MetastoreServiceSettings getSettings() { + return settings; + } + + public MetastoreServiceStub getStub() { + return stub; + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Creates a new catalog. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
+   *   Catalog catalog = Catalog.newBuilder().build();
+   *   String catalogId = "catalogId1455933204";
+   *   Catalog response = metastoreServiceClient.createCatalog(parent, catalog, catalogId);
+   * }
+   * }
+ * + * @param parent Required. The parent resource where this catalog will be created. Format: + * projects/{project_id_or_number}/locations/{location_id} + * @param catalog Required. The catalog to create. The `name` field does not need to be provided. + * @param catalogId Required. The ID to use for the catalog, which will become the final component + * of the catalog's resource name. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Catalog createCatalog(LocationName parent, Catalog catalog, String catalogId) { + CreateCatalogRequest request = + CreateCatalogRequest.newBuilder() + .setParent(parent == null ? null : parent.toString()) + .setCatalog(catalog) + .setCatalogId(catalogId) + .build(); + return createCatalog(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Creates a new catalog. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   String parent = LocationName.of("[PROJECT]", "[LOCATION]").toString();
+   *   Catalog catalog = Catalog.newBuilder().build();
+   *   String catalogId = "catalogId1455933204";
+   *   Catalog response = metastoreServiceClient.createCatalog(parent, catalog, catalogId);
+   * }
+   * }
+ * + * @param parent Required. The parent resource where this catalog will be created. Format: + * projects/{project_id_or_number}/locations/{location_id} + * @param catalog Required. The catalog to create. The `name` field does not need to be provided. + * @param catalogId Required. The ID to use for the catalog, which will become the final component + * of the catalog's resource name. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Catalog createCatalog(String parent, Catalog catalog, String catalogId) { + CreateCatalogRequest request = + CreateCatalogRequest.newBuilder() + .setParent(parent) + .setCatalog(catalog) + .setCatalogId(catalogId) + .build(); + return createCatalog(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Creates a new catalog. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   CreateCatalogRequest request =
+   *       CreateCatalogRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setCatalog(Catalog.newBuilder().build())
+   *           .setCatalogId("catalogId1455933204")
+   *           .build();
+   *   Catalog response = metastoreServiceClient.createCatalog(request);
+   * }
+   * }
+ * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Catalog createCatalog(CreateCatalogRequest request) { + return createCatalogCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Creates a new catalog. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   CreateCatalogRequest request =
+   *       CreateCatalogRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setCatalog(Catalog.newBuilder().build())
+   *           .setCatalogId("catalogId1455933204")
+   *           .build();
+   *   ApiFuture future =
+   *       metastoreServiceClient.createCatalogCallable().futureCall(request);
+   *   // Do something.
+   *   Catalog response = future.get();
+   * }
+   * }
+ */ + public final UnaryCallable createCatalogCallable() { + return stub.createCatalogCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Deletes an existing catalog specified by the catalog ID. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   CatalogName name = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]");
+   *   Catalog response = metastoreServiceClient.deleteCatalog(name);
+   * }
+   * }
+ * + * @param name Required. The name of the catalog to delete. Format: + * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Catalog deleteCatalog(CatalogName name) { + DeleteCatalogRequest request = + DeleteCatalogRequest.newBuilder().setName(name == null ? null : name.toString()).build(); + return deleteCatalog(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Deletes an existing catalog specified by the catalog ID. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   String name = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString();
+   *   Catalog response = metastoreServiceClient.deleteCatalog(name);
+   * }
+   * }
+ * + * @param name Required. The name of the catalog to delete. Format: + * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Catalog deleteCatalog(String name) { + DeleteCatalogRequest request = DeleteCatalogRequest.newBuilder().setName(name).build(); + return deleteCatalog(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Deletes an existing catalog specified by the catalog ID. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   DeleteCatalogRequest request =
+   *       DeleteCatalogRequest.newBuilder()
+   *           .setName(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
+   *           .build();
+   *   Catalog response = metastoreServiceClient.deleteCatalog(request);
+   * }
+   * }
+ * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Catalog deleteCatalog(DeleteCatalogRequest request) { + return deleteCatalogCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Deletes an existing catalog specified by the catalog ID. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   DeleteCatalogRequest request =
+   *       DeleteCatalogRequest.newBuilder()
+   *           .setName(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
+   *           .build();
+   *   ApiFuture future =
+   *       metastoreServiceClient.deleteCatalogCallable().futureCall(request);
+   *   // Do something.
+   *   Catalog response = future.get();
+   * }
+   * }
+ */ + public final UnaryCallable deleteCatalogCallable() { + return stub.deleteCatalogCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Gets the catalog specified by the resource name. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   CatalogName name = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]");
+   *   Catalog response = metastoreServiceClient.getCatalog(name);
+   * }
+   * }
+ * + * @param name Required. The name of the catalog to retrieve. Format: + * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Catalog getCatalog(CatalogName name) { + GetCatalogRequest request = + GetCatalogRequest.newBuilder().setName(name == null ? null : name.toString()).build(); + return getCatalog(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Gets the catalog specified by the resource name. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   String name = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString();
+   *   Catalog response = metastoreServiceClient.getCatalog(name);
+   * }
+   * }
+ * + * @param name Required. The name of the catalog to retrieve. Format: + * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Catalog getCatalog(String name) { + GetCatalogRequest request = GetCatalogRequest.newBuilder().setName(name).build(); + return getCatalog(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Gets the catalog specified by the resource name. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   GetCatalogRequest request =
+   *       GetCatalogRequest.newBuilder()
+   *           .setName(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
+   *           .build();
+   *   Catalog response = metastoreServiceClient.getCatalog(request);
+   * }
+   * }
+ * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Catalog getCatalog(GetCatalogRequest request) { + return getCatalogCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Gets the catalog specified by the resource name. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   GetCatalogRequest request =
+   *       GetCatalogRequest.newBuilder()
+   *           .setName(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
+   *           .build();
+   *   ApiFuture future = metastoreServiceClient.getCatalogCallable().futureCall(request);
+   *   // Do something.
+   *   Catalog response = future.get();
+   * }
+   * }
+ */ + public final UnaryCallable getCatalogCallable() { + return stub.getCatalogCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * List all catalogs in a specified project. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
+   *   for (Catalog element : metastoreServiceClient.listCatalogs(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * + * @param parent Required. The parent, which owns this collection of catalogs. Format: + * projects/{project_id_or_number}/locations/{location_id} + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final ListCatalogsPagedResponse listCatalogs(LocationName parent) { + ListCatalogsRequest request = + ListCatalogsRequest.newBuilder() + .setParent(parent == null ? null : parent.toString()) + .build(); + return listCatalogs(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * List all catalogs in a specified project. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   String parent = LocationName.of("[PROJECT]", "[LOCATION]").toString();
+   *   for (Catalog element : metastoreServiceClient.listCatalogs(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * + * @param parent Required. The parent, which owns this collection of catalogs. Format: + * projects/{project_id_or_number}/locations/{location_id} + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final ListCatalogsPagedResponse listCatalogs(String parent) { + ListCatalogsRequest request = ListCatalogsRequest.newBuilder().setParent(parent).build(); + return listCatalogs(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * List all catalogs in a specified project. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   ListCatalogsRequest request =
+   *       ListCatalogsRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .build();
+   *   for (Catalog element : metastoreServiceClient.listCatalogs(request).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final ListCatalogsPagedResponse listCatalogs(ListCatalogsRequest request) { + return listCatalogsPagedCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * List all catalogs in a specified project. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   ListCatalogsRequest request =
+   *       ListCatalogsRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .build();
+   *   ApiFuture future =
+   *       metastoreServiceClient.listCatalogsPagedCallable().futureCall(request);
+   *   // Do something.
+   *   for (Catalog element : future.get().iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ */ + public final UnaryCallable + listCatalogsPagedCallable() { + return stub.listCatalogsPagedCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * List all catalogs in a specified project. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   ListCatalogsRequest request =
+   *       ListCatalogsRequest.newBuilder()
+   *           .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString())
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .build();
+   *   while (true) {
+   *     ListCatalogsResponse response = metastoreServiceClient.listCatalogsCallable().call(request);
+   *     for (Catalog element : response.getCatalogsList()) {
+   *       // doThingsWith(element);
+   *     }
+   *     String nextPageToken = response.getNextPageToken();
+   *     if (!Strings.isNullOrEmpty(nextPageToken)) {
+   *       request = request.toBuilder().setPageToken(nextPageToken).build();
+   *     } else {
+   *       break;
+   *     }
+   *   }
+   * }
+   * }
+ */ + public final UnaryCallable listCatalogsCallable() { + return stub.listCatalogsCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Creates a new database. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   CatalogName parent = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]");
+   *   Database database = Database.newBuilder().build();
+   *   String databaseId = "databaseId1688905718";
+   *   Database response = metastoreServiceClient.createDatabase(parent, database, databaseId);
+   * }
+   * }
+ * + * @param parent Required. The parent resource where this database will be created. Format: + * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + * @param database Required. The database to create. The `name` field does not need to be + * provided. + * @param databaseId Required. The ID to use for the database, which will become the final + * component of the database's resource name. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Database createDatabase(CatalogName parent, Database database, String databaseId) { + CreateDatabaseRequest request = + CreateDatabaseRequest.newBuilder() + .setParent(parent == null ? null : parent.toString()) + .setDatabase(database) + .setDatabaseId(databaseId) + .build(); + return createDatabase(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Creates a new database. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   String parent = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString();
+   *   Database database = Database.newBuilder().build();
+   *   String databaseId = "databaseId1688905718";
+   *   Database response = metastoreServiceClient.createDatabase(parent, database, databaseId);
+   * }
+   * }
+ * + * @param parent Required. The parent resource where this database will be created. Format: + * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + * @param database Required. The database to create. The `name` field does not need to be + * provided. + * @param databaseId Required. The ID to use for the database, which will become the final + * component of the database's resource name. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Database createDatabase(String parent, Database database, String databaseId) { + CreateDatabaseRequest request = + CreateDatabaseRequest.newBuilder() + .setParent(parent) + .setDatabase(database) + .setDatabaseId(databaseId) + .build(); + return createDatabase(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Creates a new database. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   CreateDatabaseRequest request =
+   *       CreateDatabaseRequest.newBuilder()
+   *           .setParent(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
+   *           .setDatabase(Database.newBuilder().build())
+   *           .setDatabaseId("databaseId1688905718")
+   *           .build();
+   *   Database response = metastoreServiceClient.createDatabase(request);
+   * }
+   * }
+ * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Database createDatabase(CreateDatabaseRequest request) { + return createDatabaseCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Creates a new database. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   CreateDatabaseRequest request =
+   *       CreateDatabaseRequest.newBuilder()
+   *           .setParent(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
+   *           .setDatabase(Database.newBuilder().build())
+   *           .setDatabaseId("databaseId1688905718")
+   *           .build();
+   *   ApiFuture future =
+   *       metastoreServiceClient.createDatabaseCallable().futureCall(request);
+   *   // Do something.
+   *   Database response = future.get();
+   * }
+   * }
+ */ + public final UnaryCallable createDatabaseCallable() { + return stub.createDatabaseCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Deletes an existing database specified by the database ID. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   DatabaseName name = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]");
+   *   Database response = metastoreServiceClient.deleteDatabase(name);
+   * }
+   * }
+ * + * @param name Required. The name of the database to delete. Format: + * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Database deleteDatabase(DatabaseName name) { + DeleteDatabaseRequest request = + DeleteDatabaseRequest.newBuilder().setName(name == null ? null : name.toString()).build(); + return deleteDatabase(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Deletes an existing database specified by the database ID. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   String name =
+   *       DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString();
+   *   Database response = metastoreServiceClient.deleteDatabase(name);
+   * }
+   * }
+ * + * @param name Required. The name of the database to delete. Format: + * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Database deleteDatabase(String name) { + DeleteDatabaseRequest request = DeleteDatabaseRequest.newBuilder().setName(name).build(); + return deleteDatabase(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Deletes an existing database specified by the database ID. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   DeleteDatabaseRequest request =
+   *       DeleteDatabaseRequest.newBuilder()
+   *           .setName(
+   *               DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString())
+   *           .build();
+   *   Database response = metastoreServiceClient.deleteDatabase(request);
+   * }
+   * }
+ * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Database deleteDatabase(DeleteDatabaseRequest request) { + return deleteDatabaseCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Deletes an existing database specified by the database ID. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   DeleteDatabaseRequest request =
+   *       DeleteDatabaseRequest.newBuilder()
+   *           .setName(
+   *               DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString())
+   *           .build();
+   *   ApiFuture future =
+   *       metastoreServiceClient.deleteDatabaseCallable().futureCall(request);
+   *   // Do something.
+   *   Database response = future.get();
+   * }
+   * }
+ */ + public final UnaryCallable deleteDatabaseCallable() { + return stub.deleteDatabaseCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Updates an existing database specified by the database ID. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   Database database = Database.newBuilder().build();
+   *   FieldMask updateMask = FieldMask.newBuilder().build();
+   *   Database response = metastoreServiceClient.updateDatabase(database, updateMask);
+   * }
+   * }
+ * + * @param database Required. The database to update. + *

The database's `name` field is used to identify the database to update. Format: + * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + * @param updateMask The list of fields to update. + *

For the `FieldMask` definition, see + * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask If + * not set, defaults to all of the fields that are allowed to update. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Database updateDatabase(Database database, FieldMask updateMask) { + UpdateDatabaseRequest request = + UpdateDatabaseRequest.newBuilder().setDatabase(database).setUpdateMask(updateMask).build(); + return updateDatabase(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Updates an existing database specified by the database ID. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   UpdateDatabaseRequest request =
+   *       UpdateDatabaseRequest.newBuilder()
+   *           .setDatabase(Database.newBuilder().build())
+   *           .setUpdateMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   Database response = metastoreServiceClient.updateDatabase(request);
+   * }
+   * }
+ * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Database updateDatabase(UpdateDatabaseRequest request) { + return updateDatabaseCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Updates an existing database specified by the database ID. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   UpdateDatabaseRequest request =
+   *       UpdateDatabaseRequest.newBuilder()
+   *           .setDatabase(Database.newBuilder().build())
+   *           .setUpdateMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   ApiFuture future =
+   *       metastoreServiceClient.updateDatabaseCallable().futureCall(request);
+   *   // Do something.
+   *   Database response = future.get();
+   * }
+   * }
+ */ + public final UnaryCallable updateDatabaseCallable() { + return stub.updateDatabaseCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Gets the database specified by the resource name. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   DatabaseName name = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]");
+   *   Database response = metastoreServiceClient.getDatabase(name);
+   * }
+   * }
+ * + * @param name Required. The name of the database to retrieve. Format: + * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Database getDatabase(DatabaseName name) { + GetDatabaseRequest request = + GetDatabaseRequest.newBuilder().setName(name == null ? null : name.toString()).build(); + return getDatabase(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Gets the database specified by the resource name. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   String name =
+   *       DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString();
+   *   Database response = metastoreServiceClient.getDatabase(name);
+   * }
+   * }
+ * + * @param name Required. The name of the database to retrieve. Format: + * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Database getDatabase(String name) { + GetDatabaseRequest request = GetDatabaseRequest.newBuilder().setName(name).build(); + return getDatabase(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Gets the database specified by the resource name. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   GetDatabaseRequest request =
+   *       GetDatabaseRequest.newBuilder()
+   *           .setName(
+   *               DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString())
+   *           .build();
+   *   Database response = metastoreServiceClient.getDatabase(request);
+   * }
+   * }
+ * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Database getDatabase(GetDatabaseRequest request) { + return getDatabaseCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Gets the database specified by the resource name. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   GetDatabaseRequest request =
+   *       GetDatabaseRequest.newBuilder()
+   *           .setName(
+   *               DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString())
+   *           .build();
+   *   ApiFuture future = metastoreServiceClient.getDatabaseCallable().futureCall(request);
+   *   // Do something.
+   *   Database response = future.get();
+   * }
+   * }
+ */ + public final UnaryCallable getDatabaseCallable() { + return stub.getDatabaseCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * List all databases in a specified catalog. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   CatalogName parent = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]");
+   *   for (Database element : metastoreServiceClient.listDatabases(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * + * @param parent Required. The parent, which owns this collection of databases. Format: + * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final ListDatabasesPagedResponse listDatabases(CatalogName parent) { + ListDatabasesRequest request = + ListDatabasesRequest.newBuilder() + .setParent(parent == null ? null : parent.toString()) + .build(); + return listDatabases(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * List all databases in a specified catalog. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   String parent = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString();
+   *   for (Database element : metastoreServiceClient.listDatabases(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * + * @param parent Required. The parent, which owns this collection of databases. Format: + * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final ListDatabasesPagedResponse listDatabases(String parent) { + ListDatabasesRequest request = ListDatabasesRequest.newBuilder().setParent(parent).build(); + return listDatabases(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * List all databases in a specified catalog. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   ListDatabasesRequest request =
+   *       ListDatabasesRequest.newBuilder()
+   *           .setParent(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .build();
+   *   for (Database element : metastoreServiceClient.listDatabases(request).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final ListDatabasesPagedResponse listDatabases(ListDatabasesRequest request) { + return listDatabasesPagedCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * List all databases in a specified catalog. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   ListDatabasesRequest request =
+   *       ListDatabasesRequest.newBuilder()
+   *           .setParent(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .build();
+   *   ApiFuture future =
+   *       metastoreServiceClient.listDatabasesPagedCallable().futureCall(request);
+   *   // Do something.
+   *   for (Database element : future.get().iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ */ + public final UnaryCallable + listDatabasesPagedCallable() { + return stub.listDatabasesPagedCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * List all databases in a specified catalog. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   ListDatabasesRequest request =
+   *       ListDatabasesRequest.newBuilder()
+   *           .setParent(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .build();
+   *   while (true) {
+   *     ListDatabasesResponse response =
+   *         metastoreServiceClient.listDatabasesCallable().call(request);
+   *     for (Database element : response.getDatabasesList()) {
+   *       // doThingsWith(element);
+   *     }
+   *     String nextPageToken = response.getNextPageToken();
+   *     if (!Strings.isNullOrEmpty(nextPageToken)) {
+   *       request = request.toBuilder().setPageToken(nextPageToken).build();
+   *     } else {
+   *       break;
+   *     }
+   *   }
+   * }
+   * }
+ */ + public final UnaryCallable listDatabasesCallable() { + return stub.listDatabasesCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Creates a new table. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   DatabaseName parent = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]");
+   *   Table table = Table.newBuilder().build();
+   *   String tableId = "tableId-1552905847";
+   *   Table response = metastoreServiceClient.createTable(parent, table, tableId);
+   * }
+   * }
+ * + * @param parent Required. The parent resource where this table will be created. Format: + * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + * @param table Required. The table to create. The `name` field does not need to be provided for + * the table creation. + * @param tableId Required. The ID to use for the table, which will become the final component of + * the table's resource name. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Table createTable(DatabaseName parent, Table table, String tableId) { + CreateTableRequest request = + CreateTableRequest.newBuilder() + .setParent(parent == null ? null : parent.toString()) + .setTable(table) + .setTableId(tableId) + .build(); + return createTable(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Creates a new table. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   String parent =
+   *       DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString();
+   *   Table table = Table.newBuilder().build();
+   *   String tableId = "tableId-1552905847";
+   *   Table response = metastoreServiceClient.createTable(parent, table, tableId);
+   * }
+   * }
+ * + * @param parent Required. The parent resource where this table will be created. Format: + * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + * @param table Required. The table to create. The `name` field does not need to be provided for + * the table creation. + * @param tableId Required. The ID to use for the table, which will become the final component of + * the table's resource name. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Table createTable(String parent, Table table, String tableId) { + CreateTableRequest request = + CreateTableRequest.newBuilder() + .setParent(parent) + .setTable(table) + .setTableId(tableId) + .build(); + return createTable(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Creates a new table. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   CreateTableRequest request =
+   *       CreateTableRequest.newBuilder()
+   *           .setParent(
+   *               DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString())
+   *           .setTable(Table.newBuilder().build())
+   *           .setTableId("tableId-1552905847")
+   *           .build();
+   *   Table response = metastoreServiceClient.createTable(request);
+   * }
+   * }
+ * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Table createTable(CreateTableRequest request) { + return createTableCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Creates a new table. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   CreateTableRequest request =
+   *       CreateTableRequest.newBuilder()
+   *           .setParent(
+   *               DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString())
+   *           .setTable(Table.newBuilder().build())
+   *           .setTableId("tableId-1552905847")
+   *           .build();
+   *   ApiFuture future = metastoreServiceClient.createTableCallable().futureCall(request);
+   *   // Do something.
+   *   Table response = future.get();
+   * }
+   * }
+   */
+  public final UnaryCallable createTableCallable() {
+    return stub.createTableCallable();
+  }
+
+  // AUTO-GENERATED DOCUMENTATION AND METHOD.
+  /**
+   * Deletes an existing table specified by the table ID.
+   *
+   * 

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   TableName name =
+   *       TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]");
+   *   Table response = metastoreServiceClient.deleteTable(name);
+   * }
+   * }
+ * + * @param name Required. The name of the table to delete. Format: + * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Table deleteTable(TableName name) { + DeleteTableRequest request = + DeleteTableRequest.newBuilder().setName(name == null ? null : name.toString()).build(); + return deleteTable(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Deletes an existing table specified by the table ID. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   String name =
+   *       TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]").toString();
+   *   Table response = metastoreServiceClient.deleteTable(name);
+   * }
+   * }
+ * + * @param name Required. The name of the table to delete. Format: + * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Table deleteTable(String name) { + DeleteTableRequest request = DeleteTableRequest.newBuilder().setName(name).build(); + return deleteTable(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Deletes an existing table specified by the table ID. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   DeleteTableRequest request =
+   *       DeleteTableRequest.newBuilder()
+   *           .setName(
+   *               TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]")
+   *                   .toString())
+   *           .build();
+   *   Table response = metastoreServiceClient.deleteTable(request);
+   * }
+   * }
+ * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Table deleteTable(DeleteTableRequest request) { + return deleteTableCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Deletes an existing table specified by the table ID. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   DeleteTableRequest request =
+   *       DeleteTableRequest.newBuilder()
+   *           .setName(
+   *               TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]")
+   *                   .toString())
+   *           .build();
+   *   ApiFuture
future = metastoreServiceClient.deleteTableCallable().futureCall(request); + * // Do something. + * Table response = future.get(); + * } + * } + */ + public final UnaryCallable deleteTableCallable() { + return stub.deleteTableCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Updates an existing table specified by the table ID. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   Table table = Table.newBuilder().build();
+   *   FieldMask updateMask = FieldMask.newBuilder().build();
+   *   Table response = metastoreServiceClient.updateTable(table, updateMask);
+   * }
+   * }
+ * + * @param table Required. The table to update. + *

The table's `name` field is used to identify the database to update. Format: + * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + * @param updateMask The list of fields to update. + *

For the `FieldMask` definition, see + * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask If + * not set, defaults to all of the fields that are allowed to update. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Table updateTable(Table table, FieldMask updateMask) { + UpdateTableRequest request = + UpdateTableRequest.newBuilder().setTable(table).setUpdateMask(updateMask).build(); + return updateTable(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Updates an existing table specified by the table ID. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   UpdateTableRequest request =
+   *       UpdateTableRequest.newBuilder()
+   *           .setTable(Table.newBuilder().build())
+   *           .setUpdateMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   Table response = metastoreServiceClient.updateTable(request);
+   * }
+   * }
+ * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Table updateTable(UpdateTableRequest request) { + return updateTableCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Updates an existing table specified by the table ID. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   UpdateTableRequest request =
+   *       UpdateTableRequest.newBuilder()
+   *           .setTable(Table.newBuilder().build())
+   *           .setUpdateMask(FieldMask.newBuilder().build())
+   *           .build();
+   *   ApiFuture
future = metastoreServiceClient.updateTableCallable().futureCall(request); + * // Do something. + * Table response = future.get(); + * } + * } + */ + public final UnaryCallable updateTableCallable() { + return stub.updateTableCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Gets the table specified by the resource name. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   TableName name =
+   *       TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]");
+   *   Table response = metastoreServiceClient.getTable(name);
+   * }
+   * }
+ * + * @param name Required. The name of the table to retrieve. Format: + * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Table getTable(TableName name) { + GetTableRequest request = + GetTableRequest.newBuilder().setName(name == null ? null : name.toString()).build(); + return getTable(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Gets the table specified by the resource name. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   String name =
+   *       TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]").toString();
+   *   Table response = metastoreServiceClient.getTable(name);
+   * }
+   * }
+ * + * @param name Required. The name of the table to retrieve. Format: + * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Table getTable(String name) { + GetTableRequest request = GetTableRequest.newBuilder().setName(name).build(); + return getTable(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Gets the table specified by the resource name. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   GetTableRequest request =
+   *       GetTableRequest.newBuilder()
+   *           .setName(
+   *               TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]")
+   *                   .toString())
+   *           .build();
+   *   Table response = metastoreServiceClient.getTable(request);
+   * }
+   * }
+ * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Table getTable(GetTableRequest request) { + return getTableCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Gets the table specified by the resource name. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   GetTableRequest request =
+   *       GetTableRequest.newBuilder()
+   *           .setName(
+   *               TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]")
+   *                   .toString())
+   *           .build();
+   *   ApiFuture
future = metastoreServiceClient.getTableCallable().futureCall(request); + * // Do something. + * Table response = future.get(); + * } + * } + */ + public final UnaryCallable getTableCallable() { + return stub.getTableCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * List all tables in a specified database. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   DatabaseName parent = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]");
+   *   for (Table element : metastoreServiceClient.listTables(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * + * @param parent Required. The parent, which owns this collection of tables. Format: + * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final ListTablesPagedResponse listTables(DatabaseName parent) { + ListTablesRequest request = + ListTablesRequest.newBuilder().setParent(parent == null ? null : parent.toString()).build(); + return listTables(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * List all tables in a specified database. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   String parent =
+   *       DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString();
+   *   for (Table element : metastoreServiceClient.listTables(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * + * @param parent Required. The parent, which owns this collection of tables. Format: + * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final ListTablesPagedResponse listTables(String parent) { + ListTablesRequest request = ListTablesRequest.newBuilder().setParent(parent).build(); + return listTables(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * List all tables in a specified database. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   ListTablesRequest request =
+   *       ListTablesRequest.newBuilder()
+   *           .setParent(
+   *               DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString())
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .build();
+   *   for (Table element : metastoreServiceClient.listTables(request).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final ListTablesPagedResponse listTables(ListTablesRequest request) { + return listTablesPagedCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * List all tables in a specified database. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   ListTablesRequest request =
+   *       ListTablesRequest.newBuilder()
+   *           .setParent(
+   *               DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString())
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .build();
+   *   ApiFuture
future = + * metastoreServiceClient.listTablesPagedCallable().futureCall(request); + * // Do something. + * for (Table element : future.get().iterateAll()) { + * // doThingsWith(element); + * } + * } + * } + */ + public final UnaryCallable listTablesPagedCallable() { + return stub.listTablesPagedCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * List all tables in a specified database. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   ListTablesRequest request =
+   *       ListTablesRequest.newBuilder()
+   *           .setParent(
+   *               DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString())
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .build();
+   *   while (true) {
+   *     ListTablesResponse response = metastoreServiceClient.listTablesCallable().call(request);
+   *     for (Table element : response.getTablesList()) {
+   *       // doThingsWith(element);
+   *     }
+   *     String nextPageToken = response.getNextPageToken();
+   *     if (!Strings.isNullOrEmpty(nextPageToken)) {
+   *       request = request.toBuilder().setPageToken(nextPageToken).build();
+   *     } else {
+   *       break;
+   *     }
+   *   }
+   * }
+   * }
+ */ + public final UnaryCallable listTablesCallable() { + return stub.listTablesCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Creates a new lock. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   DatabaseName parent = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]");
+   *   Lock lock = Lock.newBuilder().build();
+   *   Lock response = metastoreServiceClient.createLock(parent, lock);
+   * }
+   * }
+ * + * @param parent Required. The parent resource where this lock will be created. Format: + * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + * @param lock Required. The lock to create. The `name` field does not need to be provided for the + * lock creation. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Lock createLock(DatabaseName parent, Lock lock) { + CreateLockRequest request = + CreateLockRequest.newBuilder() + .setParent(parent == null ? null : parent.toString()) + .setLock(lock) + .build(); + return createLock(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Creates a new lock. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   String parent =
+   *       DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString();
+   *   Lock lock = Lock.newBuilder().build();
+   *   Lock response = metastoreServiceClient.createLock(parent, lock);
+   * }
+   * }
+ * + * @param parent Required. The parent resource where this lock will be created. Format: + * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + * @param lock Required. The lock to create. The `name` field does not need to be provided for the + * lock creation. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Lock createLock(String parent, Lock lock) { + CreateLockRequest request = + CreateLockRequest.newBuilder().setParent(parent).setLock(lock).build(); + return createLock(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Creates a new lock. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   CreateLockRequest request =
+   *       CreateLockRequest.newBuilder()
+   *           .setParent(
+   *               DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString())
+   *           .setLock(Lock.newBuilder().build())
+   *           .build();
+   *   Lock response = metastoreServiceClient.createLock(request);
+   * }
+   * }
+ * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Lock createLock(CreateLockRequest request) { + return createLockCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Creates a new lock. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   CreateLockRequest request =
+   *       CreateLockRequest.newBuilder()
+   *           .setParent(
+   *               DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString())
+   *           .setLock(Lock.newBuilder().build())
+   *           .build();
+   *   ApiFuture future = metastoreServiceClient.createLockCallable().futureCall(request);
+   *   // Do something.
+   *   Lock response = future.get();
+   * }
+   * }
+ */ + public final UnaryCallable createLockCallable() { + return stub.createLockCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Deletes an existing lock specified by the lock ID. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   LockName name = LockName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[LOCK]");
+   *   metastoreServiceClient.deleteLock(name);
+   * }
+   * }
+ * + * @param name Required. The name of the lock to delete. Format: + * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id} + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final void deleteLock(LockName name) { + DeleteLockRequest request = + DeleteLockRequest.newBuilder().setName(name == null ? null : name.toString()).build(); + deleteLock(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Deletes an existing lock specified by the lock ID. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   String name =
+   *       LockName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[LOCK]").toString();
+   *   metastoreServiceClient.deleteLock(name);
+   * }
+   * }
+ * + * @param name Required. The name of the lock to delete. Format: + * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id} + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final void deleteLock(String name) { + DeleteLockRequest request = DeleteLockRequest.newBuilder().setName(name).build(); + deleteLock(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Deletes an existing lock specified by the lock ID. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   DeleteLockRequest request =
+   *       DeleteLockRequest.newBuilder()
+   *           .setName(
+   *               LockName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[LOCK]")
+   *                   .toString())
+   *           .build();
+   *   metastoreServiceClient.deleteLock(request);
+   * }
+   * }
+ * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final void deleteLock(DeleteLockRequest request) { + deleteLockCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Deletes an existing lock specified by the lock ID. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   DeleteLockRequest request =
+   *       DeleteLockRequest.newBuilder()
+   *           .setName(
+   *               LockName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[LOCK]")
+   *                   .toString())
+   *           .build();
+   *   ApiFuture future = metastoreServiceClient.deleteLockCallable().futureCall(request);
+   *   // Do something.
+   *   future.get();
+   * }
+   * }
+ */ + public final UnaryCallable deleteLockCallable() { + return stub.deleteLockCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Checks the state of a lock specified by the lock ID. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   LockName name = LockName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[LOCK]");
+   *   Lock response = metastoreServiceClient.checkLock(name);
+   * }
+   * }
+ * + * @param name Required. The name of the lock to check. Format: + * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id} + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Lock checkLock(LockName name) { + CheckLockRequest request = + CheckLockRequest.newBuilder().setName(name == null ? null : name.toString()).build(); + return checkLock(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Checks the state of a lock specified by the lock ID. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   String name =
+   *       LockName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[LOCK]").toString();
+   *   Lock response = metastoreServiceClient.checkLock(name);
+   * }
+   * }
+ * + * @param name Required. The name of the lock to check. Format: + * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id} + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Lock checkLock(String name) { + CheckLockRequest request = CheckLockRequest.newBuilder().setName(name).build(); + return checkLock(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Checks the state of a lock specified by the lock ID. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   CheckLockRequest request =
+   *       CheckLockRequest.newBuilder()
+   *           .setName(
+   *               LockName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[LOCK]")
+   *                   .toString())
+   *           .build();
+   *   Lock response = metastoreServiceClient.checkLock(request);
+   * }
+   * }
+ * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final Lock checkLock(CheckLockRequest request) { + return checkLockCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Checks the state of a lock specified by the lock ID. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   CheckLockRequest request =
+   *       CheckLockRequest.newBuilder()
+   *           .setName(
+   *               LockName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[LOCK]")
+   *                   .toString())
+   *           .build();
+   *   ApiFuture future = metastoreServiceClient.checkLockCallable().futureCall(request);
+   *   // Do something.
+   *   Lock response = future.get();
+   * }
+   * }
+ */ + public final UnaryCallable checkLockCallable() { + return stub.checkLockCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * List all locks in a specified database. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   DatabaseName parent = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]");
+   *   for (Lock element : metastoreServiceClient.listLocks(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * + * @param parent Required. The parent, which owns this collection of locks. Format: + * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final ListLocksPagedResponse listLocks(DatabaseName parent) { + ListLocksRequest request = + ListLocksRequest.newBuilder().setParent(parent == null ? null : parent.toString()).build(); + return listLocks(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * List all locks in a specified database. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   String parent =
+   *       DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString();
+   *   for (Lock element : metastoreServiceClient.listLocks(parent).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * + * @param parent Required. The parent, which owns this collection of locks. Format: + * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final ListLocksPagedResponse listLocks(String parent) { + ListLocksRequest request = ListLocksRequest.newBuilder().setParent(parent).build(); + return listLocks(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * List all locks in a specified database. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   ListLocksRequest request =
+   *       ListLocksRequest.newBuilder()
+   *           .setParent(
+   *               DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString())
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .build();
+   *   for (Lock element : metastoreServiceClient.listLocks(request).iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final ListLocksPagedResponse listLocks(ListLocksRequest request) { + return listLocksPagedCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * List all locks in a specified database. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   ListLocksRequest request =
+   *       ListLocksRequest.newBuilder()
+   *           .setParent(
+   *               DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString())
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .build();
+   *   ApiFuture future = metastoreServiceClient.listLocksPagedCallable().futureCall(request);
+   *   // Do something.
+   *   for (Lock element : future.get().iterateAll()) {
+   *     // doThingsWith(element);
+   *   }
+   * }
+   * }
+ */ + public final UnaryCallable listLocksPagedCallable() { + return stub.listLocksPagedCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * List all locks in a specified database. + * + *

Sample code: + * + *

{@code
+   * // This snippet has been automatically generated and should be regarded as a code template only.
+   * // It will require modifications to work:
+   * // - It may require correct/in-range values for request initialization.
+   * // - It may require specifying regional endpoints when creating the service client as shown in
+   * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+   * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+   *   ListLocksRequest request =
+   *       ListLocksRequest.newBuilder()
+   *           .setParent(
+   *               DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString())
+   *           .setPageSize(883849137)
+   *           .setPageToken("pageToken873572522")
+   *           .build();
+   *   while (true) {
+   *     ListLocksResponse response = metastoreServiceClient.listLocksCallable().call(request);
+   *     for (Lock element : response.getLocksList()) {
+   *       // doThingsWith(element);
+   *     }
+   *     String nextPageToken = response.getNextPageToken();
+   *     if (!Strings.isNullOrEmpty(nextPageToken)) {
+   *       request = request.toBuilder().setPageToken(nextPageToken).build();
+   *     } else {
+   *       break;
+   *     }
+   *   }
+   * }
+   * }
+ */ + public final UnaryCallable listLocksCallable() { + return stub.listLocksCallable(); + } + + @Override + public final void close() { + stub.close(); + } + + @Override + public void shutdown() { + stub.shutdown(); + } + + @Override + public boolean isShutdown() { + return stub.isShutdown(); + } + + @Override + public boolean isTerminated() { + return stub.isTerminated(); + } + + @Override + public void shutdownNow() { + stub.shutdownNow(); + } + + @Override + public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { + return stub.awaitTermination(duration, unit); + } + + public static class ListCatalogsPagedResponse + extends AbstractPagedListResponse< + ListCatalogsRequest, + ListCatalogsResponse, + Catalog, + ListCatalogsPage, + ListCatalogsFixedSizeCollection> { + + public static ApiFuture createAsync( + PageContext context, + ApiFuture futureResponse) { + ApiFuture futurePage = + ListCatalogsPage.createEmptyPage().createPageAsync(context, futureResponse); + return ApiFutures.transform( + futurePage, + input -> new ListCatalogsPagedResponse(input), + MoreExecutors.directExecutor()); + } + + private ListCatalogsPagedResponse(ListCatalogsPage page) { + super(page, ListCatalogsFixedSizeCollection.createEmptyCollection()); + } + } + + public static class ListCatalogsPage + extends AbstractPage { + + private ListCatalogsPage( + PageContext context, + ListCatalogsResponse response) { + super(context, response); + } + + private static ListCatalogsPage createEmptyPage() { + return new ListCatalogsPage(null, null); + } + + @Override + protected ListCatalogsPage createPage( + PageContext context, + ListCatalogsResponse response) { + return new ListCatalogsPage(context, response); + } + + @Override + public ApiFuture createPageAsync( + PageContext context, + ApiFuture futureResponse) { + return super.createPageAsync(context, futureResponse); + } + } + + public static class ListCatalogsFixedSizeCollection + extends AbstractFixedSizeCollection< + ListCatalogsRequest, + ListCatalogsResponse, + Catalog, + ListCatalogsPage, + ListCatalogsFixedSizeCollection> { + + private ListCatalogsFixedSizeCollection(List pages, int collectionSize) { + super(pages, collectionSize); + } + + private static ListCatalogsFixedSizeCollection createEmptyCollection() { + return new ListCatalogsFixedSizeCollection(null, 0); + } + + @Override + protected ListCatalogsFixedSizeCollection createCollection( + List pages, int collectionSize) { + return new ListCatalogsFixedSizeCollection(pages, collectionSize); + } + } + + public static class ListDatabasesPagedResponse + extends AbstractPagedListResponse< + ListDatabasesRequest, + ListDatabasesResponse, + Database, + ListDatabasesPage, + ListDatabasesFixedSizeCollection> { + + public static ApiFuture createAsync( + PageContext context, + ApiFuture futureResponse) { + ApiFuture futurePage = + ListDatabasesPage.createEmptyPage().createPageAsync(context, futureResponse); + return ApiFutures.transform( + futurePage, + input -> new ListDatabasesPagedResponse(input), + MoreExecutors.directExecutor()); + } + + private ListDatabasesPagedResponse(ListDatabasesPage page) { + super(page, ListDatabasesFixedSizeCollection.createEmptyCollection()); + } + } + + public static class ListDatabasesPage + extends AbstractPage< + ListDatabasesRequest, ListDatabasesResponse, Database, ListDatabasesPage> { + + private ListDatabasesPage( + PageContext context, + ListDatabasesResponse response) { + super(context, response); + } + + private static ListDatabasesPage createEmptyPage() { + return new ListDatabasesPage(null, null); + } + + @Override + protected ListDatabasesPage createPage( + PageContext context, + ListDatabasesResponse response) { + return new ListDatabasesPage(context, response); + } + + @Override + public ApiFuture createPageAsync( + PageContext context, + ApiFuture futureResponse) { + return super.createPageAsync(context, futureResponse); + } + } + + public static class ListDatabasesFixedSizeCollection + extends AbstractFixedSizeCollection< + ListDatabasesRequest, + ListDatabasesResponse, + Database, + ListDatabasesPage, + ListDatabasesFixedSizeCollection> { + + private ListDatabasesFixedSizeCollection(List pages, int collectionSize) { + super(pages, collectionSize); + } + + private static ListDatabasesFixedSizeCollection createEmptyCollection() { + return new ListDatabasesFixedSizeCollection(null, 0); + } + + @Override + protected ListDatabasesFixedSizeCollection createCollection( + List pages, int collectionSize) { + return new ListDatabasesFixedSizeCollection(pages, collectionSize); + } + } + + public static class ListTablesPagedResponse + extends AbstractPagedListResponse< + ListTablesRequest, + ListTablesResponse, + Table, + ListTablesPage, + ListTablesFixedSizeCollection> { + + public static ApiFuture createAsync( + PageContext context, + ApiFuture futureResponse) { + ApiFuture futurePage = + ListTablesPage.createEmptyPage().createPageAsync(context, futureResponse); + return ApiFutures.transform( + futurePage, input -> new ListTablesPagedResponse(input), MoreExecutors.directExecutor()); + } + + private ListTablesPagedResponse(ListTablesPage page) { + super(page, ListTablesFixedSizeCollection.createEmptyCollection()); + } + } + + public static class ListTablesPage + extends AbstractPage { + + private ListTablesPage( + PageContext context, + ListTablesResponse response) { + super(context, response); + } + + private static ListTablesPage createEmptyPage() { + return new ListTablesPage(null, null); + } + + @Override + protected ListTablesPage createPage( + PageContext context, + ListTablesResponse response) { + return new ListTablesPage(context, response); + } + + @Override + public ApiFuture createPageAsync( + PageContext context, + ApiFuture futureResponse) { + return super.createPageAsync(context, futureResponse); + } + } + + public static class ListTablesFixedSizeCollection + extends AbstractFixedSizeCollection< + ListTablesRequest, + ListTablesResponse, + Table, + ListTablesPage, + ListTablesFixedSizeCollection> { + + private ListTablesFixedSizeCollection(List pages, int collectionSize) { + super(pages, collectionSize); + } + + private static ListTablesFixedSizeCollection createEmptyCollection() { + return new ListTablesFixedSizeCollection(null, 0); + } + + @Override + protected ListTablesFixedSizeCollection createCollection( + List pages, int collectionSize) { + return new ListTablesFixedSizeCollection(pages, collectionSize); + } + } + + public static class ListLocksPagedResponse + extends AbstractPagedListResponse< + ListLocksRequest, ListLocksResponse, Lock, ListLocksPage, ListLocksFixedSizeCollection> { + + public static ApiFuture createAsync( + PageContext context, + ApiFuture futureResponse) { + ApiFuture futurePage = + ListLocksPage.createEmptyPage().createPageAsync(context, futureResponse); + return ApiFutures.transform( + futurePage, input -> new ListLocksPagedResponse(input), MoreExecutors.directExecutor()); + } + + private ListLocksPagedResponse(ListLocksPage page) { + super(page, ListLocksFixedSizeCollection.createEmptyCollection()); + } + } + + public static class ListLocksPage + extends AbstractPage { + + private ListLocksPage( + PageContext context, + ListLocksResponse response) { + super(context, response); + } + + private static ListLocksPage createEmptyPage() { + return new ListLocksPage(null, null); + } + + @Override + protected ListLocksPage createPage( + PageContext context, + ListLocksResponse response) { + return new ListLocksPage(context, response); + } + + @Override + public ApiFuture createPageAsync( + PageContext context, + ApiFuture futureResponse) { + return super.createPageAsync(context, futureResponse); + } + } + + public static class ListLocksFixedSizeCollection + extends AbstractFixedSizeCollection< + ListLocksRequest, ListLocksResponse, Lock, ListLocksPage, ListLocksFixedSizeCollection> { + + private ListLocksFixedSizeCollection(List pages, int collectionSize) { + super(pages, collectionSize); + } + + private static ListLocksFixedSizeCollection createEmptyCollection() { + return new ListLocksFixedSizeCollection(null, 0); + } + + @Override + protected ListLocksFixedSizeCollection createCollection( + List pages, int collectionSize) { + return new ListLocksFixedSizeCollection(pages, collectionSize); + } + } +} diff --git a/java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/MetastoreServiceSettings.java b/java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/MetastoreServiceSettings.java new file mode 100644 index 000000000000..4edd0b4f4f92 --- /dev/null +++ b/java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/MetastoreServiceSettings.java @@ -0,0 +1,393 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1; + +import static com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient.ListCatalogsPagedResponse; +import static com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient.ListDatabasesPagedResponse; +import static com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient.ListLocksPagedResponse; +import static com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient.ListTablesPagedResponse; + +import com.google.api.core.ApiFunction; +import com.google.api.core.BetaApi; +import com.google.api.gax.core.GoogleCredentialsProvider; +import com.google.api.gax.core.InstantiatingExecutorProvider; +import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; +import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider; +import com.google.api.gax.rpc.ApiClientHeaderProvider; +import com.google.api.gax.rpc.ClientContext; +import com.google.api.gax.rpc.ClientSettings; +import com.google.api.gax.rpc.PagedCallSettings; +import com.google.api.gax.rpc.TransportChannelProvider; +import com.google.api.gax.rpc.UnaryCallSettings; +import com.google.cloud.bigquery.biglake.v1alpha1.stub.MetastoreServiceStubSettings; +import com.google.protobuf.Empty; +import java.io.IOException; +import java.util.List; +import javax.annotation.Generated; + +// AUTO-GENERATED DOCUMENTATION AND CLASS. +/** + * Settings class to configure an instance of {@link MetastoreServiceClient}. + * + *

The default instance has everything set to sensible defaults: + * + *

    + *
  • The default service address (biglake.googleapis.com) and default port (443) are used. + *
  • Credentials are acquired automatically through Application Default Credentials. + *
  • Retries are configured for idempotent methods but not for non-idempotent methods. + *
+ * + *

The builder of this class is recursive, so contained classes are themselves builders. When + * build() is called, the tree of builders is called to create the complete settings object. + * + *

For example, to set the total timeout of createCatalog to 30 seconds: + * + *

{@code
+ * // This snippet has been automatically generated and should be regarded as a code template only.
+ * // It will require modifications to work:
+ * // - It may require correct/in-range values for request initialization.
+ * // - It may require specifying regional endpoints when creating the service client as shown in
+ * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+ * MetastoreServiceSettings.Builder metastoreServiceSettingsBuilder =
+ *     MetastoreServiceSettings.newBuilder();
+ * metastoreServiceSettingsBuilder
+ *     .createCatalogSettings()
+ *     .setRetrySettings(
+ *         metastoreServiceSettingsBuilder
+ *             .createCatalogSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
+ *             .setTotalTimeout(Duration.ofSeconds(30))
+ *             .build());
+ * MetastoreServiceSettings metastoreServiceSettings = metastoreServiceSettingsBuilder.build();
+ * }
+ */ +@BetaApi +@Generated("by gapic-generator-java") +public class MetastoreServiceSettings extends ClientSettings { + + /** Returns the object with the settings used for calls to createCatalog. */ + public UnaryCallSettings createCatalogSettings() { + return ((MetastoreServiceStubSettings) getStubSettings()).createCatalogSettings(); + } + + /** Returns the object with the settings used for calls to deleteCatalog. */ + public UnaryCallSettings deleteCatalogSettings() { + return ((MetastoreServiceStubSettings) getStubSettings()).deleteCatalogSettings(); + } + + /** Returns the object with the settings used for calls to getCatalog. */ + public UnaryCallSettings getCatalogSettings() { + return ((MetastoreServiceStubSettings) getStubSettings()).getCatalogSettings(); + } + + /** Returns the object with the settings used for calls to listCatalogs. */ + public PagedCallSettings + listCatalogsSettings() { + return ((MetastoreServiceStubSettings) getStubSettings()).listCatalogsSettings(); + } + + /** Returns the object with the settings used for calls to createDatabase. */ + public UnaryCallSettings createDatabaseSettings() { + return ((MetastoreServiceStubSettings) getStubSettings()).createDatabaseSettings(); + } + + /** Returns the object with the settings used for calls to deleteDatabase. */ + public UnaryCallSettings deleteDatabaseSettings() { + return ((MetastoreServiceStubSettings) getStubSettings()).deleteDatabaseSettings(); + } + + /** Returns the object with the settings used for calls to updateDatabase. */ + public UnaryCallSettings updateDatabaseSettings() { + return ((MetastoreServiceStubSettings) getStubSettings()).updateDatabaseSettings(); + } + + /** Returns the object with the settings used for calls to getDatabase. */ + public UnaryCallSettings getDatabaseSettings() { + return ((MetastoreServiceStubSettings) getStubSettings()).getDatabaseSettings(); + } + + /** Returns the object with the settings used for calls to listDatabases. */ + public PagedCallSettings + listDatabasesSettings() { + return ((MetastoreServiceStubSettings) getStubSettings()).listDatabasesSettings(); + } + + /** Returns the object with the settings used for calls to createTable. */ + public UnaryCallSettings createTableSettings() { + return ((MetastoreServiceStubSettings) getStubSettings()).createTableSettings(); + } + + /** Returns the object with the settings used for calls to deleteTable. */ + public UnaryCallSettings deleteTableSettings() { + return ((MetastoreServiceStubSettings) getStubSettings()).deleteTableSettings(); + } + + /** Returns the object with the settings used for calls to updateTable. */ + public UnaryCallSettings updateTableSettings() { + return ((MetastoreServiceStubSettings) getStubSettings()).updateTableSettings(); + } + + /** Returns the object with the settings used for calls to getTable. */ + public UnaryCallSettings getTableSettings() { + return ((MetastoreServiceStubSettings) getStubSettings()).getTableSettings(); + } + + /** Returns the object with the settings used for calls to listTables. */ + public PagedCallSettings + listTablesSettings() { + return ((MetastoreServiceStubSettings) getStubSettings()).listTablesSettings(); + } + + /** Returns the object with the settings used for calls to createLock. */ + public UnaryCallSettings createLockSettings() { + return ((MetastoreServiceStubSettings) getStubSettings()).createLockSettings(); + } + + /** Returns the object with the settings used for calls to deleteLock. */ + public UnaryCallSettings deleteLockSettings() { + return ((MetastoreServiceStubSettings) getStubSettings()).deleteLockSettings(); + } + + /** Returns the object with the settings used for calls to checkLock. */ + public UnaryCallSettings checkLockSettings() { + return ((MetastoreServiceStubSettings) getStubSettings()).checkLockSettings(); + } + + /** Returns the object with the settings used for calls to listLocks. */ + public PagedCallSettings + listLocksSettings() { + return ((MetastoreServiceStubSettings) getStubSettings()).listLocksSettings(); + } + + public static final MetastoreServiceSettings create(MetastoreServiceStubSettings stub) + throws IOException { + return new MetastoreServiceSettings.Builder(stub.toBuilder()).build(); + } + + /** Returns a builder for the default ExecutorProvider for this service. */ + public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { + return MetastoreServiceStubSettings.defaultExecutorProviderBuilder(); + } + + /** Returns the default service endpoint. */ + public static String getDefaultEndpoint() { + return MetastoreServiceStubSettings.getDefaultEndpoint(); + } + + /** Returns the default service scopes. */ + public static List getDefaultServiceScopes() { + return MetastoreServiceStubSettings.getDefaultServiceScopes(); + } + + /** Returns a builder for the default credentials for this service. */ + public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { + return MetastoreServiceStubSettings.defaultCredentialsProviderBuilder(); + } + + /** Returns a builder for the default gRPC ChannelProvider for this service. */ + public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { + return MetastoreServiceStubSettings.defaultGrpcTransportProviderBuilder(); + } + + /** Returns a builder for the default REST ChannelProvider for this service. */ + @BetaApi + public static InstantiatingHttpJsonChannelProvider.Builder + defaultHttpJsonTransportProviderBuilder() { + return MetastoreServiceStubSettings.defaultHttpJsonTransportProviderBuilder(); + } + + public static TransportChannelProvider defaultTransportChannelProvider() { + return MetastoreServiceStubSettings.defaultTransportChannelProvider(); + } + + @BetaApi("The surface for customizing headers is not stable yet and may change in the future.") + public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { + return MetastoreServiceStubSettings.defaultApiClientHeaderProviderBuilder(); + } + + /** Returns a new gRPC builder for this class. */ + public static Builder newBuilder() { + return Builder.createDefault(); + } + + /** Returns a new REST builder for this class. */ + @BetaApi + public static Builder newHttpJsonBuilder() { + return Builder.createHttpJsonDefault(); + } + + /** Returns a new builder for this class. */ + public static Builder newBuilder(ClientContext clientContext) { + return new Builder(clientContext); + } + + /** Returns a builder containing all the values of this settings class. */ + public Builder toBuilder() { + return new Builder(this); + } + + protected MetastoreServiceSettings(Builder settingsBuilder) throws IOException { + super(settingsBuilder); + } + + /** Builder for MetastoreServiceSettings. */ + public static class Builder extends ClientSettings.Builder { + + protected Builder() throws IOException { + this(((ClientContext) null)); + } + + protected Builder(ClientContext clientContext) { + super(MetastoreServiceStubSettings.newBuilder(clientContext)); + } + + protected Builder(MetastoreServiceSettings settings) { + super(settings.getStubSettings().toBuilder()); + } + + protected Builder(MetastoreServiceStubSettings.Builder stubSettings) { + super(stubSettings); + } + + private static Builder createDefault() { + return new Builder(MetastoreServiceStubSettings.newBuilder()); + } + + @BetaApi + private static Builder createHttpJsonDefault() { + return new Builder(MetastoreServiceStubSettings.newHttpJsonBuilder()); + } + + public MetastoreServiceStubSettings.Builder getStubSettingsBuilder() { + return ((MetastoreServiceStubSettings.Builder) getStubSettings()); + } + + /** + * Applies the given settings updater function to all of the unary API methods in this service. + * + *

Note: This method does not support applying settings to streaming methods. + */ + public Builder applyToAllUnaryMethods( + ApiFunction, Void> settingsUpdater) { + super.applyToAllUnaryMethods( + getStubSettingsBuilder().unaryMethodSettingsBuilders(), settingsUpdater); + return this; + } + + /** Returns the builder for the settings used for calls to createCatalog. */ + public UnaryCallSettings.Builder createCatalogSettings() { + return getStubSettingsBuilder().createCatalogSettings(); + } + + /** Returns the builder for the settings used for calls to deleteCatalog. */ + public UnaryCallSettings.Builder deleteCatalogSettings() { + return getStubSettingsBuilder().deleteCatalogSettings(); + } + + /** Returns the builder for the settings used for calls to getCatalog. */ + public UnaryCallSettings.Builder getCatalogSettings() { + return getStubSettingsBuilder().getCatalogSettings(); + } + + /** Returns the builder for the settings used for calls to listCatalogs. */ + public PagedCallSettings.Builder< + ListCatalogsRequest, ListCatalogsResponse, ListCatalogsPagedResponse> + listCatalogsSettings() { + return getStubSettingsBuilder().listCatalogsSettings(); + } + + /** Returns the builder for the settings used for calls to createDatabase. */ + public UnaryCallSettings.Builder createDatabaseSettings() { + return getStubSettingsBuilder().createDatabaseSettings(); + } + + /** Returns the builder for the settings used for calls to deleteDatabase. */ + public UnaryCallSettings.Builder deleteDatabaseSettings() { + return getStubSettingsBuilder().deleteDatabaseSettings(); + } + + /** Returns the builder for the settings used for calls to updateDatabase. */ + public UnaryCallSettings.Builder updateDatabaseSettings() { + return getStubSettingsBuilder().updateDatabaseSettings(); + } + + /** Returns the builder for the settings used for calls to getDatabase. */ + public UnaryCallSettings.Builder getDatabaseSettings() { + return getStubSettingsBuilder().getDatabaseSettings(); + } + + /** Returns the builder for the settings used for calls to listDatabases. */ + public PagedCallSettings.Builder< + ListDatabasesRequest, ListDatabasesResponse, ListDatabasesPagedResponse> + listDatabasesSettings() { + return getStubSettingsBuilder().listDatabasesSettings(); + } + + /** Returns the builder for the settings used for calls to createTable. */ + public UnaryCallSettings.Builder createTableSettings() { + return getStubSettingsBuilder().createTableSettings(); + } + + /** Returns the builder for the settings used for calls to deleteTable. */ + public UnaryCallSettings.Builder deleteTableSettings() { + return getStubSettingsBuilder().deleteTableSettings(); + } + + /** Returns the builder for the settings used for calls to updateTable. */ + public UnaryCallSettings.Builder updateTableSettings() { + return getStubSettingsBuilder().updateTableSettings(); + } + + /** Returns the builder for the settings used for calls to getTable. */ + public UnaryCallSettings.Builder getTableSettings() { + return getStubSettingsBuilder().getTableSettings(); + } + + /** Returns the builder for the settings used for calls to listTables. */ + public PagedCallSettings.Builder + listTablesSettings() { + return getStubSettingsBuilder().listTablesSettings(); + } + + /** Returns the builder for the settings used for calls to createLock. */ + public UnaryCallSettings.Builder createLockSettings() { + return getStubSettingsBuilder().createLockSettings(); + } + + /** Returns the builder for the settings used for calls to deleteLock. */ + public UnaryCallSettings.Builder deleteLockSettings() { + return getStubSettingsBuilder().deleteLockSettings(); + } + + /** Returns the builder for the settings used for calls to checkLock. */ + public UnaryCallSettings.Builder checkLockSettings() { + return getStubSettingsBuilder().checkLockSettings(); + } + + /** Returns the builder for the settings used for calls to listLocks. */ + public PagedCallSettings.Builder + listLocksSettings() { + return getStubSettingsBuilder().listLocksSettings(); + } + + @Override + public MetastoreServiceSettings build() throws IOException { + return new MetastoreServiceSettings(this); + } + } +} diff --git a/java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/gapic_metadata.json b/java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/gapic_metadata.json new file mode 100644 index 000000000000..95b24abff408 --- /dev/null +++ b/java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/gapic_metadata.json @@ -0,0 +1,72 @@ +{ + "schema": "1.0", + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "java", + "protoPackage": "google.cloud.bigquery.biglake.v1alpha1", + "libraryPackage": "com.google.cloud.bigquery.biglake.v1alpha1", + "services": { + "MetastoreService": { + "clients": { + "grpc": { + "libraryClient": "MetastoreServiceClient", + "rpcs": { + "CheckLock": { + "methods": ["checkLock", "checkLock", "checkLock", "checkLockCallable"] + }, + "CreateCatalog": { + "methods": ["createCatalog", "createCatalog", "createCatalog", "createCatalogCallable"] + }, + "CreateDatabase": { + "methods": ["createDatabase", "createDatabase", "createDatabase", "createDatabaseCallable"] + }, + "CreateLock": { + "methods": ["createLock", "createLock", "createLock", "createLockCallable"] + }, + "CreateTable": { + "methods": ["createTable", "createTable", "createTable", "createTableCallable"] + }, + "DeleteCatalog": { + "methods": ["deleteCatalog", "deleteCatalog", "deleteCatalog", "deleteCatalogCallable"] + }, + "DeleteDatabase": { + "methods": ["deleteDatabase", "deleteDatabase", "deleteDatabase", "deleteDatabaseCallable"] + }, + "DeleteLock": { + "methods": ["deleteLock", "deleteLock", "deleteLock", "deleteLockCallable"] + }, + "DeleteTable": { + "methods": ["deleteTable", "deleteTable", "deleteTable", "deleteTableCallable"] + }, + "GetCatalog": { + "methods": ["getCatalog", "getCatalog", "getCatalog", "getCatalogCallable"] + }, + "GetDatabase": { + "methods": ["getDatabase", "getDatabase", "getDatabase", "getDatabaseCallable"] + }, + "GetTable": { + "methods": ["getTable", "getTable", "getTable", "getTableCallable"] + }, + "ListCatalogs": { + "methods": ["listCatalogs", "listCatalogs", "listCatalogs", "listCatalogsPagedCallable", "listCatalogsCallable"] + }, + "ListDatabases": { + "methods": ["listDatabases", "listDatabases", "listDatabases", "listDatabasesPagedCallable", "listDatabasesCallable"] + }, + "ListLocks": { + "methods": ["listLocks", "listLocks", "listLocks", "listLocksPagedCallable", "listLocksCallable"] + }, + "ListTables": { + "methods": ["listTables", "listTables", "listTables", "listTablesPagedCallable", "listTablesCallable"] + }, + "UpdateDatabase": { + "methods": ["updateDatabase", "updateDatabase", "updateDatabaseCallable"] + }, + "UpdateTable": { + "methods": ["updateTable", "updateTable", "updateTableCallable"] + } + } + } + } + } + } +} \ No newline at end of file diff --git a/java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/package-info.java b/java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/package-info.java new file mode 100644 index 000000000000..1e340e17a164 --- /dev/null +++ b/java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/package-info.java @@ -0,0 +1,56 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * A client to BigLake API + * + *

The interfaces provided are listed below, along with usage samples. + * + *

======================= MetastoreServiceClient ======================= + * + *

Service Description: BigLake Metastore is a serverless, highly available, multi-tenant runtime + * metastore for Google Cloud Data Analytics products. + * + *

The BigLake Metastore API defines the following resource model: + * + *

    + *
  • A collection of Google Cloud projects: `/projects/*` + *
  • Each project has a collection of available locations: `/locations/*` + *
  • Each location has a collection of catalogs: `/catalogs/*` + *
  • Each catalog has a collection of databases: `/databases/*` + *
  • Each database has a collection of tables: `/tables/*` + *
+ * + *

Sample for MetastoreServiceClient: + * + *

{@code
+ * // This snippet has been automatically generated and should be regarded as a code template only.
+ * // It will require modifications to work:
+ * // - It may require correct/in-range values for request initialization.
+ * // - It may require specifying regional endpoints when creating the service client as shown in
+ * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+ * try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) {
+ *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
+ *   Catalog catalog = Catalog.newBuilder().build();
+ *   String catalogId = "catalogId1455933204";
+ *   Catalog response = metastoreServiceClient.createCatalog(parent, catalog, catalogId);
+ * }
+ * }
+ */ +@Generated("by gapic-generator-java") +package com.google.cloud.bigquery.biglake.v1alpha1; + +import javax.annotation.Generated; diff --git a/java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/stub/GrpcMetastoreServiceCallableFactory.java b/java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/stub/GrpcMetastoreServiceCallableFactory.java new file mode 100644 index 000000000000..4b86c5307130 --- /dev/null +++ b/java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/stub/GrpcMetastoreServiceCallableFactory.java @@ -0,0 +1,115 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.stub; + +import com.google.api.core.BetaApi; +import com.google.api.gax.grpc.GrpcCallSettings; +import com.google.api.gax.grpc.GrpcCallableFactory; +import com.google.api.gax.grpc.GrpcStubCallableFactory; +import com.google.api.gax.rpc.BatchingCallSettings; +import com.google.api.gax.rpc.BidiStreamingCallable; +import com.google.api.gax.rpc.ClientContext; +import com.google.api.gax.rpc.ClientStreamingCallable; +import com.google.api.gax.rpc.OperationCallSettings; +import com.google.api.gax.rpc.OperationCallable; +import com.google.api.gax.rpc.PagedCallSettings; +import com.google.api.gax.rpc.ServerStreamingCallSettings; +import com.google.api.gax.rpc.ServerStreamingCallable; +import com.google.api.gax.rpc.StreamingCallSettings; +import com.google.api.gax.rpc.UnaryCallSettings; +import com.google.api.gax.rpc.UnaryCallable; +import com.google.longrunning.Operation; +import com.google.longrunning.stub.OperationsStub; +import javax.annotation.Generated; + +// AUTO-GENERATED DOCUMENTATION AND CLASS. +/** + * gRPC callable factory implementation for the MetastoreService service API. + * + *

This class is for advanced usage. + */ +@BetaApi +@Generated("by gapic-generator-java") +public class GrpcMetastoreServiceCallableFactory implements GrpcStubCallableFactory { + + @Override + public UnaryCallable createUnaryCallable( + GrpcCallSettings grpcCallSettings, + UnaryCallSettings callSettings, + ClientContext clientContext) { + return GrpcCallableFactory.createUnaryCallable(grpcCallSettings, callSettings, clientContext); + } + + @Override + public + UnaryCallable createPagedCallable( + GrpcCallSettings grpcCallSettings, + PagedCallSettings callSettings, + ClientContext clientContext) { + return GrpcCallableFactory.createPagedCallable(grpcCallSettings, callSettings, clientContext); + } + + @Override + public UnaryCallable createBatchingCallable( + GrpcCallSettings grpcCallSettings, + BatchingCallSettings callSettings, + ClientContext clientContext) { + return GrpcCallableFactory.createBatchingCallable( + grpcCallSettings, callSettings, clientContext); + } + + @Override + public + OperationCallable createOperationCallable( + GrpcCallSettings grpcCallSettings, + OperationCallSettings callSettings, + ClientContext clientContext, + OperationsStub operationsStub) { + return GrpcCallableFactory.createOperationCallable( + grpcCallSettings, callSettings, clientContext, operationsStub); + } + + @Override + public + BidiStreamingCallable createBidiStreamingCallable( + GrpcCallSettings grpcCallSettings, + StreamingCallSettings callSettings, + ClientContext clientContext) { + return GrpcCallableFactory.createBidiStreamingCallable( + grpcCallSettings, callSettings, clientContext); + } + + @Override + public + ServerStreamingCallable createServerStreamingCallable( + GrpcCallSettings grpcCallSettings, + ServerStreamingCallSettings callSettings, + ClientContext clientContext) { + return GrpcCallableFactory.createServerStreamingCallable( + grpcCallSettings, callSettings, clientContext); + } + + @Override + public + ClientStreamingCallable createClientStreamingCallable( + GrpcCallSettings grpcCallSettings, + StreamingCallSettings callSettings, + ClientContext clientContext) { + return GrpcCallableFactory.createClientStreamingCallable( + grpcCallSettings, callSettings, clientContext); + } +} diff --git a/java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/stub/GrpcMetastoreServiceStub.java b/java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/stub/GrpcMetastoreServiceStub.java new file mode 100644 index 000000000000..a2af24c3e41b --- /dev/null +++ b/java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/stub/GrpcMetastoreServiceStub.java @@ -0,0 +1,716 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.stub; + +import static com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient.ListCatalogsPagedResponse; +import static com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient.ListDatabasesPagedResponse; +import static com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient.ListLocksPagedResponse; +import static com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient.ListTablesPagedResponse; + +import com.google.api.core.BetaApi; +import com.google.api.gax.core.BackgroundResource; +import com.google.api.gax.core.BackgroundResourceAggregation; +import com.google.api.gax.grpc.GrpcCallSettings; +import com.google.api.gax.grpc.GrpcStubCallableFactory; +import com.google.api.gax.rpc.ClientContext; +import com.google.api.gax.rpc.UnaryCallable; +import com.google.cloud.bigquery.biglake.v1alpha1.Catalog; +import com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.Database; +import com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse; +import com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse; +import com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse; +import com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse; +import com.google.cloud.bigquery.biglake.v1alpha1.Lock; +import com.google.cloud.bigquery.biglake.v1alpha1.Table; +import com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest; +import com.google.common.collect.ImmutableMap; +import com.google.longrunning.stub.GrpcOperationsStub; +import com.google.protobuf.Empty; +import io.grpc.MethodDescriptor; +import io.grpc.protobuf.ProtoUtils; +import java.io.IOException; +import java.util.concurrent.TimeUnit; +import javax.annotation.Generated; + +// AUTO-GENERATED DOCUMENTATION AND CLASS. +/** + * gRPC stub implementation for the MetastoreService service API. + * + *

This class is for advanced usage and reflects the underlying API directly. + */ +@BetaApi +@Generated("by gapic-generator-java") +public class GrpcMetastoreServiceStub extends MetastoreServiceStub { + private static final MethodDescriptor + createCatalogMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + "google.cloud.bigquery.biglake.v1alpha1.MetastoreService/CreateCatalog") + .setRequestMarshaller( + ProtoUtils.marshaller(CreateCatalogRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(Catalog.getDefaultInstance())) + .build(); + + private static final MethodDescriptor + deleteCatalogMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + "google.cloud.bigquery.biglake.v1alpha1.MetastoreService/DeleteCatalog") + .setRequestMarshaller( + ProtoUtils.marshaller(DeleteCatalogRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(Catalog.getDefaultInstance())) + .build(); + + private static final MethodDescriptor getCatalogMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName("google.cloud.bigquery.biglake.v1alpha1.MetastoreService/GetCatalog") + .setRequestMarshaller(ProtoUtils.marshaller(GetCatalogRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(Catalog.getDefaultInstance())) + .build(); + + private static final MethodDescriptor + listCatalogsMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + "google.cloud.bigquery.biglake.v1alpha1.MetastoreService/ListCatalogs") + .setRequestMarshaller(ProtoUtils.marshaller(ListCatalogsRequest.getDefaultInstance())) + .setResponseMarshaller( + ProtoUtils.marshaller(ListCatalogsResponse.getDefaultInstance())) + .build(); + + private static final MethodDescriptor + createDatabaseMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + "google.cloud.bigquery.biglake.v1alpha1.MetastoreService/CreateDatabase") + .setRequestMarshaller( + ProtoUtils.marshaller(CreateDatabaseRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(Database.getDefaultInstance())) + .build(); + + private static final MethodDescriptor + deleteDatabaseMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + "google.cloud.bigquery.biglake.v1alpha1.MetastoreService/DeleteDatabase") + .setRequestMarshaller( + ProtoUtils.marshaller(DeleteDatabaseRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(Database.getDefaultInstance())) + .build(); + + private static final MethodDescriptor + updateDatabaseMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + "google.cloud.bigquery.biglake.v1alpha1.MetastoreService/UpdateDatabase") + .setRequestMarshaller( + ProtoUtils.marshaller(UpdateDatabaseRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(Database.getDefaultInstance())) + .build(); + + private static final MethodDescriptor getDatabaseMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName("google.cloud.bigquery.biglake.v1alpha1.MetastoreService/GetDatabase") + .setRequestMarshaller(ProtoUtils.marshaller(GetDatabaseRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(Database.getDefaultInstance())) + .build(); + + private static final MethodDescriptor + listDatabasesMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + "google.cloud.bigquery.biglake.v1alpha1.MetastoreService/ListDatabases") + .setRequestMarshaller( + ProtoUtils.marshaller(ListDatabasesRequest.getDefaultInstance())) + .setResponseMarshaller( + ProtoUtils.marshaller(ListDatabasesResponse.getDefaultInstance())) + .build(); + + private static final MethodDescriptor createTableMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName("google.cloud.bigquery.biglake.v1alpha1.MetastoreService/CreateTable") + .setRequestMarshaller(ProtoUtils.marshaller(CreateTableRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(Table.getDefaultInstance())) + .build(); + + private static final MethodDescriptor deleteTableMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName("google.cloud.bigquery.biglake.v1alpha1.MetastoreService/DeleteTable") + .setRequestMarshaller(ProtoUtils.marshaller(DeleteTableRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(Table.getDefaultInstance())) + .build(); + + private static final MethodDescriptor updateTableMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName("google.cloud.bigquery.biglake.v1alpha1.MetastoreService/UpdateTable") + .setRequestMarshaller(ProtoUtils.marshaller(UpdateTableRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(Table.getDefaultInstance())) + .build(); + + private static final MethodDescriptor getTableMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName("google.cloud.bigquery.biglake.v1alpha1.MetastoreService/GetTable") + .setRequestMarshaller(ProtoUtils.marshaller(GetTableRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(Table.getDefaultInstance())) + .build(); + + private static final MethodDescriptor + listTablesMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + "google.cloud.bigquery.biglake.v1alpha1.MetastoreService/ListTables") + .setRequestMarshaller(ProtoUtils.marshaller(ListTablesRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(ListTablesResponse.getDefaultInstance())) + .build(); + + private static final MethodDescriptor createLockMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName("google.cloud.bigquery.biglake.v1alpha1.MetastoreService/CreateLock") + .setRequestMarshaller(ProtoUtils.marshaller(CreateLockRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(Lock.getDefaultInstance())) + .build(); + + private static final MethodDescriptor deleteLockMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName("google.cloud.bigquery.biglake.v1alpha1.MetastoreService/DeleteLock") + .setRequestMarshaller(ProtoUtils.marshaller(DeleteLockRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(Empty.getDefaultInstance())) + .build(); + + private static final MethodDescriptor checkLockMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName("google.cloud.bigquery.biglake.v1alpha1.MetastoreService/CheckLock") + .setRequestMarshaller(ProtoUtils.marshaller(CheckLockRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(Lock.getDefaultInstance())) + .build(); + + private static final MethodDescriptor + listLocksMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + "google.cloud.bigquery.biglake.v1alpha1.MetastoreService/ListLocks") + .setRequestMarshaller(ProtoUtils.marshaller(ListLocksRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(ListLocksResponse.getDefaultInstance())) + .build(); + + private final UnaryCallable createCatalogCallable; + private final UnaryCallable deleteCatalogCallable; + private final UnaryCallable getCatalogCallable; + private final UnaryCallable listCatalogsCallable; + private final UnaryCallable + listCatalogsPagedCallable; + private final UnaryCallable createDatabaseCallable; + private final UnaryCallable deleteDatabaseCallable; + private final UnaryCallable updateDatabaseCallable; + private final UnaryCallable getDatabaseCallable; + private final UnaryCallable listDatabasesCallable; + private final UnaryCallable + listDatabasesPagedCallable; + private final UnaryCallable createTableCallable; + private final UnaryCallable deleteTableCallable; + private final UnaryCallable updateTableCallable; + private final UnaryCallable getTableCallable; + private final UnaryCallable listTablesCallable; + private final UnaryCallable listTablesPagedCallable; + private final UnaryCallable createLockCallable; + private final UnaryCallable deleteLockCallable; + private final UnaryCallable checkLockCallable; + private final UnaryCallable listLocksCallable; + private final UnaryCallable listLocksPagedCallable; + + private final BackgroundResource backgroundResources; + private final GrpcOperationsStub operationsStub; + private final GrpcStubCallableFactory callableFactory; + + public static final GrpcMetastoreServiceStub create(MetastoreServiceStubSettings settings) + throws IOException { + return new GrpcMetastoreServiceStub(settings, ClientContext.create(settings)); + } + + public static final GrpcMetastoreServiceStub create(ClientContext clientContext) + throws IOException { + return new GrpcMetastoreServiceStub( + MetastoreServiceStubSettings.newBuilder().build(), clientContext); + } + + public static final GrpcMetastoreServiceStub create( + ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { + return new GrpcMetastoreServiceStub( + MetastoreServiceStubSettings.newBuilder().build(), clientContext, callableFactory); + } + + /** + * Constructs an instance of GrpcMetastoreServiceStub, using the given settings. This is protected + * so that it is easy to make a subclass, but otherwise, the static factory methods should be + * preferred. + */ + protected GrpcMetastoreServiceStub( + MetastoreServiceStubSettings settings, ClientContext clientContext) throws IOException { + this(settings, clientContext, new GrpcMetastoreServiceCallableFactory()); + } + + /** + * Constructs an instance of GrpcMetastoreServiceStub, using the given settings. This is protected + * so that it is easy to make a subclass, but otherwise, the static factory methods should be + * preferred. + */ + protected GrpcMetastoreServiceStub( + MetastoreServiceStubSettings settings, + ClientContext clientContext, + GrpcStubCallableFactory callableFactory) + throws IOException { + this.callableFactory = callableFactory; + this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); + + GrpcCallSettings createCatalogTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(createCatalogMethodDescriptor) + .setParamsExtractor( + request -> { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("parent", String.valueOf(request.getParent())); + return params.build(); + }) + .build(); + GrpcCallSettings deleteCatalogTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(deleteCatalogMethodDescriptor) + .setParamsExtractor( + request -> { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("name", String.valueOf(request.getName())); + return params.build(); + }) + .build(); + GrpcCallSettings getCatalogTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(getCatalogMethodDescriptor) + .setParamsExtractor( + request -> { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("name", String.valueOf(request.getName())); + return params.build(); + }) + .build(); + GrpcCallSettings listCatalogsTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(listCatalogsMethodDescriptor) + .setParamsExtractor( + request -> { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("parent", String.valueOf(request.getParent())); + return params.build(); + }) + .build(); + GrpcCallSettings createDatabaseTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(createDatabaseMethodDescriptor) + .setParamsExtractor( + request -> { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("parent", String.valueOf(request.getParent())); + return params.build(); + }) + .build(); + GrpcCallSettings deleteDatabaseTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(deleteDatabaseMethodDescriptor) + .setParamsExtractor( + request -> { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("name", String.valueOf(request.getName())); + return params.build(); + }) + .build(); + GrpcCallSettings updateDatabaseTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(updateDatabaseMethodDescriptor) + .setParamsExtractor( + request -> { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("database.name", String.valueOf(request.getDatabase().getName())); + return params.build(); + }) + .build(); + GrpcCallSettings getDatabaseTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(getDatabaseMethodDescriptor) + .setParamsExtractor( + request -> { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("name", String.valueOf(request.getName())); + return params.build(); + }) + .build(); + GrpcCallSettings listDatabasesTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(listDatabasesMethodDescriptor) + .setParamsExtractor( + request -> { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("parent", String.valueOf(request.getParent())); + return params.build(); + }) + .build(); + GrpcCallSettings createTableTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(createTableMethodDescriptor) + .setParamsExtractor( + request -> { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("parent", String.valueOf(request.getParent())); + return params.build(); + }) + .build(); + GrpcCallSettings deleteTableTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(deleteTableMethodDescriptor) + .setParamsExtractor( + request -> { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("name", String.valueOf(request.getName())); + return params.build(); + }) + .build(); + GrpcCallSettings updateTableTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(updateTableMethodDescriptor) + .setParamsExtractor( + request -> { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("table.name", String.valueOf(request.getTable().getName())); + return params.build(); + }) + .build(); + GrpcCallSettings getTableTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(getTableMethodDescriptor) + .setParamsExtractor( + request -> { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("name", String.valueOf(request.getName())); + return params.build(); + }) + .build(); + GrpcCallSettings listTablesTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(listTablesMethodDescriptor) + .setParamsExtractor( + request -> { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("parent", String.valueOf(request.getParent())); + return params.build(); + }) + .build(); + GrpcCallSettings createLockTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(createLockMethodDescriptor) + .setParamsExtractor( + request -> { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("parent", String.valueOf(request.getParent())); + return params.build(); + }) + .build(); + GrpcCallSettings deleteLockTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(deleteLockMethodDescriptor) + .setParamsExtractor( + request -> { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("name", String.valueOf(request.getName())); + return params.build(); + }) + .build(); + GrpcCallSettings checkLockTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(checkLockMethodDescriptor) + .setParamsExtractor( + request -> { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("name", String.valueOf(request.getName())); + return params.build(); + }) + .build(); + GrpcCallSettings listLocksTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(listLocksMethodDescriptor) + .setParamsExtractor( + request -> { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("parent", String.valueOf(request.getParent())); + return params.build(); + }) + .build(); + + this.createCatalogCallable = + callableFactory.createUnaryCallable( + createCatalogTransportSettings, settings.createCatalogSettings(), clientContext); + this.deleteCatalogCallable = + callableFactory.createUnaryCallable( + deleteCatalogTransportSettings, settings.deleteCatalogSettings(), clientContext); + this.getCatalogCallable = + callableFactory.createUnaryCallable( + getCatalogTransportSettings, settings.getCatalogSettings(), clientContext); + this.listCatalogsCallable = + callableFactory.createUnaryCallable( + listCatalogsTransportSettings, settings.listCatalogsSettings(), clientContext); + this.listCatalogsPagedCallable = + callableFactory.createPagedCallable( + listCatalogsTransportSettings, settings.listCatalogsSettings(), clientContext); + this.createDatabaseCallable = + callableFactory.createUnaryCallable( + createDatabaseTransportSettings, settings.createDatabaseSettings(), clientContext); + this.deleteDatabaseCallable = + callableFactory.createUnaryCallable( + deleteDatabaseTransportSettings, settings.deleteDatabaseSettings(), clientContext); + this.updateDatabaseCallable = + callableFactory.createUnaryCallable( + updateDatabaseTransportSettings, settings.updateDatabaseSettings(), clientContext); + this.getDatabaseCallable = + callableFactory.createUnaryCallable( + getDatabaseTransportSettings, settings.getDatabaseSettings(), clientContext); + this.listDatabasesCallable = + callableFactory.createUnaryCallable( + listDatabasesTransportSettings, settings.listDatabasesSettings(), clientContext); + this.listDatabasesPagedCallable = + callableFactory.createPagedCallable( + listDatabasesTransportSettings, settings.listDatabasesSettings(), clientContext); + this.createTableCallable = + callableFactory.createUnaryCallable( + createTableTransportSettings, settings.createTableSettings(), clientContext); + this.deleteTableCallable = + callableFactory.createUnaryCallable( + deleteTableTransportSettings, settings.deleteTableSettings(), clientContext); + this.updateTableCallable = + callableFactory.createUnaryCallable( + updateTableTransportSettings, settings.updateTableSettings(), clientContext); + this.getTableCallable = + callableFactory.createUnaryCallable( + getTableTransportSettings, settings.getTableSettings(), clientContext); + this.listTablesCallable = + callableFactory.createUnaryCallable( + listTablesTransportSettings, settings.listTablesSettings(), clientContext); + this.listTablesPagedCallable = + callableFactory.createPagedCallable( + listTablesTransportSettings, settings.listTablesSettings(), clientContext); + this.createLockCallable = + callableFactory.createUnaryCallable( + createLockTransportSettings, settings.createLockSettings(), clientContext); + this.deleteLockCallable = + callableFactory.createUnaryCallable( + deleteLockTransportSettings, settings.deleteLockSettings(), clientContext); + this.checkLockCallable = + callableFactory.createUnaryCallable( + checkLockTransportSettings, settings.checkLockSettings(), clientContext); + this.listLocksCallable = + callableFactory.createUnaryCallable( + listLocksTransportSettings, settings.listLocksSettings(), clientContext); + this.listLocksPagedCallable = + callableFactory.createPagedCallable( + listLocksTransportSettings, settings.listLocksSettings(), clientContext); + + this.backgroundResources = + new BackgroundResourceAggregation(clientContext.getBackgroundResources()); + } + + public GrpcOperationsStub getOperationsStub() { + return operationsStub; + } + + @Override + public UnaryCallable createCatalogCallable() { + return createCatalogCallable; + } + + @Override + public UnaryCallable deleteCatalogCallable() { + return deleteCatalogCallable; + } + + @Override + public UnaryCallable getCatalogCallable() { + return getCatalogCallable; + } + + @Override + public UnaryCallable listCatalogsCallable() { + return listCatalogsCallable; + } + + @Override + public UnaryCallable listCatalogsPagedCallable() { + return listCatalogsPagedCallable; + } + + @Override + public UnaryCallable createDatabaseCallable() { + return createDatabaseCallable; + } + + @Override + public UnaryCallable deleteDatabaseCallable() { + return deleteDatabaseCallable; + } + + @Override + public UnaryCallable updateDatabaseCallable() { + return updateDatabaseCallable; + } + + @Override + public UnaryCallable getDatabaseCallable() { + return getDatabaseCallable; + } + + @Override + public UnaryCallable listDatabasesCallable() { + return listDatabasesCallable; + } + + @Override + public UnaryCallable + listDatabasesPagedCallable() { + return listDatabasesPagedCallable; + } + + @Override + public UnaryCallable createTableCallable() { + return createTableCallable; + } + + @Override + public UnaryCallable deleteTableCallable() { + return deleteTableCallable; + } + + @Override + public UnaryCallable updateTableCallable() { + return updateTableCallable; + } + + @Override + public UnaryCallable getTableCallable() { + return getTableCallable; + } + + @Override + public UnaryCallable listTablesCallable() { + return listTablesCallable; + } + + @Override + public UnaryCallable listTablesPagedCallable() { + return listTablesPagedCallable; + } + + @Override + public UnaryCallable createLockCallable() { + return createLockCallable; + } + + @Override + public UnaryCallable deleteLockCallable() { + return deleteLockCallable; + } + + @Override + public UnaryCallable checkLockCallable() { + return checkLockCallable; + } + + @Override + public UnaryCallable listLocksCallable() { + return listLocksCallable; + } + + @Override + public UnaryCallable listLocksPagedCallable() { + return listLocksPagedCallable; + } + + @Override + public final void close() { + try { + backgroundResources.close(); + } catch (RuntimeException e) { + throw e; + } catch (Exception e) { + throw new IllegalStateException("Failed to close resource", e); + } + } + + @Override + public void shutdown() { + backgroundResources.shutdown(); + } + + @Override + public boolean isShutdown() { + return backgroundResources.isShutdown(); + } + + @Override + public boolean isTerminated() { + return backgroundResources.isTerminated(); + } + + @Override + public void shutdownNow() { + backgroundResources.shutdownNow(); + } + + @Override + public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { + return backgroundResources.awaitTermination(duration, unit); + } +} diff --git a/java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/stub/HttpJsonMetastoreServiceCallableFactory.java b/java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/stub/HttpJsonMetastoreServiceCallableFactory.java new file mode 100644 index 000000000000..43e575df064e --- /dev/null +++ b/java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/stub/HttpJsonMetastoreServiceCallableFactory.java @@ -0,0 +1,105 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.stub; + +import com.google.api.core.BetaApi; +import com.google.api.gax.httpjson.HttpJsonCallSettings; +import com.google.api.gax.httpjson.HttpJsonCallableFactory; +import com.google.api.gax.httpjson.HttpJsonOperationSnapshotCallable; +import com.google.api.gax.httpjson.HttpJsonStubCallableFactory; +import com.google.api.gax.httpjson.longrunning.stub.OperationsStub; +import com.google.api.gax.rpc.BatchingCallSettings; +import com.google.api.gax.rpc.ClientContext; +import com.google.api.gax.rpc.OperationCallSettings; +import com.google.api.gax.rpc.OperationCallable; +import com.google.api.gax.rpc.PagedCallSettings; +import com.google.api.gax.rpc.ServerStreamingCallSettings; +import com.google.api.gax.rpc.ServerStreamingCallable; +import com.google.api.gax.rpc.UnaryCallSettings; +import com.google.api.gax.rpc.UnaryCallable; +import com.google.longrunning.Operation; +import javax.annotation.Generated; + +// AUTO-GENERATED DOCUMENTATION AND CLASS. +/** + * REST callable factory implementation for the MetastoreService service API. + * + *

This class is for advanced usage. + */ +@BetaApi +@Generated("by gapic-generator-java") +public class HttpJsonMetastoreServiceCallableFactory + implements HttpJsonStubCallableFactory { + + @Override + public UnaryCallable createUnaryCallable( + HttpJsonCallSettings httpJsonCallSettings, + UnaryCallSettings callSettings, + ClientContext clientContext) { + return HttpJsonCallableFactory.createUnaryCallable( + httpJsonCallSettings, callSettings, clientContext); + } + + @Override + public + UnaryCallable createPagedCallable( + HttpJsonCallSettings httpJsonCallSettings, + PagedCallSettings callSettings, + ClientContext clientContext) { + return HttpJsonCallableFactory.createPagedCallable( + httpJsonCallSettings, callSettings, clientContext); + } + + @Override + public UnaryCallable createBatchingCallable( + HttpJsonCallSettings httpJsonCallSettings, + BatchingCallSettings callSettings, + ClientContext clientContext) { + return HttpJsonCallableFactory.createBatchingCallable( + httpJsonCallSettings, callSettings, clientContext); + } + + @BetaApi( + "The surface for long-running operations is not stable yet and may change in the future.") + @Override + public + OperationCallable createOperationCallable( + HttpJsonCallSettings httpJsonCallSettings, + OperationCallSettings callSettings, + ClientContext clientContext, + OperationsStub operationsStub) { + UnaryCallable innerCallable = + HttpJsonCallableFactory.createBaseUnaryCallable( + httpJsonCallSettings, callSettings.getInitialCallSettings(), clientContext); + HttpJsonOperationSnapshotCallable initialCallable = + new HttpJsonOperationSnapshotCallable( + innerCallable, + httpJsonCallSettings.getMethodDescriptor().getOperationSnapshotFactory()); + return HttpJsonCallableFactory.createOperationCallable( + callSettings, clientContext, operationsStub.longRunningClient(), initialCallable); + } + + @Override + public + ServerStreamingCallable createServerStreamingCallable( + HttpJsonCallSettings httpJsonCallSettings, + ServerStreamingCallSettings callSettings, + ClientContext clientContext) { + return HttpJsonCallableFactory.createServerStreamingCallable( + httpJsonCallSettings, callSettings, clientContext); + } +} diff --git a/java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/stub/HttpJsonMetastoreServiceStub.java b/java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/stub/HttpJsonMetastoreServiceStub.java new file mode 100644 index 000000000000..d32ff816a75c --- /dev/null +++ b/java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/stub/HttpJsonMetastoreServiceStub.java @@ -0,0 +1,1129 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.stub; + +import static com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient.ListCatalogsPagedResponse; +import static com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient.ListDatabasesPagedResponse; +import static com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient.ListLocksPagedResponse; +import static com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient.ListTablesPagedResponse; + +import com.google.api.core.BetaApi; +import com.google.api.core.InternalApi; +import com.google.api.gax.core.BackgroundResource; +import com.google.api.gax.core.BackgroundResourceAggregation; +import com.google.api.gax.httpjson.ApiMethodDescriptor; +import com.google.api.gax.httpjson.HttpJsonCallSettings; +import com.google.api.gax.httpjson.HttpJsonStubCallableFactory; +import com.google.api.gax.httpjson.ProtoMessageRequestFormatter; +import com.google.api.gax.httpjson.ProtoMessageResponseParser; +import com.google.api.gax.httpjson.ProtoRestSerializer; +import com.google.api.gax.rpc.ClientContext; +import com.google.api.gax.rpc.UnaryCallable; +import com.google.cloud.bigquery.biglake.v1alpha1.Catalog; +import com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.Database; +import com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse; +import com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse; +import com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse; +import com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse; +import com.google.cloud.bigquery.biglake.v1alpha1.Lock; +import com.google.cloud.bigquery.biglake.v1alpha1.Table; +import com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest; +import com.google.protobuf.Empty; +import com.google.protobuf.TypeRegistry; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import javax.annotation.Generated; + +// AUTO-GENERATED DOCUMENTATION AND CLASS. +/** + * REST stub implementation for the MetastoreService service API. + * + *

This class is for advanced usage and reflects the underlying API directly. + */ +@BetaApi +@Generated("by gapic-generator-java") +public class HttpJsonMetastoreServiceStub extends MetastoreServiceStub { + private static final TypeRegistry typeRegistry = TypeRegistry.newBuilder().build(); + + private static final ApiMethodDescriptor + createCatalogMethodDescriptor = + ApiMethodDescriptor.newBuilder() + .setFullMethodName( + "google.cloud.bigquery.biglake.v1alpha1.MetastoreService/CreateCatalog") + .setHttpMethod("POST") + .setType(ApiMethodDescriptor.MethodType.UNARY) + .setRequestFormatter( + ProtoMessageRequestFormatter.newBuilder() + .setPath( + "/v1alpha1/{parent=projects/*/locations/*}/catalogs", + request -> { + Map fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putPathParam(fields, "parent", request.getParent()); + return fields; + }) + .setQueryParamsExtractor( + request -> { + Map> fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putQueryParam(fields, "catalogId", request.getCatalogId()); + serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); + return fields; + }) + .setRequestBodyExtractor( + request -> + ProtoRestSerializer.create() + .toBody("catalog", request.getCatalog(), true)) + .build()) + .setResponseParser( + ProtoMessageResponseParser.newBuilder() + .setDefaultInstance(Catalog.getDefaultInstance()) + .setDefaultTypeRegistry(typeRegistry) + .build()) + .build(); + + private static final ApiMethodDescriptor + deleteCatalogMethodDescriptor = + ApiMethodDescriptor.newBuilder() + .setFullMethodName( + "google.cloud.bigquery.biglake.v1alpha1.MetastoreService/DeleteCatalog") + .setHttpMethod("DELETE") + .setType(ApiMethodDescriptor.MethodType.UNARY) + .setRequestFormatter( + ProtoMessageRequestFormatter.newBuilder() + .setPath( + "/v1alpha1/{name=projects/*/locations/*/catalogs/*}", + request -> { + Map fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putPathParam(fields, "name", request.getName()); + return fields; + }) + .setQueryParamsExtractor( + request -> { + Map> fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); + return fields; + }) + .setRequestBodyExtractor(request -> null) + .build()) + .setResponseParser( + ProtoMessageResponseParser.newBuilder() + .setDefaultInstance(Catalog.getDefaultInstance()) + .setDefaultTypeRegistry(typeRegistry) + .build()) + .build(); + + private static final ApiMethodDescriptor getCatalogMethodDescriptor = + ApiMethodDescriptor.newBuilder() + .setFullMethodName("google.cloud.bigquery.biglake.v1alpha1.MetastoreService/GetCatalog") + .setHttpMethod("GET") + .setType(ApiMethodDescriptor.MethodType.UNARY) + .setRequestFormatter( + ProtoMessageRequestFormatter.newBuilder() + .setPath( + "/v1alpha1/{name=projects/*/locations/*/catalogs/*}", + request -> { + Map fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putPathParam(fields, "name", request.getName()); + return fields; + }) + .setQueryParamsExtractor( + request -> { + Map> fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); + return fields; + }) + .setRequestBodyExtractor(request -> null) + .build()) + .setResponseParser( + ProtoMessageResponseParser.newBuilder() + .setDefaultInstance(Catalog.getDefaultInstance()) + .setDefaultTypeRegistry(typeRegistry) + .build()) + .build(); + + private static final ApiMethodDescriptor + listCatalogsMethodDescriptor = + ApiMethodDescriptor.newBuilder() + .setFullMethodName( + "google.cloud.bigquery.biglake.v1alpha1.MetastoreService/ListCatalogs") + .setHttpMethod("GET") + .setType(ApiMethodDescriptor.MethodType.UNARY) + .setRequestFormatter( + ProtoMessageRequestFormatter.newBuilder() + .setPath( + "/v1alpha1/{parent=projects/*/locations/*}/catalogs", + request -> { + Map fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putPathParam(fields, "parent", request.getParent()); + return fields; + }) + .setQueryParamsExtractor( + request -> { + Map> fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putQueryParam(fields, "pageSize", request.getPageSize()); + serializer.putQueryParam(fields, "pageToken", request.getPageToken()); + serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); + return fields; + }) + .setRequestBodyExtractor(request -> null) + .build()) + .setResponseParser( + ProtoMessageResponseParser.newBuilder() + .setDefaultInstance(ListCatalogsResponse.getDefaultInstance()) + .setDefaultTypeRegistry(typeRegistry) + .build()) + .build(); + + private static final ApiMethodDescriptor + createDatabaseMethodDescriptor = + ApiMethodDescriptor.newBuilder() + .setFullMethodName( + "google.cloud.bigquery.biglake.v1alpha1.MetastoreService/CreateDatabase") + .setHttpMethod("POST") + .setType(ApiMethodDescriptor.MethodType.UNARY) + .setRequestFormatter( + ProtoMessageRequestFormatter.newBuilder() + .setPath( + "/v1alpha1/{parent=projects/*/locations/*/catalogs/*}/databases", + request -> { + Map fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putPathParam(fields, "parent", request.getParent()); + return fields; + }) + .setQueryParamsExtractor( + request -> { + Map> fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putQueryParam(fields, "databaseId", request.getDatabaseId()); + serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); + return fields; + }) + .setRequestBodyExtractor( + request -> + ProtoRestSerializer.create() + .toBody("database", request.getDatabase(), true)) + .build()) + .setResponseParser( + ProtoMessageResponseParser.newBuilder() + .setDefaultInstance(Database.getDefaultInstance()) + .setDefaultTypeRegistry(typeRegistry) + .build()) + .build(); + + private static final ApiMethodDescriptor + deleteDatabaseMethodDescriptor = + ApiMethodDescriptor.newBuilder() + .setFullMethodName( + "google.cloud.bigquery.biglake.v1alpha1.MetastoreService/DeleteDatabase") + .setHttpMethod("DELETE") + .setType(ApiMethodDescriptor.MethodType.UNARY) + .setRequestFormatter( + ProtoMessageRequestFormatter.newBuilder() + .setPath( + "/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*}", + request -> { + Map fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putPathParam(fields, "name", request.getName()); + return fields; + }) + .setQueryParamsExtractor( + request -> { + Map> fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); + return fields; + }) + .setRequestBodyExtractor(request -> null) + .build()) + .setResponseParser( + ProtoMessageResponseParser.newBuilder() + .setDefaultInstance(Database.getDefaultInstance()) + .setDefaultTypeRegistry(typeRegistry) + .build()) + .build(); + + private static final ApiMethodDescriptor + updateDatabaseMethodDescriptor = + ApiMethodDescriptor.newBuilder() + .setFullMethodName( + "google.cloud.bigquery.biglake.v1alpha1.MetastoreService/UpdateDatabase") + .setHttpMethod("PATCH") + .setType(ApiMethodDescriptor.MethodType.UNARY) + .setRequestFormatter( + ProtoMessageRequestFormatter.newBuilder() + .setPath( + "/v1alpha1/{database.name=projects/*/locations/*/catalogs/*/databases/*}", + request -> { + Map fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putPathParam( + fields, "database.name", request.getDatabase().getName()); + return fields; + }) + .setQueryParamsExtractor( + request -> { + Map> fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putQueryParam(fields, "updateMask", request.getUpdateMask()); + serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); + return fields; + }) + .setRequestBodyExtractor( + request -> + ProtoRestSerializer.create() + .toBody("database", request.getDatabase(), true)) + .build()) + .setResponseParser( + ProtoMessageResponseParser.newBuilder() + .setDefaultInstance(Database.getDefaultInstance()) + .setDefaultTypeRegistry(typeRegistry) + .build()) + .build(); + + private static final ApiMethodDescriptor + getDatabaseMethodDescriptor = + ApiMethodDescriptor.newBuilder() + .setFullMethodName( + "google.cloud.bigquery.biglake.v1alpha1.MetastoreService/GetDatabase") + .setHttpMethod("GET") + .setType(ApiMethodDescriptor.MethodType.UNARY) + .setRequestFormatter( + ProtoMessageRequestFormatter.newBuilder() + .setPath( + "/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*}", + request -> { + Map fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putPathParam(fields, "name", request.getName()); + return fields; + }) + .setQueryParamsExtractor( + request -> { + Map> fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); + return fields; + }) + .setRequestBodyExtractor(request -> null) + .build()) + .setResponseParser( + ProtoMessageResponseParser.newBuilder() + .setDefaultInstance(Database.getDefaultInstance()) + .setDefaultTypeRegistry(typeRegistry) + .build()) + .build(); + + private static final ApiMethodDescriptor + listDatabasesMethodDescriptor = + ApiMethodDescriptor.newBuilder() + .setFullMethodName( + "google.cloud.bigquery.biglake.v1alpha1.MetastoreService/ListDatabases") + .setHttpMethod("GET") + .setType(ApiMethodDescriptor.MethodType.UNARY) + .setRequestFormatter( + ProtoMessageRequestFormatter.newBuilder() + .setPath( + "/v1alpha1/{parent=projects/*/locations/*/catalogs/*}/databases", + request -> { + Map fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putPathParam(fields, "parent", request.getParent()); + return fields; + }) + .setQueryParamsExtractor( + request -> { + Map> fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putQueryParam(fields, "pageSize", request.getPageSize()); + serializer.putQueryParam(fields, "pageToken", request.getPageToken()); + serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); + return fields; + }) + .setRequestBodyExtractor(request -> null) + .build()) + .setResponseParser( + ProtoMessageResponseParser.newBuilder() + .setDefaultInstance(ListDatabasesResponse.getDefaultInstance()) + .setDefaultTypeRegistry(typeRegistry) + .build()) + .build(); + + private static final ApiMethodDescriptor createTableMethodDescriptor = + ApiMethodDescriptor.newBuilder() + .setFullMethodName("google.cloud.bigquery.biglake.v1alpha1.MetastoreService/CreateTable") + .setHttpMethod("POST") + .setType(ApiMethodDescriptor.MethodType.UNARY) + .setRequestFormatter( + ProtoMessageRequestFormatter.newBuilder() + .setPath( + "/v1alpha1/{parent=projects/*/locations/*/catalogs/*/databases/*}/tables", + request -> { + Map fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putPathParam(fields, "parent", request.getParent()); + return fields; + }) + .setQueryParamsExtractor( + request -> { + Map> fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putQueryParam(fields, "tableId", request.getTableId()); + serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); + return fields; + }) + .setRequestBodyExtractor( + request -> + ProtoRestSerializer.create().toBody("table", request.getTable(), true)) + .build()) + .setResponseParser( + ProtoMessageResponseParser.

newBuilder() + .setDefaultInstance(Table.getDefaultInstance()) + .setDefaultTypeRegistry(typeRegistry) + .build()) + .build(); + + private static final ApiMethodDescriptor deleteTableMethodDescriptor = + ApiMethodDescriptor.newBuilder() + .setFullMethodName("google.cloud.bigquery.biglake.v1alpha1.MetastoreService/DeleteTable") + .setHttpMethod("DELETE") + .setType(ApiMethodDescriptor.MethodType.UNARY) + .setRequestFormatter( + ProtoMessageRequestFormatter.newBuilder() + .setPath( + "/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*/tables/*}", + request -> { + Map fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putPathParam(fields, "name", request.getName()); + return fields; + }) + .setQueryParamsExtractor( + request -> { + Map> fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); + return fields; + }) + .setRequestBodyExtractor(request -> null) + .build()) + .setResponseParser( + ProtoMessageResponseParser.
newBuilder() + .setDefaultInstance(Table.getDefaultInstance()) + .setDefaultTypeRegistry(typeRegistry) + .build()) + .build(); + + private static final ApiMethodDescriptor updateTableMethodDescriptor = + ApiMethodDescriptor.newBuilder() + .setFullMethodName("google.cloud.bigquery.biglake.v1alpha1.MetastoreService/UpdateTable") + .setHttpMethod("PATCH") + .setType(ApiMethodDescriptor.MethodType.UNARY) + .setRequestFormatter( + ProtoMessageRequestFormatter.newBuilder() + .setPath( + "/v1alpha1/{table.name=projects/*/locations/*/catalogs/*/databases/*/tables/*}", + request -> { + Map fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putPathParam(fields, "table.name", request.getTable().getName()); + return fields; + }) + .setQueryParamsExtractor( + request -> { + Map> fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putQueryParam(fields, "updateMask", request.getUpdateMask()); + serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); + return fields; + }) + .setRequestBodyExtractor( + request -> + ProtoRestSerializer.create().toBody("table", request.getTable(), true)) + .build()) + .setResponseParser( + ProtoMessageResponseParser.
newBuilder() + .setDefaultInstance(Table.getDefaultInstance()) + .setDefaultTypeRegistry(typeRegistry) + .build()) + .build(); + + private static final ApiMethodDescriptor getTableMethodDescriptor = + ApiMethodDescriptor.newBuilder() + .setFullMethodName("google.cloud.bigquery.biglake.v1alpha1.MetastoreService/GetTable") + .setHttpMethod("GET") + .setType(ApiMethodDescriptor.MethodType.UNARY) + .setRequestFormatter( + ProtoMessageRequestFormatter.newBuilder() + .setPath( + "/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*/tables/*}", + request -> { + Map fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putPathParam(fields, "name", request.getName()); + return fields; + }) + .setQueryParamsExtractor( + request -> { + Map> fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); + return fields; + }) + .setRequestBodyExtractor(request -> null) + .build()) + .setResponseParser( + ProtoMessageResponseParser.
newBuilder() + .setDefaultInstance(Table.getDefaultInstance()) + .setDefaultTypeRegistry(typeRegistry) + .build()) + .build(); + + private static final ApiMethodDescriptor + listTablesMethodDescriptor = + ApiMethodDescriptor.newBuilder() + .setFullMethodName( + "google.cloud.bigquery.biglake.v1alpha1.MetastoreService/ListTables") + .setHttpMethod("GET") + .setType(ApiMethodDescriptor.MethodType.UNARY) + .setRequestFormatter( + ProtoMessageRequestFormatter.newBuilder() + .setPath( + "/v1alpha1/{parent=projects/*/locations/*/catalogs/*/databases/*}/tables", + request -> { + Map fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putPathParam(fields, "parent", request.getParent()); + return fields; + }) + .setQueryParamsExtractor( + request -> { + Map> fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putQueryParam(fields, "pageSize", request.getPageSize()); + serializer.putQueryParam(fields, "pageToken", request.getPageToken()); + serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); + return fields; + }) + .setRequestBodyExtractor(request -> null) + .build()) + .setResponseParser( + ProtoMessageResponseParser.newBuilder() + .setDefaultInstance(ListTablesResponse.getDefaultInstance()) + .setDefaultTypeRegistry(typeRegistry) + .build()) + .build(); + + private static final ApiMethodDescriptor createLockMethodDescriptor = + ApiMethodDescriptor.newBuilder() + .setFullMethodName("google.cloud.bigquery.biglake.v1alpha1.MetastoreService/CreateLock") + .setHttpMethod("POST") + .setType(ApiMethodDescriptor.MethodType.UNARY) + .setRequestFormatter( + ProtoMessageRequestFormatter.newBuilder() + .setPath( + "/v1alpha1/{parent=projects/*/locations/*/catalogs/*/databases/*}/locks", + request -> { + Map fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putPathParam(fields, "parent", request.getParent()); + return fields; + }) + .setQueryParamsExtractor( + request -> { + Map> fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); + return fields; + }) + .setRequestBodyExtractor( + request -> + ProtoRestSerializer.create().toBody("lock", request.getLock(), true)) + .build()) + .setResponseParser( + ProtoMessageResponseParser.newBuilder() + .setDefaultInstance(Lock.getDefaultInstance()) + .setDefaultTypeRegistry(typeRegistry) + .build()) + .build(); + + private static final ApiMethodDescriptor deleteLockMethodDescriptor = + ApiMethodDescriptor.newBuilder() + .setFullMethodName("google.cloud.bigquery.biglake.v1alpha1.MetastoreService/DeleteLock") + .setHttpMethod("DELETE") + .setType(ApiMethodDescriptor.MethodType.UNARY) + .setRequestFormatter( + ProtoMessageRequestFormatter.newBuilder() + .setPath( + "/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*/locks/*}", + request -> { + Map fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putPathParam(fields, "name", request.getName()); + return fields; + }) + .setQueryParamsExtractor( + request -> { + Map> fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); + return fields; + }) + .setRequestBodyExtractor(request -> null) + .build()) + .setResponseParser( + ProtoMessageResponseParser.newBuilder() + .setDefaultInstance(Empty.getDefaultInstance()) + .setDefaultTypeRegistry(typeRegistry) + .build()) + .build(); + + private static final ApiMethodDescriptor checkLockMethodDescriptor = + ApiMethodDescriptor.newBuilder() + .setFullMethodName("google.cloud.bigquery.biglake.v1alpha1.MetastoreService/CheckLock") + .setHttpMethod("POST") + .setType(ApiMethodDescriptor.MethodType.UNARY) + .setRequestFormatter( + ProtoMessageRequestFormatter.newBuilder() + .setPath( + "/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*/locks/*}:check", + request -> { + Map fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putPathParam(fields, "name", request.getName()); + return fields; + }) + .setQueryParamsExtractor( + request -> { + Map> fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); + return fields; + }) + .setRequestBodyExtractor( + request -> + ProtoRestSerializer.create() + .toBody("*", request.toBuilder().clearName().build(), true)) + .build()) + .setResponseParser( + ProtoMessageResponseParser.newBuilder() + .setDefaultInstance(Lock.getDefaultInstance()) + .setDefaultTypeRegistry(typeRegistry) + .build()) + .build(); + + private static final ApiMethodDescriptor + listLocksMethodDescriptor = + ApiMethodDescriptor.newBuilder() + .setFullMethodName( + "google.cloud.bigquery.biglake.v1alpha1.MetastoreService/ListLocks") + .setHttpMethod("GET") + .setType(ApiMethodDescriptor.MethodType.UNARY) + .setRequestFormatter( + ProtoMessageRequestFormatter.newBuilder() + .setPath( + "/v1alpha1/{parent=projects/*/locations/*/catalogs/*/databases/*}/locks", + request -> { + Map fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putPathParam(fields, "parent", request.getParent()); + return fields; + }) + .setQueryParamsExtractor( + request -> { + Map> fields = new HashMap<>(); + ProtoRestSerializer serializer = + ProtoRestSerializer.create(); + serializer.putQueryParam(fields, "pageSize", request.getPageSize()); + serializer.putQueryParam(fields, "pageToken", request.getPageToken()); + serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); + return fields; + }) + .setRequestBodyExtractor(request -> null) + .build()) + .setResponseParser( + ProtoMessageResponseParser.newBuilder() + .setDefaultInstance(ListLocksResponse.getDefaultInstance()) + .setDefaultTypeRegistry(typeRegistry) + .build()) + .build(); + + private final UnaryCallable createCatalogCallable; + private final UnaryCallable deleteCatalogCallable; + private final UnaryCallable getCatalogCallable; + private final UnaryCallable listCatalogsCallable; + private final UnaryCallable + listCatalogsPagedCallable; + private final UnaryCallable createDatabaseCallable; + private final UnaryCallable deleteDatabaseCallable; + private final UnaryCallable updateDatabaseCallable; + private final UnaryCallable getDatabaseCallable; + private final UnaryCallable listDatabasesCallable; + private final UnaryCallable + listDatabasesPagedCallable; + private final UnaryCallable createTableCallable; + private final UnaryCallable deleteTableCallable; + private final UnaryCallable updateTableCallable; + private final UnaryCallable getTableCallable; + private final UnaryCallable listTablesCallable; + private final UnaryCallable listTablesPagedCallable; + private final UnaryCallable createLockCallable; + private final UnaryCallable deleteLockCallable; + private final UnaryCallable checkLockCallable; + private final UnaryCallable listLocksCallable; + private final UnaryCallable listLocksPagedCallable; + + private final BackgroundResource backgroundResources; + private final HttpJsonStubCallableFactory callableFactory; + + public static final HttpJsonMetastoreServiceStub create(MetastoreServiceStubSettings settings) + throws IOException { + return new HttpJsonMetastoreServiceStub(settings, ClientContext.create(settings)); + } + + public static final HttpJsonMetastoreServiceStub create(ClientContext clientContext) + throws IOException { + return new HttpJsonMetastoreServiceStub( + MetastoreServiceStubSettings.newHttpJsonBuilder().build(), clientContext); + } + + public static final HttpJsonMetastoreServiceStub create( + ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException { + return new HttpJsonMetastoreServiceStub( + MetastoreServiceStubSettings.newHttpJsonBuilder().build(), clientContext, callableFactory); + } + + /** + * Constructs an instance of HttpJsonMetastoreServiceStub, using the given settings. This is + * protected so that it is easy to make a subclass, but otherwise, the static factory methods + * should be preferred. + */ + protected HttpJsonMetastoreServiceStub( + MetastoreServiceStubSettings settings, ClientContext clientContext) throws IOException { + this(settings, clientContext, new HttpJsonMetastoreServiceCallableFactory()); + } + + /** + * Constructs an instance of HttpJsonMetastoreServiceStub, using the given settings. This is + * protected so that it is easy to make a subclass, but otherwise, the static factory methods + * should be preferred. + */ + protected HttpJsonMetastoreServiceStub( + MetastoreServiceStubSettings settings, + ClientContext clientContext, + HttpJsonStubCallableFactory callableFactory) + throws IOException { + this.callableFactory = callableFactory; + + HttpJsonCallSettings createCatalogTransportSettings = + HttpJsonCallSettings.newBuilder() + .setMethodDescriptor(createCatalogMethodDescriptor) + .setTypeRegistry(typeRegistry) + .build(); + HttpJsonCallSettings deleteCatalogTransportSettings = + HttpJsonCallSettings.newBuilder() + .setMethodDescriptor(deleteCatalogMethodDescriptor) + .setTypeRegistry(typeRegistry) + .build(); + HttpJsonCallSettings getCatalogTransportSettings = + HttpJsonCallSettings.newBuilder() + .setMethodDescriptor(getCatalogMethodDescriptor) + .setTypeRegistry(typeRegistry) + .build(); + HttpJsonCallSettings listCatalogsTransportSettings = + HttpJsonCallSettings.newBuilder() + .setMethodDescriptor(listCatalogsMethodDescriptor) + .setTypeRegistry(typeRegistry) + .build(); + HttpJsonCallSettings createDatabaseTransportSettings = + HttpJsonCallSettings.newBuilder() + .setMethodDescriptor(createDatabaseMethodDescriptor) + .setTypeRegistry(typeRegistry) + .build(); + HttpJsonCallSettings deleteDatabaseTransportSettings = + HttpJsonCallSettings.newBuilder() + .setMethodDescriptor(deleteDatabaseMethodDescriptor) + .setTypeRegistry(typeRegistry) + .build(); + HttpJsonCallSettings updateDatabaseTransportSettings = + HttpJsonCallSettings.newBuilder() + .setMethodDescriptor(updateDatabaseMethodDescriptor) + .setTypeRegistry(typeRegistry) + .build(); + HttpJsonCallSettings getDatabaseTransportSettings = + HttpJsonCallSettings.newBuilder() + .setMethodDescriptor(getDatabaseMethodDescriptor) + .setTypeRegistry(typeRegistry) + .build(); + HttpJsonCallSettings + listDatabasesTransportSettings = + HttpJsonCallSettings.newBuilder() + .setMethodDescriptor(listDatabasesMethodDescriptor) + .setTypeRegistry(typeRegistry) + .build(); + HttpJsonCallSettings createTableTransportSettings = + HttpJsonCallSettings.newBuilder() + .setMethodDescriptor(createTableMethodDescriptor) + .setTypeRegistry(typeRegistry) + .build(); + HttpJsonCallSettings deleteTableTransportSettings = + HttpJsonCallSettings.newBuilder() + .setMethodDescriptor(deleteTableMethodDescriptor) + .setTypeRegistry(typeRegistry) + .build(); + HttpJsonCallSettings updateTableTransportSettings = + HttpJsonCallSettings.newBuilder() + .setMethodDescriptor(updateTableMethodDescriptor) + .setTypeRegistry(typeRegistry) + .build(); + HttpJsonCallSettings getTableTransportSettings = + HttpJsonCallSettings.newBuilder() + .setMethodDescriptor(getTableMethodDescriptor) + .setTypeRegistry(typeRegistry) + .build(); + HttpJsonCallSettings listTablesTransportSettings = + HttpJsonCallSettings.newBuilder() + .setMethodDescriptor(listTablesMethodDescriptor) + .setTypeRegistry(typeRegistry) + .build(); + HttpJsonCallSettings createLockTransportSettings = + HttpJsonCallSettings.newBuilder() + .setMethodDescriptor(createLockMethodDescriptor) + .setTypeRegistry(typeRegistry) + .build(); + HttpJsonCallSettings deleteLockTransportSettings = + HttpJsonCallSettings.newBuilder() + .setMethodDescriptor(deleteLockMethodDescriptor) + .setTypeRegistry(typeRegistry) + .build(); + HttpJsonCallSettings checkLockTransportSettings = + HttpJsonCallSettings.newBuilder() + .setMethodDescriptor(checkLockMethodDescriptor) + .setTypeRegistry(typeRegistry) + .build(); + HttpJsonCallSettings listLocksTransportSettings = + HttpJsonCallSettings.newBuilder() + .setMethodDescriptor(listLocksMethodDescriptor) + .setTypeRegistry(typeRegistry) + .build(); + + this.createCatalogCallable = + callableFactory.createUnaryCallable( + createCatalogTransportSettings, settings.createCatalogSettings(), clientContext); + this.deleteCatalogCallable = + callableFactory.createUnaryCallable( + deleteCatalogTransportSettings, settings.deleteCatalogSettings(), clientContext); + this.getCatalogCallable = + callableFactory.createUnaryCallable( + getCatalogTransportSettings, settings.getCatalogSettings(), clientContext); + this.listCatalogsCallable = + callableFactory.createUnaryCallable( + listCatalogsTransportSettings, settings.listCatalogsSettings(), clientContext); + this.listCatalogsPagedCallable = + callableFactory.createPagedCallable( + listCatalogsTransportSettings, settings.listCatalogsSettings(), clientContext); + this.createDatabaseCallable = + callableFactory.createUnaryCallable( + createDatabaseTransportSettings, settings.createDatabaseSettings(), clientContext); + this.deleteDatabaseCallable = + callableFactory.createUnaryCallable( + deleteDatabaseTransportSettings, settings.deleteDatabaseSettings(), clientContext); + this.updateDatabaseCallable = + callableFactory.createUnaryCallable( + updateDatabaseTransportSettings, settings.updateDatabaseSettings(), clientContext); + this.getDatabaseCallable = + callableFactory.createUnaryCallable( + getDatabaseTransportSettings, settings.getDatabaseSettings(), clientContext); + this.listDatabasesCallable = + callableFactory.createUnaryCallable( + listDatabasesTransportSettings, settings.listDatabasesSettings(), clientContext); + this.listDatabasesPagedCallable = + callableFactory.createPagedCallable( + listDatabasesTransportSettings, settings.listDatabasesSettings(), clientContext); + this.createTableCallable = + callableFactory.createUnaryCallable( + createTableTransportSettings, settings.createTableSettings(), clientContext); + this.deleteTableCallable = + callableFactory.createUnaryCallable( + deleteTableTransportSettings, settings.deleteTableSettings(), clientContext); + this.updateTableCallable = + callableFactory.createUnaryCallable( + updateTableTransportSettings, settings.updateTableSettings(), clientContext); + this.getTableCallable = + callableFactory.createUnaryCallable( + getTableTransportSettings, settings.getTableSettings(), clientContext); + this.listTablesCallable = + callableFactory.createUnaryCallable( + listTablesTransportSettings, settings.listTablesSettings(), clientContext); + this.listTablesPagedCallable = + callableFactory.createPagedCallable( + listTablesTransportSettings, settings.listTablesSettings(), clientContext); + this.createLockCallable = + callableFactory.createUnaryCallable( + createLockTransportSettings, settings.createLockSettings(), clientContext); + this.deleteLockCallable = + callableFactory.createUnaryCallable( + deleteLockTransportSettings, settings.deleteLockSettings(), clientContext); + this.checkLockCallable = + callableFactory.createUnaryCallable( + checkLockTransportSettings, settings.checkLockSettings(), clientContext); + this.listLocksCallable = + callableFactory.createUnaryCallable( + listLocksTransportSettings, settings.listLocksSettings(), clientContext); + this.listLocksPagedCallable = + callableFactory.createPagedCallable( + listLocksTransportSettings, settings.listLocksSettings(), clientContext); + + this.backgroundResources = + new BackgroundResourceAggregation(clientContext.getBackgroundResources()); + } + + @InternalApi + public static List getMethodDescriptors() { + List methodDescriptors = new ArrayList<>(); + methodDescriptors.add(createCatalogMethodDescriptor); + methodDescriptors.add(deleteCatalogMethodDescriptor); + methodDescriptors.add(getCatalogMethodDescriptor); + methodDescriptors.add(listCatalogsMethodDescriptor); + methodDescriptors.add(createDatabaseMethodDescriptor); + methodDescriptors.add(deleteDatabaseMethodDescriptor); + methodDescriptors.add(updateDatabaseMethodDescriptor); + methodDescriptors.add(getDatabaseMethodDescriptor); + methodDescriptors.add(listDatabasesMethodDescriptor); + methodDescriptors.add(createTableMethodDescriptor); + methodDescriptors.add(deleteTableMethodDescriptor); + methodDescriptors.add(updateTableMethodDescriptor); + methodDescriptors.add(getTableMethodDescriptor); + methodDescriptors.add(listTablesMethodDescriptor); + methodDescriptors.add(createLockMethodDescriptor); + methodDescriptors.add(deleteLockMethodDescriptor); + methodDescriptors.add(checkLockMethodDescriptor); + methodDescriptors.add(listLocksMethodDescriptor); + return methodDescriptors; + } + + @Override + public UnaryCallable createCatalogCallable() { + return createCatalogCallable; + } + + @Override + public UnaryCallable deleteCatalogCallable() { + return deleteCatalogCallable; + } + + @Override + public UnaryCallable getCatalogCallable() { + return getCatalogCallable; + } + + @Override + public UnaryCallable listCatalogsCallable() { + return listCatalogsCallable; + } + + @Override + public UnaryCallable listCatalogsPagedCallable() { + return listCatalogsPagedCallable; + } + + @Override + public UnaryCallable createDatabaseCallable() { + return createDatabaseCallable; + } + + @Override + public UnaryCallable deleteDatabaseCallable() { + return deleteDatabaseCallable; + } + + @Override + public UnaryCallable updateDatabaseCallable() { + return updateDatabaseCallable; + } + + @Override + public UnaryCallable getDatabaseCallable() { + return getDatabaseCallable; + } + + @Override + public UnaryCallable listDatabasesCallable() { + return listDatabasesCallable; + } + + @Override + public UnaryCallable + listDatabasesPagedCallable() { + return listDatabasesPagedCallable; + } + + @Override + public UnaryCallable createTableCallable() { + return createTableCallable; + } + + @Override + public UnaryCallable deleteTableCallable() { + return deleteTableCallable; + } + + @Override + public UnaryCallable updateTableCallable() { + return updateTableCallable; + } + + @Override + public UnaryCallable getTableCallable() { + return getTableCallable; + } + + @Override + public UnaryCallable listTablesCallable() { + return listTablesCallable; + } + + @Override + public UnaryCallable listTablesPagedCallable() { + return listTablesPagedCallable; + } + + @Override + public UnaryCallable createLockCallable() { + return createLockCallable; + } + + @Override + public UnaryCallable deleteLockCallable() { + return deleteLockCallable; + } + + @Override + public UnaryCallable checkLockCallable() { + return checkLockCallable; + } + + @Override + public UnaryCallable listLocksCallable() { + return listLocksCallable; + } + + @Override + public UnaryCallable listLocksPagedCallable() { + return listLocksPagedCallable; + } + + @Override + public final void close() { + try { + backgroundResources.close(); + } catch (RuntimeException e) { + throw e; + } catch (Exception e) { + throw new IllegalStateException("Failed to close resource", e); + } + } + + @Override + public void shutdown() { + backgroundResources.shutdown(); + } + + @Override + public boolean isShutdown() { + return backgroundResources.isShutdown(); + } + + @Override + public boolean isTerminated() { + return backgroundResources.isTerminated(); + } + + @Override + public void shutdownNow() { + backgroundResources.shutdownNow(); + } + + @Override + public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { + return backgroundResources.awaitTermination(duration, unit); + } +} diff --git a/java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/stub/MetastoreServiceStub.java b/java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/stub/MetastoreServiceStub.java new file mode 100644 index 000000000000..4486bafa2229 --- /dev/null +++ b/java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/stub/MetastoreServiceStub.java @@ -0,0 +1,157 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.stub; + +import static com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient.ListCatalogsPagedResponse; +import static com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient.ListDatabasesPagedResponse; +import static com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient.ListLocksPagedResponse; +import static com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient.ListTablesPagedResponse; + +import com.google.api.core.BetaApi; +import com.google.api.gax.core.BackgroundResource; +import com.google.api.gax.rpc.UnaryCallable; +import com.google.cloud.bigquery.biglake.v1alpha1.Catalog; +import com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.Database; +import com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse; +import com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse; +import com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse; +import com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse; +import com.google.cloud.bigquery.biglake.v1alpha1.Lock; +import com.google.cloud.bigquery.biglake.v1alpha1.Table; +import com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest; +import com.google.protobuf.Empty; +import javax.annotation.Generated; + +// AUTO-GENERATED DOCUMENTATION AND CLASS. +/** + * Base stub class for the MetastoreService service API. + * + *

This class is for advanced usage and reflects the underlying API directly. + */ +@BetaApi +@Generated("by gapic-generator-java") +public abstract class MetastoreServiceStub implements BackgroundResource { + + public UnaryCallable createCatalogCallable() { + throw new UnsupportedOperationException("Not implemented: createCatalogCallable()"); + } + + public UnaryCallable deleteCatalogCallable() { + throw new UnsupportedOperationException("Not implemented: deleteCatalogCallable()"); + } + + public UnaryCallable getCatalogCallable() { + throw new UnsupportedOperationException("Not implemented: getCatalogCallable()"); + } + + public UnaryCallable listCatalogsPagedCallable() { + throw new UnsupportedOperationException("Not implemented: listCatalogsPagedCallable()"); + } + + public UnaryCallable listCatalogsCallable() { + throw new UnsupportedOperationException("Not implemented: listCatalogsCallable()"); + } + + public UnaryCallable createDatabaseCallable() { + throw new UnsupportedOperationException("Not implemented: createDatabaseCallable()"); + } + + public UnaryCallable deleteDatabaseCallable() { + throw new UnsupportedOperationException("Not implemented: deleteDatabaseCallable()"); + } + + public UnaryCallable updateDatabaseCallable() { + throw new UnsupportedOperationException("Not implemented: updateDatabaseCallable()"); + } + + public UnaryCallable getDatabaseCallable() { + throw new UnsupportedOperationException("Not implemented: getDatabaseCallable()"); + } + + public UnaryCallable + listDatabasesPagedCallable() { + throw new UnsupportedOperationException("Not implemented: listDatabasesPagedCallable()"); + } + + public UnaryCallable listDatabasesCallable() { + throw new UnsupportedOperationException("Not implemented: listDatabasesCallable()"); + } + + public UnaryCallable createTableCallable() { + throw new UnsupportedOperationException("Not implemented: createTableCallable()"); + } + + public UnaryCallable deleteTableCallable() { + throw new UnsupportedOperationException("Not implemented: deleteTableCallable()"); + } + + public UnaryCallable updateTableCallable() { + throw new UnsupportedOperationException("Not implemented: updateTableCallable()"); + } + + public UnaryCallable getTableCallable() { + throw new UnsupportedOperationException("Not implemented: getTableCallable()"); + } + + public UnaryCallable listTablesPagedCallable() { + throw new UnsupportedOperationException("Not implemented: listTablesPagedCallable()"); + } + + public UnaryCallable listTablesCallable() { + throw new UnsupportedOperationException("Not implemented: listTablesCallable()"); + } + + public UnaryCallable createLockCallable() { + throw new UnsupportedOperationException("Not implemented: createLockCallable()"); + } + + public UnaryCallable deleteLockCallable() { + throw new UnsupportedOperationException("Not implemented: deleteLockCallable()"); + } + + public UnaryCallable checkLockCallable() { + throw new UnsupportedOperationException("Not implemented: checkLockCallable()"); + } + + public UnaryCallable listLocksPagedCallable() { + throw new UnsupportedOperationException("Not implemented: listLocksPagedCallable()"); + } + + public UnaryCallable listLocksCallable() { + throw new UnsupportedOperationException("Not implemented: listLocksCallable()"); + } + + @Override + public abstract void close(); +} diff --git a/java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/stub/MetastoreServiceStubSettings.java b/java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/stub/MetastoreServiceStubSettings.java new file mode 100644 index 000000000000..248ee1a73680 --- /dev/null +++ b/java-biglake/google-cloud-biglake/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/stub/MetastoreServiceStubSettings.java @@ -0,0 +1,980 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.stub; + +import static com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient.ListCatalogsPagedResponse; +import static com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient.ListDatabasesPagedResponse; +import static com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient.ListLocksPagedResponse; +import static com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient.ListTablesPagedResponse; + +import com.google.api.core.ApiFunction; +import com.google.api.core.ApiFuture; +import com.google.api.core.BetaApi; +import com.google.api.gax.core.GaxProperties; +import com.google.api.gax.core.GoogleCredentialsProvider; +import com.google.api.gax.core.InstantiatingExecutorProvider; +import com.google.api.gax.grpc.GaxGrpcProperties; +import com.google.api.gax.grpc.GrpcTransportChannel; +import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; +import com.google.api.gax.httpjson.GaxHttpJsonProperties; +import com.google.api.gax.httpjson.HttpJsonTransportChannel; +import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider; +import com.google.api.gax.retrying.RetrySettings; +import com.google.api.gax.rpc.ApiCallContext; +import com.google.api.gax.rpc.ApiClientHeaderProvider; +import com.google.api.gax.rpc.ClientContext; +import com.google.api.gax.rpc.PageContext; +import com.google.api.gax.rpc.PagedCallSettings; +import com.google.api.gax.rpc.PagedListDescriptor; +import com.google.api.gax.rpc.PagedListResponseFactory; +import com.google.api.gax.rpc.StatusCode; +import com.google.api.gax.rpc.StubSettings; +import com.google.api.gax.rpc.TransportChannelProvider; +import com.google.api.gax.rpc.UnaryCallSettings; +import com.google.api.gax.rpc.UnaryCallable; +import com.google.cloud.bigquery.biglake.v1alpha1.Catalog; +import com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.Database; +import com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse; +import com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse; +import com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse; +import com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse; +import com.google.cloud.bigquery.biglake.v1alpha1.Lock; +import com.google.cloud.bigquery.biglake.v1alpha1.Table; +import com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Lists; +import com.google.protobuf.Empty; +import java.io.IOException; +import java.util.List; +import javax.annotation.Generated; +import org.threeten.bp.Duration; + +// AUTO-GENERATED DOCUMENTATION AND CLASS. +/** + * Settings class to configure an instance of {@link MetastoreServiceStub}. + * + *

The default instance has everything set to sensible defaults: + * + *

    + *
  • The default service address (biglake.googleapis.com) and default port (443) are used. + *
  • Credentials are acquired automatically through Application Default Credentials. + *
  • Retries are configured for idempotent methods but not for non-idempotent methods. + *
+ * + *

The builder of this class is recursive, so contained classes are themselves builders. When + * build() is called, the tree of builders is called to create the complete settings object. + * + *

For example, to set the total timeout of createCatalog to 30 seconds: + * + *

{@code
+ * // This snippet has been automatically generated and should be regarded as a code template only.
+ * // It will require modifications to work:
+ * // - It may require correct/in-range values for request initialization.
+ * // - It may require specifying regional endpoints when creating the service client as shown in
+ * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
+ * MetastoreServiceStubSettings.Builder metastoreServiceSettingsBuilder =
+ *     MetastoreServiceStubSettings.newBuilder();
+ * metastoreServiceSettingsBuilder
+ *     .createCatalogSettings()
+ *     .setRetrySettings(
+ *         metastoreServiceSettingsBuilder
+ *             .createCatalogSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
+ *             .setTotalTimeout(Duration.ofSeconds(30))
+ *             .build());
+ * MetastoreServiceStubSettings metastoreServiceSettings = metastoreServiceSettingsBuilder.build();
+ * }
+ */ +@BetaApi +@Generated("by gapic-generator-java") +public class MetastoreServiceStubSettings extends StubSettings { + /** The default scopes of the service. */ + private static final ImmutableList DEFAULT_SERVICE_SCOPES = + ImmutableList.builder() + .add("https://www.googleapis.com/auth/bigquery") + .add("https://www.googleapis.com/auth/cloud-platform") + .build(); + + private final UnaryCallSettings createCatalogSettings; + private final UnaryCallSettings deleteCatalogSettings; + private final UnaryCallSettings getCatalogSettings; + private final PagedCallSettings< + ListCatalogsRequest, ListCatalogsResponse, ListCatalogsPagedResponse> + listCatalogsSettings; + private final UnaryCallSettings createDatabaseSettings; + private final UnaryCallSettings deleteDatabaseSettings; + private final UnaryCallSettings updateDatabaseSettings; + private final UnaryCallSettings getDatabaseSettings; + private final PagedCallSettings< + ListDatabasesRequest, ListDatabasesResponse, ListDatabasesPagedResponse> + listDatabasesSettings; + private final UnaryCallSettings createTableSettings; + private final UnaryCallSettings deleteTableSettings; + private final UnaryCallSettings updateTableSettings; + private final UnaryCallSettings getTableSettings; + private final PagedCallSettings + listTablesSettings; + private final UnaryCallSettings createLockSettings; + private final UnaryCallSettings deleteLockSettings; + private final UnaryCallSettings checkLockSettings; + private final PagedCallSettings + listLocksSettings; + + private static final PagedListDescriptor + LIST_CATALOGS_PAGE_STR_DESC = + new PagedListDescriptor() { + @Override + public String emptyToken() { + return ""; + } + + @Override + public ListCatalogsRequest injectToken(ListCatalogsRequest payload, String token) { + return ListCatalogsRequest.newBuilder(payload).setPageToken(token).build(); + } + + @Override + public ListCatalogsRequest injectPageSize(ListCatalogsRequest payload, int pageSize) { + return ListCatalogsRequest.newBuilder(payload).setPageSize(pageSize).build(); + } + + @Override + public Integer extractPageSize(ListCatalogsRequest payload) { + return payload.getPageSize(); + } + + @Override + public String extractNextToken(ListCatalogsResponse payload) { + return payload.getNextPageToken(); + } + + @Override + public Iterable extractResources(ListCatalogsResponse payload) { + return payload.getCatalogsList() == null + ? ImmutableList.of() + : payload.getCatalogsList(); + } + }; + + private static final PagedListDescriptor + LIST_DATABASES_PAGE_STR_DESC = + new PagedListDescriptor() { + @Override + public String emptyToken() { + return ""; + } + + @Override + public ListDatabasesRequest injectToken(ListDatabasesRequest payload, String token) { + return ListDatabasesRequest.newBuilder(payload).setPageToken(token).build(); + } + + @Override + public ListDatabasesRequest injectPageSize(ListDatabasesRequest payload, int pageSize) { + return ListDatabasesRequest.newBuilder(payload).setPageSize(pageSize).build(); + } + + @Override + public Integer extractPageSize(ListDatabasesRequest payload) { + return payload.getPageSize(); + } + + @Override + public String extractNextToken(ListDatabasesResponse payload) { + return payload.getNextPageToken(); + } + + @Override + public Iterable extractResources(ListDatabasesResponse payload) { + return payload.getDatabasesList() == null + ? ImmutableList.of() + : payload.getDatabasesList(); + } + }; + + private static final PagedListDescriptor + LIST_TABLES_PAGE_STR_DESC = + new PagedListDescriptor() { + @Override + public String emptyToken() { + return ""; + } + + @Override + public ListTablesRequest injectToken(ListTablesRequest payload, String token) { + return ListTablesRequest.newBuilder(payload).setPageToken(token).build(); + } + + @Override + public ListTablesRequest injectPageSize(ListTablesRequest payload, int pageSize) { + return ListTablesRequest.newBuilder(payload).setPageSize(pageSize).build(); + } + + @Override + public Integer extractPageSize(ListTablesRequest payload) { + return payload.getPageSize(); + } + + @Override + public String extractNextToken(ListTablesResponse payload) { + return payload.getNextPageToken(); + } + + @Override + public Iterable
extractResources(ListTablesResponse payload) { + return payload.getTablesList() == null + ? ImmutableList.
of() + : payload.getTablesList(); + } + }; + + private static final PagedListDescriptor + LIST_LOCKS_PAGE_STR_DESC = + new PagedListDescriptor() { + @Override + public String emptyToken() { + return ""; + } + + @Override + public ListLocksRequest injectToken(ListLocksRequest payload, String token) { + return ListLocksRequest.newBuilder(payload).setPageToken(token).build(); + } + + @Override + public ListLocksRequest injectPageSize(ListLocksRequest payload, int pageSize) { + return ListLocksRequest.newBuilder(payload).setPageSize(pageSize).build(); + } + + @Override + public Integer extractPageSize(ListLocksRequest payload) { + return payload.getPageSize(); + } + + @Override + public String extractNextToken(ListLocksResponse payload) { + return payload.getNextPageToken(); + } + + @Override + public Iterable extractResources(ListLocksResponse payload) { + return payload.getLocksList() == null + ? ImmutableList.of() + : payload.getLocksList(); + } + }; + + private static final PagedListResponseFactory< + ListCatalogsRequest, ListCatalogsResponse, ListCatalogsPagedResponse> + LIST_CATALOGS_PAGE_STR_FACT = + new PagedListResponseFactory< + ListCatalogsRequest, ListCatalogsResponse, ListCatalogsPagedResponse>() { + @Override + public ApiFuture getFuturePagedResponse( + UnaryCallable callable, + ListCatalogsRequest request, + ApiCallContext context, + ApiFuture futureResponse) { + PageContext pageContext = + PageContext.create(callable, LIST_CATALOGS_PAGE_STR_DESC, request, context); + return ListCatalogsPagedResponse.createAsync(pageContext, futureResponse); + } + }; + + private static final PagedListResponseFactory< + ListDatabasesRequest, ListDatabasesResponse, ListDatabasesPagedResponse> + LIST_DATABASES_PAGE_STR_FACT = + new PagedListResponseFactory< + ListDatabasesRequest, ListDatabasesResponse, ListDatabasesPagedResponse>() { + @Override + public ApiFuture getFuturePagedResponse( + UnaryCallable callable, + ListDatabasesRequest request, + ApiCallContext context, + ApiFuture futureResponse) { + PageContext pageContext = + PageContext.create(callable, LIST_DATABASES_PAGE_STR_DESC, request, context); + return ListDatabasesPagedResponse.createAsync(pageContext, futureResponse); + } + }; + + private static final PagedListResponseFactory< + ListTablesRequest, ListTablesResponse, ListTablesPagedResponse> + LIST_TABLES_PAGE_STR_FACT = + new PagedListResponseFactory< + ListTablesRequest, ListTablesResponse, ListTablesPagedResponse>() { + @Override + public ApiFuture getFuturePagedResponse( + UnaryCallable callable, + ListTablesRequest request, + ApiCallContext context, + ApiFuture futureResponse) { + PageContext pageContext = + PageContext.create(callable, LIST_TABLES_PAGE_STR_DESC, request, context); + return ListTablesPagedResponse.createAsync(pageContext, futureResponse); + } + }; + + private static final PagedListResponseFactory< + ListLocksRequest, ListLocksResponse, ListLocksPagedResponse> + LIST_LOCKS_PAGE_STR_FACT = + new PagedListResponseFactory< + ListLocksRequest, ListLocksResponse, ListLocksPagedResponse>() { + @Override + public ApiFuture getFuturePagedResponse( + UnaryCallable callable, + ListLocksRequest request, + ApiCallContext context, + ApiFuture futureResponse) { + PageContext pageContext = + PageContext.create(callable, LIST_LOCKS_PAGE_STR_DESC, request, context); + return ListLocksPagedResponse.createAsync(pageContext, futureResponse); + } + }; + + /** Returns the object with the settings used for calls to createCatalog. */ + public UnaryCallSettings createCatalogSettings() { + return createCatalogSettings; + } + + /** Returns the object with the settings used for calls to deleteCatalog. */ + public UnaryCallSettings deleteCatalogSettings() { + return deleteCatalogSettings; + } + + /** Returns the object with the settings used for calls to getCatalog. */ + public UnaryCallSettings getCatalogSettings() { + return getCatalogSettings; + } + + /** Returns the object with the settings used for calls to listCatalogs. */ + public PagedCallSettings + listCatalogsSettings() { + return listCatalogsSettings; + } + + /** Returns the object with the settings used for calls to createDatabase. */ + public UnaryCallSettings createDatabaseSettings() { + return createDatabaseSettings; + } + + /** Returns the object with the settings used for calls to deleteDatabase. */ + public UnaryCallSettings deleteDatabaseSettings() { + return deleteDatabaseSettings; + } + + /** Returns the object with the settings used for calls to updateDatabase. */ + public UnaryCallSettings updateDatabaseSettings() { + return updateDatabaseSettings; + } + + /** Returns the object with the settings used for calls to getDatabase. */ + public UnaryCallSettings getDatabaseSettings() { + return getDatabaseSettings; + } + + /** Returns the object with the settings used for calls to listDatabases. */ + public PagedCallSettings + listDatabasesSettings() { + return listDatabasesSettings; + } + + /** Returns the object with the settings used for calls to createTable. */ + public UnaryCallSettings createTableSettings() { + return createTableSettings; + } + + /** Returns the object with the settings used for calls to deleteTable. */ + public UnaryCallSettings deleteTableSettings() { + return deleteTableSettings; + } + + /** Returns the object with the settings used for calls to updateTable. */ + public UnaryCallSettings updateTableSettings() { + return updateTableSettings; + } + + /** Returns the object with the settings used for calls to getTable. */ + public UnaryCallSettings getTableSettings() { + return getTableSettings; + } + + /** Returns the object with the settings used for calls to listTables. */ + public PagedCallSettings + listTablesSettings() { + return listTablesSettings; + } + + /** Returns the object with the settings used for calls to createLock. */ + public UnaryCallSettings createLockSettings() { + return createLockSettings; + } + + /** Returns the object with the settings used for calls to deleteLock. */ + public UnaryCallSettings deleteLockSettings() { + return deleteLockSettings; + } + + /** Returns the object with the settings used for calls to checkLock. */ + public UnaryCallSettings checkLockSettings() { + return checkLockSettings; + } + + /** Returns the object with the settings used for calls to listLocks. */ + public PagedCallSettings + listLocksSettings() { + return listLocksSettings; + } + + public MetastoreServiceStub createStub() throws IOException { + if (getTransportChannelProvider() + .getTransportName() + .equals(GrpcTransportChannel.getGrpcTransportName())) { + return GrpcMetastoreServiceStub.create(this); + } + if (getTransportChannelProvider() + .getTransportName() + .equals(HttpJsonTransportChannel.getHttpJsonTransportName())) { + return HttpJsonMetastoreServiceStub.create(this); + } + throw new UnsupportedOperationException( + String.format( + "Transport not supported: %s", getTransportChannelProvider().getTransportName())); + } + + /** Returns a builder for the default ExecutorProvider for this service. */ + public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { + return InstantiatingExecutorProvider.newBuilder(); + } + + /** Returns the default service endpoint. */ + public static String getDefaultEndpoint() { + return "biglake.googleapis.com:443"; + } + + /** Returns the default mTLS service endpoint. */ + public static String getDefaultMtlsEndpoint() { + return "biglake.mtls.googleapis.com:443"; + } + + /** Returns the default service scopes. */ + public static List getDefaultServiceScopes() { + return DEFAULT_SERVICE_SCOPES; + } + + /** Returns a builder for the default credentials for this service. */ + public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { + return GoogleCredentialsProvider.newBuilder() + .setScopesToApply(DEFAULT_SERVICE_SCOPES) + .setUseJwtAccessWithScope(true); + } + + /** Returns a builder for the default gRPC ChannelProvider for this service. */ + public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { + return InstantiatingGrpcChannelProvider.newBuilder() + .setMaxInboundMessageSize(Integer.MAX_VALUE); + } + + /** Returns a builder for the default REST ChannelProvider for this service. */ + @BetaApi + public static InstantiatingHttpJsonChannelProvider.Builder + defaultHttpJsonTransportProviderBuilder() { + return InstantiatingHttpJsonChannelProvider.newBuilder(); + } + + public static TransportChannelProvider defaultTransportChannelProvider() { + return defaultGrpcTransportProviderBuilder().build(); + } + + @BetaApi("The surface for customizing headers is not stable yet and may change in the future.") + public static ApiClientHeaderProvider.Builder defaultGrpcApiClientHeaderProviderBuilder() { + return ApiClientHeaderProvider.newBuilder() + .setGeneratedLibToken( + "gapic", GaxProperties.getLibraryVersion(MetastoreServiceStubSettings.class)) + .setTransportToken( + GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion()); + } + + @BetaApi("The surface for customizing headers is not stable yet and may change in the future.") + public static ApiClientHeaderProvider.Builder defaultHttpJsonApiClientHeaderProviderBuilder() { + return ApiClientHeaderProvider.newBuilder() + .setGeneratedLibToken( + "gapic", GaxProperties.getLibraryVersion(MetastoreServiceStubSettings.class)) + .setTransportToken( + GaxHttpJsonProperties.getHttpJsonTokenName(), + GaxHttpJsonProperties.getHttpJsonVersion()); + } + + public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { + return MetastoreServiceStubSettings.defaultGrpcApiClientHeaderProviderBuilder(); + } + + /** Returns a new gRPC builder for this class. */ + public static Builder newBuilder() { + return Builder.createDefault(); + } + + /** Returns a new REST builder for this class. */ + public static Builder newHttpJsonBuilder() { + return Builder.createHttpJsonDefault(); + } + + /** Returns a new builder for this class. */ + public static Builder newBuilder(ClientContext clientContext) { + return new Builder(clientContext); + } + + /** Returns a builder containing all the values of this settings class. */ + public Builder toBuilder() { + return new Builder(this); + } + + protected MetastoreServiceStubSettings(Builder settingsBuilder) throws IOException { + super(settingsBuilder); + + createCatalogSettings = settingsBuilder.createCatalogSettings().build(); + deleteCatalogSettings = settingsBuilder.deleteCatalogSettings().build(); + getCatalogSettings = settingsBuilder.getCatalogSettings().build(); + listCatalogsSettings = settingsBuilder.listCatalogsSettings().build(); + createDatabaseSettings = settingsBuilder.createDatabaseSettings().build(); + deleteDatabaseSettings = settingsBuilder.deleteDatabaseSettings().build(); + updateDatabaseSettings = settingsBuilder.updateDatabaseSettings().build(); + getDatabaseSettings = settingsBuilder.getDatabaseSettings().build(); + listDatabasesSettings = settingsBuilder.listDatabasesSettings().build(); + createTableSettings = settingsBuilder.createTableSettings().build(); + deleteTableSettings = settingsBuilder.deleteTableSettings().build(); + updateTableSettings = settingsBuilder.updateTableSettings().build(); + getTableSettings = settingsBuilder.getTableSettings().build(); + listTablesSettings = settingsBuilder.listTablesSettings().build(); + createLockSettings = settingsBuilder.createLockSettings().build(); + deleteLockSettings = settingsBuilder.deleteLockSettings().build(); + checkLockSettings = settingsBuilder.checkLockSettings().build(); + listLocksSettings = settingsBuilder.listLocksSettings().build(); + } + + /** Builder for MetastoreServiceStubSettings. */ + public static class Builder extends StubSettings.Builder { + private final ImmutableList> unaryMethodSettingsBuilders; + private final UnaryCallSettings.Builder createCatalogSettings; + private final UnaryCallSettings.Builder deleteCatalogSettings; + private final UnaryCallSettings.Builder getCatalogSettings; + private final PagedCallSettings.Builder< + ListCatalogsRequest, ListCatalogsResponse, ListCatalogsPagedResponse> + listCatalogsSettings; + private final UnaryCallSettings.Builder createDatabaseSettings; + private final UnaryCallSettings.Builder deleteDatabaseSettings; + private final UnaryCallSettings.Builder updateDatabaseSettings; + private final UnaryCallSettings.Builder getDatabaseSettings; + private final PagedCallSettings.Builder< + ListDatabasesRequest, ListDatabasesResponse, ListDatabasesPagedResponse> + listDatabasesSettings; + private final UnaryCallSettings.Builder createTableSettings; + private final UnaryCallSettings.Builder deleteTableSettings; + private final UnaryCallSettings.Builder updateTableSettings; + private final UnaryCallSettings.Builder getTableSettings; + private final PagedCallSettings.Builder< + ListTablesRequest, ListTablesResponse, ListTablesPagedResponse> + listTablesSettings; + private final UnaryCallSettings.Builder createLockSettings; + private final UnaryCallSettings.Builder deleteLockSettings; + private final UnaryCallSettings.Builder checkLockSettings; + private final PagedCallSettings.Builder< + ListLocksRequest, ListLocksResponse, ListLocksPagedResponse> + listLocksSettings; + private static final ImmutableMap> + RETRYABLE_CODE_DEFINITIONS; + + static { + ImmutableMap.Builder> definitions = + ImmutableMap.builder(); + definitions.put( + "retry_policy_0_codes", + ImmutableSet.copyOf( + Lists.newArrayList( + StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); + RETRYABLE_CODE_DEFINITIONS = definitions.build(); + } + + private static final ImmutableMap RETRY_PARAM_DEFINITIONS; + + static { + ImmutableMap.Builder definitions = ImmutableMap.builder(); + RetrySettings settings = null; + settings = + RetrySettings.newBuilder() + .setInitialRetryDelay(Duration.ofMillis(1000L)) + .setRetryDelayMultiplier(1.3) + .setMaxRetryDelay(Duration.ofMillis(10000L)) + .setInitialRpcTimeout(Duration.ofMillis(60000L)) + .setRpcTimeoutMultiplier(1.0) + .setMaxRpcTimeout(Duration.ofMillis(60000L)) + .setTotalTimeout(Duration.ofMillis(60000L)) + .build(); + definitions.put("retry_policy_0_params", settings); + RETRY_PARAM_DEFINITIONS = definitions.build(); + } + + protected Builder() { + this(((ClientContext) null)); + } + + protected Builder(ClientContext clientContext) { + super(clientContext); + + createCatalogSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); + deleteCatalogSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); + getCatalogSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); + listCatalogsSettings = PagedCallSettings.newBuilder(LIST_CATALOGS_PAGE_STR_FACT); + createDatabaseSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); + deleteDatabaseSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); + updateDatabaseSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); + getDatabaseSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); + listDatabasesSettings = PagedCallSettings.newBuilder(LIST_DATABASES_PAGE_STR_FACT); + createTableSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); + deleteTableSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); + updateTableSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); + getTableSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); + listTablesSettings = PagedCallSettings.newBuilder(LIST_TABLES_PAGE_STR_FACT); + createLockSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); + deleteLockSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); + checkLockSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); + listLocksSettings = PagedCallSettings.newBuilder(LIST_LOCKS_PAGE_STR_FACT); + + unaryMethodSettingsBuilders = + ImmutableList.>of( + createCatalogSettings, + deleteCatalogSettings, + getCatalogSettings, + listCatalogsSettings, + createDatabaseSettings, + deleteDatabaseSettings, + updateDatabaseSettings, + getDatabaseSettings, + listDatabasesSettings, + createTableSettings, + deleteTableSettings, + updateTableSettings, + getTableSettings, + listTablesSettings, + createLockSettings, + deleteLockSettings, + checkLockSettings, + listLocksSettings); + initDefaults(this); + } + + protected Builder(MetastoreServiceStubSettings settings) { + super(settings); + + createCatalogSettings = settings.createCatalogSettings.toBuilder(); + deleteCatalogSettings = settings.deleteCatalogSettings.toBuilder(); + getCatalogSettings = settings.getCatalogSettings.toBuilder(); + listCatalogsSettings = settings.listCatalogsSettings.toBuilder(); + createDatabaseSettings = settings.createDatabaseSettings.toBuilder(); + deleteDatabaseSettings = settings.deleteDatabaseSettings.toBuilder(); + updateDatabaseSettings = settings.updateDatabaseSettings.toBuilder(); + getDatabaseSettings = settings.getDatabaseSettings.toBuilder(); + listDatabasesSettings = settings.listDatabasesSettings.toBuilder(); + createTableSettings = settings.createTableSettings.toBuilder(); + deleteTableSettings = settings.deleteTableSettings.toBuilder(); + updateTableSettings = settings.updateTableSettings.toBuilder(); + getTableSettings = settings.getTableSettings.toBuilder(); + listTablesSettings = settings.listTablesSettings.toBuilder(); + createLockSettings = settings.createLockSettings.toBuilder(); + deleteLockSettings = settings.deleteLockSettings.toBuilder(); + checkLockSettings = settings.checkLockSettings.toBuilder(); + listLocksSettings = settings.listLocksSettings.toBuilder(); + + unaryMethodSettingsBuilders = + ImmutableList.>of( + createCatalogSettings, + deleteCatalogSettings, + getCatalogSettings, + listCatalogsSettings, + createDatabaseSettings, + deleteDatabaseSettings, + updateDatabaseSettings, + getDatabaseSettings, + listDatabasesSettings, + createTableSettings, + deleteTableSettings, + updateTableSettings, + getTableSettings, + listTablesSettings, + createLockSettings, + deleteLockSettings, + checkLockSettings, + listLocksSettings); + } + + private static Builder createDefault() { + Builder builder = new Builder(((ClientContext) null)); + + builder.setTransportChannelProvider(defaultTransportChannelProvider()); + builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); + builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); + builder.setEndpoint(getDefaultEndpoint()); + builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); + builder.setSwitchToMtlsEndpointAllowed(true); + + return initDefaults(builder); + } + + private static Builder createHttpJsonDefault() { + Builder builder = new Builder(((ClientContext) null)); + + builder.setTransportChannelProvider(defaultHttpJsonTransportProviderBuilder().build()); + builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); + builder.setInternalHeaderProvider(defaultHttpJsonApiClientHeaderProviderBuilder().build()); + builder.setEndpoint(getDefaultEndpoint()); + builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); + builder.setSwitchToMtlsEndpointAllowed(true); + + return initDefaults(builder); + } + + private static Builder initDefaults(Builder builder) { + builder + .createCatalogSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); + + builder + .deleteCatalogSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); + + builder + .getCatalogSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); + + builder + .listCatalogsSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); + + builder + .createDatabaseSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); + + builder + .deleteDatabaseSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); + + builder + .updateDatabaseSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); + + builder + .getDatabaseSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); + + builder + .listDatabasesSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); + + builder + .createTableSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); + + builder + .deleteTableSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); + + builder + .updateTableSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); + + builder + .getTableSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); + + builder + .listTablesSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); + + builder + .createLockSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); + + builder + .deleteLockSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); + + builder + .checkLockSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); + + builder + .listLocksSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); + + return builder; + } + + /** + * Applies the given settings updater function to all of the unary API methods in this service. + * + *

Note: This method does not support applying settings to streaming methods. + */ + public Builder applyToAllUnaryMethods( + ApiFunction, Void> settingsUpdater) { + super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater); + return this; + } + + public ImmutableList> unaryMethodSettingsBuilders() { + return unaryMethodSettingsBuilders; + } + + /** Returns the builder for the settings used for calls to createCatalog. */ + public UnaryCallSettings.Builder createCatalogSettings() { + return createCatalogSettings; + } + + /** Returns the builder for the settings used for calls to deleteCatalog. */ + public UnaryCallSettings.Builder deleteCatalogSettings() { + return deleteCatalogSettings; + } + + /** Returns the builder for the settings used for calls to getCatalog. */ + public UnaryCallSettings.Builder getCatalogSettings() { + return getCatalogSettings; + } + + /** Returns the builder for the settings used for calls to listCatalogs. */ + public PagedCallSettings.Builder< + ListCatalogsRequest, ListCatalogsResponse, ListCatalogsPagedResponse> + listCatalogsSettings() { + return listCatalogsSettings; + } + + /** Returns the builder for the settings used for calls to createDatabase. */ + public UnaryCallSettings.Builder createDatabaseSettings() { + return createDatabaseSettings; + } + + /** Returns the builder for the settings used for calls to deleteDatabase. */ + public UnaryCallSettings.Builder deleteDatabaseSettings() { + return deleteDatabaseSettings; + } + + /** Returns the builder for the settings used for calls to updateDatabase. */ + public UnaryCallSettings.Builder updateDatabaseSettings() { + return updateDatabaseSettings; + } + + /** Returns the builder for the settings used for calls to getDatabase. */ + public UnaryCallSettings.Builder getDatabaseSettings() { + return getDatabaseSettings; + } + + /** Returns the builder for the settings used for calls to listDatabases. */ + public PagedCallSettings.Builder< + ListDatabasesRequest, ListDatabasesResponse, ListDatabasesPagedResponse> + listDatabasesSettings() { + return listDatabasesSettings; + } + + /** Returns the builder for the settings used for calls to createTable. */ + public UnaryCallSettings.Builder createTableSettings() { + return createTableSettings; + } + + /** Returns the builder for the settings used for calls to deleteTable. */ + public UnaryCallSettings.Builder deleteTableSettings() { + return deleteTableSettings; + } + + /** Returns the builder for the settings used for calls to updateTable. */ + public UnaryCallSettings.Builder updateTableSettings() { + return updateTableSettings; + } + + /** Returns the builder for the settings used for calls to getTable. */ + public UnaryCallSettings.Builder getTableSettings() { + return getTableSettings; + } + + /** Returns the builder for the settings used for calls to listTables. */ + public PagedCallSettings.Builder + listTablesSettings() { + return listTablesSettings; + } + + /** Returns the builder for the settings used for calls to createLock. */ + public UnaryCallSettings.Builder createLockSettings() { + return createLockSettings; + } + + /** Returns the builder for the settings used for calls to deleteLock. */ + public UnaryCallSettings.Builder deleteLockSettings() { + return deleteLockSettings; + } + + /** Returns the builder for the settings used for calls to checkLock. */ + public UnaryCallSettings.Builder checkLockSettings() { + return checkLockSettings; + } + + /** Returns the builder for the settings used for calls to listLocks. */ + public PagedCallSettings.Builder + listLocksSettings() { + return listLocksSettings; + } + + @Override + public MetastoreServiceStubSettings build() throws IOException { + return new MetastoreServiceStubSettings(this); + } + } +} diff --git a/java-biglake/google-cloud-biglake/src/test/java/com/google/cloud/bigquery/biglake/v1alpha1/MetastoreServiceClientHttpJsonTest.java b/java-biglake/google-cloud-biglake/src/test/java/com/google/cloud/bigquery/biglake/v1alpha1/MetastoreServiceClientHttpJsonTest.java new file mode 100644 index 000000000000..7b02553a6809 --- /dev/null +++ b/java-biglake/google-cloud-biglake/src/test/java/com/google/cloud/bigquery/biglake/v1alpha1/MetastoreServiceClientHttpJsonTest.java @@ -0,0 +1,1820 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1; + +import static com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient.ListCatalogsPagedResponse; +import static com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient.ListDatabasesPagedResponse; +import static com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient.ListLocksPagedResponse; +import static com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient.ListTablesPagedResponse; + +import com.google.api.gax.core.NoCredentialsProvider; +import com.google.api.gax.httpjson.GaxHttpJsonProperties; +import com.google.api.gax.httpjson.testing.MockHttpService; +import com.google.api.gax.rpc.ApiClientHeaderProvider; +import com.google.api.gax.rpc.ApiException; +import com.google.api.gax.rpc.ApiExceptionFactory; +import com.google.api.gax.rpc.InvalidArgumentException; +import com.google.api.gax.rpc.StatusCode; +import com.google.api.gax.rpc.testing.FakeStatusCode; +import com.google.cloud.bigquery.biglake.v1alpha1.stub.HttpJsonMetastoreServiceStub; +import com.google.common.collect.Lists; +import com.google.protobuf.Empty; +import com.google.protobuf.FieldMask; +import com.google.protobuf.Timestamp; +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import javax.annotation.Generated; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +@Generated("by gapic-generator-java") +public class MetastoreServiceClientHttpJsonTest { + private static MockHttpService mockService; + private static MetastoreServiceClient client; + + @BeforeClass + public static void startStaticServer() throws IOException { + mockService = + new MockHttpService( + HttpJsonMetastoreServiceStub.getMethodDescriptors(), + MetastoreServiceSettings.getDefaultEndpoint()); + MetastoreServiceSettings settings = + MetastoreServiceSettings.newHttpJsonBuilder() + .setTransportChannelProvider( + MetastoreServiceSettings.defaultHttpJsonTransportProviderBuilder() + .setHttpTransport(mockService) + .build()) + .setCredentialsProvider(NoCredentialsProvider.create()) + .build(); + client = MetastoreServiceClient.create(settings); + } + + @AfterClass + public static void stopServer() { + client.close(); + } + + @Before + public void setUp() {} + + @After + public void tearDown() throws Exception { + mockService.reset(); + } + + @Test + public void createCatalogTest() throws Exception { + Catalog expectedResponse = + Catalog.newBuilder() + .setName(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockService.addResponse(expectedResponse); + + LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); + Catalog catalog = Catalog.newBuilder().build(); + String catalogId = "catalogId1455933204"; + + Catalog actualResponse = client.createCatalog(parent, catalog, catalogId); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockService.getRequestPaths(); + Assert.assertEquals(1, actualRequests.size()); + + String apiClientHeaderKey = + mockService + .getRequestHeaders() + .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) + .iterator() + .next(); + Assert.assertTrue( + GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() + .matcher(apiClientHeaderKey) + .matches()); + } + + @Test + public void createCatalogExceptionTest() throws Exception { + ApiException exception = + ApiExceptionFactory.createException( + new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); + mockService.addException(exception); + + try { + LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); + Catalog catalog = Catalog.newBuilder().build(); + String catalogId = "catalogId1455933204"; + client.createCatalog(parent, catalog, catalogId); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void createCatalogTest2() throws Exception { + Catalog expectedResponse = + Catalog.newBuilder() + .setName(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockService.addResponse(expectedResponse); + + String parent = "projects/project-5833/locations/location-5833"; + Catalog catalog = Catalog.newBuilder().build(); + String catalogId = "catalogId1455933204"; + + Catalog actualResponse = client.createCatalog(parent, catalog, catalogId); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockService.getRequestPaths(); + Assert.assertEquals(1, actualRequests.size()); + + String apiClientHeaderKey = + mockService + .getRequestHeaders() + .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) + .iterator() + .next(); + Assert.assertTrue( + GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() + .matcher(apiClientHeaderKey) + .matches()); + } + + @Test + public void createCatalogExceptionTest2() throws Exception { + ApiException exception = + ApiExceptionFactory.createException( + new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); + mockService.addException(exception); + + try { + String parent = "projects/project-5833/locations/location-5833"; + Catalog catalog = Catalog.newBuilder().build(); + String catalogId = "catalogId1455933204"; + client.createCatalog(parent, catalog, catalogId); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void deleteCatalogTest() throws Exception { + Catalog expectedResponse = + Catalog.newBuilder() + .setName(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockService.addResponse(expectedResponse); + + CatalogName name = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); + + Catalog actualResponse = client.deleteCatalog(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockService.getRequestPaths(); + Assert.assertEquals(1, actualRequests.size()); + + String apiClientHeaderKey = + mockService + .getRequestHeaders() + .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) + .iterator() + .next(); + Assert.assertTrue( + GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() + .matcher(apiClientHeaderKey) + .matches()); + } + + @Test + public void deleteCatalogExceptionTest() throws Exception { + ApiException exception = + ApiExceptionFactory.createException( + new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); + mockService.addException(exception); + + try { + CatalogName name = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); + client.deleteCatalog(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void deleteCatalogTest2() throws Exception { + Catalog expectedResponse = + Catalog.newBuilder() + .setName(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockService.addResponse(expectedResponse); + + String name = "projects/project-3958/locations/location-3958/catalogs/catalog-3958"; + + Catalog actualResponse = client.deleteCatalog(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockService.getRequestPaths(); + Assert.assertEquals(1, actualRequests.size()); + + String apiClientHeaderKey = + mockService + .getRequestHeaders() + .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) + .iterator() + .next(); + Assert.assertTrue( + GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() + .matcher(apiClientHeaderKey) + .matches()); + } + + @Test + public void deleteCatalogExceptionTest2() throws Exception { + ApiException exception = + ApiExceptionFactory.createException( + new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); + mockService.addException(exception); + + try { + String name = "projects/project-3958/locations/location-3958/catalogs/catalog-3958"; + client.deleteCatalog(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void getCatalogTest() throws Exception { + Catalog expectedResponse = + Catalog.newBuilder() + .setName(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockService.addResponse(expectedResponse); + + CatalogName name = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); + + Catalog actualResponse = client.getCatalog(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockService.getRequestPaths(); + Assert.assertEquals(1, actualRequests.size()); + + String apiClientHeaderKey = + mockService + .getRequestHeaders() + .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) + .iterator() + .next(); + Assert.assertTrue( + GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() + .matcher(apiClientHeaderKey) + .matches()); + } + + @Test + public void getCatalogExceptionTest() throws Exception { + ApiException exception = + ApiExceptionFactory.createException( + new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); + mockService.addException(exception); + + try { + CatalogName name = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); + client.getCatalog(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void getCatalogTest2() throws Exception { + Catalog expectedResponse = + Catalog.newBuilder() + .setName(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockService.addResponse(expectedResponse); + + String name = "projects/project-3958/locations/location-3958/catalogs/catalog-3958"; + + Catalog actualResponse = client.getCatalog(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockService.getRequestPaths(); + Assert.assertEquals(1, actualRequests.size()); + + String apiClientHeaderKey = + mockService + .getRequestHeaders() + .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) + .iterator() + .next(); + Assert.assertTrue( + GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() + .matcher(apiClientHeaderKey) + .matches()); + } + + @Test + public void getCatalogExceptionTest2() throws Exception { + ApiException exception = + ApiExceptionFactory.createException( + new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); + mockService.addException(exception); + + try { + String name = "projects/project-3958/locations/location-3958/catalogs/catalog-3958"; + client.getCatalog(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void listCatalogsTest() throws Exception { + Catalog responsesElement = Catalog.newBuilder().build(); + ListCatalogsResponse expectedResponse = + ListCatalogsResponse.newBuilder() + .setNextPageToken("") + .addAllCatalogs(Arrays.asList(responsesElement)) + .build(); + mockService.addResponse(expectedResponse); + + LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); + + ListCatalogsPagedResponse pagedListResponse = client.listCatalogs(parent); + + List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getCatalogsList().get(0), resources.get(0)); + + List actualRequests = mockService.getRequestPaths(); + Assert.assertEquals(1, actualRequests.size()); + + String apiClientHeaderKey = + mockService + .getRequestHeaders() + .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) + .iterator() + .next(); + Assert.assertTrue( + GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() + .matcher(apiClientHeaderKey) + .matches()); + } + + @Test + public void listCatalogsExceptionTest() throws Exception { + ApiException exception = + ApiExceptionFactory.createException( + new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); + mockService.addException(exception); + + try { + LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); + client.listCatalogs(parent); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void listCatalogsTest2() throws Exception { + Catalog responsesElement = Catalog.newBuilder().build(); + ListCatalogsResponse expectedResponse = + ListCatalogsResponse.newBuilder() + .setNextPageToken("") + .addAllCatalogs(Arrays.asList(responsesElement)) + .build(); + mockService.addResponse(expectedResponse); + + String parent = "projects/project-5833/locations/location-5833"; + + ListCatalogsPagedResponse pagedListResponse = client.listCatalogs(parent); + + List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getCatalogsList().get(0), resources.get(0)); + + List actualRequests = mockService.getRequestPaths(); + Assert.assertEquals(1, actualRequests.size()); + + String apiClientHeaderKey = + mockService + .getRequestHeaders() + .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) + .iterator() + .next(); + Assert.assertTrue( + GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() + .matcher(apiClientHeaderKey) + .matches()); + } + + @Test + public void listCatalogsExceptionTest2() throws Exception { + ApiException exception = + ApiExceptionFactory.createException( + new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); + mockService.addException(exception); + + try { + String parent = "projects/project-5833/locations/location-5833"; + client.listCatalogs(parent); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void createDatabaseTest() throws Exception { + Database expectedResponse = + Database.newBuilder() + .setName( + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockService.addResponse(expectedResponse); + + CatalogName parent = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); + Database database = Database.newBuilder().build(); + String databaseId = "databaseId1688905718"; + + Database actualResponse = client.createDatabase(parent, database, databaseId); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockService.getRequestPaths(); + Assert.assertEquals(1, actualRequests.size()); + + String apiClientHeaderKey = + mockService + .getRequestHeaders() + .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) + .iterator() + .next(); + Assert.assertTrue( + GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() + .matcher(apiClientHeaderKey) + .matches()); + } + + @Test + public void createDatabaseExceptionTest() throws Exception { + ApiException exception = + ApiExceptionFactory.createException( + new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); + mockService.addException(exception); + + try { + CatalogName parent = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); + Database database = Database.newBuilder().build(); + String databaseId = "databaseId1688905718"; + client.createDatabase(parent, database, databaseId); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void createDatabaseTest2() throws Exception { + Database expectedResponse = + Database.newBuilder() + .setName( + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockService.addResponse(expectedResponse); + + String parent = "projects/project-6267/locations/location-6267/catalogs/catalog-6267"; + Database database = Database.newBuilder().build(); + String databaseId = "databaseId1688905718"; + + Database actualResponse = client.createDatabase(parent, database, databaseId); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockService.getRequestPaths(); + Assert.assertEquals(1, actualRequests.size()); + + String apiClientHeaderKey = + mockService + .getRequestHeaders() + .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) + .iterator() + .next(); + Assert.assertTrue( + GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() + .matcher(apiClientHeaderKey) + .matches()); + } + + @Test + public void createDatabaseExceptionTest2() throws Exception { + ApiException exception = + ApiExceptionFactory.createException( + new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); + mockService.addException(exception); + + try { + String parent = "projects/project-6267/locations/location-6267/catalogs/catalog-6267"; + Database database = Database.newBuilder().build(); + String databaseId = "databaseId1688905718"; + client.createDatabase(parent, database, databaseId); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void deleteDatabaseTest() throws Exception { + Database expectedResponse = + Database.newBuilder() + .setName( + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockService.addResponse(expectedResponse); + + DatabaseName name = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]"); + + Database actualResponse = client.deleteDatabase(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockService.getRequestPaths(); + Assert.assertEquals(1, actualRequests.size()); + + String apiClientHeaderKey = + mockService + .getRequestHeaders() + .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) + .iterator() + .next(); + Assert.assertTrue( + GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() + .matcher(apiClientHeaderKey) + .matches()); + } + + @Test + public void deleteDatabaseExceptionTest() throws Exception { + ApiException exception = + ApiExceptionFactory.createException( + new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); + mockService.addException(exception); + + try { + DatabaseName name = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]"); + client.deleteDatabase(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void deleteDatabaseTest2() throws Exception { + Database expectedResponse = + Database.newBuilder() + .setName( + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockService.addResponse(expectedResponse); + + String name = + "projects/project-3426/locations/location-3426/catalogs/catalog-3426/databases/database-3426"; + + Database actualResponse = client.deleteDatabase(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockService.getRequestPaths(); + Assert.assertEquals(1, actualRequests.size()); + + String apiClientHeaderKey = + mockService + .getRequestHeaders() + .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) + .iterator() + .next(); + Assert.assertTrue( + GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() + .matcher(apiClientHeaderKey) + .matches()); + } + + @Test + public void deleteDatabaseExceptionTest2() throws Exception { + ApiException exception = + ApiExceptionFactory.createException( + new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); + mockService.addException(exception); + + try { + String name = + "projects/project-3426/locations/location-3426/catalogs/catalog-3426/databases/database-3426"; + client.deleteDatabase(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void updateDatabaseTest() throws Exception { + Database expectedResponse = + Database.newBuilder() + .setName( + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockService.addResponse(expectedResponse); + + Database database = + Database.newBuilder() + .setName( + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + FieldMask updateMask = FieldMask.newBuilder().build(); + + Database actualResponse = client.updateDatabase(database, updateMask); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockService.getRequestPaths(); + Assert.assertEquals(1, actualRequests.size()); + + String apiClientHeaderKey = + mockService + .getRequestHeaders() + .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) + .iterator() + .next(); + Assert.assertTrue( + GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() + .matcher(apiClientHeaderKey) + .matches()); + } + + @Test + public void updateDatabaseExceptionTest() throws Exception { + ApiException exception = + ApiExceptionFactory.createException( + new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); + mockService.addException(exception); + + try { + Database database = + Database.newBuilder() + .setName( + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + FieldMask updateMask = FieldMask.newBuilder().build(); + client.updateDatabase(database, updateMask); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void getDatabaseTest() throws Exception { + Database expectedResponse = + Database.newBuilder() + .setName( + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockService.addResponse(expectedResponse); + + DatabaseName name = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]"); + + Database actualResponse = client.getDatabase(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockService.getRequestPaths(); + Assert.assertEquals(1, actualRequests.size()); + + String apiClientHeaderKey = + mockService + .getRequestHeaders() + .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) + .iterator() + .next(); + Assert.assertTrue( + GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() + .matcher(apiClientHeaderKey) + .matches()); + } + + @Test + public void getDatabaseExceptionTest() throws Exception { + ApiException exception = + ApiExceptionFactory.createException( + new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); + mockService.addException(exception); + + try { + DatabaseName name = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]"); + client.getDatabase(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void getDatabaseTest2() throws Exception { + Database expectedResponse = + Database.newBuilder() + .setName( + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockService.addResponse(expectedResponse); + + String name = + "projects/project-3426/locations/location-3426/catalogs/catalog-3426/databases/database-3426"; + + Database actualResponse = client.getDatabase(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockService.getRequestPaths(); + Assert.assertEquals(1, actualRequests.size()); + + String apiClientHeaderKey = + mockService + .getRequestHeaders() + .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) + .iterator() + .next(); + Assert.assertTrue( + GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() + .matcher(apiClientHeaderKey) + .matches()); + } + + @Test + public void getDatabaseExceptionTest2() throws Exception { + ApiException exception = + ApiExceptionFactory.createException( + new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); + mockService.addException(exception); + + try { + String name = + "projects/project-3426/locations/location-3426/catalogs/catalog-3426/databases/database-3426"; + client.getDatabase(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void listDatabasesTest() throws Exception { + Database responsesElement = Database.newBuilder().build(); + ListDatabasesResponse expectedResponse = + ListDatabasesResponse.newBuilder() + .setNextPageToken("") + .addAllDatabases(Arrays.asList(responsesElement)) + .build(); + mockService.addResponse(expectedResponse); + + CatalogName parent = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); + + ListDatabasesPagedResponse pagedListResponse = client.listDatabases(parent); + + List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getDatabasesList().get(0), resources.get(0)); + + List actualRequests = mockService.getRequestPaths(); + Assert.assertEquals(1, actualRequests.size()); + + String apiClientHeaderKey = + mockService + .getRequestHeaders() + .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) + .iterator() + .next(); + Assert.assertTrue( + GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() + .matcher(apiClientHeaderKey) + .matches()); + } + + @Test + public void listDatabasesExceptionTest() throws Exception { + ApiException exception = + ApiExceptionFactory.createException( + new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); + mockService.addException(exception); + + try { + CatalogName parent = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); + client.listDatabases(parent); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void listDatabasesTest2() throws Exception { + Database responsesElement = Database.newBuilder().build(); + ListDatabasesResponse expectedResponse = + ListDatabasesResponse.newBuilder() + .setNextPageToken("") + .addAllDatabases(Arrays.asList(responsesElement)) + .build(); + mockService.addResponse(expectedResponse); + + String parent = "projects/project-6267/locations/location-6267/catalogs/catalog-6267"; + + ListDatabasesPagedResponse pagedListResponse = client.listDatabases(parent); + + List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getDatabasesList().get(0), resources.get(0)); + + List actualRequests = mockService.getRequestPaths(); + Assert.assertEquals(1, actualRequests.size()); + + String apiClientHeaderKey = + mockService + .getRequestHeaders() + .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) + .iterator() + .next(); + Assert.assertTrue( + GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() + .matcher(apiClientHeaderKey) + .matches()); + } + + @Test + public void listDatabasesExceptionTest2() throws Exception { + ApiException exception = + ApiExceptionFactory.createException( + new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); + mockService.addException(exception); + + try { + String parent = "projects/project-6267/locations/location-6267/catalogs/catalog-6267"; + client.listDatabases(parent); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void createTableTest() throws Exception { + Table expectedResponse = + Table.newBuilder() + .setName( + TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]") + .toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockService.addResponse(expectedResponse); + + DatabaseName parent = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]"); + Table table = Table.newBuilder().build(); + String tableId = "tableId-1552905847"; + + Table actualResponse = client.createTable(parent, table, tableId); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockService.getRequestPaths(); + Assert.assertEquals(1, actualRequests.size()); + + String apiClientHeaderKey = + mockService + .getRequestHeaders() + .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) + .iterator() + .next(); + Assert.assertTrue( + GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() + .matcher(apiClientHeaderKey) + .matches()); + } + + @Test + public void createTableExceptionTest() throws Exception { + ApiException exception = + ApiExceptionFactory.createException( + new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); + mockService.addException(exception); + + try { + DatabaseName parent = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]"); + Table table = Table.newBuilder().build(); + String tableId = "tableId-1552905847"; + client.createTable(parent, table, tableId); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void createTableTest2() throws Exception { + Table expectedResponse = + Table.newBuilder() + .setName( + TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]") + .toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockService.addResponse(expectedResponse); + + String parent = + "projects/project-6081/locations/location-6081/catalogs/catalog-6081/databases/database-6081"; + Table table = Table.newBuilder().build(); + String tableId = "tableId-1552905847"; + + Table actualResponse = client.createTable(parent, table, tableId); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockService.getRequestPaths(); + Assert.assertEquals(1, actualRequests.size()); + + String apiClientHeaderKey = + mockService + .getRequestHeaders() + .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) + .iterator() + .next(); + Assert.assertTrue( + GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() + .matcher(apiClientHeaderKey) + .matches()); + } + + @Test + public void createTableExceptionTest2() throws Exception { + ApiException exception = + ApiExceptionFactory.createException( + new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); + mockService.addException(exception); + + try { + String parent = + "projects/project-6081/locations/location-6081/catalogs/catalog-6081/databases/database-6081"; + Table table = Table.newBuilder().build(); + String tableId = "tableId-1552905847"; + client.createTable(parent, table, tableId); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void deleteTableTest() throws Exception { + Table expectedResponse = + Table.newBuilder() + .setName( + TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]") + .toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockService.addResponse(expectedResponse); + + TableName name = TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]"); + + Table actualResponse = client.deleteTable(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockService.getRequestPaths(); + Assert.assertEquals(1, actualRequests.size()); + + String apiClientHeaderKey = + mockService + .getRequestHeaders() + .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) + .iterator() + .next(); + Assert.assertTrue( + GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() + .matcher(apiClientHeaderKey) + .matches()); + } + + @Test + public void deleteTableExceptionTest() throws Exception { + ApiException exception = + ApiExceptionFactory.createException( + new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); + mockService.addException(exception); + + try { + TableName name = + TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]"); + client.deleteTable(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void deleteTableTest2() throws Exception { + Table expectedResponse = + Table.newBuilder() + .setName( + TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]") + .toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockService.addResponse(expectedResponse); + + String name = + "projects/project-3983/locations/location-3983/catalogs/catalog-3983/databases/database-3983/tables/table-3983"; + + Table actualResponse = client.deleteTable(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockService.getRequestPaths(); + Assert.assertEquals(1, actualRequests.size()); + + String apiClientHeaderKey = + mockService + .getRequestHeaders() + .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) + .iterator() + .next(); + Assert.assertTrue( + GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() + .matcher(apiClientHeaderKey) + .matches()); + } + + @Test + public void deleteTableExceptionTest2() throws Exception { + ApiException exception = + ApiExceptionFactory.createException( + new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); + mockService.addException(exception); + + try { + String name = + "projects/project-3983/locations/location-3983/catalogs/catalog-3983/databases/database-3983/tables/table-3983"; + client.deleteTable(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void updateTableTest() throws Exception { + Table expectedResponse = + Table.newBuilder() + .setName( + TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]") + .toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockService.addResponse(expectedResponse); + + Table table = + Table.newBuilder() + .setName( + TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]") + .toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + FieldMask updateMask = FieldMask.newBuilder().build(); + + Table actualResponse = client.updateTable(table, updateMask); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockService.getRequestPaths(); + Assert.assertEquals(1, actualRequests.size()); + + String apiClientHeaderKey = + mockService + .getRequestHeaders() + .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) + .iterator() + .next(); + Assert.assertTrue( + GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() + .matcher(apiClientHeaderKey) + .matches()); + } + + @Test + public void updateTableExceptionTest() throws Exception { + ApiException exception = + ApiExceptionFactory.createException( + new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); + mockService.addException(exception); + + try { + Table table = + Table.newBuilder() + .setName( + TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]") + .toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + FieldMask updateMask = FieldMask.newBuilder().build(); + client.updateTable(table, updateMask); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void getTableTest() throws Exception { + Table expectedResponse = + Table.newBuilder() + .setName( + TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]") + .toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockService.addResponse(expectedResponse); + + TableName name = TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]"); + + Table actualResponse = client.getTable(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockService.getRequestPaths(); + Assert.assertEquals(1, actualRequests.size()); + + String apiClientHeaderKey = + mockService + .getRequestHeaders() + .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) + .iterator() + .next(); + Assert.assertTrue( + GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() + .matcher(apiClientHeaderKey) + .matches()); + } + + @Test + public void getTableExceptionTest() throws Exception { + ApiException exception = + ApiExceptionFactory.createException( + new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); + mockService.addException(exception); + + try { + TableName name = + TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]"); + client.getTable(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void getTableTest2() throws Exception { + Table expectedResponse = + Table.newBuilder() + .setName( + TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]") + .toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockService.addResponse(expectedResponse); + + String name = + "projects/project-3983/locations/location-3983/catalogs/catalog-3983/databases/database-3983/tables/table-3983"; + + Table actualResponse = client.getTable(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockService.getRequestPaths(); + Assert.assertEquals(1, actualRequests.size()); + + String apiClientHeaderKey = + mockService + .getRequestHeaders() + .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) + .iterator() + .next(); + Assert.assertTrue( + GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() + .matcher(apiClientHeaderKey) + .matches()); + } + + @Test + public void getTableExceptionTest2() throws Exception { + ApiException exception = + ApiExceptionFactory.createException( + new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); + mockService.addException(exception); + + try { + String name = + "projects/project-3983/locations/location-3983/catalogs/catalog-3983/databases/database-3983/tables/table-3983"; + client.getTable(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void listTablesTest() throws Exception { + Table responsesElement = Table.newBuilder().build(); + ListTablesResponse expectedResponse = + ListTablesResponse.newBuilder() + .setNextPageToken("") + .addAllTables(Arrays.asList(responsesElement)) + .build(); + mockService.addResponse(expectedResponse); + + DatabaseName parent = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]"); + + ListTablesPagedResponse pagedListResponse = client.listTables(parent); + + List

resources = Lists.newArrayList(pagedListResponse.iterateAll()); + + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getTablesList().get(0), resources.get(0)); + + List actualRequests = mockService.getRequestPaths(); + Assert.assertEquals(1, actualRequests.size()); + + String apiClientHeaderKey = + mockService + .getRequestHeaders() + .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) + .iterator() + .next(); + Assert.assertTrue( + GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() + .matcher(apiClientHeaderKey) + .matches()); + } + + @Test + public void listTablesExceptionTest() throws Exception { + ApiException exception = + ApiExceptionFactory.createException( + new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); + mockService.addException(exception); + + try { + DatabaseName parent = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]"); + client.listTables(parent); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void listTablesTest2() throws Exception { + Table responsesElement = Table.newBuilder().build(); + ListTablesResponse expectedResponse = + ListTablesResponse.newBuilder() + .setNextPageToken("") + .addAllTables(Arrays.asList(responsesElement)) + .build(); + mockService.addResponse(expectedResponse); + + String parent = + "projects/project-6081/locations/location-6081/catalogs/catalog-6081/databases/database-6081"; + + ListTablesPagedResponse pagedListResponse = client.listTables(parent); + + List
resources = Lists.newArrayList(pagedListResponse.iterateAll()); + + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getTablesList().get(0), resources.get(0)); + + List actualRequests = mockService.getRequestPaths(); + Assert.assertEquals(1, actualRequests.size()); + + String apiClientHeaderKey = + mockService + .getRequestHeaders() + .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) + .iterator() + .next(); + Assert.assertTrue( + GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() + .matcher(apiClientHeaderKey) + .matches()); + } + + @Test + public void listTablesExceptionTest2() throws Exception { + ApiException exception = + ApiExceptionFactory.createException( + new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); + mockService.addException(exception); + + try { + String parent = + "projects/project-6081/locations/location-6081/catalogs/catalog-6081/databases/database-6081"; + client.listTables(parent); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void createLockTest() throws Exception { + Lock expectedResponse = + Lock.newBuilder() + .setName( + LockName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[LOCK]") + .toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .build(); + mockService.addResponse(expectedResponse); + + DatabaseName parent = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]"); + Lock lock = Lock.newBuilder().build(); + + Lock actualResponse = client.createLock(parent, lock); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockService.getRequestPaths(); + Assert.assertEquals(1, actualRequests.size()); + + String apiClientHeaderKey = + mockService + .getRequestHeaders() + .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) + .iterator() + .next(); + Assert.assertTrue( + GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() + .matcher(apiClientHeaderKey) + .matches()); + } + + @Test + public void createLockExceptionTest() throws Exception { + ApiException exception = + ApiExceptionFactory.createException( + new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); + mockService.addException(exception); + + try { + DatabaseName parent = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]"); + Lock lock = Lock.newBuilder().build(); + client.createLock(parent, lock); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void createLockTest2() throws Exception { + Lock expectedResponse = + Lock.newBuilder() + .setName( + LockName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[LOCK]") + .toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .build(); + mockService.addResponse(expectedResponse); + + String parent = + "projects/project-6081/locations/location-6081/catalogs/catalog-6081/databases/database-6081"; + Lock lock = Lock.newBuilder().build(); + + Lock actualResponse = client.createLock(parent, lock); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockService.getRequestPaths(); + Assert.assertEquals(1, actualRequests.size()); + + String apiClientHeaderKey = + mockService + .getRequestHeaders() + .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) + .iterator() + .next(); + Assert.assertTrue( + GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() + .matcher(apiClientHeaderKey) + .matches()); + } + + @Test + public void createLockExceptionTest2() throws Exception { + ApiException exception = + ApiExceptionFactory.createException( + new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); + mockService.addException(exception); + + try { + String parent = + "projects/project-6081/locations/location-6081/catalogs/catalog-6081/databases/database-6081"; + Lock lock = Lock.newBuilder().build(); + client.createLock(parent, lock); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void deleteLockTest() throws Exception { + Empty expectedResponse = Empty.newBuilder().build(); + mockService.addResponse(expectedResponse); + + LockName name = LockName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[LOCK]"); + + client.deleteLock(name); + + List actualRequests = mockService.getRequestPaths(); + Assert.assertEquals(1, actualRequests.size()); + + String apiClientHeaderKey = + mockService + .getRequestHeaders() + .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) + .iterator() + .next(); + Assert.assertTrue( + GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() + .matcher(apiClientHeaderKey) + .matches()); + } + + @Test + public void deleteLockExceptionTest() throws Exception { + ApiException exception = + ApiExceptionFactory.createException( + new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); + mockService.addException(exception); + + try { + LockName name = LockName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[LOCK]"); + client.deleteLock(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void deleteLockTest2() throws Exception { + Empty expectedResponse = Empty.newBuilder().build(); + mockService.addResponse(expectedResponse); + + String name = + "projects/project-1582/locations/location-1582/catalogs/catalog-1582/databases/database-1582/locks/lock-1582"; + + client.deleteLock(name); + + List actualRequests = mockService.getRequestPaths(); + Assert.assertEquals(1, actualRequests.size()); + + String apiClientHeaderKey = + mockService + .getRequestHeaders() + .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) + .iterator() + .next(); + Assert.assertTrue( + GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() + .matcher(apiClientHeaderKey) + .matches()); + } + + @Test + public void deleteLockExceptionTest2() throws Exception { + ApiException exception = + ApiExceptionFactory.createException( + new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); + mockService.addException(exception); + + try { + String name = + "projects/project-1582/locations/location-1582/catalogs/catalog-1582/databases/database-1582/locks/lock-1582"; + client.deleteLock(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void checkLockTest() throws Exception { + Lock expectedResponse = + Lock.newBuilder() + .setName( + LockName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[LOCK]") + .toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .build(); + mockService.addResponse(expectedResponse); + + LockName name = LockName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[LOCK]"); + + Lock actualResponse = client.checkLock(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockService.getRequestPaths(); + Assert.assertEquals(1, actualRequests.size()); + + String apiClientHeaderKey = + mockService + .getRequestHeaders() + .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) + .iterator() + .next(); + Assert.assertTrue( + GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() + .matcher(apiClientHeaderKey) + .matches()); + } + + @Test + public void checkLockExceptionTest() throws Exception { + ApiException exception = + ApiExceptionFactory.createException( + new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); + mockService.addException(exception); + + try { + LockName name = LockName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[LOCK]"); + client.checkLock(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void checkLockTest2() throws Exception { + Lock expectedResponse = + Lock.newBuilder() + .setName( + LockName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[LOCK]") + .toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .build(); + mockService.addResponse(expectedResponse); + + String name = + "projects/project-1582/locations/location-1582/catalogs/catalog-1582/databases/database-1582/locks/lock-1582"; + + Lock actualResponse = client.checkLock(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockService.getRequestPaths(); + Assert.assertEquals(1, actualRequests.size()); + + String apiClientHeaderKey = + mockService + .getRequestHeaders() + .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) + .iterator() + .next(); + Assert.assertTrue( + GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() + .matcher(apiClientHeaderKey) + .matches()); + } + + @Test + public void checkLockExceptionTest2() throws Exception { + ApiException exception = + ApiExceptionFactory.createException( + new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); + mockService.addException(exception); + + try { + String name = + "projects/project-1582/locations/location-1582/catalogs/catalog-1582/databases/database-1582/locks/lock-1582"; + client.checkLock(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void listLocksTest() throws Exception { + Lock responsesElement = Lock.newBuilder().build(); + ListLocksResponse expectedResponse = + ListLocksResponse.newBuilder() + .setNextPageToken("") + .addAllLocks(Arrays.asList(responsesElement)) + .build(); + mockService.addResponse(expectedResponse); + + DatabaseName parent = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]"); + + ListLocksPagedResponse pagedListResponse = client.listLocks(parent); + + List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getLocksList().get(0), resources.get(0)); + + List actualRequests = mockService.getRequestPaths(); + Assert.assertEquals(1, actualRequests.size()); + + String apiClientHeaderKey = + mockService + .getRequestHeaders() + .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) + .iterator() + .next(); + Assert.assertTrue( + GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() + .matcher(apiClientHeaderKey) + .matches()); + } + + @Test + public void listLocksExceptionTest() throws Exception { + ApiException exception = + ApiExceptionFactory.createException( + new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); + mockService.addException(exception); + + try { + DatabaseName parent = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]"); + client.listLocks(parent); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void listLocksTest2() throws Exception { + Lock responsesElement = Lock.newBuilder().build(); + ListLocksResponse expectedResponse = + ListLocksResponse.newBuilder() + .setNextPageToken("") + .addAllLocks(Arrays.asList(responsesElement)) + .build(); + mockService.addResponse(expectedResponse); + + String parent = + "projects/project-6081/locations/location-6081/catalogs/catalog-6081/databases/database-6081"; + + ListLocksPagedResponse pagedListResponse = client.listLocks(parent); + + List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getLocksList().get(0), resources.get(0)); + + List actualRequests = mockService.getRequestPaths(); + Assert.assertEquals(1, actualRequests.size()); + + String apiClientHeaderKey = + mockService + .getRequestHeaders() + .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) + .iterator() + .next(); + Assert.assertTrue( + GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() + .matcher(apiClientHeaderKey) + .matches()); + } + + @Test + public void listLocksExceptionTest2() throws Exception { + ApiException exception = + ApiExceptionFactory.createException( + new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); + mockService.addException(exception); + + try { + String parent = + "projects/project-6081/locations/location-6081/catalogs/catalog-6081/databases/database-6081"; + client.listLocks(parent); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } +} diff --git a/java-biglake/google-cloud-biglake/src/test/java/com/google/cloud/bigquery/biglake/v1alpha1/MetastoreServiceClientTest.java b/java-biglake/google-cloud-biglake/src/test/java/com/google/cloud/bigquery/biglake/v1alpha1/MetastoreServiceClientTest.java new file mode 100644 index 000000000000..30bf80d784c8 --- /dev/null +++ b/java-biglake/google-cloud-biglake/src/test/java/com/google/cloud/bigquery/biglake/v1alpha1/MetastoreServiceClientTest.java @@ -0,0 +1,1581 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1; + +import static com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient.ListCatalogsPagedResponse; +import static com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient.ListDatabasesPagedResponse; +import static com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient.ListLocksPagedResponse; +import static com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient.ListTablesPagedResponse; + +import com.google.api.gax.core.NoCredentialsProvider; +import com.google.api.gax.grpc.GaxGrpcProperties; +import com.google.api.gax.grpc.testing.LocalChannelProvider; +import com.google.api.gax.grpc.testing.MockGrpcService; +import com.google.api.gax.grpc.testing.MockServiceHelper; +import com.google.api.gax.rpc.ApiClientHeaderProvider; +import com.google.api.gax.rpc.InvalidArgumentException; +import com.google.common.collect.Lists; +import com.google.protobuf.AbstractMessage; +import com.google.protobuf.Empty; +import com.google.protobuf.FieldMask; +import com.google.protobuf.Timestamp; +import io.grpc.StatusRuntimeException; +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.UUID; +import javax.annotation.Generated; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +@Generated("by gapic-generator-java") +public class MetastoreServiceClientTest { + private static MockMetastoreService mockMetastoreService; + private static MockServiceHelper mockServiceHelper; + private LocalChannelProvider channelProvider; + private MetastoreServiceClient client; + + @BeforeClass + public static void startStaticServer() { + mockMetastoreService = new MockMetastoreService(); + mockServiceHelper = + new MockServiceHelper( + UUID.randomUUID().toString(), Arrays.asList(mockMetastoreService)); + mockServiceHelper.start(); + } + + @AfterClass + public static void stopServer() { + mockServiceHelper.stop(); + } + + @Before + public void setUp() throws IOException { + mockServiceHelper.reset(); + channelProvider = mockServiceHelper.createChannelProvider(); + MetastoreServiceSettings settings = + MetastoreServiceSettings.newBuilder() + .setTransportChannelProvider(channelProvider) + .setCredentialsProvider(NoCredentialsProvider.create()) + .build(); + client = MetastoreServiceClient.create(settings); + } + + @After + public void tearDown() throws Exception { + client.close(); + } + + @Test + public void createCatalogTest() throws Exception { + Catalog expectedResponse = + Catalog.newBuilder() + .setName(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockMetastoreService.addResponse(expectedResponse); + + LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); + Catalog catalog = Catalog.newBuilder().build(); + String catalogId = "catalogId1455933204"; + + Catalog actualResponse = client.createCatalog(parent, catalog, catalogId); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockMetastoreService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + CreateCatalogRequest actualRequest = ((CreateCatalogRequest) actualRequests.get(0)); + + Assert.assertEquals(parent.toString(), actualRequest.getParent()); + Assert.assertEquals(catalog, actualRequest.getCatalog()); + Assert.assertEquals(catalogId, actualRequest.getCatalogId()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void createCatalogExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMetastoreService.addException(exception); + + try { + LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); + Catalog catalog = Catalog.newBuilder().build(); + String catalogId = "catalogId1455933204"; + client.createCatalog(parent, catalog, catalogId); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void createCatalogTest2() throws Exception { + Catalog expectedResponse = + Catalog.newBuilder() + .setName(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockMetastoreService.addResponse(expectedResponse); + + String parent = "parent-995424086"; + Catalog catalog = Catalog.newBuilder().build(); + String catalogId = "catalogId1455933204"; + + Catalog actualResponse = client.createCatalog(parent, catalog, catalogId); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockMetastoreService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + CreateCatalogRequest actualRequest = ((CreateCatalogRequest) actualRequests.get(0)); + + Assert.assertEquals(parent, actualRequest.getParent()); + Assert.assertEquals(catalog, actualRequest.getCatalog()); + Assert.assertEquals(catalogId, actualRequest.getCatalogId()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void createCatalogExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMetastoreService.addException(exception); + + try { + String parent = "parent-995424086"; + Catalog catalog = Catalog.newBuilder().build(); + String catalogId = "catalogId1455933204"; + client.createCatalog(parent, catalog, catalogId); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void deleteCatalogTest() throws Exception { + Catalog expectedResponse = + Catalog.newBuilder() + .setName(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockMetastoreService.addResponse(expectedResponse); + + CatalogName name = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); + + Catalog actualResponse = client.deleteCatalog(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockMetastoreService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + DeleteCatalogRequest actualRequest = ((DeleteCatalogRequest) actualRequests.get(0)); + + Assert.assertEquals(name.toString(), actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void deleteCatalogExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMetastoreService.addException(exception); + + try { + CatalogName name = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); + client.deleteCatalog(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void deleteCatalogTest2() throws Exception { + Catalog expectedResponse = + Catalog.newBuilder() + .setName(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockMetastoreService.addResponse(expectedResponse); + + String name = "name3373707"; + + Catalog actualResponse = client.deleteCatalog(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockMetastoreService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + DeleteCatalogRequest actualRequest = ((DeleteCatalogRequest) actualRequests.get(0)); + + Assert.assertEquals(name, actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void deleteCatalogExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMetastoreService.addException(exception); + + try { + String name = "name3373707"; + client.deleteCatalog(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void getCatalogTest() throws Exception { + Catalog expectedResponse = + Catalog.newBuilder() + .setName(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockMetastoreService.addResponse(expectedResponse); + + CatalogName name = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); + + Catalog actualResponse = client.getCatalog(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockMetastoreService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + GetCatalogRequest actualRequest = ((GetCatalogRequest) actualRequests.get(0)); + + Assert.assertEquals(name.toString(), actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void getCatalogExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMetastoreService.addException(exception); + + try { + CatalogName name = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); + client.getCatalog(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void getCatalogTest2() throws Exception { + Catalog expectedResponse = + Catalog.newBuilder() + .setName(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockMetastoreService.addResponse(expectedResponse); + + String name = "name3373707"; + + Catalog actualResponse = client.getCatalog(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockMetastoreService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + GetCatalogRequest actualRequest = ((GetCatalogRequest) actualRequests.get(0)); + + Assert.assertEquals(name, actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void getCatalogExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMetastoreService.addException(exception); + + try { + String name = "name3373707"; + client.getCatalog(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void listCatalogsTest() throws Exception { + Catalog responsesElement = Catalog.newBuilder().build(); + ListCatalogsResponse expectedResponse = + ListCatalogsResponse.newBuilder() + .setNextPageToken("") + .addAllCatalogs(Arrays.asList(responsesElement)) + .build(); + mockMetastoreService.addResponse(expectedResponse); + + LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); + + ListCatalogsPagedResponse pagedListResponse = client.listCatalogs(parent); + + List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getCatalogsList().get(0), resources.get(0)); + + List actualRequests = mockMetastoreService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + ListCatalogsRequest actualRequest = ((ListCatalogsRequest) actualRequests.get(0)); + + Assert.assertEquals(parent.toString(), actualRequest.getParent()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void listCatalogsExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMetastoreService.addException(exception); + + try { + LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); + client.listCatalogs(parent); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void listCatalogsTest2() throws Exception { + Catalog responsesElement = Catalog.newBuilder().build(); + ListCatalogsResponse expectedResponse = + ListCatalogsResponse.newBuilder() + .setNextPageToken("") + .addAllCatalogs(Arrays.asList(responsesElement)) + .build(); + mockMetastoreService.addResponse(expectedResponse); + + String parent = "parent-995424086"; + + ListCatalogsPagedResponse pagedListResponse = client.listCatalogs(parent); + + List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getCatalogsList().get(0), resources.get(0)); + + List actualRequests = mockMetastoreService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + ListCatalogsRequest actualRequest = ((ListCatalogsRequest) actualRequests.get(0)); + + Assert.assertEquals(parent, actualRequest.getParent()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void listCatalogsExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMetastoreService.addException(exception); + + try { + String parent = "parent-995424086"; + client.listCatalogs(parent); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void createDatabaseTest() throws Exception { + Database expectedResponse = + Database.newBuilder() + .setName( + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockMetastoreService.addResponse(expectedResponse); + + CatalogName parent = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); + Database database = Database.newBuilder().build(); + String databaseId = "databaseId1688905718"; + + Database actualResponse = client.createDatabase(parent, database, databaseId); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockMetastoreService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + CreateDatabaseRequest actualRequest = ((CreateDatabaseRequest) actualRequests.get(0)); + + Assert.assertEquals(parent.toString(), actualRequest.getParent()); + Assert.assertEquals(database, actualRequest.getDatabase()); + Assert.assertEquals(databaseId, actualRequest.getDatabaseId()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void createDatabaseExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMetastoreService.addException(exception); + + try { + CatalogName parent = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); + Database database = Database.newBuilder().build(); + String databaseId = "databaseId1688905718"; + client.createDatabase(parent, database, databaseId); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void createDatabaseTest2() throws Exception { + Database expectedResponse = + Database.newBuilder() + .setName( + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockMetastoreService.addResponse(expectedResponse); + + String parent = "parent-995424086"; + Database database = Database.newBuilder().build(); + String databaseId = "databaseId1688905718"; + + Database actualResponse = client.createDatabase(parent, database, databaseId); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockMetastoreService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + CreateDatabaseRequest actualRequest = ((CreateDatabaseRequest) actualRequests.get(0)); + + Assert.assertEquals(parent, actualRequest.getParent()); + Assert.assertEquals(database, actualRequest.getDatabase()); + Assert.assertEquals(databaseId, actualRequest.getDatabaseId()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void createDatabaseExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMetastoreService.addException(exception); + + try { + String parent = "parent-995424086"; + Database database = Database.newBuilder().build(); + String databaseId = "databaseId1688905718"; + client.createDatabase(parent, database, databaseId); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void deleteDatabaseTest() throws Exception { + Database expectedResponse = + Database.newBuilder() + .setName( + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockMetastoreService.addResponse(expectedResponse); + + DatabaseName name = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]"); + + Database actualResponse = client.deleteDatabase(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockMetastoreService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + DeleteDatabaseRequest actualRequest = ((DeleteDatabaseRequest) actualRequests.get(0)); + + Assert.assertEquals(name.toString(), actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void deleteDatabaseExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMetastoreService.addException(exception); + + try { + DatabaseName name = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]"); + client.deleteDatabase(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void deleteDatabaseTest2() throws Exception { + Database expectedResponse = + Database.newBuilder() + .setName( + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockMetastoreService.addResponse(expectedResponse); + + String name = "name3373707"; + + Database actualResponse = client.deleteDatabase(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockMetastoreService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + DeleteDatabaseRequest actualRequest = ((DeleteDatabaseRequest) actualRequests.get(0)); + + Assert.assertEquals(name, actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void deleteDatabaseExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMetastoreService.addException(exception); + + try { + String name = "name3373707"; + client.deleteDatabase(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void updateDatabaseTest() throws Exception { + Database expectedResponse = + Database.newBuilder() + .setName( + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockMetastoreService.addResponse(expectedResponse); + + Database database = Database.newBuilder().build(); + FieldMask updateMask = FieldMask.newBuilder().build(); + + Database actualResponse = client.updateDatabase(database, updateMask); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockMetastoreService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + UpdateDatabaseRequest actualRequest = ((UpdateDatabaseRequest) actualRequests.get(0)); + + Assert.assertEquals(database, actualRequest.getDatabase()); + Assert.assertEquals(updateMask, actualRequest.getUpdateMask()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void updateDatabaseExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMetastoreService.addException(exception); + + try { + Database database = Database.newBuilder().build(); + FieldMask updateMask = FieldMask.newBuilder().build(); + client.updateDatabase(database, updateMask); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void getDatabaseTest() throws Exception { + Database expectedResponse = + Database.newBuilder() + .setName( + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockMetastoreService.addResponse(expectedResponse); + + DatabaseName name = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]"); + + Database actualResponse = client.getDatabase(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockMetastoreService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + GetDatabaseRequest actualRequest = ((GetDatabaseRequest) actualRequests.get(0)); + + Assert.assertEquals(name.toString(), actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void getDatabaseExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMetastoreService.addException(exception); + + try { + DatabaseName name = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]"); + client.getDatabase(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void getDatabaseTest2() throws Exception { + Database expectedResponse = + Database.newBuilder() + .setName( + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockMetastoreService.addResponse(expectedResponse); + + String name = "name3373707"; + + Database actualResponse = client.getDatabase(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockMetastoreService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + GetDatabaseRequest actualRequest = ((GetDatabaseRequest) actualRequests.get(0)); + + Assert.assertEquals(name, actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void getDatabaseExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMetastoreService.addException(exception); + + try { + String name = "name3373707"; + client.getDatabase(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void listDatabasesTest() throws Exception { + Database responsesElement = Database.newBuilder().build(); + ListDatabasesResponse expectedResponse = + ListDatabasesResponse.newBuilder() + .setNextPageToken("") + .addAllDatabases(Arrays.asList(responsesElement)) + .build(); + mockMetastoreService.addResponse(expectedResponse); + + CatalogName parent = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); + + ListDatabasesPagedResponse pagedListResponse = client.listDatabases(parent); + + List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getDatabasesList().get(0), resources.get(0)); + + List actualRequests = mockMetastoreService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + ListDatabasesRequest actualRequest = ((ListDatabasesRequest) actualRequests.get(0)); + + Assert.assertEquals(parent.toString(), actualRequest.getParent()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void listDatabasesExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMetastoreService.addException(exception); + + try { + CatalogName parent = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); + client.listDatabases(parent); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void listDatabasesTest2() throws Exception { + Database responsesElement = Database.newBuilder().build(); + ListDatabasesResponse expectedResponse = + ListDatabasesResponse.newBuilder() + .setNextPageToken("") + .addAllDatabases(Arrays.asList(responsesElement)) + .build(); + mockMetastoreService.addResponse(expectedResponse); + + String parent = "parent-995424086"; + + ListDatabasesPagedResponse pagedListResponse = client.listDatabases(parent); + + List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getDatabasesList().get(0), resources.get(0)); + + List actualRequests = mockMetastoreService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + ListDatabasesRequest actualRequest = ((ListDatabasesRequest) actualRequests.get(0)); + + Assert.assertEquals(parent, actualRequest.getParent()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void listDatabasesExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMetastoreService.addException(exception); + + try { + String parent = "parent-995424086"; + client.listDatabases(parent); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void createTableTest() throws Exception { + Table expectedResponse = + Table.newBuilder() + .setName( + TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]") + .toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockMetastoreService.addResponse(expectedResponse); + + DatabaseName parent = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]"); + Table table = Table.newBuilder().build(); + String tableId = "tableId-1552905847"; + + Table actualResponse = client.createTable(parent, table, tableId); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockMetastoreService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + CreateTableRequest actualRequest = ((CreateTableRequest) actualRequests.get(0)); + + Assert.assertEquals(parent.toString(), actualRequest.getParent()); + Assert.assertEquals(table, actualRequest.getTable()); + Assert.assertEquals(tableId, actualRequest.getTableId()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void createTableExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMetastoreService.addException(exception); + + try { + DatabaseName parent = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]"); + Table table = Table.newBuilder().build(); + String tableId = "tableId-1552905847"; + client.createTable(parent, table, tableId); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void createTableTest2() throws Exception { + Table expectedResponse = + Table.newBuilder() + .setName( + TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]") + .toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockMetastoreService.addResponse(expectedResponse); + + String parent = "parent-995424086"; + Table table = Table.newBuilder().build(); + String tableId = "tableId-1552905847"; + + Table actualResponse = client.createTable(parent, table, tableId); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockMetastoreService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + CreateTableRequest actualRequest = ((CreateTableRequest) actualRequests.get(0)); + + Assert.assertEquals(parent, actualRequest.getParent()); + Assert.assertEquals(table, actualRequest.getTable()); + Assert.assertEquals(tableId, actualRequest.getTableId()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void createTableExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMetastoreService.addException(exception); + + try { + String parent = "parent-995424086"; + Table table = Table.newBuilder().build(); + String tableId = "tableId-1552905847"; + client.createTable(parent, table, tableId); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void deleteTableTest() throws Exception { + Table expectedResponse = + Table.newBuilder() + .setName( + TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]") + .toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockMetastoreService.addResponse(expectedResponse); + + TableName name = TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]"); + + Table actualResponse = client.deleteTable(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockMetastoreService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + DeleteTableRequest actualRequest = ((DeleteTableRequest) actualRequests.get(0)); + + Assert.assertEquals(name.toString(), actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void deleteTableExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMetastoreService.addException(exception); + + try { + TableName name = + TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]"); + client.deleteTable(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void deleteTableTest2() throws Exception { + Table expectedResponse = + Table.newBuilder() + .setName( + TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]") + .toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockMetastoreService.addResponse(expectedResponse); + + String name = "name3373707"; + + Table actualResponse = client.deleteTable(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockMetastoreService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + DeleteTableRequest actualRequest = ((DeleteTableRequest) actualRequests.get(0)); + + Assert.assertEquals(name, actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void deleteTableExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMetastoreService.addException(exception); + + try { + String name = "name3373707"; + client.deleteTable(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void updateTableTest() throws Exception { + Table expectedResponse = + Table.newBuilder() + .setName( + TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]") + .toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockMetastoreService.addResponse(expectedResponse); + + Table table = Table.newBuilder().build(); + FieldMask updateMask = FieldMask.newBuilder().build(); + + Table actualResponse = client.updateTable(table, updateMask); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockMetastoreService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + UpdateTableRequest actualRequest = ((UpdateTableRequest) actualRequests.get(0)); + + Assert.assertEquals(table, actualRequest.getTable()); + Assert.assertEquals(updateMask, actualRequest.getUpdateMask()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void updateTableExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMetastoreService.addException(exception); + + try { + Table table = Table.newBuilder().build(); + FieldMask updateMask = FieldMask.newBuilder().build(); + client.updateTable(table, updateMask); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void getTableTest() throws Exception { + Table expectedResponse = + Table.newBuilder() + .setName( + TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]") + .toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockMetastoreService.addResponse(expectedResponse); + + TableName name = TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]"); + + Table actualResponse = client.getTable(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockMetastoreService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + GetTableRequest actualRequest = ((GetTableRequest) actualRequests.get(0)); + + Assert.assertEquals(name.toString(), actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void getTableExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMetastoreService.addException(exception); + + try { + TableName name = + TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]"); + client.getTable(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void getTableTest2() throws Exception { + Table expectedResponse = + Table.newBuilder() + .setName( + TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]") + .toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .setDeleteTime(Timestamp.newBuilder().build()) + .setExpireTime(Timestamp.newBuilder().build()) + .build(); + mockMetastoreService.addResponse(expectedResponse); + + String name = "name3373707"; + + Table actualResponse = client.getTable(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockMetastoreService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + GetTableRequest actualRequest = ((GetTableRequest) actualRequests.get(0)); + + Assert.assertEquals(name, actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void getTableExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMetastoreService.addException(exception); + + try { + String name = "name3373707"; + client.getTable(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void listTablesTest() throws Exception { + Table responsesElement = Table.newBuilder().build(); + ListTablesResponse expectedResponse = + ListTablesResponse.newBuilder() + .setNextPageToken("") + .addAllTables(Arrays.asList(responsesElement)) + .build(); + mockMetastoreService.addResponse(expectedResponse); + + DatabaseName parent = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]"); + + ListTablesPagedResponse pagedListResponse = client.listTables(parent); + + List
resources = Lists.newArrayList(pagedListResponse.iterateAll()); + + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getTablesList().get(0), resources.get(0)); + + List actualRequests = mockMetastoreService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + ListTablesRequest actualRequest = ((ListTablesRequest) actualRequests.get(0)); + + Assert.assertEquals(parent.toString(), actualRequest.getParent()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void listTablesExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMetastoreService.addException(exception); + + try { + DatabaseName parent = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]"); + client.listTables(parent); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void listTablesTest2() throws Exception { + Table responsesElement = Table.newBuilder().build(); + ListTablesResponse expectedResponse = + ListTablesResponse.newBuilder() + .setNextPageToken("") + .addAllTables(Arrays.asList(responsesElement)) + .build(); + mockMetastoreService.addResponse(expectedResponse); + + String parent = "parent-995424086"; + + ListTablesPagedResponse pagedListResponse = client.listTables(parent); + + List
resources = Lists.newArrayList(pagedListResponse.iterateAll()); + + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getTablesList().get(0), resources.get(0)); + + List actualRequests = mockMetastoreService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + ListTablesRequest actualRequest = ((ListTablesRequest) actualRequests.get(0)); + + Assert.assertEquals(parent, actualRequest.getParent()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void listTablesExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMetastoreService.addException(exception); + + try { + String parent = "parent-995424086"; + client.listTables(parent); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void createLockTest() throws Exception { + Lock expectedResponse = + Lock.newBuilder() + .setName( + LockName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[LOCK]") + .toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .build(); + mockMetastoreService.addResponse(expectedResponse); + + DatabaseName parent = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]"); + Lock lock = Lock.newBuilder().build(); + + Lock actualResponse = client.createLock(parent, lock); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockMetastoreService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + CreateLockRequest actualRequest = ((CreateLockRequest) actualRequests.get(0)); + + Assert.assertEquals(parent.toString(), actualRequest.getParent()); + Assert.assertEquals(lock, actualRequest.getLock()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void createLockExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMetastoreService.addException(exception); + + try { + DatabaseName parent = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]"); + Lock lock = Lock.newBuilder().build(); + client.createLock(parent, lock); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void createLockTest2() throws Exception { + Lock expectedResponse = + Lock.newBuilder() + .setName( + LockName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[LOCK]") + .toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .build(); + mockMetastoreService.addResponse(expectedResponse); + + String parent = "parent-995424086"; + Lock lock = Lock.newBuilder().build(); + + Lock actualResponse = client.createLock(parent, lock); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockMetastoreService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + CreateLockRequest actualRequest = ((CreateLockRequest) actualRequests.get(0)); + + Assert.assertEquals(parent, actualRequest.getParent()); + Assert.assertEquals(lock, actualRequest.getLock()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void createLockExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMetastoreService.addException(exception); + + try { + String parent = "parent-995424086"; + Lock lock = Lock.newBuilder().build(); + client.createLock(parent, lock); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void deleteLockTest() throws Exception { + Empty expectedResponse = Empty.newBuilder().build(); + mockMetastoreService.addResponse(expectedResponse); + + LockName name = LockName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[LOCK]"); + + client.deleteLock(name); + + List actualRequests = mockMetastoreService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + DeleteLockRequest actualRequest = ((DeleteLockRequest) actualRequests.get(0)); + + Assert.assertEquals(name.toString(), actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void deleteLockExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMetastoreService.addException(exception); + + try { + LockName name = LockName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[LOCK]"); + client.deleteLock(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void deleteLockTest2() throws Exception { + Empty expectedResponse = Empty.newBuilder().build(); + mockMetastoreService.addResponse(expectedResponse); + + String name = "name3373707"; + + client.deleteLock(name); + + List actualRequests = mockMetastoreService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + DeleteLockRequest actualRequest = ((DeleteLockRequest) actualRequests.get(0)); + + Assert.assertEquals(name, actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void deleteLockExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMetastoreService.addException(exception); + + try { + String name = "name3373707"; + client.deleteLock(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void checkLockTest() throws Exception { + Lock expectedResponse = + Lock.newBuilder() + .setName( + LockName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[LOCK]") + .toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .build(); + mockMetastoreService.addResponse(expectedResponse); + + LockName name = LockName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[LOCK]"); + + Lock actualResponse = client.checkLock(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockMetastoreService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + CheckLockRequest actualRequest = ((CheckLockRequest) actualRequests.get(0)); + + Assert.assertEquals(name.toString(), actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void checkLockExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMetastoreService.addException(exception); + + try { + LockName name = LockName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[LOCK]"); + client.checkLock(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void checkLockTest2() throws Exception { + Lock expectedResponse = + Lock.newBuilder() + .setName( + LockName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[LOCK]") + .toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .build(); + mockMetastoreService.addResponse(expectedResponse); + + String name = "name3373707"; + + Lock actualResponse = client.checkLock(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockMetastoreService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + CheckLockRequest actualRequest = ((CheckLockRequest) actualRequests.get(0)); + + Assert.assertEquals(name, actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void checkLockExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMetastoreService.addException(exception); + + try { + String name = "name3373707"; + client.checkLock(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void listLocksTest() throws Exception { + Lock responsesElement = Lock.newBuilder().build(); + ListLocksResponse expectedResponse = + ListLocksResponse.newBuilder() + .setNextPageToken("") + .addAllLocks(Arrays.asList(responsesElement)) + .build(); + mockMetastoreService.addResponse(expectedResponse); + + DatabaseName parent = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]"); + + ListLocksPagedResponse pagedListResponse = client.listLocks(parent); + + List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getLocksList().get(0), resources.get(0)); + + List actualRequests = mockMetastoreService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + ListLocksRequest actualRequest = ((ListLocksRequest) actualRequests.get(0)); + + Assert.assertEquals(parent.toString(), actualRequest.getParent()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void listLocksExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMetastoreService.addException(exception); + + try { + DatabaseName parent = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]"); + client.listLocks(parent); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void listLocksTest2() throws Exception { + Lock responsesElement = Lock.newBuilder().build(); + ListLocksResponse expectedResponse = + ListLocksResponse.newBuilder() + .setNextPageToken("") + .addAllLocks(Arrays.asList(responsesElement)) + .build(); + mockMetastoreService.addResponse(expectedResponse); + + String parent = "parent-995424086"; + + ListLocksPagedResponse pagedListResponse = client.listLocks(parent); + + List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getLocksList().get(0), resources.get(0)); + + List actualRequests = mockMetastoreService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + ListLocksRequest actualRequest = ((ListLocksRequest) actualRequests.get(0)); + + Assert.assertEquals(parent, actualRequest.getParent()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void listLocksExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockMetastoreService.addException(exception); + + try { + String parent = "parent-995424086"; + client.listLocks(parent); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } +} diff --git a/java-biglake/google-cloud-biglake/src/test/java/com/google/cloud/bigquery/biglake/v1alpha1/MockMetastoreService.java b/java-biglake/google-cloud-biglake/src/test/java/com/google/cloud/bigquery/biglake/v1alpha1/MockMetastoreService.java new file mode 100644 index 000000000000..c22d0a1af607 --- /dev/null +++ b/java-biglake/google-cloud-biglake/src/test/java/com/google/cloud/bigquery/biglake/v1alpha1/MockMetastoreService.java @@ -0,0 +1,59 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1; + +import com.google.api.core.BetaApi; +import com.google.api.gax.grpc.testing.MockGrpcService; +import com.google.protobuf.AbstractMessage; +import io.grpc.ServerServiceDefinition; +import java.util.List; +import javax.annotation.Generated; + +@BetaApi +@Generated("by gapic-generator-java") +public class MockMetastoreService implements MockGrpcService { + private final MockMetastoreServiceImpl serviceImpl; + + public MockMetastoreService() { + serviceImpl = new MockMetastoreServiceImpl(); + } + + @Override + public List getRequests() { + return serviceImpl.getRequests(); + } + + @Override + public void addResponse(AbstractMessage response) { + serviceImpl.addResponse(response); + } + + @Override + public void addException(Exception exception) { + serviceImpl.addException(exception); + } + + @Override + public ServerServiceDefinition getServiceDefinition() { + return serviceImpl.bindService(); + } + + @Override + public void reset() { + serviceImpl.reset(); + } +} diff --git a/java-biglake/google-cloud-biglake/src/test/java/com/google/cloud/bigquery/biglake/v1alpha1/MockMetastoreServiceImpl.java b/java-biglake/google-cloud-biglake/src/test/java/com/google/cloud/bigquery/biglake/v1alpha1/MockMetastoreServiceImpl.java new file mode 100644 index 000000000000..4a7d88ffd03e --- /dev/null +++ b/java-biglake/google-cloud-biglake/src/test/java/com/google/cloud/bigquery/biglake/v1alpha1/MockMetastoreServiceImpl.java @@ -0,0 +1,430 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1; + +import com.google.api.core.BetaApi; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceGrpc.MetastoreServiceImplBase; +import com.google.protobuf.AbstractMessage; +import com.google.protobuf.Empty; +import io.grpc.stub.StreamObserver; +import java.util.ArrayList; +import java.util.LinkedList; +import java.util.List; +import java.util.Queue; +import javax.annotation.Generated; + +@BetaApi +@Generated("by gapic-generator-java") +public class MockMetastoreServiceImpl extends MetastoreServiceImplBase { + private List requests; + private Queue responses; + + public MockMetastoreServiceImpl() { + requests = new ArrayList<>(); + responses = new LinkedList<>(); + } + + public List getRequests() { + return requests; + } + + public void addResponse(AbstractMessage response) { + responses.add(response); + } + + public void setResponses(List responses) { + this.responses = new LinkedList(responses); + } + + public void addException(Exception exception) { + responses.add(exception); + } + + public void reset() { + requests = new ArrayList<>(); + responses = new LinkedList<>(); + } + + @Override + public void createCatalog( + CreateCatalogRequest request, StreamObserver responseObserver) { + Object response = responses.poll(); + if (response instanceof Catalog) { + requests.add(request); + responseObserver.onNext(((Catalog) response)); + responseObserver.onCompleted(); + } else if (response instanceof Exception) { + responseObserver.onError(((Exception) response)); + } else { + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method CreateCatalog, expected %s or %s", + response == null ? "null" : response.getClass().getName(), + Catalog.class.getName(), + Exception.class.getName()))); + } + } + + @Override + public void deleteCatalog( + DeleteCatalogRequest request, StreamObserver responseObserver) { + Object response = responses.poll(); + if (response instanceof Catalog) { + requests.add(request); + responseObserver.onNext(((Catalog) response)); + responseObserver.onCompleted(); + } else if (response instanceof Exception) { + responseObserver.onError(((Exception) response)); + } else { + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method DeleteCatalog, expected %s or %s", + response == null ? "null" : response.getClass().getName(), + Catalog.class.getName(), + Exception.class.getName()))); + } + } + + @Override + public void getCatalog(GetCatalogRequest request, StreamObserver responseObserver) { + Object response = responses.poll(); + if (response instanceof Catalog) { + requests.add(request); + responseObserver.onNext(((Catalog) response)); + responseObserver.onCompleted(); + } else if (response instanceof Exception) { + responseObserver.onError(((Exception) response)); + } else { + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method GetCatalog, expected %s or %s", + response == null ? "null" : response.getClass().getName(), + Catalog.class.getName(), + Exception.class.getName()))); + } + } + + @Override + public void listCatalogs( + ListCatalogsRequest request, StreamObserver responseObserver) { + Object response = responses.poll(); + if (response instanceof ListCatalogsResponse) { + requests.add(request); + responseObserver.onNext(((ListCatalogsResponse) response)); + responseObserver.onCompleted(); + } else if (response instanceof Exception) { + responseObserver.onError(((Exception) response)); + } else { + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method ListCatalogs, expected %s or %s", + response == null ? "null" : response.getClass().getName(), + ListCatalogsResponse.class.getName(), + Exception.class.getName()))); + } + } + + @Override + public void createDatabase( + CreateDatabaseRequest request, StreamObserver responseObserver) { + Object response = responses.poll(); + if (response instanceof Database) { + requests.add(request); + responseObserver.onNext(((Database) response)); + responseObserver.onCompleted(); + } else if (response instanceof Exception) { + responseObserver.onError(((Exception) response)); + } else { + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method CreateDatabase, expected %s or %s", + response == null ? "null" : response.getClass().getName(), + Database.class.getName(), + Exception.class.getName()))); + } + } + + @Override + public void deleteDatabase( + DeleteDatabaseRequest request, StreamObserver responseObserver) { + Object response = responses.poll(); + if (response instanceof Database) { + requests.add(request); + responseObserver.onNext(((Database) response)); + responseObserver.onCompleted(); + } else if (response instanceof Exception) { + responseObserver.onError(((Exception) response)); + } else { + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method DeleteDatabase, expected %s or %s", + response == null ? "null" : response.getClass().getName(), + Database.class.getName(), + Exception.class.getName()))); + } + } + + @Override + public void updateDatabase( + UpdateDatabaseRequest request, StreamObserver responseObserver) { + Object response = responses.poll(); + if (response instanceof Database) { + requests.add(request); + responseObserver.onNext(((Database) response)); + responseObserver.onCompleted(); + } else if (response instanceof Exception) { + responseObserver.onError(((Exception) response)); + } else { + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method UpdateDatabase, expected %s or %s", + response == null ? "null" : response.getClass().getName(), + Database.class.getName(), + Exception.class.getName()))); + } + } + + @Override + public void getDatabase(GetDatabaseRequest request, StreamObserver responseObserver) { + Object response = responses.poll(); + if (response instanceof Database) { + requests.add(request); + responseObserver.onNext(((Database) response)); + responseObserver.onCompleted(); + } else if (response instanceof Exception) { + responseObserver.onError(((Exception) response)); + } else { + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method GetDatabase, expected %s or %s", + response == null ? "null" : response.getClass().getName(), + Database.class.getName(), + Exception.class.getName()))); + } + } + + @Override + public void listDatabases( + ListDatabasesRequest request, StreamObserver responseObserver) { + Object response = responses.poll(); + if (response instanceof ListDatabasesResponse) { + requests.add(request); + responseObserver.onNext(((ListDatabasesResponse) response)); + responseObserver.onCompleted(); + } else if (response instanceof Exception) { + responseObserver.onError(((Exception) response)); + } else { + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method ListDatabases, expected %s or %s", + response == null ? "null" : response.getClass().getName(), + ListDatabasesResponse.class.getName(), + Exception.class.getName()))); + } + } + + @Override + public void createTable(CreateTableRequest request, StreamObserver
responseObserver) { + Object response = responses.poll(); + if (response instanceof Table) { + requests.add(request); + responseObserver.onNext(((Table) response)); + responseObserver.onCompleted(); + } else if (response instanceof Exception) { + responseObserver.onError(((Exception) response)); + } else { + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method CreateTable, expected %s or %s", + response == null ? "null" : response.getClass().getName(), + Table.class.getName(), + Exception.class.getName()))); + } + } + + @Override + public void deleteTable(DeleteTableRequest request, StreamObserver
responseObserver) { + Object response = responses.poll(); + if (response instanceof Table) { + requests.add(request); + responseObserver.onNext(((Table) response)); + responseObserver.onCompleted(); + } else if (response instanceof Exception) { + responseObserver.onError(((Exception) response)); + } else { + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method DeleteTable, expected %s or %s", + response == null ? "null" : response.getClass().getName(), + Table.class.getName(), + Exception.class.getName()))); + } + } + + @Override + public void updateTable(UpdateTableRequest request, StreamObserver
responseObserver) { + Object response = responses.poll(); + if (response instanceof Table) { + requests.add(request); + responseObserver.onNext(((Table) response)); + responseObserver.onCompleted(); + } else if (response instanceof Exception) { + responseObserver.onError(((Exception) response)); + } else { + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method UpdateTable, expected %s or %s", + response == null ? "null" : response.getClass().getName(), + Table.class.getName(), + Exception.class.getName()))); + } + } + + @Override + public void getTable(GetTableRequest request, StreamObserver
responseObserver) { + Object response = responses.poll(); + if (response instanceof Table) { + requests.add(request); + responseObserver.onNext(((Table) response)); + responseObserver.onCompleted(); + } else if (response instanceof Exception) { + responseObserver.onError(((Exception) response)); + } else { + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method GetTable, expected %s or %s", + response == null ? "null" : response.getClass().getName(), + Table.class.getName(), + Exception.class.getName()))); + } + } + + @Override + public void listTables( + ListTablesRequest request, StreamObserver responseObserver) { + Object response = responses.poll(); + if (response instanceof ListTablesResponse) { + requests.add(request); + responseObserver.onNext(((ListTablesResponse) response)); + responseObserver.onCompleted(); + } else if (response instanceof Exception) { + responseObserver.onError(((Exception) response)); + } else { + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method ListTables, expected %s or %s", + response == null ? "null" : response.getClass().getName(), + ListTablesResponse.class.getName(), + Exception.class.getName()))); + } + } + + @Override + public void createLock(CreateLockRequest request, StreamObserver responseObserver) { + Object response = responses.poll(); + if (response instanceof Lock) { + requests.add(request); + responseObserver.onNext(((Lock) response)); + responseObserver.onCompleted(); + } else if (response instanceof Exception) { + responseObserver.onError(((Exception) response)); + } else { + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method CreateLock, expected %s or %s", + response == null ? "null" : response.getClass().getName(), + Lock.class.getName(), + Exception.class.getName()))); + } + } + + @Override + public void deleteLock(DeleteLockRequest request, StreamObserver responseObserver) { + Object response = responses.poll(); + if (response instanceof Empty) { + requests.add(request); + responseObserver.onNext(((Empty) response)); + responseObserver.onCompleted(); + } else if (response instanceof Exception) { + responseObserver.onError(((Exception) response)); + } else { + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method DeleteLock, expected %s or %s", + response == null ? "null" : response.getClass().getName(), + Empty.class.getName(), + Exception.class.getName()))); + } + } + + @Override + public void checkLock(CheckLockRequest request, StreamObserver responseObserver) { + Object response = responses.poll(); + if (response instanceof Lock) { + requests.add(request); + responseObserver.onNext(((Lock) response)); + responseObserver.onCompleted(); + } else if (response instanceof Exception) { + responseObserver.onError(((Exception) response)); + } else { + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method CheckLock, expected %s or %s", + response == null ? "null" : response.getClass().getName(), + Lock.class.getName(), + Exception.class.getName()))); + } + } + + @Override + public void listLocks( + ListLocksRequest request, StreamObserver responseObserver) { + Object response = responses.poll(); + if (response instanceof ListLocksResponse) { + requests.add(request); + responseObserver.onNext(((ListLocksResponse) response)); + responseObserver.onCompleted(); + } else if (response instanceof Exception) { + responseObserver.onError(((Exception) response)); + } else { + responseObserver.onError( + new IllegalArgumentException( + String.format( + "Unrecognized response type %s for method ListLocks, expected %s or %s", + response == null ? "null" : response.getClass().getName(), + ListLocksResponse.class.getName(), + Exception.class.getName()))); + } + } +} diff --git a/java-biglake/grpc-google-cloud-biglake-v1alpha1/pom.xml b/java-biglake/grpc-google-cloud-biglake-v1alpha1/pom.xml new file mode 100644 index 000000000000..3158a8dfd892 --- /dev/null +++ b/java-biglake/grpc-google-cloud-biglake-v1alpha1/pom.xml @@ -0,0 +1,45 @@ + + 4.0.0 + com.google.api.grpc + grpc-google-cloud-biglake-v1alpha1 + 0.0.1-SNAPSHOT + grpc-google-cloud-biglake-v1alpha1 + GRPC library for google-cloud-biglake + + com.google.cloud + google-cloud-biglake-parent + 0.0.1-SNAPSHOT + + + + io.grpc + grpc-api + + + io.grpc + grpc-stub + + + io.grpc + grpc-protobuf + + + com.google.protobuf + protobuf-java + + + com.google.api.grpc + proto-google-common-protos + + + com.google.api.grpc + proto-google-cloud-biglake-v1alpha1 + + + com.google.guava + guava + + + \ No newline at end of file diff --git a/java-biglake/grpc-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/MetastoreServiceGrpc.java b/java-biglake/grpc-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/MetastoreServiceGrpc.java new file mode 100644 index 000000000000..ac726ba7339d --- /dev/null +++ b/java-biglake/grpc-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/MetastoreServiceGrpc.java @@ -0,0 +1,2426 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.bigquery.biglake.v1alpha1; + +import static io.grpc.MethodDescriptor.generateFullMethodName; + +/** + * + * + *
+ * BigLake Metastore is a serverless, highly available, multi-tenant runtime
+ * metastore for Google Cloud Data Analytics products.
+ * The BigLake Metastore API defines the following resource model:
+ * * A collection of Google Cloud projects: `/projects/*`
+ * * Each project has a collection of available locations: `/locations/*`
+ * * Each location has a collection of catalogs: `/catalogs/*`
+ * * Each catalog has a collection of databases: `/databases/*`
+ * * Each database has a collection of tables: `/tables/*`
+ * 
+ */ +@javax.annotation.Generated( + value = "by gRPC proto compiler", + comments = "Source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto") +@io.grpc.stub.annotations.GrpcGenerated +public final class MetastoreServiceGrpc { + + private MetastoreServiceGrpc() {} + + public static final String SERVICE_NAME = + "google.cloud.bigquery.biglake.v1alpha1.MetastoreService"; + + // Static method descriptors that strictly reflect the proto. + private static volatile io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Catalog> + getCreateCatalogMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "CreateCatalog", + requestType = com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest.class, + responseType = com.google.cloud.bigquery.biglake.v1alpha1.Catalog.class, + methodType = io.grpc.MethodDescriptor.MethodType.UNARY) + public static io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Catalog> + getCreateCatalogMethod() { + io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Catalog> + getCreateCatalogMethod; + if ((getCreateCatalogMethod = MetastoreServiceGrpc.getCreateCatalogMethod) == null) { + synchronized (MetastoreServiceGrpc.class) { + if ((getCreateCatalogMethod = MetastoreServiceGrpc.getCreateCatalogMethod) == null) { + MetastoreServiceGrpc.getCreateCatalogMethod = + getCreateCatalogMethod = + io.grpc.MethodDescriptor + . + newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName(SERVICE_NAME, "CreateCatalog")) + .setSampledToLocalTracing(true) + .setRequestMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest + .getDefaultInstance())) + .setResponseMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.Catalog + .getDefaultInstance())) + .setSchemaDescriptor( + new MetastoreServiceMethodDescriptorSupplier("CreateCatalog")) + .build(); + } + } + } + return getCreateCatalogMethod; + } + + private static volatile io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Catalog> + getDeleteCatalogMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "DeleteCatalog", + requestType = com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest.class, + responseType = com.google.cloud.bigquery.biglake.v1alpha1.Catalog.class, + methodType = io.grpc.MethodDescriptor.MethodType.UNARY) + public static io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Catalog> + getDeleteCatalogMethod() { + io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Catalog> + getDeleteCatalogMethod; + if ((getDeleteCatalogMethod = MetastoreServiceGrpc.getDeleteCatalogMethod) == null) { + synchronized (MetastoreServiceGrpc.class) { + if ((getDeleteCatalogMethod = MetastoreServiceGrpc.getDeleteCatalogMethod) == null) { + MetastoreServiceGrpc.getDeleteCatalogMethod = + getDeleteCatalogMethod = + io.grpc.MethodDescriptor + . + newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName(SERVICE_NAME, "DeleteCatalog")) + .setSampledToLocalTracing(true) + .setRequestMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest + .getDefaultInstance())) + .setResponseMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.Catalog + .getDefaultInstance())) + .setSchemaDescriptor( + new MetastoreServiceMethodDescriptorSupplier("DeleteCatalog")) + .build(); + } + } + } + return getDeleteCatalogMethod; + } + + private static volatile io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Catalog> + getGetCatalogMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "GetCatalog", + requestType = com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest.class, + responseType = com.google.cloud.bigquery.biglake.v1alpha1.Catalog.class, + methodType = io.grpc.MethodDescriptor.MethodType.UNARY) + public static io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Catalog> + getGetCatalogMethod() { + io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Catalog> + getGetCatalogMethod; + if ((getGetCatalogMethod = MetastoreServiceGrpc.getGetCatalogMethod) == null) { + synchronized (MetastoreServiceGrpc.class) { + if ((getGetCatalogMethod = MetastoreServiceGrpc.getGetCatalogMethod) == null) { + MetastoreServiceGrpc.getGetCatalogMethod = + getGetCatalogMethod = + io.grpc.MethodDescriptor + . + newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetCatalog")) + .setSampledToLocalTracing(true) + .setRequestMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest + .getDefaultInstance())) + .setResponseMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.Catalog + .getDefaultInstance())) + .setSchemaDescriptor( + new MetastoreServiceMethodDescriptorSupplier("GetCatalog")) + .build(); + } + } + } + return getGetCatalogMethod; + } + + private static volatile io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest, + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse> + getListCatalogsMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "ListCatalogs", + requestType = com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest.class, + responseType = com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse.class, + methodType = io.grpc.MethodDescriptor.MethodType.UNARY) + public static io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest, + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse> + getListCatalogsMethod() { + io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest, + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse> + getListCatalogsMethod; + if ((getListCatalogsMethod = MetastoreServiceGrpc.getListCatalogsMethod) == null) { + synchronized (MetastoreServiceGrpc.class) { + if ((getListCatalogsMethod = MetastoreServiceGrpc.getListCatalogsMethod) == null) { + MetastoreServiceGrpc.getListCatalogsMethod = + getListCatalogsMethod = + io.grpc.MethodDescriptor + . + newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListCatalogs")) + .setSampledToLocalTracing(true) + .setRequestMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest + .getDefaultInstance())) + .setResponseMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse + .getDefaultInstance())) + .setSchemaDescriptor( + new MetastoreServiceMethodDescriptorSupplier("ListCatalogs")) + .build(); + } + } + } + return getListCatalogsMethod; + } + + private static volatile io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Database> + getCreateDatabaseMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "CreateDatabase", + requestType = com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest.class, + responseType = com.google.cloud.bigquery.biglake.v1alpha1.Database.class, + methodType = io.grpc.MethodDescriptor.MethodType.UNARY) + public static io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Database> + getCreateDatabaseMethod() { + io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Database> + getCreateDatabaseMethod; + if ((getCreateDatabaseMethod = MetastoreServiceGrpc.getCreateDatabaseMethod) == null) { + synchronized (MetastoreServiceGrpc.class) { + if ((getCreateDatabaseMethod = MetastoreServiceGrpc.getCreateDatabaseMethod) == null) { + MetastoreServiceGrpc.getCreateDatabaseMethod = + getCreateDatabaseMethod = + io.grpc.MethodDescriptor + . + newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName(SERVICE_NAME, "CreateDatabase")) + .setSampledToLocalTracing(true) + .setRequestMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest + .getDefaultInstance())) + .setResponseMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.Database + .getDefaultInstance())) + .setSchemaDescriptor( + new MetastoreServiceMethodDescriptorSupplier("CreateDatabase")) + .build(); + } + } + } + return getCreateDatabaseMethod; + } + + private static volatile io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Database> + getDeleteDatabaseMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "DeleteDatabase", + requestType = com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest.class, + responseType = com.google.cloud.bigquery.biglake.v1alpha1.Database.class, + methodType = io.grpc.MethodDescriptor.MethodType.UNARY) + public static io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Database> + getDeleteDatabaseMethod() { + io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Database> + getDeleteDatabaseMethod; + if ((getDeleteDatabaseMethod = MetastoreServiceGrpc.getDeleteDatabaseMethod) == null) { + synchronized (MetastoreServiceGrpc.class) { + if ((getDeleteDatabaseMethod = MetastoreServiceGrpc.getDeleteDatabaseMethod) == null) { + MetastoreServiceGrpc.getDeleteDatabaseMethod = + getDeleteDatabaseMethod = + io.grpc.MethodDescriptor + . + newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName(SERVICE_NAME, "DeleteDatabase")) + .setSampledToLocalTracing(true) + .setRequestMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest + .getDefaultInstance())) + .setResponseMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.Database + .getDefaultInstance())) + .setSchemaDescriptor( + new MetastoreServiceMethodDescriptorSupplier("DeleteDatabase")) + .build(); + } + } + } + return getDeleteDatabaseMethod; + } + + private static volatile io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Database> + getUpdateDatabaseMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "UpdateDatabase", + requestType = com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest.class, + responseType = com.google.cloud.bigquery.biglake.v1alpha1.Database.class, + methodType = io.grpc.MethodDescriptor.MethodType.UNARY) + public static io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Database> + getUpdateDatabaseMethod() { + io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Database> + getUpdateDatabaseMethod; + if ((getUpdateDatabaseMethod = MetastoreServiceGrpc.getUpdateDatabaseMethod) == null) { + synchronized (MetastoreServiceGrpc.class) { + if ((getUpdateDatabaseMethod = MetastoreServiceGrpc.getUpdateDatabaseMethod) == null) { + MetastoreServiceGrpc.getUpdateDatabaseMethod = + getUpdateDatabaseMethod = + io.grpc.MethodDescriptor + . + newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName(SERVICE_NAME, "UpdateDatabase")) + .setSampledToLocalTracing(true) + .setRequestMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest + .getDefaultInstance())) + .setResponseMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.Database + .getDefaultInstance())) + .setSchemaDescriptor( + new MetastoreServiceMethodDescriptorSupplier("UpdateDatabase")) + .build(); + } + } + } + return getUpdateDatabaseMethod; + } + + private static volatile io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Database> + getGetDatabaseMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "GetDatabase", + requestType = com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest.class, + responseType = com.google.cloud.bigquery.biglake.v1alpha1.Database.class, + methodType = io.grpc.MethodDescriptor.MethodType.UNARY) + public static io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Database> + getGetDatabaseMethod() { + io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Database> + getGetDatabaseMethod; + if ((getGetDatabaseMethod = MetastoreServiceGrpc.getGetDatabaseMethod) == null) { + synchronized (MetastoreServiceGrpc.class) { + if ((getGetDatabaseMethod = MetastoreServiceGrpc.getGetDatabaseMethod) == null) { + MetastoreServiceGrpc.getGetDatabaseMethod = + getGetDatabaseMethod = + io.grpc.MethodDescriptor + . + newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetDatabase")) + .setSampledToLocalTracing(true) + .setRequestMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest + .getDefaultInstance())) + .setResponseMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.Database + .getDefaultInstance())) + .setSchemaDescriptor( + new MetastoreServiceMethodDescriptorSupplier("GetDatabase")) + .build(); + } + } + } + return getGetDatabaseMethod; + } + + private static volatile io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest, + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse> + getListDatabasesMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "ListDatabases", + requestType = com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest.class, + responseType = com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse.class, + methodType = io.grpc.MethodDescriptor.MethodType.UNARY) + public static io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest, + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse> + getListDatabasesMethod() { + io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest, + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse> + getListDatabasesMethod; + if ((getListDatabasesMethod = MetastoreServiceGrpc.getListDatabasesMethod) == null) { + synchronized (MetastoreServiceGrpc.class) { + if ((getListDatabasesMethod = MetastoreServiceGrpc.getListDatabasesMethod) == null) { + MetastoreServiceGrpc.getListDatabasesMethod = + getListDatabasesMethod = + io.grpc.MethodDescriptor + . + newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListDatabases")) + .setSampledToLocalTracing(true) + .setRequestMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest + .getDefaultInstance())) + .setResponseMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse + .getDefaultInstance())) + .setSchemaDescriptor( + new MetastoreServiceMethodDescriptorSupplier("ListDatabases")) + .build(); + } + } + } + return getListDatabasesMethod; + } + + private static volatile io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Table> + getCreateTableMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "CreateTable", + requestType = com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest.class, + responseType = com.google.cloud.bigquery.biglake.v1alpha1.Table.class, + methodType = io.grpc.MethodDescriptor.MethodType.UNARY) + public static io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Table> + getCreateTableMethod() { + io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Table> + getCreateTableMethod; + if ((getCreateTableMethod = MetastoreServiceGrpc.getCreateTableMethod) == null) { + synchronized (MetastoreServiceGrpc.class) { + if ((getCreateTableMethod = MetastoreServiceGrpc.getCreateTableMethod) == null) { + MetastoreServiceGrpc.getCreateTableMethod = + getCreateTableMethod = + io.grpc.MethodDescriptor + . + newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName(SERVICE_NAME, "CreateTable")) + .setSampledToLocalTracing(true) + .setRequestMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest + .getDefaultInstance())) + .setResponseMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.Table + .getDefaultInstance())) + .setSchemaDescriptor( + new MetastoreServiceMethodDescriptorSupplier("CreateTable")) + .build(); + } + } + } + return getCreateTableMethod; + } + + private static volatile io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Table> + getDeleteTableMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "DeleteTable", + requestType = com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest.class, + responseType = com.google.cloud.bigquery.biglake.v1alpha1.Table.class, + methodType = io.grpc.MethodDescriptor.MethodType.UNARY) + public static io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Table> + getDeleteTableMethod() { + io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Table> + getDeleteTableMethod; + if ((getDeleteTableMethod = MetastoreServiceGrpc.getDeleteTableMethod) == null) { + synchronized (MetastoreServiceGrpc.class) { + if ((getDeleteTableMethod = MetastoreServiceGrpc.getDeleteTableMethod) == null) { + MetastoreServiceGrpc.getDeleteTableMethod = + getDeleteTableMethod = + io.grpc.MethodDescriptor + . + newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName(SERVICE_NAME, "DeleteTable")) + .setSampledToLocalTracing(true) + .setRequestMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest + .getDefaultInstance())) + .setResponseMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.Table + .getDefaultInstance())) + .setSchemaDescriptor( + new MetastoreServiceMethodDescriptorSupplier("DeleteTable")) + .build(); + } + } + } + return getDeleteTableMethod; + } + + private static volatile io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Table> + getUpdateTableMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "UpdateTable", + requestType = com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest.class, + responseType = com.google.cloud.bigquery.biglake.v1alpha1.Table.class, + methodType = io.grpc.MethodDescriptor.MethodType.UNARY) + public static io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Table> + getUpdateTableMethod() { + io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Table> + getUpdateTableMethod; + if ((getUpdateTableMethod = MetastoreServiceGrpc.getUpdateTableMethod) == null) { + synchronized (MetastoreServiceGrpc.class) { + if ((getUpdateTableMethod = MetastoreServiceGrpc.getUpdateTableMethod) == null) { + MetastoreServiceGrpc.getUpdateTableMethod = + getUpdateTableMethod = + io.grpc.MethodDescriptor + . + newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName(SERVICE_NAME, "UpdateTable")) + .setSampledToLocalTracing(true) + .setRequestMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest + .getDefaultInstance())) + .setResponseMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.Table + .getDefaultInstance())) + .setSchemaDescriptor( + new MetastoreServiceMethodDescriptorSupplier("UpdateTable")) + .build(); + } + } + } + return getUpdateTableMethod; + } + + private static volatile io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Table> + getGetTableMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "GetTable", + requestType = com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest.class, + responseType = com.google.cloud.bigquery.biglake.v1alpha1.Table.class, + methodType = io.grpc.MethodDescriptor.MethodType.UNARY) + public static io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Table> + getGetTableMethod() { + io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Table> + getGetTableMethod; + if ((getGetTableMethod = MetastoreServiceGrpc.getGetTableMethod) == null) { + synchronized (MetastoreServiceGrpc.class) { + if ((getGetTableMethod = MetastoreServiceGrpc.getGetTableMethod) == null) { + MetastoreServiceGrpc.getGetTableMethod = + getGetTableMethod = + io.grpc.MethodDescriptor + . + newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetTable")) + .setSampledToLocalTracing(true) + .setRequestMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest + .getDefaultInstance())) + .setResponseMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.Table + .getDefaultInstance())) + .setSchemaDescriptor(new MetastoreServiceMethodDescriptorSupplier("GetTable")) + .build(); + } + } + } + return getGetTableMethod; + } + + private static volatile io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest, + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse> + getListTablesMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "ListTables", + requestType = com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest.class, + responseType = com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse.class, + methodType = io.grpc.MethodDescriptor.MethodType.UNARY) + public static io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest, + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse> + getListTablesMethod() { + io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest, + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse> + getListTablesMethod; + if ((getListTablesMethod = MetastoreServiceGrpc.getListTablesMethod) == null) { + synchronized (MetastoreServiceGrpc.class) { + if ((getListTablesMethod = MetastoreServiceGrpc.getListTablesMethod) == null) { + MetastoreServiceGrpc.getListTablesMethod = + getListTablesMethod = + io.grpc.MethodDescriptor + . + newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListTables")) + .setSampledToLocalTracing(true) + .setRequestMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest + .getDefaultInstance())) + .setResponseMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse + .getDefaultInstance())) + .setSchemaDescriptor( + new MetastoreServiceMethodDescriptorSupplier("ListTables")) + .build(); + } + } + } + return getListTablesMethod; + } + + private static volatile io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Lock> + getCreateLockMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "CreateLock", + requestType = com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest.class, + responseType = com.google.cloud.bigquery.biglake.v1alpha1.Lock.class, + methodType = io.grpc.MethodDescriptor.MethodType.UNARY) + public static io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Lock> + getCreateLockMethod() { + io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Lock> + getCreateLockMethod; + if ((getCreateLockMethod = MetastoreServiceGrpc.getCreateLockMethod) == null) { + synchronized (MetastoreServiceGrpc.class) { + if ((getCreateLockMethod = MetastoreServiceGrpc.getCreateLockMethod) == null) { + MetastoreServiceGrpc.getCreateLockMethod = + getCreateLockMethod = + io.grpc.MethodDescriptor + . + newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName(SERVICE_NAME, "CreateLock")) + .setSampledToLocalTracing(true) + .setRequestMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest + .getDefaultInstance())) + .setResponseMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.Lock.getDefaultInstance())) + .setSchemaDescriptor( + new MetastoreServiceMethodDescriptorSupplier("CreateLock")) + .build(); + } + } + } + return getCreateLockMethod; + } + + private static volatile io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest, com.google.protobuf.Empty> + getDeleteLockMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "DeleteLock", + requestType = com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest.class, + responseType = com.google.protobuf.Empty.class, + methodType = io.grpc.MethodDescriptor.MethodType.UNARY) + public static io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest, com.google.protobuf.Empty> + getDeleteLockMethod() { + io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest, com.google.protobuf.Empty> + getDeleteLockMethod; + if ((getDeleteLockMethod = MetastoreServiceGrpc.getDeleteLockMethod) == null) { + synchronized (MetastoreServiceGrpc.class) { + if ((getDeleteLockMethod = MetastoreServiceGrpc.getDeleteLockMethod) == null) { + MetastoreServiceGrpc.getDeleteLockMethod = + getDeleteLockMethod = + io.grpc.MethodDescriptor + . + newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName(SERVICE_NAME, "DeleteLock")) + .setSampledToLocalTracing(true) + .setRequestMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest + .getDefaultInstance())) + .setResponseMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.protobuf.Empty.getDefaultInstance())) + .setSchemaDescriptor( + new MetastoreServiceMethodDescriptorSupplier("DeleteLock")) + .build(); + } + } + } + return getDeleteLockMethod; + } + + private static volatile io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Lock> + getCheckLockMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "CheckLock", + requestType = com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest.class, + responseType = com.google.cloud.bigquery.biglake.v1alpha1.Lock.class, + methodType = io.grpc.MethodDescriptor.MethodType.UNARY) + public static io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Lock> + getCheckLockMethod() { + io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Lock> + getCheckLockMethod; + if ((getCheckLockMethod = MetastoreServiceGrpc.getCheckLockMethod) == null) { + synchronized (MetastoreServiceGrpc.class) { + if ((getCheckLockMethod = MetastoreServiceGrpc.getCheckLockMethod) == null) { + MetastoreServiceGrpc.getCheckLockMethod = + getCheckLockMethod = + io.grpc.MethodDescriptor + . + newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName(SERVICE_NAME, "CheckLock")) + .setSampledToLocalTracing(true) + .setRequestMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest + .getDefaultInstance())) + .setResponseMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.Lock.getDefaultInstance())) + .setSchemaDescriptor( + new MetastoreServiceMethodDescriptorSupplier("CheckLock")) + .build(); + } + } + } + return getCheckLockMethod; + } + + private static volatile io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest, + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse> + getListLocksMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "ListLocks", + requestType = com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest.class, + responseType = com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse.class, + methodType = io.grpc.MethodDescriptor.MethodType.UNARY) + public static io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest, + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse> + getListLocksMethod() { + io.grpc.MethodDescriptor< + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest, + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse> + getListLocksMethod; + if ((getListLocksMethod = MetastoreServiceGrpc.getListLocksMethod) == null) { + synchronized (MetastoreServiceGrpc.class) { + if ((getListLocksMethod = MetastoreServiceGrpc.getListLocksMethod) == null) { + MetastoreServiceGrpc.getListLocksMethod = + getListLocksMethod = + io.grpc.MethodDescriptor + . + newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListLocks")) + .setSampledToLocalTracing(true) + .setRequestMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest + .getDefaultInstance())) + .setResponseMarshaller( + io.grpc.protobuf.ProtoUtils.marshaller( + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse + .getDefaultInstance())) + .setSchemaDescriptor( + new MetastoreServiceMethodDescriptorSupplier("ListLocks")) + .build(); + } + } + } + return getListLocksMethod; + } + + /** Creates a new async stub that supports all call types for the service */ + public static MetastoreServiceStub newStub(io.grpc.Channel channel) { + io.grpc.stub.AbstractStub.StubFactory factory = + new io.grpc.stub.AbstractStub.StubFactory() { + @java.lang.Override + public MetastoreServiceStub newStub( + io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + return new MetastoreServiceStub(channel, callOptions); + } + }; + return MetastoreServiceStub.newStub(factory, channel); + } + + /** + * Creates a new blocking-style stub that supports unary and streaming output calls on the service + */ + public static MetastoreServiceBlockingStub newBlockingStub(io.grpc.Channel channel) { + io.grpc.stub.AbstractStub.StubFactory factory = + new io.grpc.stub.AbstractStub.StubFactory() { + @java.lang.Override + public MetastoreServiceBlockingStub newStub( + io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + return new MetastoreServiceBlockingStub(channel, callOptions); + } + }; + return MetastoreServiceBlockingStub.newStub(factory, channel); + } + + /** Creates a new ListenableFuture-style stub that supports unary calls on the service */ + public static MetastoreServiceFutureStub newFutureStub(io.grpc.Channel channel) { + io.grpc.stub.AbstractStub.StubFactory factory = + new io.grpc.stub.AbstractStub.StubFactory() { + @java.lang.Override + public MetastoreServiceFutureStub newStub( + io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + return new MetastoreServiceFutureStub(channel, callOptions); + } + }; + return MetastoreServiceFutureStub.newStub(factory, channel); + } + + /** + * + * + *
+   * BigLake Metastore is a serverless, highly available, multi-tenant runtime
+   * metastore for Google Cloud Data Analytics products.
+   * The BigLake Metastore API defines the following resource model:
+   * * A collection of Google Cloud projects: `/projects/*`
+   * * Each project has a collection of available locations: `/locations/*`
+   * * Each location has a collection of catalogs: `/catalogs/*`
+   * * Each catalog has a collection of databases: `/databases/*`
+   * * Each database has a collection of tables: `/tables/*`
+   * 
+ */ + public abstract static class MetastoreServiceImplBase implements io.grpc.BindableService { + + /** + * + * + *
+     * Creates a new catalog.
+     * 
+ */ + public void createCatalog( + com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( + getCreateCatalogMethod(), responseObserver); + } + + /** + * + * + *
+     * Deletes an existing catalog specified by the catalog ID.
+     * 
+ */ + public void deleteCatalog( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( + getDeleteCatalogMethod(), responseObserver); + } + + /** + * + * + *
+     * Gets the catalog specified by the resource name.
+     * 
+ */ + public void getCatalog( + com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGetCatalogMethod(), responseObserver); + } + + /** + * + * + *
+     * List all catalogs in a specified project.
+     * 
+ */ + public void listCatalogs( + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( + getListCatalogsMethod(), responseObserver); + } + + /** + * + * + *
+     * Creates a new database.
+     * 
+ */ + public void createDatabase( + com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( + getCreateDatabaseMethod(), responseObserver); + } + + /** + * + * + *
+     * Deletes an existing database specified by the database ID.
+     * 
+ */ + public void deleteDatabase( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( + getDeleteDatabaseMethod(), responseObserver); + } + + /** + * + * + *
+     * Updates an existing database specified by the database ID.
+     * 
+ */ + public void updateDatabase( + com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( + getUpdateDatabaseMethod(), responseObserver); + } + + /** + * + * + *
+     * Gets the database specified by the resource name.
+     * 
+ */ + public void getDatabase( + com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( + getGetDatabaseMethod(), responseObserver); + } + + /** + * + * + *
+     * List all databases in a specified catalog.
+     * 
+ */ + public void listDatabases( + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest request, + io.grpc.stub.StreamObserver< + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse> + responseObserver) { + io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( + getListDatabasesMethod(), responseObserver); + } + + /** + * + * + *
+     * Creates a new table.
+     * 
+ */ + public void createTable( + com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( + getCreateTableMethod(), responseObserver); + } + + /** + * + * + *
+     * Deletes an existing table specified by the table ID.
+     * 
+ */ + public void deleteTable( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( + getDeleteTableMethod(), responseObserver); + } + + /** + * + * + *
+     * Updates an existing table specified by the table ID.
+     * 
+ */ + public void updateTable( + com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( + getUpdateTableMethod(), responseObserver); + } + + /** + * + * + *
+     * Gets the table specified by the resource name.
+     * 
+ */ + public void getTable( + com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGetTableMethod(), responseObserver); + } + + /** + * + * + *
+     * List all tables in a specified database.
+     * 
+ */ + public void listTables( + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getListTablesMethod(), responseObserver); + } + + /** + * + * + *
+     * Creates a new lock.
+     * 
+ */ + public void createLock( + com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getCreateLockMethod(), responseObserver); + } + + /** + * + * + *
+     * Deletes an existing lock specified by the lock ID.
+     * 
+ */ + public void deleteLock( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest request, + io.grpc.stub.StreamObserver responseObserver) { + io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getDeleteLockMethod(), responseObserver); + } + + /** + * + * + *
+     * Checks the state of a lock specified by the lock ID.
+     * 
+ */ + public void checkLock( + com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getCheckLockMethod(), responseObserver); + } + + /** + * + * + *
+     * List all locks in a specified database.
+     * 
+ */ + public void listLocks( + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getListLocksMethod(), responseObserver); + } + + @java.lang.Override + public final io.grpc.ServerServiceDefinition bindService() { + return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) + .addMethod( + getCreateCatalogMethod(), + io.grpc.stub.ServerCalls.asyncUnaryCall( + new MethodHandlers< + com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Catalog>( + this, METHODID_CREATE_CATALOG))) + .addMethod( + getDeleteCatalogMethod(), + io.grpc.stub.ServerCalls.asyncUnaryCall( + new MethodHandlers< + com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Catalog>( + this, METHODID_DELETE_CATALOG))) + .addMethod( + getGetCatalogMethod(), + io.grpc.stub.ServerCalls.asyncUnaryCall( + new MethodHandlers< + com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Catalog>( + this, METHODID_GET_CATALOG))) + .addMethod( + getListCatalogsMethod(), + io.grpc.stub.ServerCalls.asyncUnaryCall( + new MethodHandlers< + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest, + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse>( + this, METHODID_LIST_CATALOGS))) + .addMethod( + getCreateDatabaseMethod(), + io.grpc.stub.ServerCalls.asyncUnaryCall( + new MethodHandlers< + com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Database>( + this, METHODID_CREATE_DATABASE))) + .addMethod( + getDeleteDatabaseMethod(), + io.grpc.stub.ServerCalls.asyncUnaryCall( + new MethodHandlers< + com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Database>( + this, METHODID_DELETE_DATABASE))) + .addMethod( + getUpdateDatabaseMethod(), + io.grpc.stub.ServerCalls.asyncUnaryCall( + new MethodHandlers< + com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Database>( + this, METHODID_UPDATE_DATABASE))) + .addMethod( + getGetDatabaseMethod(), + io.grpc.stub.ServerCalls.asyncUnaryCall( + new MethodHandlers< + com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Database>( + this, METHODID_GET_DATABASE))) + .addMethod( + getListDatabasesMethod(), + io.grpc.stub.ServerCalls.asyncUnaryCall( + new MethodHandlers< + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest, + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse>( + this, METHODID_LIST_DATABASES))) + .addMethod( + getCreateTableMethod(), + io.grpc.stub.ServerCalls.asyncUnaryCall( + new MethodHandlers< + com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Table>( + this, METHODID_CREATE_TABLE))) + .addMethod( + getDeleteTableMethod(), + io.grpc.stub.ServerCalls.asyncUnaryCall( + new MethodHandlers< + com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Table>( + this, METHODID_DELETE_TABLE))) + .addMethod( + getUpdateTableMethod(), + io.grpc.stub.ServerCalls.asyncUnaryCall( + new MethodHandlers< + com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Table>( + this, METHODID_UPDATE_TABLE))) + .addMethod( + getGetTableMethod(), + io.grpc.stub.ServerCalls.asyncUnaryCall( + new MethodHandlers< + com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Table>(this, METHODID_GET_TABLE))) + .addMethod( + getListTablesMethod(), + io.grpc.stub.ServerCalls.asyncUnaryCall( + new MethodHandlers< + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest, + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse>( + this, METHODID_LIST_TABLES))) + .addMethod( + getCreateLockMethod(), + io.grpc.stub.ServerCalls.asyncUnaryCall( + new MethodHandlers< + com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Lock>(this, METHODID_CREATE_LOCK))) + .addMethod( + getDeleteLockMethod(), + io.grpc.stub.ServerCalls.asyncUnaryCall( + new MethodHandlers< + com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest, + com.google.protobuf.Empty>(this, METHODID_DELETE_LOCK))) + .addMethod( + getCheckLockMethod(), + io.grpc.stub.ServerCalls.asyncUnaryCall( + new MethodHandlers< + com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest, + com.google.cloud.bigquery.biglake.v1alpha1.Lock>(this, METHODID_CHECK_LOCK))) + .addMethod( + getListLocksMethod(), + io.grpc.stub.ServerCalls.asyncUnaryCall( + new MethodHandlers< + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest, + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse>( + this, METHODID_LIST_LOCKS))) + .build(); + } + } + + /** + * + * + *
+   * BigLake Metastore is a serverless, highly available, multi-tenant runtime
+   * metastore for Google Cloud Data Analytics products.
+   * The BigLake Metastore API defines the following resource model:
+   * * A collection of Google Cloud projects: `/projects/*`
+   * * Each project has a collection of available locations: `/locations/*`
+   * * Each location has a collection of catalogs: `/catalogs/*`
+   * * Each catalog has a collection of databases: `/databases/*`
+   * * Each database has a collection of tables: `/tables/*`
+   * 
+ */ + public static final class MetastoreServiceStub + extends io.grpc.stub.AbstractAsyncStub { + private MetastoreServiceStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + super(channel, callOptions); + } + + @java.lang.Override + protected MetastoreServiceStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + return new MetastoreServiceStub(channel, callOptions); + } + + /** + * + * + *
+     * Creates a new catalog.
+     * 
+ */ + public void createCatalog( + com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ClientCalls.asyncUnaryCall( + getChannel().newCall(getCreateCatalogMethod(), getCallOptions()), + request, + responseObserver); + } + + /** + * + * + *
+     * Deletes an existing catalog specified by the catalog ID.
+     * 
+ */ + public void deleteCatalog( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ClientCalls.asyncUnaryCall( + getChannel().newCall(getDeleteCatalogMethod(), getCallOptions()), + request, + responseObserver); + } + + /** + * + * + *
+     * Gets the catalog specified by the resource name.
+     * 
+ */ + public void getCatalog( + com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ClientCalls.asyncUnaryCall( + getChannel().newCall(getGetCatalogMethod(), getCallOptions()), request, responseObserver); + } + + /** + * + * + *
+     * List all catalogs in a specified project.
+     * 
+ */ + public void listCatalogs( + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ClientCalls.asyncUnaryCall( + getChannel().newCall(getListCatalogsMethod(), getCallOptions()), + request, + responseObserver); + } + + /** + * + * + *
+     * Creates a new database.
+     * 
+ */ + public void createDatabase( + com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ClientCalls.asyncUnaryCall( + getChannel().newCall(getCreateDatabaseMethod(), getCallOptions()), + request, + responseObserver); + } + + /** + * + * + *
+     * Deletes an existing database specified by the database ID.
+     * 
+ */ + public void deleteDatabase( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ClientCalls.asyncUnaryCall( + getChannel().newCall(getDeleteDatabaseMethod(), getCallOptions()), + request, + responseObserver); + } + + /** + * + * + *
+     * Updates an existing database specified by the database ID.
+     * 
+ */ + public void updateDatabase( + com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ClientCalls.asyncUnaryCall( + getChannel().newCall(getUpdateDatabaseMethod(), getCallOptions()), + request, + responseObserver); + } + + /** + * + * + *
+     * Gets the database specified by the resource name.
+     * 
+ */ + public void getDatabase( + com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ClientCalls.asyncUnaryCall( + getChannel().newCall(getGetDatabaseMethod(), getCallOptions()), + request, + responseObserver); + } + + /** + * + * + *
+     * List all databases in a specified catalog.
+     * 
+ */ + public void listDatabases( + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest request, + io.grpc.stub.StreamObserver< + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse> + responseObserver) { + io.grpc.stub.ClientCalls.asyncUnaryCall( + getChannel().newCall(getListDatabasesMethod(), getCallOptions()), + request, + responseObserver); + } + + /** + * + * + *
+     * Creates a new table.
+     * 
+ */ + public void createTable( + com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ClientCalls.asyncUnaryCall( + getChannel().newCall(getCreateTableMethod(), getCallOptions()), + request, + responseObserver); + } + + /** + * + * + *
+     * Deletes an existing table specified by the table ID.
+     * 
+ */ + public void deleteTable( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ClientCalls.asyncUnaryCall( + getChannel().newCall(getDeleteTableMethod(), getCallOptions()), + request, + responseObserver); + } + + /** + * + * + *
+     * Updates an existing table specified by the table ID.
+     * 
+ */ + public void updateTable( + com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ClientCalls.asyncUnaryCall( + getChannel().newCall(getUpdateTableMethod(), getCallOptions()), + request, + responseObserver); + } + + /** + * + * + *
+     * Gets the table specified by the resource name.
+     * 
+ */ + public void getTable( + com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ClientCalls.asyncUnaryCall( + getChannel().newCall(getGetTableMethod(), getCallOptions()), request, responseObserver); + } + + /** + * + * + *
+     * List all tables in a specified database.
+     * 
+ */ + public void listTables( + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ClientCalls.asyncUnaryCall( + getChannel().newCall(getListTablesMethod(), getCallOptions()), request, responseObserver); + } + + /** + * + * + *
+     * Creates a new lock.
+     * 
+ */ + public void createLock( + com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ClientCalls.asyncUnaryCall( + getChannel().newCall(getCreateLockMethod(), getCallOptions()), request, responseObserver); + } + + /** + * + * + *
+     * Deletes an existing lock specified by the lock ID.
+     * 
+ */ + public void deleteLock( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest request, + io.grpc.stub.StreamObserver responseObserver) { + io.grpc.stub.ClientCalls.asyncUnaryCall( + getChannel().newCall(getDeleteLockMethod(), getCallOptions()), request, responseObserver); + } + + /** + * + * + *
+     * Checks the state of a lock specified by the lock ID.
+     * 
+ */ + public void checkLock( + com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ClientCalls.asyncUnaryCall( + getChannel().newCall(getCheckLockMethod(), getCallOptions()), request, responseObserver); + } + + /** + * + * + *
+     * List all locks in a specified database.
+     * 
+ */ + public void listLocks( + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest request, + io.grpc.stub.StreamObserver + responseObserver) { + io.grpc.stub.ClientCalls.asyncUnaryCall( + getChannel().newCall(getListLocksMethod(), getCallOptions()), request, responseObserver); + } + } + + /** + * + * + *
+   * BigLake Metastore is a serverless, highly available, multi-tenant runtime
+   * metastore for Google Cloud Data Analytics products.
+   * The BigLake Metastore API defines the following resource model:
+   * * A collection of Google Cloud projects: `/projects/*`
+   * * Each project has a collection of available locations: `/locations/*`
+   * * Each location has a collection of catalogs: `/catalogs/*`
+   * * Each catalog has a collection of databases: `/databases/*`
+   * * Each database has a collection of tables: `/tables/*`
+   * 
+ */ + public static final class MetastoreServiceBlockingStub + extends io.grpc.stub.AbstractBlockingStub { + private MetastoreServiceBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + super(channel, callOptions); + } + + @java.lang.Override + protected MetastoreServiceBlockingStub build( + io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + return new MetastoreServiceBlockingStub(channel, callOptions); + } + + /** + * + * + *
+     * Creates a new catalog.
+     * 
+ */ + public com.google.cloud.bigquery.biglake.v1alpha1.Catalog createCatalog( + com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest request) { + return io.grpc.stub.ClientCalls.blockingUnaryCall( + getChannel(), getCreateCatalogMethod(), getCallOptions(), request); + } + + /** + * + * + *
+     * Deletes an existing catalog specified by the catalog ID.
+     * 
+ */ + public com.google.cloud.bigquery.biglake.v1alpha1.Catalog deleteCatalog( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest request) { + return io.grpc.stub.ClientCalls.blockingUnaryCall( + getChannel(), getDeleteCatalogMethod(), getCallOptions(), request); + } + + /** + * + * + *
+     * Gets the catalog specified by the resource name.
+     * 
+ */ + public com.google.cloud.bigquery.biglake.v1alpha1.Catalog getCatalog( + com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest request) { + return io.grpc.stub.ClientCalls.blockingUnaryCall( + getChannel(), getGetCatalogMethod(), getCallOptions(), request); + } + + /** + * + * + *
+     * List all catalogs in a specified project.
+     * 
+ */ + public com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse listCatalogs( + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest request) { + return io.grpc.stub.ClientCalls.blockingUnaryCall( + getChannel(), getListCatalogsMethod(), getCallOptions(), request); + } + + /** + * + * + *
+     * Creates a new database.
+     * 
+ */ + public com.google.cloud.bigquery.biglake.v1alpha1.Database createDatabase( + com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest request) { + return io.grpc.stub.ClientCalls.blockingUnaryCall( + getChannel(), getCreateDatabaseMethod(), getCallOptions(), request); + } + + /** + * + * + *
+     * Deletes an existing database specified by the database ID.
+     * 
+ */ + public com.google.cloud.bigquery.biglake.v1alpha1.Database deleteDatabase( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest request) { + return io.grpc.stub.ClientCalls.blockingUnaryCall( + getChannel(), getDeleteDatabaseMethod(), getCallOptions(), request); + } + + /** + * + * + *
+     * Updates an existing database specified by the database ID.
+     * 
+ */ + public com.google.cloud.bigquery.biglake.v1alpha1.Database updateDatabase( + com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest request) { + return io.grpc.stub.ClientCalls.blockingUnaryCall( + getChannel(), getUpdateDatabaseMethod(), getCallOptions(), request); + } + + /** + * + * + *
+     * Gets the database specified by the resource name.
+     * 
+ */ + public com.google.cloud.bigquery.biglake.v1alpha1.Database getDatabase( + com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest request) { + return io.grpc.stub.ClientCalls.blockingUnaryCall( + getChannel(), getGetDatabaseMethod(), getCallOptions(), request); + } + + /** + * + * + *
+     * List all databases in a specified catalog.
+     * 
+ */ + public com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse listDatabases( + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest request) { + return io.grpc.stub.ClientCalls.blockingUnaryCall( + getChannel(), getListDatabasesMethod(), getCallOptions(), request); + } + + /** + * + * + *
+     * Creates a new table.
+     * 
+ */ + public com.google.cloud.bigquery.biglake.v1alpha1.Table createTable( + com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest request) { + return io.grpc.stub.ClientCalls.blockingUnaryCall( + getChannel(), getCreateTableMethod(), getCallOptions(), request); + } + + /** + * + * + *
+     * Deletes an existing table specified by the table ID.
+     * 
+ */ + public com.google.cloud.bigquery.biglake.v1alpha1.Table deleteTable( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest request) { + return io.grpc.stub.ClientCalls.blockingUnaryCall( + getChannel(), getDeleteTableMethod(), getCallOptions(), request); + } + + /** + * + * + *
+     * Updates an existing table specified by the table ID.
+     * 
+ */ + public com.google.cloud.bigquery.biglake.v1alpha1.Table updateTable( + com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest request) { + return io.grpc.stub.ClientCalls.blockingUnaryCall( + getChannel(), getUpdateTableMethod(), getCallOptions(), request); + } + + /** + * + * + *
+     * Gets the table specified by the resource name.
+     * 
+ */ + public com.google.cloud.bigquery.biglake.v1alpha1.Table getTable( + com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest request) { + return io.grpc.stub.ClientCalls.blockingUnaryCall( + getChannel(), getGetTableMethod(), getCallOptions(), request); + } + + /** + * + * + *
+     * List all tables in a specified database.
+     * 
+ */ + public com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse listTables( + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest request) { + return io.grpc.stub.ClientCalls.blockingUnaryCall( + getChannel(), getListTablesMethod(), getCallOptions(), request); + } + + /** + * + * + *
+     * Creates a new lock.
+     * 
+ */ + public com.google.cloud.bigquery.biglake.v1alpha1.Lock createLock( + com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest request) { + return io.grpc.stub.ClientCalls.blockingUnaryCall( + getChannel(), getCreateLockMethod(), getCallOptions(), request); + } + + /** + * + * + *
+     * Deletes an existing lock specified by the lock ID.
+     * 
+ */ + public com.google.protobuf.Empty deleteLock( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest request) { + return io.grpc.stub.ClientCalls.blockingUnaryCall( + getChannel(), getDeleteLockMethod(), getCallOptions(), request); + } + + /** + * + * + *
+     * Checks the state of a lock specified by the lock ID.
+     * 
+ */ + public com.google.cloud.bigquery.biglake.v1alpha1.Lock checkLock( + com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest request) { + return io.grpc.stub.ClientCalls.blockingUnaryCall( + getChannel(), getCheckLockMethod(), getCallOptions(), request); + } + + /** + * + * + *
+     * List all locks in a specified database.
+     * 
+ */ + public com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse listLocks( + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest request) { + return io.grpc.stub.ClientCalls.blockingUnaryCall( + getChannel(), getListLocksMethod(), getCallOptions(), request); + } + } + + /** + * + * + *
+   * BigLake Metastore is a serverless, highly available, multi-tenant runtime
+   * metastore for Google Cloud Data Analytics products.
+   * The BigLake Metastore API defines the following resource model:
+   * * A collection of Google Cloud projects: `/projects/*`
+   * * Each project has a collection of available locations: `/locations/*`
+   * * Each location has a collection of catalogs: `/catalogs/*`
+   * * Each catalog has a collection of databases: `/databases/*`
+   * * Each database has a collection of tables: `/tables/*`
+   * 
+ */ + public static final class MetastoreServiceFutureStub + extends io.grpc.stub.AbstractFutureStub { + private MetastoreServiceFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + super(channel, callOptions); + } + + @java.lang.Override + protected MetastoreServiceFutureStub build( + io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + return new MetastoreServiceFutureStub(channel, callOptions); + } + + /** + * + * + *
+     * Creates a new catalog.
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture< + com.google.cloud.bigquery.biglake.v1alpha1.Catalog> + createCatalog(com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest request) { + return io.grpc.stub.ClientCalls.futureUnaryCall( + getChannel().newCall(getCreateCatalogMethod(), getCallOptions()), request); + } + + /** + * + * + *
+     * Deletes an existing catalog specified by the catalog ID.
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture< + com.google.cloud.bigquery.biglake.v1alpha1.Catalog> + deleteCatalog(com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest request) { + return io.grpc.stub.ClientCalls.futureUnaryCall( + getChannel().newCall(getDeleteCatalogMethod(), getCallOptions()), request); + } + + /** + * + * + *
+     * Gets the catalog specified by the resource name.
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture< + com.google.cloud.bigquery.biglake.v1alpha1.Catalog> + getCatalog(com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest request) { + return io.grpc.stub.ClientCalls.futureUnaryCall( + getChannel().newCall(getGetCatalogMethod(), getCallOptions()), request); + } + + /** + * + * + *
+     * List all catalogs in a specified project.
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture< + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse> + listCatalogs(com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest request) { + return io.grpc.stub.ClientCalls.futureUnaryCall( + getChannel().newCall(getListCatalogsMethod(), getCallOptions()), request); + } + + /** + * + * + *
+     * Creates a new database.
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture< + com.google.cloud.bigquery.biglake.v1alpha1.Database> + createDatabase(com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest request) { + return io.grpc.stub.ClientCalls.futureUnaryCall( + getChannel().newCall(getCreateDatabaseMethod(), getCallOptions()), request); + } + + /** + * + * + *
+     * Deletes an existing database specified by the database ID.
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture< + com.google.cloud.bigquery.biglake.v1alpha1.Database> + deleteDatabase(com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest request) { + return io.grpc.stub.ClientCalls.futureUnaryCall( + getChannel().newCall(getDeleteDatabaseMethod(), getCallOptions()), request); + } + + /** + * + * + *
+     * Updates an existing database specified by the database ID.
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture< + com.google.cloud.bigquery.biglake.v1alpha1.Database> + updateDatabase(com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest request) { + return io.grpc.stub.ClientCalls.futureUnaryCall( + getChannel().newCall(getUpdateDatabaseMethod(), getCallOptions()), request); + } + + /** + * + * + *
+     * Gets the database specified by the resource name.
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture< + com.google.cloud.bigquery.biglake.v1alpha1.Database> + getDatabase(com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest request) { + return io.grpc.stub.ClientCalls.futureUnaryCall( + getChannel().newCall(getGetDatabaseMethod(), getCallOptions()), request); + } + + /** + * + * + *
+     * List all databases in a specified catalog.
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture< + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse> + listDatabases(com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest request) { + return io.grpc.stub.ClientCalls.futureUnaryCall( + getChannel().newCall(getListDatabasesMethod(), getCallOptions()), request); + } + + /** + * + * + *
+     * Creates a new table.
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture< + com.google.cloud.bigquery.biglake.v1alpha1.Table> + createTable(com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest request) { + return io.grpc.stub.ClientCalls.futureUnaryCall( + getChannel().newCall(getCreateTableMethod(), getCallOptions()), request); + } + + /** + * + * + *
+     * Deletes an existing table specified by the table ID.
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture< + com.google.cloud.bigquery.biglake.v1alpha1.Table> + deleteTable(com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest request) { + return io.grpc.stub.ClientCalls.futureUnaryCall( + getChannel().newCall(getDeleteTableMethod(), getCallOptions()), request); + } + + /** + * + * + *
+     * Updates an existing table specified by the table ID.
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture< + com.google.cloud.bigquery.biglake.v1alpha1.Table> + updateTable(com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest request) { + return io.grpc.stub.ClientCalls.futureUnaryCall( + getChannel().newCall(getUpdateTableMethod(), getCallOptions()), request); + } + + /** + * + * + *
+     * Gets the table specified by the resource name.
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture< + com.google.cloud.bigquery.biglake.v1alpha1.Table> + getTable(com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest request) { + return io.grpc.stub.ClientCalls.futureUnaryCall( + getChannel().newCall(getGetTableMethod(), getCallOptions()), request); + } + + /** + * + * + *
+     * List all tables in a specified database.
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture< + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse> + listTables(com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest request) { + return io.grpc.stub.ClientCalls.futureUnaryCall( + getChannel().newCall(getListTablesMethod(), getCallOptions()), request); + } + + /** + * + * + *
+     * Creates a new lock.
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture< + com.google.cloud.bigquery.biglake.v1alpha1.Lock> + createLock(com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest request) { + return io.grpc.stub.ClientCalls.futureUnaryCall( + getChannel().newCall(getCreateLockMethod(), getCallOptions()), request); + } + + /** + * + * + *
+     * Deletes an existing lock specified by the lock ID.
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture deleteLock( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest request) { + return io.grpc.stub.ClientCalls.futureUnaryCall( + getChannel().newCall(getDeleteLockMethod(), getCallOptions()), request); + } + + /** + * + * + *
+     * Checks the state of a lock specified by the lock ID.
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture< + com.google.cloud.bigquery.biglake.v1alpha1.Lock> + checkLock(com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest request) { + return io.grpc.stub.ClientCalls.futureUnaryCall( + getChannel().newCall(getCheckLockMethod(), getCallOptions()), request); + } + + /** + * + * + *
+     * List all locks in a specified database.
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture< + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse> + listLocks(com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest request) { + return io.grpc.stub.ClientCalls.futureUnaryCall( + getChannel().newCall(getListLocksMethod(), getCallOptions()), request); + } + } + + private static final int METHODID_CREATE_CATALOG = 0; + private static final int METHODID_DELETE_CATALOG = 1; + private static final int METHODID_GET_CATALOG = 2; + private static final int METHODID_LIST_CATALOGS = 3; + private static final int METHODID_CREATE_DATABASE = 4; + private static final int METHODID_DELETE_DATABASE = 5; + private static final int METHODID_UPDATE_DATABASE = 6; + private static final int METHODID_GET_DATABASE = 7; + private static final int METHODID_LIST_DATABASES = 8; + private static final int METHODID_CREATE_TABLE = 9; + private static final int METHODID_DELETE_TABLE = 10; + private static final int METHODID_UPDATE_TABLE = 11; + private static final int METHODID_GET_TABLE = 12; + private static final int METHODID_LIST_TABLES = 13; + private static final int METHODID_CREATE_LOCK = 14; + private static final int METHODID_DELETE_LOCK = 15; + private static final int METHODID_CHECK_LOCK = 16; + private static final int METHODID_LIST_LOCKS = 17; + + private static final class MethodHandlers + implements io.grpc.stub.ServerCalls.UnaryMethod, + io.grpc.stub.ServerCalls.ServerStreamingMethod, + io.grpc.stub.ServerCalls.ClientStreamingMethod, + io.grpc.stub.ServerCalls.BidiStreamingMethod { + private final MetastoreServiceImplBase serviceImpl; + private final int methodId; + + MethodHandlers(MetastoreServiceImplBase serviceImpl, int methodId) { + this.serviceImpl = serviceImpl; + this.methodId = methodId; + } + + @java.lang.Override + @java.lang.SuppressWarnings("unchecked") + public void invoke(Req request, io.grpc.stub.StreamObserver responseObserver) { + switch (methodId) { + case METHODID_CREATE_CATALOG: + serviceImpl.createCatalog( + (com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest) request, + (io.grpc.stub.StreamObserver) + responseObserver); + break; + case METHODID_DELETE_CATALOG: + serviceImpl.deleteCatalog( + (com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest) request, + (io.grpc.stub.StreamObserver) + responseObserver); + break; + case METHODID_GET_CATALOG: + serviceImpl.getCatalog( + (com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest) request, + (io.grpc.stub.StreamObserver) + responseObserver); + break; + case METHODID_LIST_CATALOGS: + serviceImpl.listCatalogs( + (com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest) request, + (io.grpc.stub.StreamObserver< + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse>) + responseObserver); + break; + case METHODID_CREATE_DATABASE: + serviceImpl.createDatabase( + (com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest) request, + (io.grpc.stub.StreamObserver) + responseObserver); + break; + case METHODID_DELETE_DATABASE: + serviceImpl.deleteDatabase( + (com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest) request, + (io.grpc.stub.StreamObserver) + responseObserver); + break; + case METHODID_UPDATE_DATABASE: + serviceImpl.updateDatabase( + (com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest) request, + (io.grpc.stub.StreamObserver) + responseObserver); + break; + case METHODID_GET_DATABASE: + serviceImpl.getDatabase( + (com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest) request, + (io.grpc.stub.StreamObserver) + responseObserver); + break; + case METHODID_LIST_DATABASES: + serviceImpl.listDatabases( + (com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest) request, + (io.grpc.stub.StreamObserver< + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse>) + responseObserver); + break; + case METHODID_CREATE_TABLE: + serviceImpl.createTable( + (com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest) request, + (io.grpc.stub.StreamObserver) + responseObserver); + break; + case METHODID_DELETE_TABLE: + serviceImpl.deleteTable( + (com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest) request, + (io.grpc.stub.StreamObserver) + responseObserver); + break; + case METHODID_UPDATE_TABLE: + serviceImpl.updateTable( + (com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest) request, + (io.grpc.stub.StreamObserver) + responseObserver); + break; + case METHODID_GET_TABLE: + serviceImpl.getTable( + (com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest) request, + (io.grpc.stub.StreamObserver) + responseObserver); + break; + case METHODID_LIST_TABLES: + serviceImpl.listTables( + (com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest) request, + (io.grpc.stub.StreamObserver< + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse>) + responseObserver); + break; + case METHODID_CREATE_LOCK: + serviceImpl.createLock( + (com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest) request, + (io.grpc.stub.StreamObserver) + responseObserver); + break; + case METHODID_DELETE_LOCK: + serviceImpl.deleteLock( + (com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest) request, + (io.grpc.stub.StreamObserver) responseObserver); + break; + case METHODID_CHECK_LOCK: + serviceImpl.checkLock( + (com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest) request, + (io.grpc.stub.StreamObserver) + responseObserver); + break; + case METHODID_LIST_LOCKS: + serviceImpl.listLocks( + (com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest) request, + (io.grpc.stub.StreamObserver< + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse>) + responseObserver); + break; + default: + throw new AssertionError(); + } + } + + @java.lang.Override + @java.lang.SuppressWarnings("unchecked") + public io.grpc.stub.StreamObserver invoke( + io.grpc.stub.StreamObserver responseObserver) { + switch (methodId) { + default: + throw new AssertionError(); + } + } + } + + private abstract static class MetastoreServiceBaseDescriptorSupplier + implements io.grpc.protobuf.ProtoFileDescriptorSupplier, + io.grpc.protobuf.ProtoServiceDescriptorSupplier { + MetastoreServiceBaseDescriptorSupplier() {} + + @java.lang.Override + public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto.getDescriptor(); + } + + @java.lang.Override + public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { + return getFileDescriptor().findServiceByName("MetastoreService"); + } + } + + private static final class MetastoreServiceFileDescriptorSupplier + extends MetastoreServiceBaseDescriptorSupplier { + MetastoreServiceFileDescriptorSupplier() {} + } + + private static final class MetastoreServiceMethodDescriptorSupplier + extends MetastoreServiceBaseDescriptorSupplier + implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { + private final String methodName; + + MetastoreServiceMethodDescriptorSupplier(String methodName) { + this.methodName = methodName; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { + return getServiceDescriptor().findMethodByName(methodName); + } + } + + private static volatile io.grpc.ServiceDescriptor serviceDescriptor; + + public static io.grpc.ServiceDescriptor getServiceDescriptor() { + io.grpc.ServiceDescriptor result = serviceDescriptor; + if (result == null) { + synchronized (MetastoreServiceGrpc.class) { + result = serviceDescriptor; + if (result == null) { + serviceDescriptor = + result = + io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) + .setSchemaDescriptor(new MetastoreServiceFileDescriptorSupplier()) + .addMethod(getCreateCatalogMethod()) + .addMethod(getDeleteCatalogMethod()) + .addMethod(getGetCatalogMethod()) + .addMethod(getListCatalogsMethod()) + .addMethod(getCreateDatabaseMethod()) + .addMethod(getDeleteDatabaseMethod()) + .addMethod(getUpdateDatabaseMethod()) + .addMethod(getGetDatabaseMethod()) + .addMethod(getListDatabasesMethod()) + .addMethod(getCreateTableMethod()) + .addMethod(getDeleteTableMethod()) + .addMethod(getUpdateTableMethod()) + .addMethod(getGetTableMethod()) + .addMethod(getListTablesMethod()) + .addMethod(getCreateLockMethod()) + .addMethod(getDeleteLockMethod()) + .addMethod(getCheckLockMethod()) + .addMethod(getListLocksMethod()) + .build(); + } + } + } + return result; + } +} diff --git a/java-biglake/owlbot.py b/java-biglake/owlbot.py new file mode 100644 index 000000000000..03c0f6686d3f --- /dev/null +++ b/java-biglake/owlbot.py @@ -0,0 +1,36 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import synthtool as s +from synthtool.languages import java + + +for library in s.get_staging_dirs(): + # put any special-case replacements here + s.move(library) + +s.remove_staging_dirs() +java.common_templates(monorepo=True, excludes=[ + ".github/*", + ".kokoro/*", + "samples/*", + "CODE_OF_CONDUCT.md", + "CONTRIBUTING.md", + "LICENSE", + "SECURITY.md", + "java.header", + "license-checks.xml", + "renovate.json", + ".gitignore" +]) \ No newline at end of file diff --git a/java-biglake/pom.xml b/java-biglake/pom.xml new file mode 100644 index 000000000000..13cdc2ede00c --- /dev/null +++ b/java-biglake/pom.xml @@ -0,0 +1,53 @@ + + + 4.0.0 + com.google.cloud + google-cloud-biglake-parent + pom + 0.0.1-SNAPSHOT + Google BigLake Parent + + Java idiomatic client for Google Cloud Platform services. + + + + com.google.cloud + google-cloud-jar-parent + 1.7.0-SNAPSHOT + ../google-cloud-jar-parent/pom.xml + + + + UTF-8 + UTF-8 + github + google-cloud-biglake-parent + + + + + + com.google.cloud + google-cloud-biglake + 0.0.1-SNAPSHOT + + + com.google.api.grpc + grpc-google-cloud-biglake-v1alpha1 + 0.0.1-SNAPSHOT + + + com.google.api.grpc + proto-google-cloud-biglake-v1alpha1 + 0.0.1-SNAPSHOT + + + + + + google-cloud-biglake + grpc-google-cloud-biglake-v1alpha1 + proto-google-cloud-biglake-v1alpha1 + google-cloud-biglake-bom + + diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/clirr-ignored-differences.xml b/java-biglake/proto-google-cloud-biglake-v1alpha1/clirr-ignored-differences.xml new file mode 100644 index 000000000000..ce307cd22dc9 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/clirr-ignored-differences.xml @@ -0,0 +1,19 @@ + + + + + 7012 + com/google/cloud/bigquery/biglake/v1alpha1/*OrBuilder + * get*(*) + + + 7012 + com/google/cloud/bigquery/biglake/v1alpha1/*OrBuilder + boolean contains*(*) + + + 7012 + com/google/cloud/bigquery/biglake/v1alpha1/*OrBuilder + boolean has*(*) + + diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/pom.xml b/java-biglake/proto-google-cloud-biglake-v1alpha1/pom.xml new file mode 100644 index 000000000000..e9beae79a489 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/pom.xml @@ -0,0 +1,37 @@ + + 4.0.0 + com.google.api.grpc + proto-google-cloud-biglake-v1alpha1 + 0.0.1-SNAPSHOT + proto-google-cloud-biglake-v1alpha1 + Proto library for google-cloud-biglake + + com.google.cloud + google-cloud-biglake-parent + 0.0.1-SNAPSHOT + + + + com.google.protobuf + protobuf-java + + + com.google.api.grpc + proto-google-common-protos + + + com.google.api.grpc + proto-google-iam-v1 + + + com.google.api + api-common + + + com.google.guava + guava + + + diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/Catalog.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/Catalog.java new file mode 100644 index 000000000000..3607a85932e2 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/Catalog.java @@ -0,0 +1,1805 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +/** + * + * + *
+ * Catalog is the container of databases.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.Catalog} + */ +public final class Catalog extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.Catalog) + CatalogOrBuilder { + private static final long serialVersionUID = 0L; + // Use Catalog.newBuilder() to construct. + private Catalog(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private Catalog() { + name_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new Catalog(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_Catalog_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_Catalog_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.Catalog.class, + com.google.cloud.bigquery.biglake.v1alpha1.Catalog.Builder.class); + } + + public static final int NAME_FIELD_NUMBER = 1; + + @SuppressWarnings("serial") + private volatile java.lang.Object name_ = ""; + /** + * + * + *
+   * Output only. The resource name.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + @java.lang.Override + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + * + * + *
+   * Output only. The resource name.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + @java.lang.Override + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int CREATE_TIME_FIELD_NUMBER = 2; + private com.google.protobuf.Timestamp createTime_; + /** + * + * + *
+   * Output only. The creation time of the catalog.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the createTime field is set. + */ + @java.lang.Override + public boolean hasCreateTime() { + return createTime_ != null; + } + /** + * + * + *
+   * Output only. The creation time of the catalog.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The createTime. + */ + @java.lang.Override + public com.google.protobuf.Timestamp getCreateTime() { + return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_; + } + /** + * + * + *
+   * Output only. The creation time of the catalog.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + @java.lang.Override + public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() { + return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_; + } + + public static final int UPDATE_TIME_FIELD_NUMBER = 3; + private com.google.protobuf.Timestamp updateTime_; + /** + * + * + *
+   * Output only. The last modification time of the catalog.
+   * 
+ * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the updateTime field is set. + */ + @java.lang.Override + public boolean hasUpdateTime() { + return updateTime_ != null; + } + /** + * + * + *
+   * Output only. The last modification time of the catalog.
+   * 
+ * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The updateTime. + */ + @java.lang.Override + public com.google.protobuf.Timestamp getUpdateTime() { + return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_; + } + /** + * + * + *
+   * Output only. The last modification time of the catalog.
+   * 
+ * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + @java.lang.Override + public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() { + return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_; + } + + public static final int DELETE_TIME_FIELD_NUMBER = 4; + private com.google.protobuf.Timestamp deleteTime_; + /** + * + * + *
+   * Output only. The deletion time of the catalog. Only set after the catalog
+   * is deleted.
+   * 
+ * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the deleteTime field is set. + */ + @java.lang.Override + public boolean hasDeleteTime() { + return deleteTime_ != null; + } + /** + * + * + *
+   * Output only. The deletion time of the catalog. Only set after the catalog
+   * is deleted.
+   * 
+ * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The deleteTime. + */ + @java.lang.Override + public com.google.protobuf.Timestamp getDeleteTime() { + return deleteTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : deleteTime_; + } + /** + * + * + *
+   * Output only. The deletion time of the catalog. Only set after the catalog
+   * is deleted.
+   * 
+ * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + @java.lang.Override + public com.google.protobuf.TimestampOrBuilder getDeleteTimeOrBuilder() { + return deleteTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : deleteTime_; + } + + public static final int EXPIRE_TIME_FIELD_NUMBER = 5; + private com.google.protobuf.Timestamp expireTime_; + /** + * + * + *
+   * Output only. The time when this catalog is considered expired. Only set
+   * after the catalog is deleted.
+   * 
+ * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the expireTime field is set. + */ + @java.lang.Override + public boolean hasExpireTime() { + return expireTime_ != null; + } + /** + * + * + *
+   * Output only. The time when this catalog is considered expired. Only set
+   * after the catalog is deleted.
+   * 
+ * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The expireTime. + */ + @java.lang.Override + public com.google.protobuf.Timestamp getExpireTime() { + return expireTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : expireTime_; + } + /** + * + * + *
+   * Output only. The time when this catalog is considered expired. Only set
+   * after the catalog is deleted.
+   * 
+ * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + @java.lang.Override + public com.google.protobuf.TimestampOrBuilder getExpireTimeOrBuilder() { + return expireTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : expireTime_; + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + if (createTime_ != null) { + output.writeMessage(2, getCreateTime()); + } + if (updateTime_ != null) { + output.writeMessage(3, getUpdateTime()); + } + if (deleteTime_ != null) { + output.writeMessage(4, getDeleteTime()); + } + if (expireTime_ != null) { + output.writeMessage(5, getExpireTime()); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + if (createTime_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getCreateTime()); + } + if (updateTime_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getUpdateTime()); + } + if (deleteTime_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(4, getDeleteTime()); + } + if (expireTime_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(5, getExpireTime()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.Catalog)) { + return super.equals(obj); + } + com.google.cloud.bigquery.biglake.v1alpha1.Catalog other = + (com.google.cloud.bigquery.biglake.v1alpha1.Catalog) obj; + + if (!getName().equals(other.getName())) return false; + if (hasCreateTime() != other.hasCreateTime()) return false; + if (hasCreateTime()) { + if (!getCreateTime().equals(other.getCreateTime())) return false; + } + if (hasUpdateTime() != other.hasUpdateTime()) return false; + if (hasUpdateTime()) { + if (!getUpdateTime().equals(other.getUpdateTime())) return false; + } + if (hasDeleteTime() != other.hasDeleteTime()) return false; + if (hasDeleteTime()) { + if (!getDeleteTime().equals(other.getDeleteTime())) return false; + } + if (hasExpireTime() != other.hasExpireTime()) return false; + if (hasExpireTime()) { + if (!getExpireTime().equals(other.getExpireTime())) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + if (hasCreateTime()) { + hash = (37 * hash) + CREATE_TIME_FIELD_NUMBER; + hash = (53 * hash) + getCreateTime().hashCode(); + } + if (hasUpdateTime()) { + hash = (37 * hash) + UPDATE_TIME_FIELD_NUMBER; + hash = (53 * hash) + getUpdateTime().hashCode(); + } + if (hasDeleteTime()) { + hash = (37 * hash) + DELETE_TIME_FIELD_NUMBER; + hash = (53 * hash) + getDeleteTime().hashCode(); + } + if (hasExpireTime()) { + hash = (37 * hash) + EXPIRE_TIME_FIELD_NUMBER; + hash = (53 * hash) + getExpireTime().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Catalog parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Catalog parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Catalog parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Catalog parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Catalog parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Catalog parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Catalog parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Catalog parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Catalog parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Catalog parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Catalog parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Catalog parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder(com.google.cloud.bigquery.biglake.v1alpha1.Catalog prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Catalog is the container of databases.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.Catalog} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.Catalog) + com.google.cloud.bigquery.biglake.v1alpha1.CatalogOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_Catalog_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_Catalog_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.Catalog.class, + com.google.cloud.bigquery.biglake.v1alpha1.Catalog.Builder.class); + } + + // Construct using com.google.cloud.bigquery.biglake.v1alpha1.Catalog.newBuilder() + private Builder() {} + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + } + + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + name_ = ""; + createTime_ = null; + if (createTimeBuilder_ != null) { + createTimeBuilder_.dispose(); + createTimeBuilder_ = null; + } + updateTime_ = null; + if (updateTimeBuilder_ != null) { + updateTimeBuilder_.dispose(); + updateTimeBuilder_ = null; + } + deleteTime_ = null; + if (deleteTimeBuilder_ != null) { + deleteTimeBuilder_.dispose(); + deleteTimeBuilder_ = null; + } + expireTime_ = null; + if (expireTimeBuilder_ != null) { + expireTimeBuilder_.dispose(); + expireTimeBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_Catalog_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.Catalog getDefaultInstanceForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.Catalog.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.Catalog build() { + com.google.cloud.bigquery.biglake.v1alpha1.Catalog result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.Catalog buildPartial() { + com.google.cloud.bigquery.biglake.v1alpha1.Catalog result = + new com.google.cloud.bigquery.biglake.v1alpha1.Catalog(this); + if (bitField0_ != 0) { + buildPartial0(result); + } + onBuilt(); + return result; + } + + private void buildPartial0(com.google.cloud.bigquery.biglake.v1alpha1.Catalog result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.name_ = name_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.createTime_ = createTimeBuilder_ == null ? createTime_ : createTimeBuilder_.build(); + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.updateTime_ = updateTimeBuilder_ == null ? updateTime_ : updateTimeBuilder_.build(); + } + if (((from_bitField0_ & 0x00000008) != 0)) { + result.deleteTime_ = deleteTimeBuilder_ == null ? deleteTime_ : deleteTimeBuilder_.build(); + } + if (((from_bitField0_ & 0x00000010) != 0)) { + result.expireTime_ = expireTimeBuilder_ == null ? expireTime_ : expireTimeBuilder_.build(); + } + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.biglake.v1alpha1.Catalog) { + return mergeFrom((com.google.cloud.bigquery.biglake.v1alpha1.Catalog) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.bigquery.biglake.v1alpha1.Catalog other) { + if (other == com.google.cloud.bigquery.biglake.v1alpha1.Catalog.getDefaultInstance()) + return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (other.hasCreateTime()) { + mergeCreateTime(other.getCreateTime()); + } + if (other.hasUpdateTime()) { + mergeUpdateTime(other.getUpdateTime()); + } + if (other.hasDeleteTime()) { + mergeDeleteTime(other.getDeleteTime()); + } + if (other.hasExpireTime()) { + mergeExpireTime(other.getExpireTime()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + name_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: + { + input.readMessage(getCreateTimeFieldBuilder().getBuilder(), extensionRegistry); + bitField0_ |= 0x00000002; + break; + } // case 18 + case 26: + { + input.readMessage(getUpdateTimeFieldBuilder().getBuilder(), extensionRegistry); + bitField0_ |= 0x00000004; + break; + } // case 26 + case 34: + { + input.readMessage(getDeleteTimeFieldBuilder().getBuilder(), extensionRegistry); + bitField0_ |= 0x00000008; + break; + } // case 34 + case 42: + { + input.readMessage(getExpireTimeFieldBuilder().getBuilder(), extensionRegistry); + bitField0_ |= 0x00000010; + break; + } // case 42 + default: + { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + + private int bitField0_; + + private java.lang.Object name_ = ""; + /** + * + * + *
+     * Output only. The resource name.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Output only. The resource name.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Output only. The resource name.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + name_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The resource name.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @return This builder for chaining. + */ + public Builder clearName() { + name_ = getDefaultInstance().getName(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The resource name.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + name_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private com.google.protobuf.Timestamp createTime_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + createTimeBuilder_; + /** + * + * + *
+     * Output only. The creation time of the catalog.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the createTime field is set. + */ + public boolean hasCreateTime() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + * + * + *
+     * Output only. The creation time of the catalog.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The createTime. + */ + public com.google.protobuf.Timestamp getCreateTime() { + if (createTimeBuilder_ == null) { + return createTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : createTime_; + } else { + return createTimeBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Output only. The creation time of the catalog.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setCreateTime(com.google.protobuf.Timestamp value) { + if (createTimeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + createTime_ = value; + } else { + createTimeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The creation time of the catalog.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setCreateTime(com.google.protobuf.Timestamp.Builder builderForValue) { + if (createTimeBuilder_ == null) { + createTime_ = builderForValue.build(); + } else { + createTimeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The creation time of the catalog.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder mergeCreateTime(com.google.protobuf.Timestamp value) { + if (createTimeBuilder_ == null) { + if (((bitField0_ & 0x00000002) != 0) + && createTime_ != null + && createTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { + getCreateTimeBuilder().mergeFrom(value); + } else { + createTime_ = value; + } + } else { + createTimeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The creation time of the catalog.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder clearCreateTime() { + bitField0_ = (bitField0_ & ~0x00000002); + createTime_ = null; + if (createTimeBuilder_ != null) { + createTimeBuilder_.dispose(); + createTimeBuilder_ = null; + } + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The creation time of the catalog.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.protobuf.Timestamp.Builder getCreateTimeBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getCreateTimeFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Output only. The creation time of the catalog.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() { + if (createTimeBuilder_ != null) { + return createTimeBuilder_.getMessageOrBuilder(); + } else { + return createTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : createTime_; + } + } + /** + * + * + *
+     * Output only. The creation time of the catalog.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + getCreateTimeFieldBuilder() { + if (createTimeBuilder_ == null) { + createTimeBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder>( + getCreateTime(), getParentForChildren(), isClean()); + createTime_ = null; + } + return createTimeBuilder_; + } + + private com.google.protobuf.Timestamp updateTime_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + updateTimeBuilder_; + /** + * + * + *
+     * Output only. The last modification time of the catalog.
+     * 
+ * + * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the updateTime field is set. + */ + public boolean hasUpdateTime() { + return ((bitField0_ & 0x00000004) != 0); + } + /** + * + * + *
+     * Output only. The last modification time of the catalog.
+     * 
+ * + * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The updateTime. + */ + public com.google.protobuf.Timestamp getUpdateTime() { + if (updateTimeBuilder_ == null) { + return updateTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : updateTime_; + } else { + return updateTimeBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Output only. The last modification time of the catalog.
+     * 
+ * + * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setUpdateTime(com.google.protobuf.Timestamp value) { + if (updateTimeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + updateTime_ = value; + } else { + updateTimeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The last modification time of the catalog.
+     * 
+ * + * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setUpdateTime(com.google.protobuf.Timestamp.Builder builderForValue) { + if (updateTimeBuilder_ == null) { + updateTime_ = builderForValue.build(); + } else { + updateTimeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The last modification time of the catalog.
+     * 
+ * + * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder mergeUpdateTime(com.google.protobuf.Timestamp value) { + if (updateTimeBuilder_ == null) { + if (((bitField0_ & 0x00000004) != 0) + && updateTime_ != null + && updateTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { + getUpdateTimeBuilder().mergeFrom(value); + } else { + updateTime_ = value; + } + } else { + updateTimeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The last modification time of the catalog.
+     * 
+ * + * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder clearUpdateTime() { + bitField0_ = (bitField0_ & ~0x00000004); + updateTime_ = null; + if (updateTimeBuilder_ != null) { + updateTimeBuilder_.dispose(); + updateTimeBuilder_ = null; + } + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The last modification time of the catalog.
+     * 
+ * + * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.protobuf.Timestamp.Builder getUpdateTimeBuilder() { + bitField0_ |= 0x00000004; + onChanged(); + return getUpdateTimeFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Output only. The last modification time of the catalog.
+     * 
+ * + * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() { + if (updateTimeBuilder_ != null) { + return updateTimeBuilder_.getMessageOrBuilder(); + } else { + return updateTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : updateTime_; + } + } + /** + * + * + *
+     * Output only. The last modification time of the catalog.
+     * 
+ * + * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + getUpdateTimeFieldBuilder() { + if (updateTimeBuilder_ == null) { + updateTimeBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder>( + getUpdateTime(), getParentForChildren(), isClean()); + updateTime_ = null; + } + return updateTimeBuilder_; + } + + private com.google.protobuf.Timestamp deleteTime_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + deleteTimeBuilder_; + /** + * + * + *
+     * Output only. The deletion time of the catalog. Only set after the catalog
+     * is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the deleteTime field is set. + */ + public boolean hasDeleteTime() { + return ((bitField0_ & 0x00000008) != 0); + } + /** + * + * + *
+     * Output only. The deletion time of the catalog. Only set after the catalog
+     * is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The deleteTime. + */ + public com.google.protobuf.Timestamp getDeleteTime() { + if (deleteTimeBuilder_ == null) { + return deleteTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : deleteTime_; + } else { + return deleteTimeBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Output only. The deletion time of the catalog. Only set after the catalog
+     * is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setDeleteTime(com.google.protobuf.Timestamp value) { + if (deleteTimeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + deleteTime_ = value; + } else { + deleteTimeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The deletion time of the catalog. Only set after the catalog
+     * is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setDeleteTime(com.google.protobuf.Timestamp.Builder builderForValue) { + if (deleteTimeBuilder_ == null) { + deleteTime_ = builderForValue.build(); + } else { + deleteTimeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The deletion time of the catalog. Only set after the catalog
+     * is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder mergeDeleteTime(com.google.protobuf.Timestamp value) { + if (deleteTimeBuilder_ == null) { + if (((bitField0_ & 0x00000008) != 0) + && deleteTime_ != null + && deleteTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { + getDeleteTimeBuilder().mergeFrom(value); + } else { + deleteTime_ = value; + } + } else { + deleteTimeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The deletion time of the catalog. Only set after the catalog
+     * is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder clearDeleteTime() { + bitField0_ = (bitField0_ & ~0x00000008); + deleteTime_ = null; + if (deleteTimeBuilder_ != null) { + deleteTimeBuilder_.dispose(); + deleteTimeBuilder_ = null; + } + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The deletion time of the catalog. Only set after the catalog
+     * is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.protobuf.Timestamp.Builder getDeleteTimeBuilder() { + bitField0_ |= 0x00000008; + onChanged(); + return getDeleteTimeFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Output only. The deletion time of the catalog. Only set after the catalog
+     * is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.protobuf.TimestampOrBuilder getDeleteTimeOrBuilder() { + if (deleteTimeBuilder_ != null) { + return deleteTimeBuilder_.getMessageOrBuilder(); + } else { + return deleteTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : deleteTime_; + } + } + /** + * + * + *
+     * Output only. The deletion time of the catalog. Only set after the catalog
+     * is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + getDeleteTimeFieldBuilder() { + if (deleteTimeBuilder_ == null) { + deleteTimeBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder>( + getDeleteTime(), getParentForChildren(), isClean()); + deleteTime_ = null; + } + return deleteTimeBuilder_; + } + + private com.google.protobuf.Timestamp expireTime_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + expireTimeBuilder_; + /** + * + * + *
+     * Output only. The time when this catalog is considered expired. Only set
+     * after the catalog is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the expireTime field is set. + */ + public boolean hasExpireTime() { + return ((bitField0_ & 0x00000010) != 0); + } + /** + * + * + *
+     * Output only. The time when this catalog is considered expired. Only set
+     * after the catalog is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The expireTime. + */ + public com.google.protobuf.Timestamp getExpireTime() { + if (expireTimeBuilder_ == null) { + return expireTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : expireTime_; + } else { + return expireTimeBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Output only. The time when this catalog is considered expired. Only set
+     * after the catalog is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setExpireTime(com.google.protobuf.Timestamp value) { + if (expireTimeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + expireTime_ = value; + } else { + expireTimeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000010; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The time when this catalog is considered expired. Only set
+     * after the catalog is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setExpireTime(com.google.protobuf.Timestamp.Builder builderForValue) { + if (expireTimeBuilder_ == null) { + expireTime_ = builderForValue.build(); + } else { + expireTimeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000010; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The time when this catalog is considered expired. Only set
+     * after the catalog is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder mergeExpireTime(com.google.protobuf.Timestamp value) { + if (expireTimeBuilder_ == null) { + if (((bitField0_ & 0x00000010) != 0) + && expireTime_ != null + && expireTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { + getExpireTimeBuilder().mergeFrom(value); + } else { + expireTime_ = value; + } + } else { + expireTimeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000010; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The time when this catalog is considered expired. Only set
+     * after the catalog is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder clearExpireTime() { + bitField0_ = (bitField0_ & ~0x00000010); + expireTime_ = null; + if (expireTimeBuilder_ != null) { + expireTimeBuilder_.dispose(); + expireTimeBuilder_ = null; + } + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The time when this catalog is considered expired. Only set
+     * after the catalog is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.protobuf.Timestamp.Builder getExpireTimeBuilder() { + bitField0_ |= 0x00000010; + onChanged(); + return getExpireTimeFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Output only. The time when this catalog is considered expired. Only set
+     * after the catalog is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.protobuf.TimestampOrBuilder getExpireTimeOrBuilder() { + if (expireTimeBuilder_ != null) { + return expireTimeBuilder_.getMessageOrBuilder(); + } else { + return expireTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : expireTime_; + } + } + /** + * + * + *
+     * Output only. The time when this catalog is considered expired. Only set
+     * after the catalog is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + getExpireTimeFieldBuilder() { + if (expireTimeBuilder_ == null) { + expireTimeBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder>( + getExpireTime(), getParentForChildren(), isClean()); + expireTime_ = null; + } + return expireTimeBuilder_; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.Catalog) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.Catalog) + private static final com.google.cloud.bigquery.biglake.v1alpha1.Catalog DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1alpha1.Catalog(); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Catalog getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Catalog parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.Catalog getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CatalogName.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CatalogName.java new file mode 100644 index 000000000000..4e21c4a8f5f9 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CatalogName.java @@ -0,0 +1,223 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1; + +import com.google.api.pathtemplate.PathTemplate; +import com.google.api.resourcenames.ResourceName; +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; + +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") +public class CatalogName implements ResourceName { + private static final PathTemplate PROJECT_LOCATION_CATALOG = + PathTemplate.createWithoutUrlEncoding( + "projects/{project}/locations/{location}/catalogs/{catalog}"); + private volatile Map fieldValuesMap; + private final String project; + private final String location; + private final String catalog; + + @Deprecated + protected CatalogName() { + project = null; + location = null; + catalog = null; + } + + private CatalogName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + location = Preconditions.checkNotNull(builder.getLocation()); + catalog = Preconditions.checkNotNull(builder.getCatalog()); + } + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public String getCatalog() { + return catalog; + } + + public static Builder newBuilder() { + return new Builder(); + } + + public Builder toBuilder() { + return new Builder(this); + } + + public static CatalogName of(String project, String location, String catalog) { + return newBuilder().setProject(project).setLocation(location).setCatalog(catalog).build(); + } + + public static String format(String project, String location, String catalog) { + return newBuilder() + .setProject(project) + .setLocation(location) + .setCatalog(catalog) + .build() + .toString(); + } + + public static CatalogName parse(String formattedString) { + if (formattedString.isEmpty()) { + return null; + } + Map matchMap = + PROJECT_LOCATION_CATALOG.validatedMatch( + formattedString, "CatalogName.parse: formattedString not in valid format"); + return of(matchMap.get("project"), matchMap.get("location"), matchMap.get("catalog")); + } + + public static List parseList(List formattedStrings) { + List list = new ArrayList<>(formattedStrings.size()); + for (String formattedString : formattedStrings) { + list.add(parse(formattedString)); + } + return list; + } + + public static List toStringList(List values) { + List list = new ArrayList<>(values.size()); + for (CatalogName value : values) { + if (value == null) { + list.add(""); + } else { + list.add(value.toString()); + } + } + return list; + } + + public static boolean isParsableFrom(String formattedString) { + return PROJECT_LOCATION_CATALOG.matches(formattedString); + } + + @Override + public Map getFieldValuesMap() { + if (fieldValuesMap == null) { + synchronized (this) { + if (fieldValuesMap == null) { + ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); + if (project != null) { + fieldMapBuilder.put("project", project); + } + if (location != null) { + fieldMapBuilder.put("location", location); + } + if (catalog != null) { + fieldMapBuilder.put("catalog", catalog); + } + fieldValuesMap = fieldMapBuilder.build(); + } + } + } + return fieldValuesMap; + } + + public String getFieldValue(String fieldName) { + return getFieldValuesMap().get(fieldName); + } + + @Override + public String toString() { + return PROJECT_LOCATION_CATALOG.instantiate( + "project", project, "location", location, "catalog", catalog); + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + CatalogName that = ((CatalogName) o); + return Objects.equals(this.project, that.project) + && Objects.equals(this.location, that.location) + && Objects.equals(this.catalog, that.catalog); + } + return false; + } + + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + h *= 1000003; + h ^= Objects.hashCode(location); + h *= 1000003; + h ^= Objects.hashCode(catalog); + return h; + } + + /** Builder for projects/{project}/locations/{location}/catalogs/{catalog}. */ + public static class Builder { + private String project; + private String location; + private String catalog; + + protected Builder() {} + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public String getCatalog() { + return catalog; + } + + public Builder setProject(String project) { + this.project = project; + return this; + } + + public Builder setLocation(String location) { + this.location = location; + return this; + } + + public Builder setCatalog(String catalog) { + this.catalog = catalog; + return this; + } + + private Builder(CatalogName catalogName) { + this.project = catalogName.project; + this.location = catalogName.location; + this.catalog = catalogName.catalog; + } + + public CatalogName build() { + return new CatalogName(this); + } + } +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CatalogOrBuilder.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CatalogOrBuilder.java new file mode 100644 index 000000000000..20c50b6b4e81 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CatalogOrBuilder.java @@ -0,0 +1,216 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +public interface CatalogOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.biglake.v1alpha1.Catalog) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Output only. The resource name.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + java.lang.String getName(); + /** + * + * + *
+   * Output only. The resource name.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + com.google.protobuf.ByteString getNameBytes(); + + /** + * + * + *
+   * Output only. The creation time of the catalog.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the createTime field is set. + */ + boolean hasCreateTime(); + /** + * + * + *
+   * Output only. The creation time of the catalog.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The createTime. + */ + com.google.protobuf.Timestamp getCreateTime(); + /** + * + * + *
+   * Output only. The creation time of the catalog.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder(); + + /** + * + * + *
+   * Output only. The last modification time of the catalog.
+   * 
+ * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the updateTime field is set. + */ + boolean hasUpdateTime(); + /** + * + * + *
+   * Output only. The last modification time of the catalog.
+   * 
+ * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The updateTime. + */ + com.google.protobuf.Timestamp getUpdateTime(); + /** + * + * + *
+   * Output only. The last modification time of the catalog.
+   * 
+ * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder(); + + /** + * + * + *
+   * Output only. The deletion time of the catalog. Only set after the catalog
+   * is deleted.
+   * 
+ * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the deleteTime field is set. + */ + boolean hasDeleteTime(); + /** + * + * + *
+   * Output only. The deletion time of the catalog. Only set after the catalog
+   * is deleted.
+   * 
+ * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The deleteTime. + */ + com.google.protobuf.Timestamp getDeleteTime(); + /** + * + * + *
+   * Output only. The deletion time of the catalog. Only set after the catalog
+   * is deleted.
+   * 
+ * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + com.google.protobuf.TimestampOrBuilder getDeleteTimeOrBuilder(); + + /** + * + * + *
+   * Output only. The time when this catalog is considered expired. Only set
+   * after the catalog is deleted.
+   * 
+ * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the expireTime field is set. + */ + boolean hasExpireTime(); + /** + * + * + *
+   * Output only. The time when this catalog is considered expired. Only set
+   * after the catalog is deleted.
+   * 
+ * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The expireTime. + */ + com.google.protobuf.Timestamp getExpireTime(); + /** + * + * + *
+   * Output only. The time when this catalog is considered expired. Only set
+   * after the catalog is deleted.
+   * 
+ * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + com.google.protobuf.TimestampOrBuilder getExpireTimeOrBuilder(); +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CheckLockRequest.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CheckLockRequest.java new file mode 100644 index 000000000000..b41792624ae5 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CheckLockRequest.java @@ -0,0 +1,658 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +/** + * + * + *
+ * Request message for the CheckLock method.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest} + */ +public final class CheckLockRequest extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest) + CheckLockRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use CheckLockRequest.newBuilder() to construct. + private CheckLockRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private CheckLockRequest() { + name_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new CheckLockRequest(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_CheckLockRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_CheckLockRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest.Builder.class); + } + + public static final int NAME_FIELD_NUMBER = 1; + + @SuppressWarnings("serial") + private volatile java.lang.Object name_ = ""; + /** + * + * + *
+   * Required. The name of the lock to check.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + @java.lang.Override + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + * + * + *
+   * Required. The name of the lock to check.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + @java.lang.Override + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest)) { + return super.equals(obj); + } + com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest other = + (com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest) obj; + + if (!getName().equals(other.getName())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Request message for the CheckLock method.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest) + com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_CheckLockRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_CheckLockRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest.Builder.class); + } + + // Construct using com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest.newBuilder() + private Builder() {} + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + } + + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + name_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_CheckLockRequest_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest getDefaultInstanceForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest build() { + com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest buildPartial() { + com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest result = + new com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest(this); + if (bitField0_ != 0) { + buildPartial0(result); + } + onBuilt(); + return result; + } + + private void buildPartial0(com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.name_ = name_; + } + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest) { + return mergeFrom((com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest other) { + if (other == com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest.getDefaultInstance()) + return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + bitField0_ |= 0x00000001; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + name_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: + { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + + private int bitField0_; + + private java.lang.Object name_ = ""; + /** + * + * + *
+     * Required. The name of the lock to check.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Required. The name of the lock to check.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Required. The name of the lock to check.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + name_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The name of the lock to check.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return This builder for chaining. + */ + public Builder clearName() { + name_ = getDefaultInstance().getName(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The name of the lock to check.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + name_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest) + private static final com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest(); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public CheckLockRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CheckLockRequestOrBuilder.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CheckLockRequestOrBuilder.java new file mode 100644 index 000000000000..6d77cbdc15a0 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CheckLockRequestOrBuilder.java @@ -0,0 +1,58 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +public interface CheckLockRequestOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Required. The name of the lock to check.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + java.lang.String getName(); + /** + * + * + *
+   * Required. The name of the lock to check.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + com.google.protobuf.ByteString getNameBytes(); +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CreateCatalogRequest.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CreateCatalogRequest.java new file mode 100644 index 000000000000..81e925c7c9d8 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CreateCatalogRequest.java @@ -0,0 +1,1149 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +/** + * + * + *
+ * Request message for the CreateCatalog method.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest} + */ +public final class CreateCatalogRequest extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest) + CreateCatalogRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use CreateCatalogRequest.newBuilder() to construct. + private CreateCatalogRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private CreateCatalogRequest() { + parent_ = ""; + catalogId_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new CreateCatalogRequest(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateCatalogRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateCatalogRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest.Builder.class); + } + + public static final int PARENT_FIELD_NUMBER = 1; + + @SuppressWarnings("serial") + private volatile java.lang.Object parent_ = ""; + /** + * + * + *
+   * Required. The parent resource where this catalog will be created.
+   * Format: projects/{project_id_or_number}/locations/{location_id}
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The parent. + */ + @java.lang.Override + public java.lang.String getParent() { + java.lang.Object ref = parent_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + parent_ = s; + return s; + } + } + /** + * + * + *
+   * Required. The parent resource where this catalog will be created.
+   * Format: projects/{project_id_or_number}/locations/{location_id}
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for parent. + */ + @java.lang.Override + public com.google.protobuf.ByteString getParentBytes() { + java.lang.Object ref = parent_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + parent_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int CATALOG_FIELD_NUMBER = 2; + private com.google.cloud.bigquery.biglake.v1alpha1.Catalog catalog_; + /** + * + * + *
+   * Required. The catalog to create.
+   * The `name` field does not need to be provided.
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Catalog catalog = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return Whether the catalog field is set. + */ + @java.lang.Override + public boolean hasCatalog() { + return catalog_ != null; + } + /** + * + * + *
+   * Required. The catalog to create.
+   * The `name` field does not need to be provided.
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Catalog catalog = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return The catalog. + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.Catalog getCatalog() { + return catalog_ == null + ? com.google.cloud.bigquery.biglake.v1alpha1.Catalog.getDefaultInstance() + : catalog_; + } + /** + * + * + *
+   * Required. The catalog to create.
+   * The `name` field does not need to be provided.
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Catalog catalog = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.CatalogOrBuilder getCatalogOrBuilder() { + return catalog_ == null + ? com.google.cloud.bigquery.biglake.v1alpha1.Catalog.getDefaultInstance() + : catalog_; + } + + public static final int CATALOG_ID_FIELD_NUMBER = 3; + + @SuppressWarnings("serial") + private volatile java.lang.Object catalogId_ = ""; + /** + * + * + *
+   * Required. The ID to use for the catalog, which will become the final
+   * component of the catalog's resource name.
+   * 
+ * + * string catalog_id = 3 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The catalogId. + */ + @java.lang.Override + public java.lang.String getCatalogId() { + java.lang.Object ref = catalogId_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + catalogId_ = s; + return s; + } + } + /** + * + * + *
+   * Required. The ID to use for the catalog, which will become the final
+   * component of the catalog's resource name.
+   * 
+ * + * string catalog_id = 3 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The bytes for catalogId. + */ + @java.lang.Override + public com.google.protobuf.ByteString getCatalogIdBytes() { + java.lang.Object ref = catalogId_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + catalogId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); + } + if (catalog_ != null) { + output.writeMessage(2, getCatalog()); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(catalogId_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, catalogId_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); + } + if (catalog_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getCatalog()); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(catalogId_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, catalogId_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest)) { + return super.equals(obj); + } + com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest other = + (com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest) obj; + + if (!getParent().equals(other.getParent())) return false; + if (hasCatalog() != other.hasCatalog()) return false; + if (hasCatalog()) { + if (!getCatalog().equals(other.getCatalog())) return false; + } + if (!getCatalogId().equals(other.getCatalogId())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + PARENT_FIELD_NUMBER; + hash = (53 * hash) + getParent().hashCode(); + if (hasCatalog()) { + hash = (37 * hash) + CATALOG_FIELD_NUMBER; + hash = (53 * hash) + getCatalog().hashCode(); + } + hash = (37 * hash) + CATALOG_ID_FIELD_NUMBER; + hash = (53 * hash) + getCatalogId().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest parseFrom( + byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Request message for the CreateCatalog method.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest) + com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateCatalogRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateCatalogRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest.Builder.class); + } + + // Construct using com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest.newBuilder() + private Builder() {} + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + } + + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + parent_ = ""; + catalog_ = null; + if (catalogBuilder_ != null) { + catalogBuilder_.dispose(); + catalogBuilder_ = null; + } + catalogId_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateCatalogRequest_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest + getDefaultInstanceForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest build() { + com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest buildPartial() { + com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest result = + new com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest(this); + if (bitField0_ != 0) { + buildPartial0(result); + } + onBuilt(); + return result; + } + + private void buildPartial0( + com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.parent_ = parent_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.catalog_ = catalogBuilder_ == null ? catalog_ : catalogBuilder_.build(); + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.catalogId_ = catalogId_; + } + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest) { + return mergeFrom((com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom( + com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest other) { + if (other + == com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest.getDefaultInstance()) + return this; + if (!other.getParent().isEmpty()) { + parent_ = other.parent_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (other.hasCatalog()) { + mergeCatalog(other.getCatalog()); + } + if (!other.getCatalogId().isEmpty()) { + catalogId_ = other.catalogId_; + bitField0_ |= 0x00000004; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + parent_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: + { + input.readMessage(getCatalogFieldBuilder().getBuilder(), extensionRegistry); + bitField0_ |= 0x00000002; + break; + } // case 18 + case 26: + { + catalogId_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000004; + break; + } // case 26 + default: + { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + + private int bitField0_; + + private java.lang.Object parent_ = ""; + /** + * + * + *
+     * Required. The parent resource where this catalog will be created.
+     * Format: projects/{project_id_or_number}/locations/{location_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The parent. + */ + public java.lang.String getParent() { + java.lang.Object ref = parent_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + parent_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Required. The parent resource where this catalog will be created.
+     * Format: projects/{project_id_or_number}/locations/{location_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for parent. + */ + public com.google.protobuf.ByteString getParentBytes() { + java.lang.Object ref = parent_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + parent_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Required. The parent resource where this catalog will be created.
+     * Format: projects/{project_id_or_number}/locations/{location_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The parent to set. + * @return This builder for chaining. + */ + public Builder setParent(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + parent_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The parent resource where this catalog will be created.
+     * Format: projects/{project_id_or_number}/locations/{location_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return This builder for chaining. + */ + public Builder clearParent() { + parent_ = getDefaultInstance().getParent(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The parent resource where this catalog will be created.
+     * Format: projects/{project_id_or_number}/locations/{location_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The bytes for parent to set. + * @return This builder for chaining. + */ + public Builder setParentBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + parent_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private com.google.cloud.bigquery.biglake.v1alpha1.Catalog catalog_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.Catalog, + com.google.cloud.bigquery.biglake.v1alpha1.Catalog.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.CatalogOrBuilder> + catalogBuilder_; + /** + * + * + *
+     * Required. The catalog to create.
+     * The `name` field does not need to be provided.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Catalog catalog = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return Whether the catalog field is set. + */ + public boolean hasCatalog() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + * + * + *
+     * Required. The catalog to create.
+     * The `name` field does not need to be provided.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Catalog catalog = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return The catalog. + */ + public com.google.cloud.bigquery.biglake.v1alpha1.Catalog getCatalog() { + if (catalogBuilder_ == null) { + return catalog_ == null + ? com.google.cloud.bigquery.biglake.v1alpha1.Catalog.getDefaultInstance() + : catalog_; + } else { + return catalogBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Required. The catalog to create.
+     * The `name` field does not need to be provided.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Catalog catalog = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder setCatalog(com.google.cloud.bigquery.biglake.v1alpha1.Catalog value) { + if (catalogBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + catalog_ = value; + } else { + catalogBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The catalog to create.
+     * The `name` field does not need to be provided.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Catalog catalog = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder setCatalog( + com.google.cloud.bigquery.biglake.v1alpha1.Catalog.Builder builderForValue) { + if (catalogBuilder_ == null) { + catalog_ = builderForValue.build(); + } else { + catalogBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The catalog to create.
+     * The `name` field does not need to be provided.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Catalog catalog = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder mergeCatalog(com.google.cloud.bigquery.biglake.v1alpha1.Catalog value) { + if (catalogBuilder_ == null) { + if (((bitField0_ & 0x00000002) != 0) + && catalog_ != null + && catalog_ + != com.google.cloud.bigquery.biglake.v1alpha1.Catalog.getDefaultInstance()) { + getCatalogBuilder().mergeFrom(value); + } else { + catalog_ = value; + } + } else { + catalogBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The catalog to create.
+     * The `name` field does not need to be provided.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Catalog catalog = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder clearCatalog() { + bitField0_ = (bitField0_ & ~0x00000002); + catalog_ = null; + if (catalogBuilder_ != null) { + catalogBuilder_.dispose(); + catalogBuilder_ = null; + } + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The catalog to create.
+     * The `name` field does not need to be provided.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Catalog catalog = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public com.google.cloud.bigquery.biglake.v1alpha1.Catalog.Builder getCatalogBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getCatalogFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Required. The catalog to create.
+     * The `name` field does not need to be provided.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Catalog catalog = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public com.google.cloud.bigquery.biglake.v1alpha1.CatalogOrBuilder getCatalogOrBuilder() { + if (catalogBuilder_ != null) { + return catalogBuilder_.getMessageOrBuilder(); + } else { + return catalog_ == null + ? com.google.cloud.bigquery.biglake.v1alpha1.Catalog.getDefaultInstance() + : catalog_; + } + } + /** + * + * + *
+     * Required. The catalog to create.
+     * The `name` field does not need to be provided.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Catalog catalog = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.Catalog, + com.google.cloud.bigquery.biglake.v1alpha1.Catalog.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.CatalogOrBuilder> + getCatalogFieldBuilder() { + if (catalogBuilder_ == null) { + catalogBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.Catalog, + com.google.cloud.bigquery.biglake.v1alpha1.Catalog.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.CatalogOrBuilder>( + getCatalog(), getParentForChildren(), isClean()); + catalog_ = null; + } + return catalogBuilder_; + } + + private java.lang.Object catalogId_ = ""; + /** + * + * + *
+     * Required. The ID to use for the catalog, which will become the final
+     * component of the catalog's resource name.
+     * 
+ * + * string catalog_id = 3 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The catalogId. + */ + public java.lang.String getCatalogId() { + java.lang.Object ref = catalogId_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + catalogId_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Required. The ID to use for the catalog, which will become the final
+     * component of the catalog's resource name.
+     * 
+ * + * string catalog_id = 3 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The bytes for catalogId. + */ + public com.google.protobuf.ByteString getCatalogIdBytes() { + java.lang.Object ref = catalogId_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + catalogId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Required. The ID to use for the catalog, which will become the final
+     * component of the catalog's resource name.
+     * 
+ * + * string catalog_id = 3 [(.google.api.field_behavior) = REQUIRED]; + * + * @param value The catalogId to set. + * @return This builder for chaining. + */ + public Builder setCatalogId(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + catalogId_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The ID to use for the catalog, which will become the final
+     * component of the catalog's resource name.
+     * 
+ * + * string catalog_id = 3 [(.google.api.field_behavior) = REQUIRED]; + * + * @return This builder for chaining. + */ + public Builder clearCatalogId() { + catalogId_ = getDefaultInstance().getCatalogId(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The ID to use for the catalog, which will become the final
+     * component of the catalog's resource name.
+     * 
+ * + * string catalog_id = 3 [(.google.api.field_behavior) = REQUIRED]; + * + * @param value The bytes for catalogId to set. + * @return This builder for chaining. + */ + public Builder setCatalogIdBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + catalogId_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest) + private static final com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest(); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest + getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public CreateCatalogRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest + getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CreateCatalogRequestOrBuilder.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CreateCatalogRequestOrBuilder.java new file mode 100644 index 000000000000..475b21d90571 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CreateCatalogRequestOrBuilder.java @@ -0,0 +1,127 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +public interface CreateCatalogRequestOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Required. The parent resource where this catalog will be created.
+   * Format: projects/{project_id_or_number}/locations/{location_id}
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The parent. + */ + java.lang.String getParent(); + /** + * + * + *
+   * Required. The parent resource where this catalog will be created.
+   * Format: projects/{project_id_or_number}/locations/{location_id}
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for parent. + */ + com.google.protobuf.ByteString getParentBytes(); + + /** + * + * + *
+   * Required. The catalog to create.
+   * The `name` field does not need to be provided.
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Catalog catalog = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return Whether the catalog field is set. + */ + boolean hasCatalog(); + /** + * + * + *
+   * Required. The catalog to create.
+   * The `name` field does not need to be provided.
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Catalog catalog = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return The catalog. + */ + com.google.cloud.bigquery.biglake.v1alpha1.Catalog getCatalog(); + /** + * + * + *
+   * Required. The catalog to create.
+   * The `name` field does not need to be provided.
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Catalog catalog = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + com.google.cloud.bigquery.biglake.v1alpha1.CatalogOrBuilder getCatalogOrBuilder(); + + /** + * + * + *
+   * Required. The ID to use for the catalog, which will become the final
+   * component of the catalog's resource name.
+   * 
+ * + * string catalog_id = 3 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The catalogId. + */ + java.lang.String getCatalogId(); + /** + * + * + *
+   * Required. The ID to use for the catalog, which will become the final
+   * component of the catalog's resource name.
+   * 
+ * + * string catalog_id = 3 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The bytes for catalogId. + */ + com.google.protobuf.ByteString getCatalogIdBytes(); +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CreateDatabaseRequest.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CreateDatabaseRequest.java new file mode 100644 index 000000000000..334c73341aff --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CreateDatabaseRequest.java @@ -0,0 +1,1156 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +/** + * + * + *
+ * Request message for the CreateDatabase method.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest} + */ +public final class CreateDatabaseRequest extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest) + CreateDatabaseRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use CreateDatabaseRequest.newBuilder() to construct. + private CreateDatabaseRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private CreateDatabaseRequest() { + parent_ = ""; + databaseId_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new CreateDatabaseRequest(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateDatabaseRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateDatabaseRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest.Builder.class); + } + + public static final int PARENT_FIELD_NUMBER = 1; + + @SuppressWarnings("serial") + private volatile java.lang.Object parent_ = ""; + /** + * + * + *
+   * Required. The parent resource where this database will be created.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The parent. + */ + @java.lang.Override + public java.lang.String getParent() { + java.lang.Object ref = parent_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + parent_ = s; + return s; + } + } + /** + * + * + *
+   * Required. The parent resource where this database will be created.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for parent. + */ + @java.lang.Override + public com.google.protobuf.ByteString getParentBytes() { + java.lang.Object ref = parent_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + parent_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int DATABASE_FIELD_NUMBER = 2; + private com.google.cloud.bigquery.biglake.v1alpha1.Database database_; + /** + * + * + *
+   * Required. The database to create.
+   * The `name` field does not need to be provided.
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Database database = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return Whether the database field is set. + */ + @java.lang.Override + public boolean hasDatabase() { + return database_ != null; + } + /** + * + * + *
+   * Required. The database to create.
+   * The `name` field does not need to be provided.
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Database database = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return The database. + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.Database getDatabase() { + return database_ == null + ? com.google.cloud.bigquery.biglake.v1alpha1.Database.getDefaultInstance() + : database_; + } + /** + * + * + *
+   * Required. The database to create.
+   * The `name` field does not need to be provided.
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Database database = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.DatabaseOrBuilder getDatabaseOrBuilder() { + return database_ == null + ? com.google.cloud.bigquery.biglake.v1alpha1.Database.getDefaultInstance() + : database_; + } + + public static final int DATABASE_ID_FIELD_NUMBER = 3; + + @SuppressWarnings("serial") + private volatile java.lang.Object databaseId_ = ""; + /** + * + * + *
+   * Required. The ID to use for the database, which will become the final
+   * component of the database's resource name.
+   * 
+ * + * string database_id = 3 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The databaseId. + */ + @java.lang.Override + public java.lang.String getDatabaseId() { + java.lang.Object ref = databaseId_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + databaseId_ = s; + return s; + } + } + /** + * + * + *
+   * Required. The ID to use for the database, which will become the final
+   * component of the database's resource name.
+   * 
+ * + * string database_id = 3 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The bytes for databaseId. + */ + @java.lang.Override + public com.google.protobuf.ByteString getDatabaseIdBytes() { + java.lang.Object ref = databaseId_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + databaseId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); + } + if (database_ != null) { + output.writeMessage(2, getDatabase()); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(databaseId_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, databaseId_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); + } + if (database_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getDatabase()); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(databaseId_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, databaseId_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest)) { + return super.equals(obj); + } + com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest other = + (com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest) obj; + + if (!getParent().equals(other.getParent())) return false; + if (hasDatabase() != other.hasDatabase()) return false; + if (hasDatabase()) { + if (!getDatabase().equals(other.getDatabase())) return false; + } + if (!getDatabaseId().equals(other.getDatabaseId())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + PARENT_FIELD_NUMBER; + hash = (53 * hash) + getParent().hashCode(); + if (hasDatabase()) { + hash = (37 * hash) + DATABASE_FIELD_NUMBER; + hash = (53 * hash) + getDatabase().hashCode(); + } + hash = (37 * hash) + DATABASE_ID_FIELD_NUMBER; + hash = (53 * hash) + getDatabaseId().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest parseFrom( + byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Request message for the CreateDatabase method.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest) + com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateDatabaseRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateDatabaseRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest.Builder.class); + } + + // Construct using com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest.newBuilder() + private Builder() {} + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + } + + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + parent_ = ""; + database_ = null; + if (databaseBuilder_ != null) { + databaseBuilder_.dispose(); + databaseBuilder_ = null; + } + databaseId_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateDatabaseRequest_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest + getDefaultInstanceForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest build() { + com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest buildPartial() { + com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest result = + new com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest(this); + if (bitField0_ != 0) { + buildPartial0(result); + } + onBuilt(); + return result; + } + + private void buildPartial0( + com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.parent_ = parent_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.database_ = databaseBuilder_ == null ? database_ : databaseBuilder_.build(); + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.databaseId_ = databaseId_; + } + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest) { + return mergeFrom((com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom( + com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest other) { + if (other + == com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest.getDefaultInstance()) + return this; + if (!other.getParent().isEmpty()) { + parent_ = other.parent_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (other.hasDatabase()) { + mergeDatabase(other.getDatabase()); + } + if (!other.getDatabaseId().isEmpty()) { + databaseId_ = other.databaseId_; + bitField0_ |= 0x00000004; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + parent_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: + { + input.readMessage(getDatabaseFieldBuilder().getBuilder(), extensionRegistry); + bitField0_ |= 0x00000002; + break; + } // case 18 + case 26: + { + databaseId_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000004; + break; + } // case 26 + default: + { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + + private int bitField0_; + + private java.lang.Object parent_ = ""; + /** + * + * + *
+     * Required. The parent resource where this database will be created.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The parent. + */ + public java.lang.String getParent() { + java.lang.Object ref = parent_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + parent_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Required. The parent resource where this database will be created.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for parent. + */ + public com.google.protobuf.ByteString getParentBytes() { + java.lang.Object ref = parent_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + parent_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Required. The parent resource where this database will be created.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The parent to set. + * @return This builder for chaining. + */ + public Builder setParent(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + parent_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The parent resource where this database will be created.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return This builder for chaining. + */ + public Builder clearParent() { + parent_ = getDefaultInstance().getParent(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The parent resource where this database will be created.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The bytes for parent to set. + * @return This builder for chaining. + */ + public Builder setParentBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + parent_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private com.google.cloud.bigquery.biglake.v1alpha1.Database database_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.Database, + com.google.cloud.bigquery.biglake.v1alpha1.Database.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.DatabaseOrBuilder> + databaseBuilder_; + /** + * + * + *
+     * Required. The database to create.
+     * The `name` field does not need to be provided.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Database database = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return Whether the database field is set. + */ + public boolean hasDatabase() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + * + * + *
+     * Required. The database to create.
+     * The `name` field does not need to be provided.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Database database = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return The database. + */ + public com.google.cloud.bigquery.biglake.v1alpha1.Database getDatabase() { + if (databaseBuilder_ == null) { + return database_ == null + ? com.google.cloud.bigquery.biglake.v1alpha1.Database.getDefaultInstance() + : database_; + } else { + return databaseBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Required. The database to create.
+     * The `name` field does not need to be provided.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Database database = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder setDatabase(com.google.cloud.bigquery.biglake.v1alpha1.Database value) { + if (databaseBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + database_ = value; + } else { + databaseBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The database to create.
+     * The `name` field does not need to be provided.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Database database = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder setDatabase( + com.google.cloud.bigquery.biglake.v1alpha1.Database.Builder builderForValue) { + if (databaseBuilder_ == null) { + database_ = builderForValue.build(); + } else { + databaseBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The database to create.
+     * The `name` field does not need to be provided.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Database database = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder mergeDatabase(com.google.cloud.bigquery.biglake.v1alpha1.Database value) { + if (databaseBuilder_ == null) { + if (((bitField0_ & 0x00000002) != 0) + && database_ != null + && database_ + != com.google.cloud.bigquery.biglake.v1alpha1.Database.getDefaultInstance()) { + getDatabaseBuilder().mergeFrom(value); + } else { + database_ = value; + } + } else { + databaseBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The database to create.
+     * The `name` field does not need to be provided.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Database database = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder clearDatabase() { + bitField0_ = (bitField0_ & ~0x00000002); + database_ = null; + if (databaseBuilder_ != null) { + databaseBuilder_.dispose(); + databaseBuilder_ = null; + } + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The database to create.
+     * The `name` field does not need to be provided.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Database database = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public com.google.cloud.bigquery.biglake.v1alpha1.Database.Builder getDatabaseBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getDatabaseFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Required. The database to create.
+     * The `name` field does not need to be provided.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Database database = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public com.google.cloud.bigquery.biglake.v1alpha1.DatabaseOrBuilder getDatabaseOrBuilder() { + if (databaseBuilder_ != null) { + return databaseBuilder_.getMessageOrBuilder(); + } else { + return database_ == null + ? com.google.cloud.bigquery.biglake.v1alpha1.Database.getDefaultInstance() + : database_; + } + } + /** + * + * + *
+     * Required. The database to create.
+     * The `name` field does not need to be provided.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Database database = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.Database, + com.google.cloud.bigquery.biglake.v1alpha1.Database.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.DatabaseOrBuilder> + getDatabaseFieldBuilder() { + if (databaseBuilder_ == null) { + databaseBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.Database, + com.google.cloud.bigquery.biglake.v1alpha1.Database.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.DatabaseOrBuilder>( + getDatabase(), getParentForChildren(), isClean()); + database_ = null; + } + return databaseBuilder_; + } + + private java.lang.Object databaseId_ = ""; + /** + * + * + *
+     * Required. The ID to use for the database, which will become the final
+     * component of the database's resource name.
+     * 
+ * + * string database_id = 3 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The databaseId. + */ + public java.lang.String getDatabaseId() { + java.lang.Object ref = databaseId_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + databaseId_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Required. The ID to use for the database, which will become the final
+     * component of the database's resource name.
+     * 
+ * + * string database_id = 3 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The bytes for databaseId. + */ + public com.google.protobuf.ByteString getDatabaseIdBytes() { + java.lang.Object ref = databaseId_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + databaseId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Required. The ID to use for the database, which will become the final
+     * component of the database's resource name.
+     * 
+ * + * string database_id = 3 [(.google.api.field_behavior) = REQUIRED]; + * + * @param value The databaseId to set. + * @return This builder for chaining. + */ + public Builder setDatabaseId(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + databaseId_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The ID to use for the database, which will become the final
+     * component of the database's resource name.
+     * 
+ * + * string database_id = 3 [(.google.api.field_behavior) = REQUIRED]; + * + * @return This builder for chaining. + */ + public Builder clearDatabaseId() { + databaseId_ = getDefaultInstance().getDatabaseId(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The ID to use for the database, which will become the final
+     * component of the database's resource name.
+     * 
+ * + * string database_id = 3 [(.google.api.field_behavior) = REQUIRED]; + * + * @param value The bytes for databaseId to set. + * @return This builder for chaining. + */ + public Builder setDatabaseIdBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + databaseId_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest) + private static final com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest(); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest + getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public CreateDatabaseRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest + getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CreateDatabaseRequestOrBuilder.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CreateDatabaseRequestOrBuilder.java new file mode 100644 index 000000000000..2e42366c95ea --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CreateDatabaseRequestOrBuilder.java @@ -0,0 +1,129 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +public interface CreateDatabaseRequestOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Required. The parent resource where this database will be created.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The parent. + */ + java.lang.String getParent(); + /** + * + * + *
+   * Required. The parent resource where this database will be created.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for parent. + */ + com.google.protobuf.ByteString getParentBytes(); + + /** + * + * + *
+   * Required. The database to create.
+   * The `name` field does not need to be provided.
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Database database = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return Whether the database field is set. + */ + boolean hasDatabase(); + /** + * + * + *
+   * Required. The database to create.
+   * The `name` field does not need to be provided.
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Database database = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return The database. + */ + com.google.cloud.bigquery.biglake.v1alpha1.Database getDatabase(); + /** + * + * + *
+   * Required. The database to create.
+   * The `name` field does not need to be provided.
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Database database = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + com.google.cloud.bigquery.biglake.v1alpha1.DatabaseOrBuilder getDatabaseOrBuilder(); + + /** + * + * + *
+   * Required. The ID to use for the database, which will become the final
+   * component of the database's resource name.
+   * 
+ * + * string database_id = 3 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The databaseId. + */ + java.lang.String getDatabaseId(); + /** + * + * + *
+   * Required. The ID to use for the database, which will become the final
+   * component of the database's resource name.
+   * 
+ * + * string database_id = 3 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The bytes for databaseId. + */ + com.google.protobuf.ByteString getDatabaseIdBytes(); +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CreateLockRequest.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CreateLockRequest.java new file mode 100644 index 000000000000..9d82c9968f84 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CreateLockRequest.java @@ -0,0 +1,963 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +/** + * + * + *
+ * Request message for the CreateLock method.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest} + */ +public final class CreateLockRequest extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest) + CreateLockRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use CreateLockRequest.newBuilder() to construct. + private CreateLockRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private CreateLockRequest() { + parent_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new CreateLockRequest(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateLockRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateLockRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest.Builder.class); + } + + public static final int PARENT_FIELD_NUMBER = 1; + + @SuppressWarnings("serial") + private volatile java.lang.Object parent_ = ""; + /** + * + * + *
+   * Required. The parent resource where this lock will be created.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The parent. + */ + @java.lang.Override + public java.lang.String getParent() { + java.lang.Object ref = parent_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + parent_ = s; + return s; + } + } + /** + * + * + *
+   * Required. The parent resource where this lock will be created.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for parent. + */ + @java.lang.Override + public com.google.protobuf.ByteString getParentBytes() { + java.lang.Object ref = parent_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + parent_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int LOCK_FIELD_NUMBER = 2; + private com.google.cloud.bigquery.biglake.v1alpha1.Lock lock_; + /** + * + * + *
+   * Required. The lock to create. The `name` field does not need to be provided
+   * for the lock creation.
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Lock lock = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return Whether the lock field is set. + */ + @java.lang.Override + public boolean hasLock() { + return lock_ != null; + } + /** + * + * + *
+   * Required. The lock to create. The `name` field does not need to be provided
+   * for the lock creation.
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Lock lock = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return The lock. + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.Lock getLock() { + return lock_ == null + ? com.google.cloud.bigquery.biglake.v1alpha1.Lock.getDefaultInstance() + : lock_; + } + /** + * + * + *
+   * Required. The lock to create. The `name` field does not need to be provided
+   * for the lock creation.
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Lock lock = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.LockOrBuilder getLockOrBuilder() { + return lock_ == null + ? com.google.cloud.bigquery.biglake.v1alpha1.Lock.getDefaultInstance() + : lock_; + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); + } + if (lock_ != null) { + output.writeMessage(2, getLock()); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); + } + if (lock_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getLock()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest)) { + return super.equals(obj); + } + com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest other = + (com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest) obj; + + if (!getParent().equals(other.getParent())) return false; + if (hasLock() != other.hasLock()) return false; + if (hasLock()) { + if (!getLock().equals(other.getLock())) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + PARENT_FIELD_NUMBER; + hash = (53 * hash) + getParent().hashCode(); + if (hasLock()) { + hash = (37 * hash) + LOCK_FIELD_NUMBER; + hash = (53 * hash) + getLock().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Request message for the CreateLock method.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest) + com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateLockRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateLockRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest.Builder.class); + } + + // Construct using com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest.newBuilder() + private Builder() {} + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + } + + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + parent_ = ""; + lock_ = null; + if (lockBuilder_ != null) { + lockBuilder_.dispose(); + lockBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateLockRequest_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest + getDefaultInstanceForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest build() { + com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest buildPartial() { + com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest result = + new com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest(this); + if (bitField0_ != 0) { + buildPartial0(result); + } + onBuilt(); + return result; + } + + private void buildPartial0( + com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.parent_ = parent_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.lock_ = lockBuilder_ == null ? lock_ : lockBuilder_.build(); + } + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest) { + return mergeFrom((com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest other) { + if (other + == com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest.getDefaultInstance()) + return this; + if (!other.getParent().isEmpty()) { + parent_ = other.parent_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (other.hasLock()) { + mergeLock(other.getLock()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + parent_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: + { + input.readMessage(getLockFieldBuilder().getBuilder(), extensionRegistry); + bitField0_ |= 0x00000002; + break; + } // case 18 + default: + { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + + private int bitField0_; + + private java.lang.Object parent_ = ""; + /** + * + * + *
+     * Required. The parent resource where this lock will be created.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The parent. + */ + public java.lang.String getParent() { + java.lang.Object ref = parent_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + parent_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Required. The parent resource where this lock will be created.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for parent. + */ + public com.google.protobuf.ByteString getParentBytes() { + java.lang.Object ref = parent_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + parent_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Required. The parent resource where this lock will be created.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The parent to set. + * @return This builder for chaining. + */ + public Builder setParent(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + parent_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The parent resource where this lock will be created.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return This builder for chaining. + */ + public Builder clearParent() { + parent_ = getDefaultInstance().getParent(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The parent resource where this lock will be created.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The bytes for parent to set. + * @return This builder for chaining. + */ + public Builder setParentBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + parent_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private com.google.cloud.bigquery.biglake.v1alpha1.Lock lock_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.Lock, + com.google.cloud.bigquery.biglake.v1alpha1.Lock.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.LockOrBuilder> + lockBuilder_; + /** + * + * + *
+     * Required. The lock to create. The `name` field does not need to be provided
+     * for the lock creation.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Lock lock = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return Whether the lock field is set. + */ + public boolean hasLock() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + * + * + *
+     * Required. The lock to create. The `name` field does not need to be provided
+     * for the lock creation.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Lock lock = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return The lock. + */ + public com.google.cloud.bigquery.biglake.v1alpha1.Lock getLock() { + if (lockBuilder_ == null) { + return lock_ == null + ? com.google.cloud.bigquery.biglake.v1alpha1.Lock.getDefaultInstance() + : lock_; + } else { + return lockBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Required. The lock to create. The `name` field does not need to be provided
+     * for the lock creation.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Lock lock = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder setLock(com.google.cloud.bigquery.biglake.v1alpha1.Lock value) { + if (lockBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + lock_ = value; + } else { + lockBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The lock to create. The `name` field does not need to be provided
+     * for the lock creation.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Lock lock = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder setLock( + com.google.cloud.bigquery.biglake.v1alpha1.Lock.Builder builderForValue) { + if (lockBuilder_ == null) { + lock_ = builderForValue.build(); + } else { + lockBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The lock to create. The `name` field does not need to be provided
+     * for the lock creation.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Lock lock = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder mergeLock(com.google.cloud.bigquery.biglake.v1alpha1.Lock value) { + if (lockBuilder_ == null) { + if (((bitField0_ & 0x00000002) != 0) + && lock_ != null + && lock_ != com.google.cloud.bigquery.biglake.v1alpha1.Lock.getDefaultInstance()) { + getLockBuilder().mergeFrom(value); + } else { + lock_ = value; + } + } else { + lockBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The lock to create. The `name` field does not need to be provided
+     * for the lock creation.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Lock lock = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder clearLock() { + bitField0_ = (bitField0_ & ~0x00000002); + lock_ = null; + if (lockBuilder_ != null) { + lockBuilder_.dispose(); + lockBuilder_ = null; + } + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The lock to create. The `name` field does not need to be provided
+     * for the lock creation.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Lock lock = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public com.google.cloud.bigquery.biglake.v1alpha1.Lock.Builder getLockBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getLockFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Required. The lock to create. The `name` field does not need to be provided
+     * for the lock creation.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Lock lock = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public com.google.cloud.bigquery.biglake.v1alpha1.LockOrBuilder getLockOrBuilder() { + if (lockBuilder_ != null) { + return lockBuilder_.getMessageOrBuilder(); + } else { + return lock_ == null + ? com.google.cloud.bigquery.biglake.v1alpha1.Lock.getDefaultInstance() + : lock_; + } + } + /** + * + * + *
+     * Required. The lock to create. The `name` field does not need to be provided
+     * for the lock creation.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Lock lock = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.Lock, + com.google.cloud.bigquery.biglake.v1alpha1.Lock.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.LockOrBuilder> + getLockFieldBuilder() { + if (lockBuilder_ == null) { + lockBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.Lock, + com.google.cloud.bigquery.biglake.v1alpha1.Lock.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.LockOrBuilder>( + getLock(), getParentForChildren(), isClean()); + lock_ = null; + } + return lockBuilder_; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest) + private static final com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest(); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public CreateLockRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CreateLockRequestOrBuilder.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CreateLockRequestOrBuilder.java new file mode 100644 index 000000000000..475fb83fd471 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CreateLockRequestOrBuilder.java @@ -0,0 +1,102 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +public interface CreateLockRequestOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Required. The parent resource where this lock will be created.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The parent. + */ + java.lang.String getParent(); + /** + * + * + *
+   * Required. The parent resource where this lock will be created.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for parent. + */ + com.google.protobuf.ByteString getParentBytes(); + + /** + * + * + *
+   * Required. The lock to create. The `name` field does not need to be provided
+   * for the lock creation.
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Lock lock = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return Whether the lock field is set. + */ + boolean hasLock(); + /** + * + * + *
+   * Required. The lock to create. The `name` field does not need to be provided
+   * for the lock creation.
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Lock lock = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return The lock. + */ + com.google.cloud.bigquery.biglake.v1alpha1.Lock getLock(); + /** + * + * + *
+   * Required. The lock to create. The `name` field does not need to be provided
+   * for the lock creation.
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Lock lock = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + com.google.cloud.bigquery.biglake.v1alpha1.LockOrBuilder getLockOrBuilder(); +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CreateTableRequest.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CreateTableRequest.java new file mode 100644 index 000000000000..adf2b727876b --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CreateTableRequest.java @@ -0,0 +1,1152 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +/** + * + * + *
+ * Request message for the CreateTable method.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest} + */ +public final class CreateTableRequest extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest) + CreateTableRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use CreateTableRequest.newBuilder() to construct. + private CreateTableRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private CreateTableRequest() { + parent_ = ""; + tableId_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new CreateTableRequest(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateTableRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateTableRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest.Builder.class); + } + + public static final int PARENT_FIELD_NUMBER = 1; + + @SuppressWarnings("serial") + private volatile java.lang.Object parent_ = ""; + /** + * + * + *
+   * Required. The parent resource where this table will be created.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The parent. + */ + @java.lang.Override + public java.lang.String getParent() { + java.lang.Object ref = parent_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + parent_ = s; + return s; + } + } + /** + * + * + *
+   * Required. The parent resource where this table will be created.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for parent. + */ + @java.lang.Override + public com.google.protobuf.ByteString getParentBytes() { + java.lang.Object ref = parent_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + parent_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int TABLE_FIELD_NUMBER = 2; + private com.google.cloud.bigquery.biglake.v1alpha1.Table table_; + /** + * + * + *
+   * Required. The table to create. The `name` field does not need to be
+   * provided for the table creation.
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Table table = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return Whether the table field is set. + */ + @java.lang.Override + public boolean hasTable() { + return table_ != null; + } + /** + * + * + *
+   * Required. The table to create. The `name` field does not need to be
+   * provided for the table creation.
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Table table = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return The table. + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.Table getTable() { + return table_ == null + ? com.google.cloud.bigquery.biglake.v1alpha1.Table.getDefaultInstance() + : table_; + } + /** + * + * + *
+   * Required. The table to create. The `name` field does not need to be
+   * provided for the table creation.
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Table table = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.TableOrBuilder getTableOrBuilder() { + return table_ == null + ? com.google.cloud.bigquery.biglake.v1alpha1.Table.getDefaultInstance() + : table_; + } + + public static final int TABLE_ID_FIELD_NUMBER = 3; + + @SuppressWarnings("serial") + private volatile java.lang.Object tableId_ = ""; + /** + * + * + *
+   * Required. The ID to use for the table, which will become the final
+   * component of the table's resource name.
+   * 
+ * + * string table_id = 3 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The tableId. + */ + @java.lang.Override + public java.lang.String getTableId() { + java.lang.Object ref = tableId_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + tableId_ = s; + return s; + } + } + /** + * + * + *
+   * Required. The ID to use for the table, which will become the final
+   * component of the table's resource name.
+   * 
+ * + * string table_id = 3 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The bytes for tableId. + */ + @java.lang.Override + public com.google.protobuf.ByteString getTableIdBytes() { + java.lang.Object ref = tableId_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + tableId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); + } + if (table_ != null) { + output.writeMessage(2, getTable()); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(tableId_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, tableId_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); + } + if (table_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getTable()); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(tableId_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, tableId_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest)) { + return super.equals(obj); + } + com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest other = + (com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest) obj; + + if (!getParent().equals(other.getParent())) return false; + if (hasTable() != other.hasTable()) return false; + if (hasTable()) { + if (!getTable().equals(other.getTable())) return false; + } + if (!getTableId().equals(other.getTableId())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + PARENT_FIELD_NUMBER; + hash = (53 * hash) + getParent().hashCode(); + if (hasTable()) { + hash = (37 * hash) + TABLE_FIELD_NUMBER; + hash = (53 * hash) + getTable().hashCode(); + } + hash = (37 * hash) + TABLE_ID_FIELD_NUMBER; + hash = (53 * hash) + getTableId().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Request message for the CreateTable method.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest) + com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateTableRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateTableRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest.Builder.class); + } + + // Construct using com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest.newBuilder() + private Builder() {} + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + } + + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + parent_ = ""; + table_ = null; + if (tableBuilder_ != null) { + tableBuilder_.dispose(); + tableBuilder_ = null; + } + tableId_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateTableRequest_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest + getDefaultInstanceForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest build() { + com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest buildPartial() { + com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest result = + new com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest(this); + if (bitField0_ != 0) { + buildPartial0(result); + } + onBuilt(); + return result; + } + + private void buildPartial0( + com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.parent_ = parent_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.table_ = tableBuilder_ == null ? table_ : tableBuilder_.build(); + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.tableId_ = tableId_; + } + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest) { + return mergeFrom((com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest other) { + if (other + == com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest.getDefaultInstance()) + return this; + if (!other.getParent().isEmpty()) { + parent_ = other.parent_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (other.hasTable()) { + mergeTable(other.getTable()); + } + if (!other.getTableId().isEmpty()) { + tableId_ = other.tableId_; + bitField0_ |= 0x00000004; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + parent_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: + { + input.readMessage(getTableFieldBuilder().getBuilder(), extensionRegistry); + bitField0_ |= 0x00000002; + break; + } // case 18 + case 26: + { + tableId_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000004; + break; + } // case 26 + default: + { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + + private int bitField0_; + + private java.lang.Object parent_ = ""; + /** + * + * + *
+     * Required. The parent resource where this table will be created.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The parent. + */ + public java.lang.String getParent() { + java.lang.Object ref = parent_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + parent_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Required. The parent resource where this table will be created.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for parent. + */ + public com.google.protobuf.ByteString getParentBytes() { + java.lang.Object ref = parent_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + parent_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Required. The parent resource where this table will be created.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The parent to set. + * @return This builder for chaining. + */ + public Builder setParent(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + parent_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The parent resource where this table will be created.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return This builder for chaining. + */ + public Builder clearParent() { + parent_ = getDefaultInstance().getParent(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The parent resource where this table will be created.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The bytes for parent to set. + * @return This builder for chaining. + */ + public Builder setParentBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + parent_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private com.google.cloud.bigquery.biglake.v1alpha1.Table table_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.Table, + com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.TableOrBuilder> + tableBuilder_; + /** + * + * + *
+     * Required. The table to create. The `name` field does not need to be
+     * provided for the table creation.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Table table = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return Whether the table field is set. + */ + public boolean hasTable() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + * + * + *
+     * Required. The table to create. The `name` field does not need to be
+     * provided for the table creation.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Table table = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return The table. + */ + public com.google.cloud.bigquery.biglake.v1alpha1.Table getTable() { + if (tableBuilder_ == null) { + return table_ == null + ? com.google.cloud.bigquery.biglake.v1alpha1.Table.getDefaultInstance() + : table_; + } else { + return tableBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Required. The table to create. The `name` field does not need to be
+     * provided for the table creation.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Table table = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder setTable(com.google.cloud.bigquery.biglake.v1alpha1.Table value) { + if (tableBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + table_ = value; + } else { + tableBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The table to create. The `name` field does not need to be
+     * provided for the table creation.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Table table = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder setTable( + com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder builderForValue) { + if (tableBuilder_ == null) { + table_ = builderForValue.build(); + } else { + tableBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The table to create. The `name` field does not need to be
+     * provided for the table creation.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Table table = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder mergeTable(com.google.cloud.bigquery.biglake.v1alpha1.Table value) { + if (tableBuilder_ == null) { + if (((bitField0_ & 0x00000002) != 0) + && table_ != null + && table_ != com.google.cloud.bigquery.biglake.v1alpha1.Table.getDefaultInstance()) { + getTableBuilder().mergeFrom(value); + } else { + table_ = value; + } + } else { + tableBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The table to create. The `name` field does not need to be
+     * provided for the table creation.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Table table = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder clearTable() { + bitField0_ = (bitField0_ & ~0x00000002); + table_ = null; + if (tableBuilder_ != null) { + tableBuilder_.dispose(); + tableBuilder_ = null; + } + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The table to create. The `name` field does not need to be
+     * provided for the table creation.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Table table = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder getTableBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getTableFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Required. The table to create. The `name` field does not need to be
+     * provided for the table creation.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Table table = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public com.google.cloud.bigquery.biglake.v1alpha1.TableOrBuilder getTableOrBuilder() { + if (tableBuilder_ != null) { + return tableBuilder_.getMessageOrBuilder(); + } else { + return table_ == null + ? com.google.cloud.bigquery.biglake.v1alpha1.Table.getDefaultInstance() + : table_; + } + } + /** + * + * + *
+     * Required. The table to create. The `name` field does not need to be
+     * provided for the table creation.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Table table = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.Table, + com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.TableOrBuilder> + getTableFieldBuilder() { + if (tableBuilder_ == null) { + tableBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.Table, + com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.TableOrBuilder>( + getTable(), getParentForChildren(), isClean()); + table_ = null; + } + return tableBuilder_; + } + + private java.lang.Object tableId_ = ""; + /** + * + * + *
+     * Required. The ID to use for the table, which will become the final
+     * component of the table's resource name.
+     * 
+ * + * string table_id = 3 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The tableId. + */ + public java.lang.String getTableId() { + java.lang.Object ref = tableId_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + tableId_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Required. The ID to use for the table, which will become the final
+     * component of the table's resource name.
+     * 
+ * + * string table_id = 3 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The bytes for tableId. + */ + public com.google.protobuf.ByteString getTableIdBytes() { + java.lang.Object ref = tableId_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + tableId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Required. The ID to use for the table, which will become the final
+     * component of the table's resource name.
+     * 
+ * + * string table_id = 3 [(.google.api.field_behavior) = REQUIRED]; + * + * @param value The tableId to set. + * @return This builder for chaining. + */ + public Builder setTableId(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + tableId_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The ID to use for the table, which will become the final
+     * component of the table's resource name.
+     * 
+ * + * string table_id = 3 [(.google.api.field_behavior) = REQUIRED]; + * + * @return This builder for chaining. + */ + public Builder clearTableId() { + tableId_ = getDefaultInstance().getTableId(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The ID to use for the table, which will become the final
+     * component of the table's resource name.
+     * 
+ * + * string table_id = 3 [(.google.api.field_behavior) = REQUIRED]; + * + * @param value The bytes for tableId to set. + * @return This builder for chaining. + */ + public Builder setTableIdBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + tableId_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest) + private static final com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest(); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public CreateTableRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CreateTableRequestOrBuilder.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CreateTableRequestOrBuilder.java new file mode 100644 index 000000000000..b18b95529a51 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CreateTableRequestOrBuilder.java @@ -0,0 +1,129 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +public interface CreateTableRequestOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Required. The parent resource where this table will be created.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The parent. + */ + java.lang.String getParent(); + /** + * + * + *
+   * Required. The parent resource where this table will be created.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for parent. + */ + com.google.protobuf.ByteString getParentBytes(); + + /** + * + * + *
+   * Required. The table to create. The `name` field does not need to be
+   * provided for the table creation.
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Table table = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return Whether the table field is set. + */ + boolean hasTable(); + /** + * + * + *
+   * Required. The table to create. The `name` field does not need to be
+   * provided for the table creation.
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Table table = 2 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return The table. + */ + com.google.cloud.bigquery.biglake.v1alpha1.Table getTable(); + /** + * + * + *
+   * Required. The table to create. The `name` field does not need to be
+   * provided for the table creation.
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Table table = 2 [(.google.api.field_behavior) = REQUIRED]; + * + */ + com.google.cloud.bigquery.biglake.v1alpha1.TableOrBuilder getTableOrBuilder(); + + /** + * + * + *
+   * Required. The ID to use for the table, which will become the final
+   * component of the table's resource name.
+   * 
+ * + * string table_id = 3 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The tableId. + */ + java.lang.String getTableId(); + /** + * + * + *
+   * Required. The ID to use for the table, which will become the final
+   * component of the table's resource name.
+   * 
+ * + * string table_id = 3 [(.google.api.field_behavior) = REQUIRED]; + * + * @return The bytes for tableId. + */ + com.google.protobuf.ByteString getTableIdBytes(); +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/Database.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/Database.java new file mode 100644 index 000000000000..112461372024 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/Database.java @@ -0,0 +1,2480 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +/** + * + * + *
+ * Database is the container of tables.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.Database} + */ +public final class Database extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.Database) + DatabaseOrBuilder { + private static final long serialVersionUID = 0L; + // Use Database.newBuilder() to construct. + private Database(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private Database() { + name_ = ""; + type_ = 0; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new Database(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_Database_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_Database_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.Database.class, + com.google.cloud.bigquery.biglake.v1alpha1.Database.Builder.class); + } + + /** + * + * + *
+   * The database type.
+   * 
+ * + * Protobuf enum {@code google.cloud.bigquery.biglake.v1alpha1.Database.Type} + */ + public enum Type implements com.google.protobuf.ProtocolMessageEnum { + /** + * + * + *
+     * The type is not specified.
+     * 
+ * + * TYPE_UNSPECIFIED = 0; + */ + TYPE_UNSPECIFIED(0), + /** + * + * + *
+     * Represents a database storing tables compatible with Hive Metastore
+     * tables.
+     * 
+ * + * HIVE = 1; + */ + HIVE(1), + UNRECOGNIZED(-1), + ; + + /** + * + * + *
+     * The type is not specified.
+     * 
+ * + * TYPE_UNSPECIFIED = 0; + */ + public static final int TYPE_UNSPECIFIED_VALUE = 0; + /** + * + * + *
+     * Represents a database storing tables compatible with Hive Metastore
+     * tables.
+     * 
+ * + * HIVE = 1; + */ + public static final int HIVE_VALUE = 1; + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static Type valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static Type forNumber(int value) { + switch (value) { + case 0: + return TYPE_UNSPECIFIED; + case 1: + return HIVE; + default: + return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() { + return internalValueMap; + } + + private static final com.google.protobuf.Internal.EnumLiteMap internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public Type findValueByNumber(int number) { + return Type.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + + public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { + return getDescriptor(); + } + + public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.Database.getDescriptor() + .getEnumTypes() + .get(0); + } + + private static final Type[] VALUES = values(); + + public static Type valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private Type(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:google.cloud.bigquery.biglake.v1alpha1.Database.Type) + } + + private int optionsCase_ = 0; + private java.lang.Object options_; + + public enum OptionsCase + implements + com.google.protobuf.Internal.EnumLite, + com.google.protobuf.AbstractMessage.InternalOneOfEnum { + HIVE_OPTIONS(7), + OPTIONS_NOT_SET(0); + private final int value; + + private OptionsCase(int value) { + this.value = value; + } + /** + * @param value The number of the enum to look for. + * @return The enum associated with the given number. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static OptionsCase valueOf(int value) { + return forNumber(value); + } + + public static OptionsCase forNumber(int value) { + switch (value) { + case 7: + return HIVE_OPTIONS; + case 0: + return OPTIONS_NOT_SET; + default: + return null; + } + } + + public int getNumber() { + return this.value; + } + }; + + public OptionsCase getOptionsCase() { + return OptionsCase.forNumber(optionsCase_); + } + + public static final int HIVE_OPTIONS_FIELD_NUMBER = 7; + /** + * + * + *
+   * Options of a Hive database.
+   * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions hive_options = 7; + * + * @return Whether the hiveOptions field is set. + */ + @java.lang.Override + public boolean hasHiveOptions() { + return optionsCase_ == 7; + } + /** + * + * + *
+   * Options of a Hive database.
+   * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions hive_options = 7; + * + * @return The hiveOptions. + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions getHiveOptions() { + if (optionsCase_ == 7) { + return (com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions) options_; + } + return com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions.getDefaultInstance(); + } + /** + * + * + *
+   * Options of a Hive database.
+   * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions hive_options = 7; + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptionsOrBuilder + getHiveOptionsOrBuilder() { + if (optionsCase_ == 7) { + return (com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions) options_; + } + return com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions.getDefaultInstance(); + } + + public static final int NAME_FIELD_NUMBER = 1; + + @SuppressWarnings("serial") + private volatile java.lang.Object name_ = ""; + /** + * + * + *
+   * Output only. The resource name.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + @java.lang.Override + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + * + * + *
+   * Output only. The resource name.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + @java.lang.Override + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int CREATE_TIME_FIELD_NUMBER = 2; + private com.google.protobuf.Timestamp createTime_; + /** + * + * + *
+   * Output only. The creation time of the database.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the createTime field is set. + */ + @java.lang.Override + public boolean hasCreateTime() { + return createTime_ != null; + } + /** + * + * + *
+   * Output only. The creation time of the database.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The createTime. + */ + @java.lang.Override + public com.google.protobuf.Timestamp getCreateTime() { + return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_; + } + /** + * + * + *
+   * Output only. The creation time of the database.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + @java.lang.Override + public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() { + return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_; + } + + public static final int UPDATE_TIME_FIELD_NUMBER = 3; + private com.google.protobuf.Timestamp updateTime_; + /** + * + * + *
+   * Output only. The last modification time of the database.
+   * 
+ * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the updateTime field is set. + */ + @java.lang.Override + public boolean hasUpdateTime() { + return updateTime_ != null; + } + /** + * + * + *
+   * Output only. The last modification time of the database.
+   * 
+ * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The updateTime. + */ + @java.lang.Override + public com.google.protobuf.Timestamp getUpdateTime() { + return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_; + } + /** + * + * + *
+   * Output only. The last modification time of the database.
+   * 
+ * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + @java.lang.Override + public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() { + return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_; + } + + public static final int DELETE_TIME_FIELD_NUMBER = 4; + private com.google.protobuf.Timestamp deleteTime_; + /** + * + * + *
+   * Output only. The deletion time of the database. Only set after the database
+   * is deleted.
+   * 
+ * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the deleteTime field is set. + */ + @java.lang.Override + public boolean hasDeleteTime() { + return deleteTime_ != null; + } + /** + * + * + *
+   * Output only. The deletion time of the database. Only set after the database
+   * is deleted.
+   * 
+ * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The deleteTime. + */ + @java.lang.Override + public com.google.protobuf.Timestamp getDeleteTime() { + return deleteTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : deleteTime_; + } + /** + * + * + *
+   * Output only. The deletion time of the database. Only set after the database
+   * is deleted.
+   * 
+ * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + @java.lang.Override + public com.google.protobuf.TimestampOrBuilder getDeleteTimeOrBuilder() { + return deleteTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : deleteTime_; + } + + public static final int EXPIRE_TIME_FIELD_NUMBER = 5; + private com.google.protobuf.Timestamp expireTime_; + /** + * + * + *
+   * Output only. The time when this database is considered expired. Only set
+   * after the database is deleted.
+   * 
+ * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the expireTime field is set. + */ + @java.lang.Override + public boolean hasExpireTime() { + return expireTime_ != null; + } + /** + * + * + *
+   * Output only. The time when this database is considered expired. Only set
+   * after the database is deleted.
+   * 
+ * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The expireTime. + */ + @java.lang.Override + public com.google.protobuf.Timestamp getExpireTime() { + return expireTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : expireTime_; + } + /** + * + * + *
+   * Output only. The time when this database is considered expired. Only set
+   * after the database is deleted.
+   * 
+ * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + @java.lang.Override + public com.google.protobuf.TimestampOrBuilder getExpireTimeOrBuilder() { + return expireTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : expireTime_; + } + + public static final int TYPE_FIELD_NUMBER = 6; + private int type_ = 0; + /** + * + * + *
+   * The database type.
+   * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.Database.Type type = 6; + * + * @return The enum numeric value on the wire for type. + */ + @java.lang.Override + public int getTypeValue() { + return type_; + } + /** + * + * + *
+   * The database type.
+   * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.Database.Type type = 6; + * + * @return The type. + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.Database.Type getType() { + com.google.cloud.bigquery.biglake.v1alpha1.Database.Type result = + com.google.cloud.bigquery.biglake.v1alpha1.Database.Type.forNumber(type_); + return result == null + ? com.google.cloud.bigquery.biglake.v1alpha1.Database.Type.UNRECOGNIZED + : result; + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + if (createTime_ != null) { + output.writeMessage(2, getCreateTime()); + } + if (updateTime_ != null) { + output.writeMessage(3, getUpdateTime()); + } + if (deleteTime_ != null) { + output.writeMessage(4, getDeleteTime()); + } + if (expireTime_ != null) { + output.writeMessage(5, getExpireTime()); + } + if (type_ + != com.google.cloud.bigquery.biglake.v1alpha1.Database.Type.TYPE_UNSPECIFIED.getNumber()) { + output.writeEnum(6, type_); + } + if (optionsCase_ == 7) { + output.writeMessage( + 7, (com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions) options_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + if (createTime_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getCreateTime()); + } + if (updateTime_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getUpdateTime()); + } + if (deleteTime_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(4, getDeleteTime()); + } + if (expireTime_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(5, getExpireTime()); + } + if (type_ + != com.google.cloud.bigquery.biglake.v1alpha1.Database.Type.TYPE_UNSPECIFIED.getNumber()) { + size += com.google.protobuf.CodedOutputStream.computeEnumSize(6, type_); + } + if (optionsCase_ == 7) { + size += + com.google.protobuf.CodedOutputStream.computeMessageSize( + 7, (com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions) options_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.Database)) { + return super.equals(obj); + } + com.google.cloud.bigquery.biglake.v1alpha1.Database other = + (com.google.cloud.bigquery.biglake.v1alpha1.Database) obj; + + if (!getName().equals(other.getName())) return false; + if (hasCreateTime() != other.hasCreateTime()) return false; + if (hasCreateTime()) { + if (!getCreateTime().equals(other.getCreateTime())) return false; + } + if (hasUpdateTime() != other.hasUpdateTime()) return false; + if (hasUpdateTime()) { + if (!getUpdateTime().equals(other.getUpdateTime())) return false; + } + if (hasDeleteTime() != other.hasDeleteTime()) return false; + if (hasDeleteTime()) { + if (!getDeleteTime().equals(other.getDeleteTime())) return false; + } + if (hasExpireTime() != other.hasExpireTime()) return false; + if (hasExpireTime()) { + if (!getExpireTime().equals(other.getExpireTime())) return false; + } + if (type_ != other.type_) return false; + if (!getOptionsCase().equals(other.getOptionsCase())) return false; + switch (optionsCase_) { + case 7: + if (!getHiveOptions().equals(other.getHiveOptions())) return false; + break; + case 0: + default: + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + if (hasCreateTime()) { + hash = (37 * hash) + CREATE_TIME_FIELD_NUMBER; + hash = (53 * hash) + getCreateTime().hashCode(); + } + if (hasUpdateTime()) { + hash = (37 * hash) + UPDATE_TIME_FIELD_NUMBER; + hash = (53 * hash) + getUpdateTime().hashCode(); + } + if (hasDeleteTime()) { + hash = (37 * hash) + DELETE_TIME_FIELD_NUMBER; + hash = (53 * hash) + getDeleteTime().hashCode(); + } + if (hasExpireTime()) { + hash = (37 * hash) + EXPIRE_TIME_FIELD_NUMBER; + hash = (53 * hash) + getExpireTime().hashCode(); + } + hash = (37 * hash) + TYPE_FIELD_NUMBER; + hash = (53 * hash) + type_; + switch (optionsCase_) { + case 7: + hash = (37 * hash) + HIVE_OPTIONS_FIELD_NUMBER; + hash = (53 * hash) + getHiveOptions().hashCode(); + break; + case 0: + default: + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Database parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Database parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Database parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Database parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Database parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Database parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Database parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Database parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Database parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Database parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Database parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Database parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder(com.google.cloud.bigquery.biglake.v1alpha1.Database prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Database is the container of tables.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.Database} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.Database) + com.google.cloud.bigquery.biglake.v1alpha1.DatabaseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_Database_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_Database_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.Database.class, + com.google.cloud.bigquery.biglake.v1alpha1.Database.Builder.class); + } + + // Construct using com.google.cloud.bigquery.biglake.v1alpha1.Database.newBuilder() + private Builder() {} + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + } + + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + if (hiveOptionsBuilder_ != null) { + hiveOptionsBuilder_.clear(); + } + name_ = ""; + createTime_ = null; + if (createTimeBuilder_ != null) { + createTimeBuilder_.dispose(); + createTimeBuilder_ = null; + } + updateTime_ = null; + if (updateTimeBuilder_ != null) { + updateTimeBuilder_.dispose(); + updateTimeBuilder_ = null; + } + deleteTime_ = null; + if (deleteTimeBuilder_ != null) { + deleteTimeBuilder_.dispose(); + deleteTimeBuilder_ = null; + } + expireTime_ = null; + if (expireTimeBuilder_ != null) { + expireTimeBuilder_.dispose(); + expireTimeBuilder_ = null; + } + type_ = 0; + optionsCase_ = 0; + options_ = null; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_Database_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.Database getDefaultInstanceForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.Database.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.Database build() { + com.google.cloud.bigquery.biglake.v1alpha1.Database result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.Database buildPartial() { + com.google.cloud.bigquery.biglake.v1alpha1.Database result = + new com.google.cloud.bigquery.biglake.v1alpha1.Database(this); + if (bitField0_ != 0) { + buildPartial0(result); + } + buildPartialOneofs(result); + onBuilt(); + return result; + } + + private void buildPartial0(com.google.cloud.bigquery.biglake.v1alpha1.Database result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000002) != 0)) { + result.name_ = name_; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.createTime_ = createTimeBuilder_ == null ? createTime_ : createTimeBuilder_.build(); + } + if (((from_bitField0_ & 0x00000008) != 0)) { + result.updateTime_ = updateTimeBuilder_ == null ? updateTime_ : updateTimeBuilder_.build(); + } + if (((from_bitField0_ & 0x00000010) != 0)) { + result.deleteTime_ = deleteTimeBuilder_ == null ? deleteTime_ : deleteTimeBuilder_.build(); + } + if (((from_bitField0_ & 0x00000020) != 0)) { + result.expireTime_ = expireTimeBuilder_ == null ? expireTime_ : expireTimeBuilder_.build(); + } + if (((from_bitField0_ & 0x00000040) != 0)) { + result.type_ = type_; + } + } + + private void buildPartialOneofs(com.google.cloud.bigquery.biglake.v1alpha1.Database result) { + result.optionsCase_ = optionsCase_; + result.options_ = this.options_; + if (optionsCase_ == 7 && hiveOptionsBuilder_ != null) { + result.options_ = hiveOptionsBuilder_.build(); + } + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.biglake.v1alpha1.Database) { + return mergeFrom((com.google.cloud.bigquery.biglake.v1alpha1.Database) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.bigquery.biglake.v1alpha1.Database other) { + if (other == com.google.cloud.bigquery.biglake.v1alpha1.Database.getDefaultInstance()) + return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + bitField0_ |= 0x00000002; + onChanged(); + } + if (other.hasCreateTime()) { + mergeCreateTime(other.getCreateTime()); + } + if (other.hasUpdateTime()) { + mergeUpdateTime(other.getUpdateTime()); + } + if (other.hasDeleteTime()) { + mergeDeleteTime(other.getDeleteTime()); + } + if (other.hasExpireTime()) { + mergeExpireTime(other.getExpireTime()); + } + if (other.type_ != 0) { + setTypeValue(other.getTypeValue()); + } + switch (other.getOptionsCase()) { + case HIVE_OPTIONS: + { + mergeHiveOptions(other.getHiveOptions()); + break; + } + case OPTIONS_NOT_SET: + { + break; + } + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + name_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000002; + break; + } // case 10 + case 18: + { + input.readMessage(getCreateTimeFieldBuilder().getBuilder(), extensionRegistry); + bitField0_ |= 0x00000004; + break; + } // case 18 + case 26: + { + input.readMessage(getUpdateTimeFieldBuilder().getBuilder(), extensionRegistry); + bitField0_ |= 0x00000008; + break; + } // case 26 + case 34: + { + input.readMessage(getDeleteTimeFieldBuilder().getBuilder(), extensionRegistry); + bitField0_ |= 0x00000010; + break; + } // case 34 + case 42: + { + input.readMessage(getExpireTimeFieldBuilder().getBuilder(), extensionRegistry); + bitField0_ |= 0x00000020; + break; + } // case 42 + case 48: + { + type_ = input.readEnum(); + bitField0_ |= 0x00000040; + break; + } // case 48 + case 58: + { + input.readMessage(getHiveOptionsFieldBuilder().getBuilder(), extensionRegistry); + optionsCase_ = 7; + break; + } // case 58 + default: + { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + + private int optionsCase_ = 0; + private java.lang.Object options_; + + public OptionsCase getOptionsCase() { + return OptionsCase.forNumber(optionsCase_); + } + + public Builder clearOptions() { + optionsCase_ = 0; + options_ = null; + onChanged(); + return this; + } + + private int bitField0_; + + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions, + com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptionsOrBuilder> + hiveOptionsBuilder_; + /** + * + * + *
+     * Options of a Hive database.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions hive_options = 7; + * + * @return Whether the hiveOptions field is set. + */ + @java.lang.Override + public boolean hasHiveOptions() { + return optionsCase_ == 7; + } + /** + * + * + *
+     * Options of a Hive database.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions hive_options = 7; + * + * @return The hiveOptions. + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions getHiveOptions() { + if (hiveOptionsBuilder_ == null) { + if (optionsCase_ == 7) { + return (com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions) options_; + } + return com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions.getDefaultInstance(); + } else { + if (optionsCase_ == 7) { + return hiveOptionsBuilder_.getMessage(); + } + return com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions.getDefaultInstance(); + } + } + /** + * + * + *
+     * Options of a Hive database.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions hive_options = 7; + */ + public Builder setHiveOptions( + com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions value) { + if (hiveOptionsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + options_ = value; + onChanged(); + } else { + hiveOptionsBuilder_.setMessage(value); + } + optionsCase_ = 7; + return this; + } + /** + * + * + *
+     * Options of a Hive database.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions hive_options = 7; + */ + public Builder setHiveOptions( + com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions.Builder builderForValue) { + if (hiveOptionsBuilder_ == null) { + options_ = builderForValue.build(); + onChanged(); + } else { + hiveOptionsBuilder_.setMessage(builderForValue.build()); + } + optionsCase_ = 7; + return this; + } + /** + * + * + *
+     * Options of a Hive database.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions hive_options = 7; + */ + public Builder mergeHiveOptions( + com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions value) { + if (hiveOptionsBuilder_ == null) { + if (optionsCase_ == 7 + && options_ + != com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions + .getDefaultInstance()) { + options_ = + com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions.newBuilder( + (com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions) options_) + .mergeFrom(value) + .buildPartial(); + } else { + options_ = value; + } + onChanged(); + } else { + if (optionsCase_ == 7) { + hiveOptionsBuilder_.mergeFrom(value); + } else { + hiveOptionsBuilder_.setMessage(value); + } + } + optionsCase_ = 7; + return this; + } + /** + * + * + *
+     * Options of a Hive database.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions hive_options = 7; + */ + public Builder clearHiveOptions() { + if (hiveOptionsBuilder_ == null) { + if (optionsCase_ == 7) { + optionsCase_ = 0; + options_ = null; + onChanged(); + } + } else { + if (optionsCase_ == 7) { + optionsCase_ = 0; + options_ = null; + } + hiveOptionsBuilder_.clear(); + } + return this; + } + /** + * + * + *
+     * Options of a Hive database.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions hive_options = 7; + */ + public com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions.Builder + getHiveOptionsBuilder() { + return getHiveOptionsFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Options of a Hive database.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions hive_options = 7; + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptionsOrBuilder + getHiveOptionsOrBuilder() { + if ((optionsCase_ == 7) && (hiveOptionsBuilder_ != null)) { + return hiveOptionsBuilder_.getMessageOrBuilder(); + } else { + if (optionsCase_ == 7) { + return (com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions) options_; + } + return com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions.getDefaultInstance(); + } + } + /** + * + * + *
+     * Options of a Hive database.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions hive_options = 7; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions, + com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptionsOrBuilder> + getHiveOptionsFieldBuilder() { + if (hiveOptionsBuilder_ == null) { + if (!(optionsCase_ == 7)) { + options_ = + com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions.getDefaultInstance(); + } + hiveOptionsBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions, + com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptionsOrBuilder>( + (com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions) options_, + getParentForChildren(), + isClean()); + options_ = null; + } + optionsCase_ = 7; + onChanged(); + return hiveOptionsBuilder_; + } + + private java.lang.Object name_ = ""; + /** + * + * + *
+     * Output only. The resource name.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Output only. The resource name.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Output only. The resource name.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + name_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The resource name.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @return This builder for chaining. + */ + public Builder clearName() { + name_ = getDefaultInstance().getName(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The resource name.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + name_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + + private com.google.protobuf.Timestamp createTime_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + createTimeBuilder_; + /** + * + * + *
+     * Output only. The creation time of the database.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the createTime field is set. + */ + public boolean hasCreateTime() { + return ((bitField0_ & 0x00000004) != 0); + } + /** + * + * + *
+     * Output only. The creation time of the database.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The createTime. + */ + public com.google.protobuf.Timestamp getCreateTime() { + if (createTimeBuilder_ == null) { + return createTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : createTime_; + } else { + return createTimeBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Output only. The creation time of the database.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setCreateTime(com.google.protobuf.Timestamp value) { + if (createTimeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + createTime_ = value; + } else { + createTimeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The creation time of the database.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setCreateTime(com.google.protobuf.Timestamp.Builder builderForValue) { + if (createTimeBuilder_ == null) { + createTime_ = builderForValue.build(); + } else { + createTimeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The creation time of the database.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder mergeCreateTime(com.google.protobuf.Timestamp value) { + if (createTimeBuilder_ == null) { + if (((bitField0_ & 0x00000004) != 0) + && createTime_ != null + && createTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { + getCreateTimeBuilder().mergeFrom(value); + } else { + createTime_ = value; + } + } else { + createTimeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The creation time of the database.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder clearCreateTime() { + bitField0_ = (bitField0_ & ~0x00000004); + createTime_ = null; + if (createTimeBuilder_ != null) { + createTimeBuilder_.dispose(); + createTimeBuilder_ = null; + } + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The creation time of the database.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.protobuf.Timestamp.Builder getCreateTimeBuilder() { + bitField0_ |= 0x00000004; + onChanged(); + return getCreateTimeFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Output only. The creation time of the database.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() { + if (createTimeBuilder_ != null) { + return createTimeBuilder_.getMessageOrBuilder(); + } else { + return createTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : createTime_; + } + } + /** + * + * + *
+     * Output only. The creation time of the database.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + getCreateTimeFieldBuilder() { + if (createTimeBuilder_ == null) { + createTimeBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder>( + getCreateTime(), getParentForChildren(), isClean()); + createTime_ = null; + } + return createTimeBuilder_; + } + + private com.google.protobuf.Timestamp updateTime_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + updateTimeBuilder_; + /** + * + * + *
+     * Output only. The last modification time of the database.
+     * 
+ * + * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the updateTime field is set. + */ + public boolean hasUpdateTime() { + return ((bitField0_ & 0x00000008) != 0); + } + /** + * + * + *
+     * Output only. The last modification time of the database.
+     * 
+ * + * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The updateTime. + */ + public com.google.protobuf.Timestamp getUpdateTime() { + if (updateTimeBuilder_ == null) { + return updateTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : updateTime_; + } else { + return updateTimeBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Output only. The last modification time of the database.
+     * 
+ * + * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setUpdateTime(com.google.protobuf.Timestamp value) { + if (updateTimeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + updateTime_ = value; + } else { + updateTimeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The last modification time of the database.
+     * 
+ * + * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setUpdateTime(com.google.protobuf.Timestamp.Builder builderForValue) { + if (updateTimeBuilder_ == null) { + updateTime_ = builderForValue.build(); + } else { + updateTimeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The last modification time of the database.
+     * 
+ * + * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder mergeUpdateTime(com.google.protobuf.Timestamp value) { + if (updateTimeBuilder_ == null) { + if (((bitField0_ & 0x00000008) != 0) + && updateTime_ != null + && updateTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { + getUpdateTimeBuilder().mergeFrom(value); + } else { + updateTime_ = value; + } + } else { + updateTimeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The last modification time of the database.
+     * 
+ * + * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder clearUpdateTime() { + bitField0_ = (bitField0_ & ~0x00000008); + updateTime_ = null; + if (updateTimeBuilder_ != null) { + updateTimeBuilder_.dispose(); + updateTimeBuilder_ = null; + } + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The last modification time of the database.
+     * 
+ * + * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.protobuf.Timestamp.Builder getUpdateTimeBuilder() { + bitField0_ |= 0x00000008; + onChanged(); + return getUpdateTimeFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Output only. The last modification time of the database.
+     * 
+ * + * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() { + if (updateTimeBuilder_ != null) { + return updateTimeBuilder_.getMessageOrBuilder(); + } else { + return updateTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : updateTime_; + } + } + /** + * + * + *
+     * Output only. The last modification time of the database.
+     * 
+ * + * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + getUpdateTimeFieldBuilder() { + if (updateTimeBuilder_ == null) { + updateTimeBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder>( + getUpdateTime(), getParentForChildren(), isClean()); + updateTime_ = null; + } + return updateTimeBuilder_; + } + + private com.google.protobuf.Timestamp deleteTime_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + deleteTimeBuilder_; + /** + * + * + *
+     * Output only. The deletion time of the database. Only set after the database
+     * is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the deleteTime field is set. + */ + public boolean hasDeleteTime() { + return ((bitField0_ & 0x00000010) != 0); + } + /** + * + * + *
+     * Output only. The deletion time of the database. Only set after the database
+     * is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The deleteTime. + */ + public com.google.protobuf.Timestamp getDeleteTime() { + if (deleteTimeBuilder_ == null) { + return deleteTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : deleteTime_; + } else { + return deleteTimeBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Output only. The deletion time of the database. Only set after the database
+     * is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setDeleteTime(com.google.protobuf.Timestamp value) { + if (deleteTimeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + deleteTime_ = value; + } else { + deleteTimeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000010; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The deletion time of the database. Only set after the database
+     * is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setDeleteTime(com.google.protobuf.Timestamp.Builder builderForValue) { + if (deleteTimeBuilder_ == null) { + deleteTime_ = builderForValue.build(); + } else { + deleteTimeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000010; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The deletion time of the database. Only set after the database
+     * is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder mergeDeleteTime(com.google.protobuf.Timestamp value) { + if (deleteTimeBuilder_ == null) { + if (((bitField0_ & 0x00000010) != 0) + && deleteTime_ != null + && deleteTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { + getDeleteTimeBuilder().mergeFrom(value); + } else { + deleteTime_ = value; + } + } else { + deleteTimeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000010; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The deletion time of the database. Only set after the database
+     * is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder clearDeleteTime() { + bitField0_ = (bitField0_ & ~0x00000010); + deleteTime_ = null; + if (deleteTimeBuilder_ != null) { + deleteTimeBuilder_.dispose(); + deleteTimeBuilder_ = null; + } + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The deletion time of the database. Only set after the database
+     * is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.protobuf.Timestamp.Builder getDeleteTimeBuilder() { + bitField0_ |= 0x00000010; + onChanged(); + return getDeleteTimeFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Output only. The deletion time of the database. Only set after the database
+     * is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.protobuf.TimestampOrBuilder getDeleteTimeOrBuilder() { + if (deleteTimeBuilder_ != null) { + return deleteTimeBuilder_.getMessageOrBuilder(); + } else { + return deleteTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : deleteTime_; + } + } + /** + * + * + *
+     * Output only. The deletion time of the database. Only set after the database
+     * is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + getDeleteTimeFieldBuilder() { + if (deleteTimeBuilder_ == null) { + deleteTimeBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder>( + getDeleteTime(), getParentForChildren(), isClean()); + deleteTime_ = null; + } + return deleteTimeBuilder_; + } + + private com.google.protobuf.Timestamp expireTime_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + expireTimeBuilder_; + /** + * + * + *
+     * Output only. The time when this database is considered expired. Only set
+     * after the database is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the expireTime field is set. + */ + public boolean hasExpireTime() { + return ((bitField0_ & 0x00000020) != 0); + } + /** + * + * + *
+     * Output only. The time when this database is considered expired. Only set
+     * after the database is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The expireTime. + */ + public com.google.protobuf.Timestamp getExpireTime() { + if (expireTimeBuilder_ == null) { + return expireTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : expireTime_; + } else { + return expireTimeBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Output only. The time when this database is considered expired. Only set
+     * after the database is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setExpireTime(com.google.protobuf.Timestamp value) { + if (expireTimeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + expireTime_ = value; + } else { + expireTimeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000020; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The time when this database is considered expired. Only set
+     * after the database is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setExpireTime(com.google.protobuf.Timestamp.Builder builderForValue) { + if (expireTimeBuilder_ == null) { + expireTime_ = builderForValue.build(); + } else { + expireTimeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000020; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The time when this database is considered expired. Only set
+     * after the database is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder mergeExpireTime(com.google.protobuf.Timestamp value) { + if (expireTimeBuilder_ == null) { + if (((bitField0_ & 0x00000020) != 0) + && expireTime_ != null + && expireTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { + getExpireTimeBuilder().mergeFrom(value); + } else { + expireTime_ = value; + } + } else { + expireTimeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000020; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The time when this database is considered expired. Only set
+     * after the database is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder clearExpireTime() { + bitField0_ = (bitField0_ & ~0x00000020); + expireTime_ = null; + if (expireTimeBuilder_ != null) { + expireTimeBuilder_.dispose(); + expireTimeBuilder_ = null; + } + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The time when this database is considered expired. Only set
+     * after the database is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.protobuf.Timestamp.Builder getExpireTimeBuilder() { + bitField0_ |= 0x00000020; + onChanged(); + return getExpireTimeFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Output only. The time when this database is considered expired. Only set
+     * after the database is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.protobuf.TimestampOrBuilder getExpireTimeOrBuilder() { + if (expireTimeBuilder_ != null) { + return expireTimeBuilder_.getMessageOrBuilder(); + } else { + return expireTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : expireTime_; + } + } + /** + * + * + *
+     * Output only. The time when this database is considered expired. Only set
+     * after the database is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + getExpireTimeFieldBuilder() { + if (expireTimeBuilder_ == null) { + expireTimeBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder>( + getExpireTime(), getParentForChildren(), isClean()); + expireTime_ = null; + } + return expireTimeBuilder_; + } + + private int type_ = 0; + /** + * + * + *
+     * The database type.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.Database.Type type = 6; + * + * @return The enum numeric value on the wire for type. + */ + @java.lang.Override + public int getTypeValue() { + return type_; + } + /** + * + * + *
+     * The database type.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.Database.Type type = 6; + * + * @param value The enum numeric value on the wire for type to set. + * @return This builder for chaining. + */ + public Builder setTypeValue(int value) { + type_ = value; + bitField0_ |= 0x00000040; + onChanged(); + return this; + } + /** + * + * + *
+     * The database type.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.Database.Type type = 6; + * + * @return The type. + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.Database.Type getType() { + com.google.cloud.bigquery.biglake.v1alpha1.Database.Type result = + com.google.cloud.bigquery.biglake.v1alpha1.Database.Type.forNumber(type_); + return result == null + ? com.google.cloud.bigquery.biglake.v1alpha1.Database.Type.UNRECOGNIZED + : result; + } + /** + * + * + *
+     * The database type.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.Database.Type type = 6; + * + * @param value The type to set. + * @return This builder for chaining. + */ + public Builder setType(com.google.cloud.bigquery.biglake.v1alpha1.Database.Type value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000040; + type_ = value.getNumber(); + onChanged(); + return this; + } + /** + * + * + *
+     * The database type.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.Database.Type type = 6; + * + * @return This builder for chaining. + */ + public Builder clearType() { + bitField0_ = (bitField0_ & ~0x00000040); + type_ = 0; + onChanged(); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.Database) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.Database) + private static final com.google.cloud.bigquery.biglake.v1alpha1.Database DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1alpha1.Database(); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Database getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Database parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.Database getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DatabaseName.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DatabaseName.java new file mode 100644 index 000000000000..9400630b9c7e --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DatabaseName.java @@ -0,0 +1,259 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1; + +import com.google.api.pathtemplate.PathTemplate; +import com.google.api.resourcenames.ResourceName; +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; + +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") +public class DatabaseName implements ResourceName { + private static final PathTemplate PROJECT_LOCATION_CATALOG_DATABASE = + PathTemplate.createWithoutUrlEncoding( + "projects/{project}/locations/{location}/catalogs/{catalog}/databases/{database}"); + private volatile Map fieldValuesMap; + private final String project; + private final String location; + private final String catalog; + private final String database; + + @Deprecated + protected DatabaseName() { + project = null; + location = null; + catalog = null; + database = null; + } + + private DatabaseName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + location = Preconditions.checkNotNull(builder.getLocation()); + catalog = Preconditions.checkNotNull(builder.getCatalog()); + database = Preconditions.checkNotNull(builder.getDatabase()); + } + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public String getCatalog() { + return catalog; + } + + public String getDatabase() { + return database; + } + + public static Builder newBuilder() { + return new Builder(); + } + + public Builder toBuilder() { + return new Builder(this); + } + + public static DatabaseName of(String project, String location, String catalog, String database) { + return newBuilder() + .setProject(project) + .setLocation(location) + .setCatalog(catalog) + .setDatabase(database) + .build(); + } + + public static String format(String project, String location, String catalog, String database) { + return newBuilder() + .setProject(project) + .setLocation(location) + .setCatalog(catalog) + .setDatabase(database) + .build() + .toString(); + } + + public static DatabaseName parse(String formattedString) { + if (formattedString.isEmpty()) { + return null; + } + Map matchMap = + PROJECT_LOCATION_CATALOG_DATABASE.validatedMatch( + formattedString, "DatabaseName.parse: formattedString not in valid format"); + return of( + matchMap.get("project"), + matchMap.get("location"), + matchMap.get("catalog"), + matchMap.get("database")); + } + + public static List parseList(List formattedStrings) { + List list = new ArrayList<>(formattedStrings.size()); + for (String formattedString : formattedStrings) { + list.add(parse(formattedString)); + } + return list; + } + + public static List toStringList(List values) { + List list = new ArrayList<>(values.size()); + for (DatabaseName value : values) { + if (value == null) { + list.add(""); + } else { + list.add(value.toString()); + } + } + return list; + } + + public static boolean isParsableFrom(String formattedString) { + return PROJECT_LOCATION_CATALOG_DATABASE.matches(formattedString); + } + + @Override + public Map getFieldValuesMap() { + if (fieldValuesMap == null) { + synchronized (this) { + if (fieldValuesMap == null) { + ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); + if (project != null) { + fieldMapBuilder.put("project", project); + } + if (location != null) { + fieldMapBuilder.put("location", location); + } + if (catalog != null) { + fieldMapBuilder.put("catalog", catalog); + } + if (database != null) { + fieldMapBuilder.put("database", database); + } + fieldValuesMap = fieldMapBuilder.build(); + } + } + } + return fieldValuesMap; + } + + public String getFieldValue(String fieldName) { + return getFieldValuesMap().get(fieldName); + } + + @Override + public String toString() { + return PROJECT_LOCATION_CATALOG_DATABASE.instantiate( + "project", project, "location", location, "catalog", catalog, "database", database); + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + DatabaseName that = ((DatabaseName) o); + return Objects.equals(this.project, that.project) + && Objects.equals(this.location, that.location) + && Objects.equals(this.catalog, that.catalog) + && Objects.equals(this.database, that.database); + } + return false; + } + + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + h *= 1000003; + h ^= Objects.hashCode(location); + h *= 1000003; + h ^= Objects.hashCode(catalog); + h *= 1000003; + h ^= Objects.hashCode(database); + return h; + } + + /** + * Builder for projects/{project}/locations/{location}/catalogs/{catalog}/databases/{database}. + */ + public static class Builder { + private String project; + private String location; + private String catalog; + private String database; + + protected Builder() {} + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public String getCatalog() { + return catalog; + } + + public String getDatabase() { + return database; + } + + public Builder setProject(String project) { + this.project = project; + return this; + } + + public Builder setLocation(String location) { + this.location = location; + return this; + } + + public Builder setCatalog(String catalog) { + this.catalog = catalog; + return this; + } + + public Builder setDatabase(String database) { + this.database = database; + return this; + } + + private Builder(DatabaseName databaseName) { + this.project = databaseName.project; + this.location = databaseName.location; + this.catalog = databaseName.catalog; + this.database = databaseName.database; + } + + public DatabaseName build() { + return new DatabaseName(this); + } + } +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DatabaseOrBuilder.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DatabaseOrBuilder.java new file mode 100644 index 000000000000..73543ff34b9c --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DatabaseOrBuilder.java @@ -0,0 +1,278 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +public interface DatabaseOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.biglake.v1alpha1.Database) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Options of a Hive database.
+   * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions hive_options = 7; + * + * @return Whether the hiveOptions field is set. + */ + boolean hasHiveOptions(); + /** + * + * + *
+   * Options of a Hive database.
+   * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions hive_options = 7; + * + * @return The hiveOptions. + */ + com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions getHiveOptions(); + /** + * + * + *
+   * Options of a Hive database.
+   * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions hive_options = 7; + */ + com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptionsOrBuilder getHiveOptionsOrBuilder(); + + /** + * + * + *
+   * Output only. The resource name.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + java.lang.String getName(); + /** + * + * + *
+   * Output only. The resource name.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + com.google.protobuf.ByteString getNameBytes(); + + /** + * + * + *
+   * Output only. The creation time of the database.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the createTime field is set. + */ + boolean hasCreateTime(); + /** + * + * + *
+   * Output only. The creation time of the database.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The createTime. + */ + com.google.protobuf.Timestamp getCreateTime(); + /** + * + * + *
+   * Output only. The creation time of the database.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder(); + + /** + * + * + *
+   * Output only. The last modification time of the database.
+   * 
+ * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the updateTime field is set. + */ + boolean hasUpdateTime(); + /** + * + * + *
+   * Output only. The last modification time of the database.
+   * 
+ * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The updateTime. + */ + com.google.protobuf.Timestamp getUpdateTime(); + /** + * + * + *
+   * Output only. The last modification time of the database.
+   * 
+ * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder(); + + /** + * + * + *
+   * Output only. The deletion time of the database. Only set after the database
+   * is deleted.
+   * 
+ * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the deleteTime field is set. + */ + boolean hasDeleteTime(); + /** + * + * + *
+   * Output only. The deletion time of the database. Only set after the database
+   * is deleted.
+   * 
+ * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The deleteTime. + */ + com.google.protobuf.Timestamp getDeleteTime(); + /** + * + * + *
+   * Output only. The deletion time of the database. Only set after the database
+   * is deleted.
+   * 
+ * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + com.google.protobuf.TimestampOrBuilder getDeleteTimeOrBuilder(); + + /** + * + * + *
+   * Output only. The time when this database is considered expired. Only set
+   * after the database is deleted.
+   * 
+ * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the expireTime field is set. + */ + boolean hasExpireTime(); + /** + * + * + *
+   * Output only. The time when this database is considered expired. Only set
+   * after the database is deleted.
+   * 
+ * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The expireTime. + */ + com.google.protobuf.Timestamp getExpireTime(); + /** + * + * + *
+   * Output only. The time when this database is considered expired. Only set
+   * after the database is deleted.
+   * 
+ * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + com.google.protobuf.TimestampOrBuilder getExpireTimeOrBuilder(); + + /** + * + * + *
+   * The database type.
+   * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.Database.Type type = 6; + * + * @return The enum numeric value on the wire for type. + */ + int getTypeValue(); + /** + * + * + *
+   * The database type.
+   * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.Database.Type type = 6; + * + * @return The type. + */ + com.google.cloud.bigquery.biglake.v1alpha1.Database.Type getType(); + + public com.google.cloud.bigquery.biglake.v1alpha1.Database.OptionsCase getOptionsCase(); +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DeleteCatalogRequest.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DeleteCatalogRequest.java new file mode 100644 index 000000000000..fceb0f3079de --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DeleteCatalogRequest.java @@ -0,0 +1,665 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +/** + * + * + *
+ * Request message for the DeleteCatalog method.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest} + */ +public final class DeleteCatalogRequest extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest) + DeleteCatalogRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use DeleteCatalogRequest.newBuilder() to construct. + private DeleteCatalogRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private DeleteCatalogRequest() { + name_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new DeleteCatalogRequest(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteCatalogRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteCatalogRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest.Builder.class); + } + + public static final int NAME_FIELD_NUMBER = 1; + + @SuppressWarnings("serial") + private volatile java.lang.Object name_ = ""; + /** + * + * + *
+   * Required. The name of the catalog to delete.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + @java.lang.Override + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + * + * + *
+   * Required. The name of the catalog to delete.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + @java.lang.Override + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest)) { + return super.equals(obj); + } + com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest other = + (com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest) obj; + + if (!getName().equals(other.getName())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest parseFrom( + byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Request message for the DeleteCatalog method.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest) + com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteCatalogRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteCatalogRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest.Builder.class); + } + + // Construct using com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest.newBuilder() + private Builder() {} + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + } + + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + name_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteCatalogRequest_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest + getDefaultInstanceForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest build() { + com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest buildPartial() { + com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest result = + new com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest(this); + if (bitField0_ != 0) { + buildPartial0(result); + } + onBuilt(); + return result; + } + + private void buildPartial0( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.name_ = name_; + } + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest) { + return mergeFrom((com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest other) { + if (other + == com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest.getDefaultInstance()) + return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + bitField0_ |= 0x00000001; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + name_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: + { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + + private int bitField0_; + + private java.lang.Object name_ = ""; + /** + * + * + *
+     * Required. The name of the catalog to delete.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Required. The name of the catalog to delete.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Required. The name of the catalog to delete.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + name_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The name of the catalog to delete.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return This builder for chaining. + */ + public Builder clearName() { + name_ = getDefaultInstance().getName(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The name of the catalog to delete.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + name_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest) + private static final com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest(); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest + getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public DeleteCatalogRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest + getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DeleteCatalogRequestOrBuilder.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DeleteCatalogRequestOrBuilder.java new file mode 100644 index 000000000000..f5cccf975ce7 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DeleteCatalogRequestOrBuilder.java @@ -0,0 +1,58 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +public interface DeleteCatalogRequestOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Required. The name of the catalog to delete.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + java.lang.String getName(); + /** + * + * + *
+   * Required. The name of the catalog to delete.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + com.google.protobuf.ByteString getNameBytes(); +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DeleteDatabaseRequest.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DeleteDatabaseRequest.java new file mode 100644 index 000000000000..3cb4a44eb6a3 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DeleteDatabaseRequest.java @@ -0,0 +1,665 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +/** + * + * + *
+ * Request message for the DeleteDatabase method.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest} + */ +public final class DeleteDatabaseRequest extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest) + DeleteDatabaseRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use DeleteDatabaseRequest.newBuilder() to construct. + private DeleteDatabaseRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private DeleteDatabaseRequest() { + name_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new DeleteDatabaseRequest(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteDatabaseRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteDatabaseRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest.Builder.class); + } + + public static final int NAME_FIELD_NUMBER = 1; + + @SuppressWarnings("serial") + private volatile java.lang.Object name_ = ""; + /** + * + * + *
+   * Required. The name of the database to delete.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + @java.lang.Override + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + * + * + *
+   * Required. The name of the database to delete.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + @java.lang.Override + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest)) { + return super.equals(obj); + } + com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest other = + (com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest) obj; + + if (!getName().equals(other.getName())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest parseFrom( + byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Request message for the DeleteDatabase method.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest) + com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteDatabaseRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteDatabaseRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest.Builder.class); + } + + // Construct using com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest.newBuilder() + private Builder() {} + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + } + + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + name_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteDatabaseRequest_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest + getDefaultInstanceForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest build() { + com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest buildPartial() { + com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest result = + new com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest(this); + if (bitField0_ != 0) { + buildPartial0(result); + } + onBuilt(); + return result; + } + + private void buildPartial0( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.name_ = name_; + } + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest) { + return mergeFrom((com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest other) { + if (other + == com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest.getDefaultInstance()) + return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + bitField0_ |= 0x00000001; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + name_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: + { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + + private int bitField0_; + + private java.lang.Object name_ = ""; + /** + * + * + *
+     * Required. The name of the database to delete.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Required. The name of the database to delete.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Required. The name of the database to delete.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + name_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The name of the database to delete.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return This builder for chaining. + */ + public Builder clearName() { + name_ = getDefaultInstance().getName(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The name of the database to delete.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + name_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest) + private static final com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest(); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest + getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public DeleteDatabaseRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest + getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DeleteDatabaseRequestOrBuilder.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DeleteDatabaseRequestOrBuilder.java new file mode 100644 index 000000000000..d941e99f71ea --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DeleteDatabaseRequestOrBuilder.java @@ -0,0 +1,58 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +public interface DeleteDatabaseRequestOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Required. The name of the database to delete.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + java.lang.String getName(); + /** + * + * + *
+   * Required. The name of the database to delete.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + com.google.protobuf.ByteString getNameBytes(); +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DeleteLockRequest.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DeleteLockRequest.java new file mode 100644 index 000000000000..8a563d486b71 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DeleteLockRequest.java @@ -0,0 +1,662 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +/** + * + * + *
+ * Request message for the DeleteLock method.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest} + */ +public final class DeleteLockRequest extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest) + DeleteLockRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use DeleteLockRequest.newBuilder() to construct. + private DeleteLockRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private DeleteLockRequest() { + name_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new DeleteLockRequest(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteLockRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteLockRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest.Builder.class); + } + + public static final int NAME_FIELD_NUMBER = 1; + + @SuppressWarnings("serial") + private volatile java.lang.Object name_ = ""; + /** + * + * + *
+   * Required. The name of the lock to delete.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + @java.lang.Override + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + * + * + *
+   * Required. The name of the lock to delete.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + @java.lang.Override + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest)) { + return super.equals(obj); + } + com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest other = + (com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest) obj; + + if (!getName().equals(other.getName())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Request message for the DeleteLock method.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest) + com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteLockRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteLockRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest.Builder.class); + } + + // Construct using com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest.newBuilder() + private Builder() {} + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + } + + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + name_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteLockRequest_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest + getDefaultInstanceForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest build() { + com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest buildPartial() { + com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest result = + new com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest(this); + if (bitField0_ != 0) { + buildPartial0(result); + } + onBuilt(); + return result; + } + + private void buildPartial0( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.name_ = name_; + } + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest) { + return mergeFrom((com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest other) { + if (other + == com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest.getDefaultInstance()) + return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + bitField0_ |= 0x00000001; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + name_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: + { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + + private int bitField0_; + + private java.lang.Object name_ = ""; + /** + * + * + *
+     * Required. The name of the lock to delete.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Required. The name of the lock to delete.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Required. The name of the lock to delete.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + name_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The name of the lock to delete.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return This builder for chaining. + */ + public Builder clearName() { + name_ = getDefaultInstance().getName(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The name of the lock to delete.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + name_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest) + private static final com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest(); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public DeleteLockRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DeleteLockRequestOrBuilder.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DeleteLockRequestOrBuilder.java new file mode 100644 index 000000000000..5b17d6dd1c57 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DeleteLockRequestOrBuilder.java @@ -0,0 +1,58 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +public interface DeleteLockRequestOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Required. The name of the lock to delete.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + java.lang.String getName(); + /** + * + * + *
+   * Required. The name of the lock to delete.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + com.google.protobuf.ByteString getNameBytes(); +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DeleteTableRequest.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DeleteTableRequest.java new file mode 100644 index 000000000000..88f58f8fd783 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DeleteTableRequest.java @@ -0,0 +1,662 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +/** + * + * + *
+ * Request message for the DeleteTable method.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest} + */ +public final class DeleteTableRequest extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest) + DeleteTableRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use DeleteTableRequest.newBuilder() to construct. + private DeleteTableRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private DeleteTableRequest() { + name_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new DeleteTableRequest(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteTableRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteTableRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest.Builder.class); + } + + public static final int NAME_FIELD_NUMBER = 1; + + @SuppressWarnings("serial") + private volatile java.lang.Object name_ = ""; + /** + * + * + *
+   * Required. The name of the table to delete.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + @java.lang.Override + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + * + * + *
+   * Required. The name of the table to delete.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + @java.lang.Override + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest)) { + return super.equals(obj); + } + com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest other = + (com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest) obj; + + if (!getName().equals(other.getName())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Request message for the DeleteTable method.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest) + com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteTableRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteTableRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest.Builder.class); + } + + // Construct using com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest.newBuilder() + private Builder() {} + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + } + + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + name_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteTableRequest_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest + getDefaultInstanceForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest build() { + com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest buildPartial() { + com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest result = + new com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest(this); + if (bitField0_ != 0) { + buildPartial0(result); + } + onBuilt(); + return result; + } + + private void buildPartial0( + com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.name_ = name_; + } + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest) { + return mergeFrom((com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest other) { + if (other + == com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest.getDefaultInstance()) + return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + bitField0_ |= 0x00000001; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + name_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: + { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + + private int bitField0_; + + private java.lang.Object name_ = ""; + /** + * + * + *
+     * Required. The name of the table to delete.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Required. The name of the table to delete.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Required. The name of the table to delete.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + name_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The name of the table to delete.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return This builder for chaining. + */ + public Builder clearName() { + name_ = getDefaultInstance().getName(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The name of the table to delete.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + name_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest) + private static final com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest(); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public DeleteTableRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DeleteTableRequestOrBuilder.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DeleteTableRequestOrBuilder.java new file mode 100644 index 000000000000..c0c88b29e8ac --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/DeleteTableRequestOrBuilder.java @@ -0,0 +1,58 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +public interface DeleteTableRequestOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Required. The name of the table to delete.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + java.lang.String getName(); + /** + * + * + *
+   * Required. The name of the table to delete.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + com.google.protobuf.ByteString getNameBytes(); +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/GetCatalogRequest.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/GetCatalogRequest.java new file mode 100644 index 000000000000..1cb4314fb7f8 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/GetCatalogRequest.java @@ -0,0 +1,662 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +/** + * + * + *
+ * Request message for the GetCatalog method.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest} + */ +public final class GetCatalogRequest extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest) + GetCatalogRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use GetCatalogRequest.newBuilder() to construct. + private GetCatalogRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private GetCatalogRequest() { + name_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new GetCatalogRequest(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_GetCatalogRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_GetCatalogRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest.Builder.class); + } + + public static final int NAME_FIELD_NUMBER = 1; + + @SuppressWarnings("serial") + private volatile java.lang.Object name_ = ""; + /** + * + * + *
+   * Required. The name of the catalog to retrieve.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + @java.lang.Override + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + * + * + *
+   * Required. The name of the catalog to retrieve.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + @java.lang.Override + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest)) { + return super.equals(obj); + } + com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest other = + (com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest) obj; + + if (!getName().equals(other.getName())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Request message for the GetCatalog method.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest) + com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_GetCatalogRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_GetCatalogRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest.Builder.class); + } + + // Construct using com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest.newBuilder() + private Builder() {} + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + } + + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + name_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_GetCatalogRequest_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest + getDefaultInstanceForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest build() { + com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest buildPartial() { + com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest result = + new com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest(this); + if (bitField0_ != 0) { + buildPartial0(result); + } + onBuilt(); + return result; + } + + private void buildPartial0( + com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.name_ = name_; + } + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest) { + return mergeFrom((com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest other) { + if (other + == com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest.getDefaultInstance()) + return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + bitField0_ |= 0x00000001; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + name_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: + { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + + private int bitField0_; + + private java.lang.Object name_ = ""; + /** + * + * + *
+     * Required. The name of the catalog to retrieve.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Required. The name of the catalog to retrieve.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Required. The name of the catalog to retrieve.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + name_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The name of the catalog to retrieve.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return This builder for chaining. + */ + public Builder clearName() { + name_ = getDefaultInstance().getName(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The name of the catalog to retrieve.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + name_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest) + private static final com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest(); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public GetCatalogRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/GetCatalogRequestOrBuilder.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/GetCatalogRequestOrBuilder.java new file mode 100644 index 000000000000..b0ad29303d84 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/GetCatalogRequestOrBuilder.java @@ -0,0 +1,58 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +public interface GetCatalogRequestOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Required. The name of the catalog to retrieve.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + java.lang.String getName(); + /** + * + * + *
+   * Required. The name of the catalog to retrieve.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + com.google.protobuf.ByteString getNameBytes(); +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/GetDatabaseRequest.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/GetDatabaseRequest.java new file mode 100644 index 000000000000..ba96d10e4913 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/GetDatabaseRequest.java @@ -0,0 +1,662 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +/** + * + * + *
+ * Request message for the GetDatabase method.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest} + */ +public final class GetDatabaseRequest extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest) + GetDatabaseRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use GetDatabaseRequest.newBuilder() to construct. + private GetDatabaseRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private GetDatabaseRequest() { + name_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new GetDatabaseRequest(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_GetDatabaseRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_GetDatabaseRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest.Builder.class); + } + + public static final int NAME_FIELD_NUMBER = 1; + + @SuppressWarnings("serial") + private volatile java.lang.Object name_ = ""; + /** + * + * + *
+   * Required. The name of the database to retrieve.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + @java.lang.Override + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + * + * + *
+   * Required. The name of the database to retrieve.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + @java.lang.Override + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest)) { + return super.equals(obj); + } + com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest other = + (com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest) obj; + + if (!getName().equals(other.getName())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Request message for the GetDatabase method.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest) + com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_GetDatabaseRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_GetDatabaseRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest.Builder.class); + } + + // Construct using com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest.newBuilder() + private Builder() {} + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + } + + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + name_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_GetDatabaseRequest_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest + getDefaultInstanceForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest build() { + com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest buildPartial() { + com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest result = + new com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest(this); + if (bitField0_ != 0) { + buildPartial0(result); + } + onBuilt(); + return result; + } + + private void buildPartial0( + com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.name_ = name_; + } + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest) { + return mergeFrom((com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest other) { + if (other + == com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest.getDefaultInstance()) + return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + bitField0_ |= 0x00000001; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + name_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: + { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + + private int bitField0_; + + private java.lang.Object name_ = ""; + /** + * + * + *
+     * Required. The name of the database to retrieve.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Required. The name of the database to retrieve.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Required. The name of the database to retrieve.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + name_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The name of the database to retrieve.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return This builder for chaining. + */ + public Builder clearName() { + name_ = getDefaultInstance().getName(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The name of the database to retrieve.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + name_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest) + private static final com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest(); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public GetDatabaseRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/GetDatabaseRequestOrBuilder.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/GetDatabaseRequestOrBuilder.java new file mode 100644 index 000000000000..26a285cf815d --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/GetDatabaseRequestOrBuilder.java @@ -0,0 +1,58 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +public interface GetDatabaseRequestOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Required. The name of the database to retrieve.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + java.lang.String getName(); + /** + * + * + *
+   * Required. The name of the database to retrieve.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + com.google.protobuf.ByteString getNameBytes(); +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/GetTableRequest.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/GetTableRequest.java new file mode 100644 index 000000000000..89e94f42bc8b --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/GetTableRequest.java @@ -0,0 +1,658 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +/** + * + * + *
+ * Request message for the GetTable method.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.GetTableRequest} + */ +public final class GetTableRequest extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.GetTableRequest) + GetTableRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use GetTableRequest.newBuilder() to construct. + private GetTableRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private GetTableRequest() { + name_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new GetTableRequest(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_GetTableRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_GetTableRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest.Builder.class); + } + + public static final int NAME_FIELD_NUMBER = 1; + + @SuppressWarnings("serial") + private volatile java.lang.Object name_ = ""; + /** + * + * + *
+   * Required. The name of the table to retrieve.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + @java.lang.Override + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + * + * + *
+   * Required. The name of the table to retrieve.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + @java.lang.Override + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest)) { + return super.equals(obj); + } + com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest other = + (com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest) obj; + + if (!getName().equals(other.getName())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Request message for the GetTable method.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.GetTableRequest} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.GetTableRequest) + com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_GetTableRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_GetTableRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest.Builder.class); + } + + // Construct using com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest.newBuilder() + private Builder() {} + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + } + + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + name_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_GetTableRequest_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest getDefaultInstanceForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest build() { + com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest buildPartial() { + com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest result = + new com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest(this); + if (bitField0_ != 0) { + buildPartial0(result); + } + onBuilt(); + return result; + } + + private void buildPartial0(com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.name_ = name_; + } + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest) { + return mergeFrom((com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest other) { + if (other == com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest.getDefaultInstance()) + return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + bitField0_ |= 0x00000001; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + name_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: + { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + + private int bitField0_; + + private java.lang.Object name_ = ""; + /** + * + * + *
+     * Required. The name of the table to retrieve.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Required. The name of the table to retrieve.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Required. The name of the table to retrieve.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + name_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The name of the table to retrieve.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return This builder for chaining. + */ + public Builder clearName() { + name_ = getDefaultInstance().getName(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The name of the table to retrieve.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + name_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.GetTableRequest) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.GetTableRequest) + private static final com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest(); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public GetTableRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/GetTableRequestOrBuilder.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/GetTableRequestOrBuilder.java new file mode 100644 index 000000000000..370ad8d6ff4e --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/GetTableRequestOrBuilder.java @@ -0,0 +1,58 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +public interface GetTableRequestOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.biglake.v1alpha1.GetTableRequest) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Required. The name of the table to retrieve.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + java.lang.String getName(); + /** + * + * + *
+   * Required. The name of the table to retrieve.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + com.google.protobuf.ByteString getNameBytes(); +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/HiveDatabaseOptions.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/HiveDatabaseOptions.java new file mode 100644 index 000000000000..d1228bf01501 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/HiveDatabaseOptions.java @@ -0,0 +1,978 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +/** + * + * + *
+ * Options of a Hive database.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions} + */ +public final class HiveDatabaseOptions extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions) + HiveDatabaseOptionsOrBuilder { + private static final long serialVersionUID = 0L; + // Use HiveDatabaseOptions.newBuilder() to construct. + private HiveDatabaseOptions(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private HiveDatabaseOptions() { + locationUri_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new HiveDatabaseOptions(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveDatabaseOptions_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + @java.lang.Override + protected com.google.protobuf.MapField internalGetMapField(int number) { + switch (number) { + case 2: + return internalGetParameters(); + default: + throw new RuntimeException("Invalid map field number: " + number); + } + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveDatabaseOptions_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions.class, + com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions.Builder.class); + } + + public static final int LOCATION_URI_FIELD_NUMBER = 1; + + @SuppressWarnings("serial") + private volatile java.lang.Object locationUri_ = ""; + /** + * + * + *
+   * Cloud Storage folder URI where the database data is stored, starting with
+   * "gs://".
+   * 
+ * + * string location_uri = 1; + * + * @return The locationUri. + */ + @java.lang.Override + public java.lang.String getLocationUri() { + java.lang.Object ref = locationUri_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + locationUri_ = s; + return s; + } + } + /** + * + * + *
+   * Cloud Storage folder URI where the database data is stored, starting with
+   * "gs://".
+   * 
+ * + * string location_uri = 1; + * + * @return The bytes for locationUri. + */ + @java.lang.Override + public com.google.protobuf.ByteString getLocationUriBytes() { + java.lang.Object ref = locationUri_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + locationUri_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int PARAMETERS_FIELD_NUMBER = 2; + + private static final class ParametersDefaultEntryHolder { + static final com.google.protobuf.MapEntry defaultEntry = + com.google.protobuf.MapEntry.newDefaultInstance( + com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveDatabaseOptions_ParametersEntry_descriptor, + com.google.protobuf.WireFormat.FieldType.STRING, + "", + com.google.protobuf.WireFormat.FieldType.STRING, + ""); + } + + @SuppressWarnings("serial") + private com.google.protobuf.MapField parameters_; + + private com.google.protobuf.MapField internalGetParameters() { + if (parameters_ == null) { + return com.google.protobuf.MapField.emptyMapField(ParametersDefaultEntryHolder.defaultEntry); + } + return parameters_; + } + + public int getParametersCount() { + return internalGetParameters().getMap().size(); + } + /** + * + * + *
+   * Stores user supplied Hive database parameters.
+   * 
+ * + * map<string, string> parameters = 2; + */ + @java.lang.Override + public boolean containsParameters(java.lang.String key) { + if (key == null) { + throw new NullPointerException("map key"); + } + return internalGetParameters().getMap().containsKey(key); + } + /** Use {@link #getParametersMap()} instead. */ + @java.lang.Override + @java.lang.Deprecated + public java.util.Map getParameters() { + return getParametersMap(); + } + /** + * + * + *
+   * Stores user supplied Hive database parameters.
+   * 
+ * + * map<string, string> parameters = 2; + */ + @java.lang.Override + public java.util.Map getParametersMap() { + return internalGetParameters().getMap(); + } + /** + * + * + *
+   * Stores user supplied Hive database parameters.
+   * 
+ * + * map<string, string> parameters = 2; + */ + @java.lang.Override + public /* nullable */ java.lang.String getParametersOrDefault( + java.lang.String key, + /* nullable */ + java.lang.String defaultValue) { + if (key == null) { + throw new NullPointerException("map key"); + } + java.util.Map map = internalGetParameters().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + * + * + *
+   * Stores user supplied Hive database parameters.
+   * 
+ * + * map<string, string> parameters = 2; + */ + @java.lang.Override + public java.lang.String getParametersOrThrow(java.lang.String key) { + if (key == null) { + throw new NullPointerException("map key"); + } + java.util.Map map = internalGetParameters().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(locationUri_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, locationUri_); + } + com.google.protobuf.GeneratedMessageV3.serializeStringMapTo( + output, internalGetParameters(), ParametersDefaultEntryHolder.defaultEntry, 2); + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(locationUri_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, locationUri_); + } + for (java.util.Map.Entry entry : + internalGetParameters().getMap().entrySet()) { + com.google.protobuf.MapEntry parameters__ = + ParametersDefaultEntryHolder.defaultEntry + .newBuilderForType() + .setKey(entry.getKey()) + .setValue(entry.getValue()) + .build(); + size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, parameters__); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions)) { + return super.equals(obj); + } + com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions other = + (com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions) obj; + + if (!getLocationUri().equals(other.getLocationUri())) return false; + if (!internalGetParameters().equals(other.internalGetParameters())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + LOCATION_URI_FIELD_NUMBER; + hash = (53 * hash) + getLocationUri().hashCode(); + if (!internalGetParameters().getMap().isEmpty()) { + hash = (37 * hash) + PARAMETERS_FIELD_NUMBER; + hash = (53 * hash) + internalGetParameters().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions parseFrom( + byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Options of a Hive database.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions) + com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptionsOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveDatabaseOptions_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMapField(int number) { + switch (number) { + case 2: + return internalGetParameters(); + default: + throw new RuntimeException("Invalid map field number: " + number); + } + } + + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMutableMapField(int number) { + switch (number) { + case 2: + return internalGetMutableParameters(); + default: + throw new RuntimeException("Invalid map field number: " + number); + } + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveDatabaseOptions_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions.class, + com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions.Builder.class); + } + + // Construct using com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions.newBuilder() + private Builder() {} + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + } + + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + locationUri_ = ""; + internalGetMutableParameters().clear(); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveDatabaseOptions_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions + getDefaultInstanceForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions build() { + com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions buildPartial() { + com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions result = + new com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions(this); + if (bitField0_ != 0) { + buildPartial0(result); + } + onBuilt(); + return result; + } + + private void buildPartial0( + com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.locationUri_ = locationUri_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.parameters_ = internalGetParameters(); + result.parameters_.makeImmutable(); + } + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions) { + return mergeFrom((com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions other) { + if (other + == com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions.getDefaultInstance()) + return this; + if (!other.getLocationUri().isEmpty()) { + locationUri_ = other.locationUri_; + bitField0_ |= 0x00000001; + onChanged(); + } + internalGetMutableParameters().mergeFrom(other.internalGetParameters()); + bitField0_ |= 0x00000002; + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + locationUri_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: + { + com.google.protobuf.MapEntry parameters__ = + input.readMessage( + ParametersDefaultEntryHolder.defaultEntry.getParserForType(), + extensionRegistry); + internalGetMutableParameters() + .getMutableMap() + .put(parameters__.getKey(), parameters__.getValue()); + bitField0_ |= 0x00000002; + break; + } // case 18 + default: + { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + + private int bitField0_; + + private java.lang.Object locationUri_ = ""; + /** + * + * + *
+     * Cloud Storage folder URI where the database data is stored, starting with
+     * "gs://".
+     * 
+ * + * string location_uri = 1; + * + * @return The locationUri. + */ + public java.lang.String getLocationUri() { + java.lang.Object ref = locationUri_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + locationUri_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Cloud Storage folder URI where the database data is stored, starting with
+     * "gs://".
+     * 
+ * + * string location_uri = 1; + * + * @return The bytes for locationUri. + */ + public com.google.protobuf.ByteString getLocationUriBytes() { + java.lang.Object ref = locationUri_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + locationUri_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Cloud Storage folder URI where the database data is stored, starting with
+     * "gs://".
+     * 
+ * + * string location_uri = 1; + * + * @param value The locationUri to set. + * @return This builder for chaining. + */ + public Builder setLocationUri(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + locationUri_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * + * + *
+     * Cloud Storage folder URI where the database data is stored, starting with
+     * "gs://".
+     * 
+ * + * string location_uri = 1; + * + * @return This builder for chaining. + */ + public Builder clearLocationUri() { + locationUri_ = getDefaultInstance().getLocationUri(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * + * + *
+     * Cloud Storage folder URI where the database data is stored, starting with
+     * "gs://".
+     * 
+ * + * string location_uri = 1; + * + * @param value The bytes for locationUri to set. + * @return This builder for chaining. + */ + public Builder setLocationUriBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + locationUri_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private com.google.protobuf.MapField parameters_; + + private com.google.protobuf.MapField + internalGetParameters() { + if (parameters_ == null) { + return com.google.protobuf.MapField.emptyMapField( + ParametersDefaultEntryHolder.defaultEntry); + } + return parameters_; + } + + private com.google.protobuf.MapField + internalGetMutableParameters() { + if (parameters_ == null) { + parameters_ = + com.google.protobuf.MapField.newMapField(ParametersDefaultEntryHolder.defaultEntry); + } + if (!parameters_.isMutable()) { + parameters_ = parameters_.copy(); + } + bitField0_ |= 0x00000002; + onChanged(); + return parameters_; + } + + public int getParametersCount() { + return internalGetParameters().getMap().size(); + } + /** + * + * + *
+     * Stores user supplied Hive database parameters.
+     * 
+ * + * map<string, string> parameters = 2; + */ + @java.lang.Override + public boolean containsParameters(java.lang.String key) { + if (key == null) { + throw new NullPointerException("map key"); + } + return internalGetParameters().getMap().containsKey(key); + } + /** Use {@link #getParametersMap()} instead. */ + @java.lang.Override + @java.lang.Deprecated + public java.util.Map getParameters() { + return getParametersMap(); + } + /** + * + * + *
+     * Stores user supplied Hive database parameters.
+     * 
+ * + * map<string, string> parameters = 2; + */ + @java.lang.Override + public java.util.Map getParametersMap() { + return internalGetParameters().getMap(); + } + /** + * + * + *
+     * Stores user supplied Hive database parameters.
+     * 
+ * + * map<string, string> parameters = 2; + */ + @java.lang.Override + public /* nullable */ java.lang.String getParametersOrDefault( + java.lang.String key, + /* nullable */ + java.lang.String defaultValue) { + if (key == null) { + throw new NullPointerException("map key"); + } + java.util.Map map = internalGetParameters().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + * + * + *
+     * Stores user supplied Hive database parameters.
+     * 
+ * + * map<string, string> parameters = 2; + */ + @java.lang.Override + public java.lang.String getParametersOrThrow(java.lang.String key) { + if (key == null) { + throw new NullPointerException("map key"); + } + java.util.Map map = internalGetParameters().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + public Builder clearParameters() { + bitField0_ = (bitField0_ & ~0x00000002); + internalGetMutableParameters().getMutableMap().clear(); + return this; + } + /** + * + * + *
+     * Stores user supplied Hive database parameters.
+     * 
+ * + * map<string, string> parameters = 2; + */ + public Builder removeParameters(java.lang.String key) { + if (key == null) { + throw new NullPointerException("map key"); + } + internalGetMutableParameters().getMutableMap().remove(key); + return this; + } + /** Use alternate mutation accessors instead. */ + @java.lang.Deprecated + public java.util.Map getMutableParameters() { + bitField0_ |= 0x00000002; + return internalGetMutableParameters().getMutableMap(); + } + /** + * + * + *
+     * Stores user supplied Hive database parameters.
+     * 
+ * + * map<string, string> parameters = 2; + */ + public Builder putParameters(java.lang.String key, java.lang.String value) { + if (key == null) { + throw new NullPointerException("map key"); + } + if (value == null) { + throw new NullPointerException("map value"); + } + internalGetMutableParameters().getMutableMap().put(key, value); + bitField0_ |= 0x00000002; + return this; + } + /** + * + * + *
+     * Stores user supplied Hive database parameters.
+     * 
+ * + * map<string, string> parameters = 2; + */ + public Builder putAllParameters(java.util.Map values) { + internalGetMutableParameters().getMutableMap().putAll(values); + bitField0_ |= 0x00000002; + return this; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions) + private static final com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions(); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions + getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public HiveDatabaseOptions parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions + getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/HiveDatabaseOptionsOrBuilder.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/HiveDatabaseOptionsOrBuilder.java new file mode 100644 index 000000000000..db9c66002311 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/HiveDatabaseOptionsOrBuilder.java @@ -0,0 +1,110 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +public interface HiveDatabaseOptionsOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.biglake.v1alpha1.HiveDatabaseOptions) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Cloud Storage folder URI where the database data is stored, starting with
+   * "gs://".
+   * 
+ * + * string location_uri = 1; + * + * @return The locationUri. + */ + java.lang.String getLocationUri(); + /** + * + * + *
+   * Cloud Storage folder URI where the database data is stored, starting with
+   * "gs://".
+   * 
+ * + * string location_uri = 1; + * + * @return The bytes for locationUri. + */ + com.google.protobuf.ByteString getLocationUriBytes(); + + /** + * + * + *
+   * Stores user supplied Hive database parameters.
+   * 
+ * + * map<string, string> parameters = 2; + */ + int getParametersCount(); + /** + * + * + *
+   * Stores user supplied Hive database parameters.
+   * 
+ * + * map<string, string> parameters = 2; + */ + boolean containsParameters(java.lang.String key); + /** Use {@link #getParametersMap()} instead. */ + @java.lang.Deprecated + java.util.Map getParameters(); + /** + * + * + *
+   * Stores user supplied Hive database parameters.
+   * 
+ * + * map<string, string> parameters = 2; + */ + java.util.Map getParametersMap(); + /** + * + * + *
+   * Stores user supplied Hive database parameters.
+   * 
+ * + * map<string, string> parameters = 2; + */ + /* nullable */ + java.lang.String getParametersOrDefault( + java.lang.String key, + /* nullable */ + java.lang.String defaultValue); + /** + * + * + *
+   * Stores user supplied Hive database parameters.
+   * 
+ * + * map<string, string> parameters = 2; + */ + java.lang.String getParametersOrThrow(java.lang.String key); +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/HiveTableOptions.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/HiveTableOptions.java new file mode 100644 index 000000000000..5208794368cb --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/HiveTableOptions.java @@ -0,0 +1,3364 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +/** + * + * + *
+ * Options of a Hive table.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions} + */ +public final class HiveTableOptions extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions) + HiveTableOptionsOrBuilder { + private static final long serialVersionUID = 0L; + // Use HiveTableOptions.newBuilder() to construct. + private HiveTableOptions(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private HiveTableOptions() { + tableType_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new HiveTableOptions(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + @java.lang.Override + protected com.google.protobuf.MapField internalGetMapField(int number) { + switch (number) { + case 1: + return internalGetParameters(); + default: + throw new RuntimeException("Invalid map field number: " + number); + } + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.class, + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.Builder.class); + } + + public interface SerDeInfoOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+     * The fully qualified Java class name of the serialization library.
+     * 
+ * + * string serialization_lib = 1; + * + * @return The serializationLib. + */ + java.lang.String getSerializationLib(); + /** + * + * + *
+     * The fully qualified Java class name of the serialization library.
+     * 
+ * + * string serialization_lib = 1; + * + * @return The bytes for serializationLib. + */ + com.google.protobuf.ByteString getSerializationLibBytes(); + } + /** + * + * + *
+   * Serializer and deserializer information.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo} + */ + public static final class SerDeInfo extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo) + SerDeInfoOrBuilder { + private static final long serialVersionUID = 0L; + // Use SerDeInfo.newBuilder() to construct. + private SerDeInfo(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private SerDeInfo() { + serializationLib_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new SerDeInfo(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_SerDeInfo_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_SerDeInfo_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo.class, + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo.Builder.class); + } + + public static final int SERIALIZATION_LIB_FIELD_NUMBER = 1; + + @SuppressWarnings("serial") + private volatile java.lang.Object serializationLib_ = ""; + /** + * + * + *
+     * The fully qualified Java class name of the serialization library.
+     * 
+ * + * string serialization_lib = 1; + * + * @return The serializationLib. + */ + @java.lang.Override + public java.lang.String getSerializationLib() { + java.lang.Object ref = serializationLib_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + serializationLib_ = s; + return s; + } + } + /** + * + * + *
+     * The fully qualified Java class name of the serialization library.
+     * 
+ * + * string serialization_lib = 1; + * + * @return The bytes for serializationLib. + */ + @java.lang.Override + public com.google.protobuf.ByteString getSerializationLibBytes() { + java.lang.Object ref = serializationLib_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + serializationLib_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(serializationLib_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, serializationLib_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(serializationLib_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, serializationLib_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo)) { + return super.equals(obj); + } + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo other = + (com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo) obj; + + if (!getSerializationLib().equals(other.getSerializationLib())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + SERIALIZATION_LIB_FIELD_NUMBER; + hash = (53 * hash) + getSerializationLib().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo parseFrom( + byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo + parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo + parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+     * Serializer and deserializer information.
+     * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo} + */ + public static final class Builder + extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo) + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfoOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_SerDeInfo_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_SerDeInfo_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo.class, + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo.Builder + .class); + } + + // Construct using + // com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo.newBuilder() + private Builder() {} + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + } + + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + serializationLib_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_SerDeInfo_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo + getDefaultInstanceForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo + .getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo build() { + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo result = + buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo buildPartial() { + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo result = + new com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo(this); + if (bitField0_ != 0) { + buildPartial0(result); + } + onBuilt(); + return result; + } + + private void buildPartial0( + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.serializationLib_ = serializationLib_; + } + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, + java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other + instanceof com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo) { + return mergeFrom( + (com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom( + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo other) { + if (other + == com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo + .getDefaultInstance()) return this; + if (!other.getSerializationLib().isEmpty()) { + serializationLib_ = other.serializationLib_; + bitField0_ |= 0x00000001; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + serializationLib_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: + { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + + private int bitField0_; + + private java.lang.Object serializationLib_ = ""; + /** + * + * + *
+       * The fully qualified Java class name of the serialization library.
+       * 
+ * + * string serialization_lib = 1; + * + * @return The serializationLib. + */ + public java.lang.String getSerializationLib() { + java.lang.Object ref = serializationLib_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + serializationLib_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+       * The fully qualified Java class name of the serialization library.
+       * 
+ * + * string serialization_lib = 1; + * + * @return The bytes for serializationLib. + */ + public com.google.protobuf.ByteString getSerializationLibBytes() { + java.lang.Object ref = serializationLib_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + serializationLib_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+       * The fully qualified Java class name of the serialization library.
+       * 
+ * + * string serialization_lib = 1; + * + * @param value The serializationLib to set. + * @return This builder for chaining. + */ + public Builder setSerializationLib(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + serializationLib_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * + * + *
+       * The fully qualified Java class name of the serialization library.
+       * 
+ * + * string serialization_lib = 1; + * + * @return This builder for chaining. + */ + public Builder clearSerializationLib() { + serializationLib_ = getDefaultInstance().getSerializationLib(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * + * + *
+       * The fully qualified Java class name of the serialization library.
+       * 
+ * + * string serialization_lib = 1; + * + * @param value The bytes for serializationLib to set. + * @return This builder for chaining. + */ + public Builder setSerializationLibBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + serializationLib_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo) + private static final com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = + new com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo(); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo + getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public SerDeInfo parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException() + .setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo + getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + } + + public interface StorageDescriptorOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+     * Cloud Storage folder URI where the table data is stored, starting with
+     * "gs://".
+     * 
+ * + * string location_uri = 1; + * + * @return The locationUri. + */ + java.lang.String getLocationUri(); + /** + * + * + *
+     * Cloud Storage folder URI where the table data is stored, starting with
+     * "gs://".
+     * 
+ * + * string location_uri = 1; + * + * @return The bytes for locationUri. + */ + com.google.protobuf.ByteString getLocationUriBytes(); + + /** + * + * + *
+     * The fully qualified Java class name of the input format.
+     * 
+ * + * string input_format = 2; + * + * @return The inputFormat. + */ + java.lang.String getInputFormat(); + /** + * + * + *
+     * The fully qualified Java class name of the input format.
+     * 
+ * + * string input_format = 2; + * + * @return The bytes for inputFormat. + */ + com.google.protobuf.ByteString getInputFormatBytes(); + + /** + * + * + *
+     * The fully qualified Java class name of the output format.
+     * 
+ * + * string output_format = 3; + * + * @return The outputFormat. + */ + java.lang.String getOutputFormat(); + /** + * + * + *
+     * The fully qualified Java class name of the output format.
+     * 
+ * + * string output_format = 3; + * + * @return The bytes for outputFormat. + */ + com.google.protobuf.ByteString getOutputFormatBytes(); + + /** + * + * + *
+     * Serializer and deserializer information.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo serde_info = 4; + * + * + * @return Whether the serdeInfo field is set. + */ + boolean hasSerdeInfo(); + /** + * + * + *
+     * Serializer and deserializer information.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo serde_info = 4; + * + * + * @return The serdeInfo. + */ + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo getSerdeInfo(); + /** + * + * + *
+     * Serializer and deserializer information.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo serde_info = 4; + * + */ + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfoOrBuilder + getSerdeInfoOrBuilder(); + } + /** + * + * + *
+   * Stores physical storage information of the data.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor} + */ + public static final class StorageDescriptor extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor) + StorageDescriptorOrBuilder { + private static final long serialVersionUID = 0L; + // Use StorageDescriptor.newBuilder() to construct. + private StorageDescriptor(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private StorageDescriptor() { + locationUri_ = ""; + inputFormat_ = ""; + outputFormat_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new StorageDescriptor(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_StorageDescriptor_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_StorageDescriptor_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor.class, + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor.Builder + .class); + } + + public static final int LOCATION_URI_FIELD_NUMBER = 1; + + @SuppressWarnings("serial") + private volatile java.lang.Object locationUri_ = ""; + /** + * + * + *
+     * Cloud Storage folder URI where the table data is stored, starting with
+     * "gs://".
+     * 
+ * + * string location_uri = 1; + * + * @return The locationUri. + */ + @java.lang.Override + public java.lang.String getLocationUri() { + java.lang.Object ref = locationUri_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + locationUri_ = s; + return s; + } + } + /** + * + * + *
+     * Cloud Storage folder URI where the table data is stored, starting with
+     * "gs://".
+     * 
+ * + * string location_uri = 1; + * + * @return The bytes for locationUri. + */ + @java.lang.Override + public com.google.protobuf.ByteString getLocationUriBytes() { + java.lang.Object ref = locationUri_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + locationUri_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int INPUT_FORMAT_FIELD_NUMBER = 2; + + @SuppressWarnings("serial") + private volatile java.lang.Object inputFormat_ = ""; + /** + * + * + *
+     * The fully qualified Java class name of the input format.
+     * 
+ * + * string input_format = 2; + * + * @return The inputFormat. + */ + @java.lang.Override + public java.lang.String getInputFormat() { + java.lang.Object ref = inputFormat_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + inputFormat_ = s; + return s; + } + } + /** + * + * + *
+     * The fully qualified Java class name of the input format.
+     * 
+ * + * string input_format = 2; + * + * @return The bytes for inputFormat. + */ + @java.lang.Override + public com.google.protobuf.ByteString getInputFormatBytes() { + java.lang.Object ref = inputFormat_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + inputFormat_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int OUTPUT_FORMAT_FIELD_NUMBER = 3; + + @SuppressWarnings("serial") + private volatile java.lang.Object outputFormat_ = ""; + /** + * + * + *
+     * The fully qualified Java class name of the output format.
+     * 
+ * + * string output_format = 3; + * + * @return The outputFormat. + */ + @java.lang.Override + public java.lang.String getOutputFormat() { + java.lang.Object ref = outputFormat_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + outputFormat_ = s; + return s; + } + } + /** + * + * + *
+     * The fully qualified Java class name of the output format.
+     * 
+ * + * string output_format = 3; + * + * @return The bytes for outputFormat. + */ + @java.lang.Override + public com.google.protobuf.ByteString getOutputFormatBytes() { + java.lang.Object ref = outputFormat_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + outputFormat_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int SERDE_INFO_FIELD_NUMBER = 4; + private com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo serdeInfo_; + /** + * + * + *
+     * Serializer and deserializer information.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo serde_info = 4; + * + * + * @return Whether the serdeInfo field is set. + */ + @java.lang.Override + public boolean hasSerdeInfo() { + return serdeInfo_ != null; + } + /** + * + * + *
+     * Serializer and deserializer information.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo serde_info = 4; + * + * + * @return The serdeInfo. + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo getSerdeInfo() { + return serdeInfo_ == null + ? com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo + .getDefaultInstance() + : serdeInfo_; + } + /** + * + * + *
+     * Serializer and deserializer information.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo serde_info = 4; + * + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfoOrBuilder + getSerdeInfoOrBuilder() { + return serdeInfo_ == null + ? com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo + .getDefaultInstance() + : serdeInfo_; + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(locationUri_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, locationUri_); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputFormat_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, inputFormat_); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputFormat_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, outputFormat_); + } + if (serdeInfo_ != null) { + output.writeMessage(4, getSerdeInfo()); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(locationUri_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, locationUri_); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputFormat_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, inputFormat_); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputFormat_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, outputFormat_); + } + if (serdeInfo_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(4, getSerdeInfo()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj + instanceof + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor)) { + return super.equals(obj); + } + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor other = + (com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor) obj; + + if (!getLocationUri().equals(other.getLocationUri())) return false; + if (!getInputFormat().equals(other.getInputFormat())) return false; + if (!getOutputFormat().equals(other.getOutputFormat())) return false; + if (hasSerdeInfo() != other.hasSerdeInfo()) return false; + if (hasSerdeInfo()) { + if (!getSerdeInfo().equals(other.getSerdeInfo())) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + LOCATION_URI_FIELD_NUMBER; + hash = (53 * hash) + getLocationUri().hashCode(); + hash = (37 * hash) + INPUT_FORMAT_FIELD_NUMBER; + hash = (53 * hash) + getInputFormat().hashCode(); + hash = (37 * hash) + OUTPUT_FORMAT_FIELD_NUMBER; + hash = (53 * hash) + getOutputFormat().hashCode(); + if (hasSerdeInfo()) { + hash = (37 * hash) + SERDE_INFO_FIELD_NUMBER; + hash = (53 * hash) + getSerdeInfo().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor + parseFrom(java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor + parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor + parseFrom(com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor + parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor + parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor + parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor + parseFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor + parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor + parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor + parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor + parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor + parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+     * Stores physical storage information of the data.
+     * 
+ * + * Protobuf type {@code + * google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor} + */ + public static final class Builder + extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor) + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptorOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_StorageDescriptor_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_StorageDescriptor_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor.class, + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor + .Builder.class); + } + + // Construct using + // com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor.newBuilder() + private Builder() {} + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + } + + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + locationUri_ = ""; + inputFormat_ = ""; + outputFormat_ = ""; + serdeInfo_ = null; + if (serdeInfoBuilder_ != null) { + serdeInfoBuilder_.dispose(); + serdeInfoBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_StorageDescriptor_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor + getDefaultInstanceForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor + .getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor build() { + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor result = + buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor + buildPartial() { + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor result = + new com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor(this); + if (bitField0_ != 0) { + buildPartial0(result); + } + onBuilt(); + return result; + } + + private void buildPartial0( + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.locationUri_ = locationUri_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.inputFormat_ = inputFormat_; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.outputFormat_ = outputFormat_; + } + if (((from_bitField0_ & 0x00000008) != 0)) { + result.serdeInfo_ = serdeInfoBuilder_ == null ? serdeInfo_ : serdeInfoBuilder_.build(); + } + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, + java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other + instanceof + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor) { + return mergeFrom( + (com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor) + other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom( + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor other) { + if (other + == com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor + .getDefaultInstance()) return this; + if (!other.getLocationUri().isEmpty()) { + locationUri_ = other.locationUri_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (!other.getInputFormat().isEmpty()) { + inputFormat_ = other.inputFormat_; + bitField0_ |= 0x00000002; + onChanged(); + } + if (!other.getOutputFormat().isEmpty()) { + outputFormat_ = other.outputFormat_; + bitField0_ |= 0x00000004; + onChanged(); + } + if (other.hasSerdeInfo()) { + mergeSerdeInfo(other.getSerdeInfo()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + locationUri_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: + { + inputFormat_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000002; + break; + } // case 18 + case 26: + { + outputFormat_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000004; + break; + } // case 26 + case 34: + { + input.readMessage(getSerdeInfoFieldBuilder().getBuilder(), extensionRegistry); + bitField0_ |= 0x00000008; + break; + } // case 34 + default: + { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + + private int bitField0_; + + private java.lang.Object locationUri_ = ""; + /** + * + * + *
+       * Cloud Storage folder URI where the table data is stored, starting with
+       * "gs://".
+       * 
+ * + * string location_uri = 1; + * + * @return The locationUri. + */ + public java.lang.String getLocationUri() { + java.lang.Object ref = locationUri_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + locationUri_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+       * Cloud Storage folder URI where the table data is stored, starting with
+       * "gs://".
+       * 
+ * + * string location_uri = 1; + * + * @return The bytes for locationUri. + */ + public com.google.protobuf.ByteString getLocationUriBytes() { + java.lang.Object ref = locationUri_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + locationUri_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+       * Cloud Storage folder URI where the table data is stored, starting with
+       * "gs://".
+       * 
+ * + * string location_uri = 1; + * + * @param value The locationUri to set. + * @return This builder for chaining. + */ + public Builder setLocationUri(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + locationUri_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * + * + *
+       * Cloud Storage folder URI where the table data is stored, starting with
+       * "gs://".
+       * 
+ * + * string location_uri = 1; + * + * @return This builder for chaining. + */ + public Builder clearLocationUri() { + locationUri_ = getDefaultInstance().getLocationUri(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * + * + *
+       * Cloud Storage folder URI where the table data is stored, starting with
+       * "gs://".
+       * 
+ * + * string location_uri = 1; + * + * @param value The bytes for locationUri to set. + * @return This builder for chaining. + */ + public Builder setLocationUriBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + locationUri_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private java.lang.Object inputFormat_ = ""; + /** + * + * + *
+       * The fully qualified Java class name of the input format.
+       * 
+ * + * string input_format = 2; + * + * @return The inputFormat. + */ + public java.lang.String getInputFormat() { + java.lang.Object ref = inputFormat_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + inputFormat_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+       * The fully qualified Java class name of the input format.
+       * 
+ * + * string input_format = 2; + * + * @return The bytes for inputFormat. + */ + public com.google.protobuf.ByteString getInputFormatBytes() { + java.lang.Object ref = inputFormat_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + inputFormat_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+       * The fully qualified Java class name of the input format.
+       * 
+ * + * string input_format = 2; + * + * @param value The inputFormat to set. + * @return This builder for chaining. + */ + public Builder setInputFormat(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + inputFormat_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * + * + *
+       * The fully qualified Java class name of the input format.
+       * 
+ * + * string input_format = 2; + * + * @return This builder for chaining. + */ + public Builder clearInputFormat() { + inputFormat_ = getDefaultInstance().getInputFormat(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + * + * + *
+       * The fully qualified Java class name of the input format.
+       * 
+ * + * string input_format = 2; + * + * @param value The bytes for inputFormat to set. + * @return This builder for chaining. + */ + public Builder setInputFormatBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + inputFormat_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + + private java.lang.Object outputFormat_ = ""; + /** + * + * + *
+       * The fully qualified Java class name of the output format.
+       * 
+ * + * string output_format = 3; + * + * @return The outputFormat. + */ + public java.lang.String getOutputFormat() { + java.lang.Object ref = outputFormat_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + outputFormat_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+       * The fully qualified Java class name of the output format.
+       * 
+ * + * string output_format = 3; + * + * @return The bytes for outputFormat. + */ + public com.google.protobuf.ByteString getOutputFormatBytes() { + java.lang.Object ref = outputFormat_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + outputFormat_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+       * The fully qualified Java class name of the output format.
+       * 
+ * + * string output_format = 3; + * + * @param value The outputFormat to set. + * @return This builder for chaining. + */ + public Builder setOutputFormat(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + outputFormat_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * + * + *
+       * The fully qualified Java class name of the output format.
+       * 
+ * + * string output_format = 3; + * + * @return This builder for chaining. + */ + public Builder clearOutputFormat() { + outputFormat_ = getDefaultInstance().getOutputFormat(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + return this; + } + /** + * + * + *
+       * The fully qualified Java class name of the output format.
+       * 
+ * + * string output_format = 3; + * + * @param value The bytes for outputFormat to set. + * @return This builder for chaining. + */ + public Builder setOutputFormatBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + outputFormat_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + + private com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo serdeInfo_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo, + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfoOrBuilder> + serdeInfoBuilder_; + /** + * + * + *
+       * Serializer and deserializer information.
+       * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo serde_info = 4; + * + * + * @return Whether the serdeInfo field is set. + */ + public boolean hasSerdeInfo() { + return ((bitField0_ & 0x00000008) != 0); + } + /** + * + * + *
+       * Serializer and deserializer information.
+       * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo serde_info = 4; + * + * + * @return The serdeInfo. + */ + public com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo getSerdeInfo() { + if (serdeInfoBuilder_ == null) { + return serdeInfo_ == null + ? com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo + .getDefaultInstance() + : serdeInfo_; + } else { + return serdeInfoBuilder_.getMessage(); + } + } + /** + * + * + *
+       * Serializer and deserializer information.
+       * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo serde_info = 4; + * + */ + public Builder setSerdeInfo( + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo value) { + if (serdeInfoBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + serdeInfo_ = value; + } else { + serdeInfoBuilder_.setMessage(value); + } + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + * + * + *
+       * Serializer and deserializer information.
+       * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo serde_info = 4; + * + */ + public Builder setSerdeInfo( + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo.Builder + builderForValue) { + if (serdeInfoBuilder_ == null) { + serdeInfo_ = builderForValue.build(); + } else { + serdeInfoBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + * + * + *
+       * Serializer and deserializer information.
+       * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo serde_info = 4; + * + */ + public Builder mergeSerdeInfo( + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo value) { + if (serdeInfoBuilder_ == null) { + if (((bitField0_ & 0x00000008) != 0) + && serdeInfo_ != null + && serdeInfo_ + != com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo + .getDefaultInstance()) { + getSerdeInfoBuilder().mergeFrom(value); + } else { + serdeInfo_ = value; + } + } else { + serdeInfoBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + * + * + *
+       * Serializer and deserializer information.
+       * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo serde_info = 4; + * + */ + public Builder clearSerdeInfo() { + bitField0_ = (bitField0_ & ~0x00000008); + serdeInfo_ = null; + if (serdeInfoBuilder_ != null) { + serdeInfoBuilder_.dispose(); + serdeInfoBuilder_ = null; + } + onChanged(); + return this; + } + /** + * + * + *
+       * Serializer and deserializer information.
+       * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo serde_info = 4; + * + */ + public com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo.Builder + getSerdeInfoBuilder() { + bitField0_ |= 0x00000008; + onChanged(); + return getSerdeInfoFieldBuilder().getBuilder(); + } + /** + * + * + *
+       * Serializer and deserializer information.
+       * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo serde_info = 4; + * + */ + public com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfoOrBuilder + getSerdeInfoOrBuilder() { + if (serdeInfoBuilder_ != null) { + return serdeInfoBuilder_.getMessageOrBuilder(); + } else { + return serdeInfo_ == null + ? com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo + .getDefaultInstance() + : serdeInfo_; + } + } + /** + * + * + *
+       * Serializer and deserializer information.
+       * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo serde_info = 4; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo, + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfoOrBuilder> + getSerdeInfoFieldBuilder() { + if (serdeInfoBuilder_ == null) { + serdeInfoBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo, + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfo.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.SerDeInfoOrBuilder>( + getSerdeInfo(), getParentForChildren(), isClean()); + serdeInfo_ = null; + } + return serdeInfoBuilder_; + } + + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor) + private static final com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions + .StorageDescriptor + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = + new com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor(); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor + getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public StorageDescriptor parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException() + .setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor + getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + } + + public static final int PARAMETERS_FIELD_NUMBER = 1; + + private static final class ParametersDefaultEntryHolder { + static final com.google.protobuf.MapEntry defaultEntry = + com.google.protobuf.MapEntry.newDefaultInstance( + com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_ParametersEntry_descriptor, + com.google.protobuf.WireFormat.FieldType.STRING, + "", + com.google.protobuf.WireFormat.FieldType.STRING, + ""); + } + + @SuppressWarnings("serial") + private com.google.protobuf.MapField parameters_; + + private com.google.protobuf.MapField internalGetParameters() { + if (parameters_ == null) { + return com.google.protobuf.MapField.emptyMapField(ParametersDefaultEntryHolder.defaultEntry); + } + return parameters_; + } + + public int getParametersCount() { + return internalGetParameters().getMap().size(); + } + /** + * + * + *
+   * Stores user supplied Hive table parameters.
+   * 
+ * + * map<string, string> parameters = 1; + */ + @java.lang.Override + public boolean containsParameters(java.lang.String key) { + if (key == null) { + throw new NullPointerException("map key"); + } + return internalGetParameters().getMap().containsKey(key); + } + /** Use {@link #getParametersMap()} instead. */ + @java.lang.Override + @java.lang.Deprecated + public java.util.Map getParameters() { + return getParametersMap(); + } + /** + * + * + *
+   * Stores user supplied Hive table parameters.
+   * 
+ * + * map<string, string> parameters = 1; + */ + @java.lang.Override + public java.util.Map getParametersMap() { + return internalGetParameters().getMap(); + } + /** + * + * + *
+   * Stores user supplied Hive table parameters.
+   * 
+ * + * map<string, string> parameters = 1; + */ + @java.lang.Override + public /* nullable */ java.lang.String getParametersOrDefault( + java.lang.String key, + /* nullable */ + java.lang.String defaultValue) { + if (key == null) { + throw new NullPointerException("map key"); + } + java.util.Map map = internalGetParameters().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + * + * + *
+   * Stores user supplied Hive table parameters.
+   * 
+ * + * map<string, string> parameters = 1; + */ + @java.lang.Override + public java.lang.String getParametersOrThrow(java.lang.String key) { + if (key == null) { + throw new NullPointerException("map key"); + } + java.util.Map map = internalGetParameters().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + public static final int TABLE_TYPE_FIELD_NUMBER = 2; + + @SuppressWarnings("serial") + private volatile java.lang.Object tableType_ = ""; + /** + * + * + *
+   * Hive table type. For example, MANAGED_TABLE, EXTERNAL_TABLE.
+   * 
+ * + * string table_type = 2; + * + * @return The tableType. + */ + @java.lang.Override + public java.lang.String getTableType() { + java.lang.Object ref = tableType_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + tableType_ = s; + return s; + } + } + /** + * + * + *
+   * Hive table type. For example, MANAGED_TABLE, EXTERNAL_TABLE.
+   * 
+ * + * string table_type = 2; + * + * @return The bytes for tableType. + */ + @java.lang.Override + public com.google.protobuf.ByteString getTableTypeBytes() { + java.lang.Object ref = tableType_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + tableType_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int STORAGE_DESCRIPTOR_FIELD_NUMBER = 3; + private com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor + storageDescriptor_; + /** + * + * + *
+   * Stores physical storage information of the data.
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor storage_descriptor = 3; + * + * + * @return Whether the storageDescriptor field is set. + */ + @java.lang.Override + public boolean hasStorageDescriptor() { + return storageDescriptor_ != null; + } + /** + * + * + *
+   * Stores physical storage information of the data.
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor storage_descriptor = 3; + * + * + * @return The storageDescriptor. + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor + getStorageDescriptor() { + return storageDescriptor_ == null + ? com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor + .getDefaultInstance() + : storageDescriptor_; + } + /** + * + * + *
+   * Stores physical storage information of the data.
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor storage_descriptor = 3; + * + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptorOrBuilder + getStorageDescriptorOrBuilder() { + return storageDescriptor_ == null + ? com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor + .getDefaultInstance() + : storageDescriptor_; + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + com.google.protobuf.GeneratedMessageV3.serializeStringMapTo( + output, internalGetParameters(), ParametersDefaultEntryHolder.defaultEntry, 1); + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(tableType_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, tableType_); + } + if (storageDescriptor_ != null) { + output.writeMessage(3, getStorageDescriptor()); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (java.util.Map.Entry entry : + internalGetParameters().getMap().entrySet()) { + com.google.protobuf.MapEntry parameters__ = + ParametersDefaultEntryHolder.defaultEntry + .newBuilderForType() + .setKey(entry.getKey()) + .setValue(entry.getValue()) + .build(); + size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, parameters__); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(tableType_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, tableType_); + } + if (storageDescriptor_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getStorageDescriptor()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions)) { + return super.equals(obj); + } + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions other = + (com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions) obj; + + if (!internalGetParameters().equals(other.internalGetParameters())) return false; + if (!getTableType().equals(other.getTableType())) return false; + if (hasStorageDescriptor() != other.hasStorageDescriptor()) return false; + if (hasStorageDescriptor()) { + if (!getStorageDescriptor().equals(other.getStorageDescriptor())) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (!internalGetParameters().getMap().isEmpty()) { + hash = (37 * hash) + PARAMETERS_FIELD_NUMBER; + hash = (53 * hash) + internalGetParameters().hashCode(); + } + hash = (37 * hash) + TABLE_TYPE_FIELD_NUMBER; + hash = (53 * hash) + getTableType().hashCode(); + if (hasStorageDescriptor()) { + hash = (37 * hash) + STORAGE_DESCRIPTOR_FIELD_NUMBER; + hash = (53 * hash) + getStorageDescriptor().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Options of a Hive table.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions) + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptionsOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMapField(int number) { + switch (number) { + case 1: + return internalGetParameters(); + default: + throw new RuntimeException("Invalid map field number: " + number); + } + } + + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMutableMapField(int number) { + switch (number) { + case 1: + return internalGetMutableParameters(); + default: + throw new RuntimeException("Invalid map field number: " + number); + } + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.class, + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.Builder.class); + } + + // Construct using com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.newBuilder() + private Builder() {} + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + } + + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + internalGetMutableParameters().clear(); + tableType_ = ""; + storageDescriptor_ = null; + if (storageDescriptorBuilder_ != null) { + storageDescriptorBuilder_.dispose(); + storageDescriptorBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions getDefaultInstanceForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions build() { + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions buildPartial() { + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions result = + new com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions(this); + if (bitField0_ != 0) { + buildPartial0(result); + } + onBuilt(); + return result; + } + + private void buildPartial0(com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.parameters_ = internalGetParameters(); + result.parameters_.makeImmutable(); + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.tableType_ = tableType_; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.storageDescriptor_ = + storageDescriptorBuilder_ == null + ? storageDescriptor_ + : storageDescriptorBuilder_.build(); + } + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions) { + return mergeFrom((com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions other) { + if (other == com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.getDefaultInstance()) + return this; + internalGetMutableParameters().mergeFrom(other.internalGetParameters()); + bitField0_ |= 0x00000001; + if (!other.getTableType().isEmpty()) { + tableType_ = other.tableType_; + bitField0_ |= 0x00000002; + onChanged(); + } + if (other.hasStorageDescriptor()) { + mergeStorageDescriptor(other.getStorageDescriptor()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + com.google.protobuf.MapEntry parameters__ = + input.readMessage( + ParametersDefaultEntryHolder.defaultEntry.getParserForType(), + extensionRegistry); + internalGetMutableParameters() + .getMutableMap() + .put(parameters__.getKey(), parameters__.getValue()); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: + { + tableType_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000002; + break; + } // case 18 + case 26: + { + input.readMessage( + getStorageDescriptorFieldBuilder().getBuilder(), extensionRegistry); + bitField0_ |= 0x00000004; + break; + } // case 26 + default: + { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + + private int bitField0_; + + private com.google.protobuf.MapField parameters_; + + private com.google.protobuf.MapField + internalGetParameters() { + if (parameters_ == null) { + return com.google.protobuf.MapField.emptyMapField( + ParametersDefaultEntryHolder.defaultEntry); + } + return parameters_; + } + + private com.google.protobuf.MapField + internalGetMutableParameters() { + if (parameters_ == null) { + parameters_ = + com.google.protobuf.MapField.newMapField(ParametersDefaultEntryHolder.defaultEntry); + } + if (!parameters_.isMutable()) { + parameters_ = parameters_.copy(); + } + bitField0_ |= 0x00000001; + onChanged(); + return parameters_; + } + + public int getParametersCount() { + return internalGetParameters().getMap().size(); + } + /** + * + * + *
+     * Stores user supplied Hive table parameters.
+     * 
+ * + * map<string, string> parameters = 1; + */ + @java.lang.Override + public boolean containsParameters(java.lang.String key) { + if (key == null) { + throw new NullPointerException("map key"); + } + return internalGetParameters().getMap().containsKey(key); + } + /** Use {@link #getParametersMap()} instead. */ + @java.lang.Override + @java.lang.Deprecated + public java.util.Map getParameters() { + return getParametersMap(); + } + /** + * + * + *
+     * Stores user supplied Hive table parameters.
+     * 
+ * + * map<string, string> parameters = 1; + */ + @java.lang.Override + public java.util.Map getParametersMap() { + return internalGetParameters().getMap(); + } + /** + * + * + *
+     * Stores user supplied Hive table parameters.
+     * 
+ * + * map<string, string> parameters = 1; + */ + @java.lang.Override + public /* nullable */ java.lang.String getParametersOrDefault( + java.lang.String key, + /* nullable */ + java.lang.String defaultValue) { + if (key == null) { + throw new NullPointerException("map key"); + } + java.util.Map map = internalGetParameters().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + * + * + *
+     * Stores user supplied Hive table parameters.
+     * 
+ * + * map<string, string> parameters = 1; + */ + @java.lang.Override + public java.lang.String getParametersOrThrow(java.lang.String key) { + if (key == null) { + throw new NullPointerException("map key"); + } + java.util.Map map = internalGetParameters().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + public Builder clearParameters() { + bitField0_ = (bitField0_ & ~0x00000001); + internalGetMutableParameters().getMutableMap().clear(); + return this; + } + /** + * + * + *
+     * Stores user supplied Hive table parameters.
+     * 
+ * + * map<string, string> parameters = 1; + */ + public Builder removeParameters(java.lang.String key) { + if (key == null) { + throw new NullPointerException("map key"); + } + internalGetMutableParameters().getMutableMap().remove(key); + return this; + } + /** Use alternate mutation accessors instead. */ + @java.lang.Deprecated + public java.util.Map getMutableParameters() { + bitField0_ |= 0x00000001; + return internalGetMutableParameters().getMutableMap(); + } + /** + * + * + *
+     * Stores user supplied Hive table parameters.
+     * 
+ * + * map<string, string> parameters = 1; + */ + public Builder putParameters(java.lang.String key, java.lang.String value) { + if (key == null) { + throw new NullPointerException("map key"); + } + if (value == null) { + throw new NullPointerException("map value"); + } + internalGetMutableParameters().getMutableMap().put(key, value); + bitField0_ |= 0x00000001; + return this; + } + /** + * + * + *
+     * Stores user supplied Hive table parameters.
+     * 
+ * + * map<string, string> parameters = 1; + */ + public Builder putAllParameters(java.util.Map values) { + internalGetMutableParameters().getMutableMap().putAll(values); + bitField0_ |= 0x00000001; + return this; + } + + private java.lang.Object tableType_ = ""; + /** + * + * + *
+     * Hive table type. For example, MANAGED_TABLE, EXTERNAL_TABLE.
+     * 
+ * + * string table_type = 2; + * + * @return The tableType. + */ + public java.lang.String getTableType() { + java.lang.Object ref = tableType_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + tableType_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Hive table type. For example, MANAGED_TABLE, EXTERNAL_TABLE.
+     * 
+ * + * string table_type = 2; + * + * @return The bytes for tableType. + */ + public com.google.protobuf.ByteString getTableTypeBytes() { + java.lang.Object ref = tableType_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + tableType_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Hive table type. For example, MANAGED_TABLE, EXTERNAL_TABLE.
+     * 
+ * + * string table_type = 2; + * + * @param value The tableType to set. + * @return This builder for chaining. + */ + public Builder setTableType(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + tableType_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * + * + *
+     * Hive table type. For example, MANAGED_TABLE, EXTERNAL_TABLE.
+     * 
+ * + * string table_type = 2; + * + * @return This builder for chaining. + */ + public Builder clearTableType() { + tableType_ = getDefaultInstance().getTableType(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + * + * + *
+     * Hive table type. For example, MANAGED_TABLE, EXTERNAL_TABLE.
+     * 
+ * + * string table_type = 2; + * + * @param value The bytes for tableType to set. + * @return This builder for chaining. + */ + public Builder setTableTypeBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + tableType_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + + private com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor + storageDescriptor_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor, + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptorOrBuilder> + storageDescriptorBuilder_; + /** + * + * + *
+     * Stores physical storage information of the data.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor storage_descriptor = 3; + * + * + * @return Whether the storageDescriptor field is set. + */ + public boolean hasStorageDescriptor() { + return ((bitField0_ & 0x00000004) != 0); + } + /** + * + * + *
+     * Stores physical storage information of the data.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor storage_descriptor = 3; + * + * + * @return The storageDescriptor. + */ + public com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor + getStorageDescriptor() { + if (storageDescriptorBuilder_ == null) { + return storageDescriptor_ == null + ? com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor + .getDefaultInstance() + : storageDescriptor_; + } else { + return storageDescriptorBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Stores physical storage information of the data.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor storage_descriptor = 3; + * + */ + public Builder setStorageDescriptor( + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor value) { + if (storageDescriptorBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + storageDescriptor_ = value; + } else { + storageDescriptorBuilder_.setMessage(value); + } + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * + * + *
+     * Stores physical storage information of the data.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor storage_descriptor = 3; + * + */ + public Builder setStorageDescriptor( + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor.Builder + builderForValue) { + if (storageDescriptorBuilder_ == null) { + storageDescriptor_ = builderForValue.build(); + } else { + storageDescriptorBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * + * + *
+     * Stores physical storage information of the data.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor storage_descriptor = 3; + * + */ + public Builder mergeStorageDescriptor( + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor value) { + if (storageDescriptorBuilder_ == null) { + if (((bitField0_ & 0x00000004) != 0) + && storageDescriptor_ != null + && storageDescriptor_ + != com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor + .getDefaultInstance()) { + getStorageDescriptorBuilder().mergeFrom(value); + } else { + storageDescriptor_ = value; + } + } else { + storageDescriptorBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * + * + *
+     * Stores physical storage information of the data.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor storage_descriptor = 3; + * + */ + public Builder clearStorageDescriptor() { + bitField0_ = (bitField0_ & ~0x00000004); + storageDescriptor_ = null; + if (storageDescriptorBuilder_ != null) { + storageDescriptorBuilder_.dispose(); + storageDescriptorBuilder_ = null; + } + onChanged(); + return this; + } + /** + * + * + *
+     * Stores physical storage information of the data.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor storage_descriptor = 3; + * + */ + public com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor.Builder + getStorageDescriptorBuilder() { + bitField0_ |= 0x00000004; + onChanged(); + return getStorageDescriptorFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Stores physical storage information of the data.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor storage_descriptor = 3; + * + */ + public com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptorOrBuilder + getStorageDescriptorOrBuilder() { + if (storageDescriptorBuilder_ != null) { + return storageDescriptorBuilder_.getMessageOrBuilder(); + } else { + return storageDescriptor_ == null + ? com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor + .getDefaultInstance() + : storageDescriptor_; + } + } + /** + * + * + *
+     * Stores physical storage information of the data.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor storage_descriptor = 3; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor, + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptorOrBuilder> + getStorageDescriptorFieldBuilder() { + if (storageDescriptorBuilder_ == null) { + storageDescriptorBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor, + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor + .Builder, + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions + .StorageDescriptorOrBuilder>( + getStorageDescriptor(), getParentForChildren(), isClean()); + storageDescriptor_ = null; + } + return storageDescriptorBuilder_; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions) + private static final com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions(); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public HiveTableOptions parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/HiveTableOptionsOrBuilder.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/HiveTableOptionsOrBuilder.java new file mode 100644 index 000000000000..69772de36343 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/HiveTableOptionsOrBuilder.java @@ -0,0 +1,151 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +public interface HiveTableOptionsOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Stores user supplied Hive table parameters.
+   * 
+ * + * map<string, string> parameters = 1; + */ + int getParametersCount(); + /** + * + * + *
+   * Stores user supplied Hive table parameters.
+   * 
+ * + * map<string, string> parameters = 1; + */ + boolean containsParameters(java.lang.String key); + /** Use {@link #getParametersMap()} instead. */ + @java.lang.Deprecated + java.util.Map getParameters(); + /** + * + * + *
+   * Stores user supplied Hive table parameters.
+   * 
+ * + * map<string, string> parameters = 1; + */ + java.util.Map getParametersMap(); + /** + * + * + *
+   * Stores user supplied Hive table parameters.
+   * 
+ * + * map<string, string> parameters = 1; + */ + /* nullable */ + java.lang.String getParametersOrDefault( + java.lang.String key, + /* nullable */ + java.lang.String defaultValue); + /** + * + * + *
+   * Stores user supplied Hive table parameters.
+   * 
+ * + * map<string, string> parameters = 1; + */ + java.lang.String getParametersOrThrow(java.lang.String key); + + /** + * + * + *
+   * Hive table type. For example, MANAGED_TABLE, EXTERNAL_TABLE.
+   * 
+ * + * string table_type = 2; + * + * @return The tableType. + */ + java.lang.String getTableType(); + /** + * + * + *
+   * Hive table type. For example, MANAGED_TABLE, EXTERNAL_TABLE.
+   * 
+ * + * string table_type = 2; + * + * @return The bytes for tableType. + */ + com.google.protobuf.ByteString getTableTypeBytes(); + + /** + * + * + *
+   * Stores physical storage information of the data.
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor storage_descriptor = 3; + * + * + * @return Whether the storageDescriptor field is set. + */ + boolean hasStorageDescriptor(); + /** + * + * + *
+   * Stores physical storage information of the data.
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor storage_descriptor = 3; + * + * + * @return The storageDescriptor. + */ + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor + getStorageDescriptor(); + /** + * + * + *
+   * Stores physical storage information of the data.
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptor storage_descriptor = 3; + * + */ + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.StorageDescriptorOrBuilder + getStorageDescriptorOrBuilder(); +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListCatalogsRequest.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListCatalogsRequest.java new file mode 100644 index 000000000000..556f96377f75 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListCatalogsRequest.java @@ -0,0 +1,965 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +/** + * + * + *
+ * Request message for the ListCatalogs method.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest} + */ +public final class ListCatalogsRequest extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest) + ListCatalogsRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use ListCatalogsRequest.newBuilder() to construct. + private ListCatalogsRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private ListCatalogsRequest() { + parent_ = ""; + pageToken_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new ListCatalogsRequest(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListCatalogsRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListCatalogsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest.Builder.class); + } + + public static final int PARENT_FIELD_NUMBER = 1; + + @SuppressWarnings("serial") + private volatile java.lang.Object parent_ = ""; + /** + * + * + *
+   * Required. The parent, which owns this collection of catalogs.
+   * Format: projects/{project_id_or_number}/locations/{location_id}
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The parent. + */ + @java.lang.Override + public java.lang.String getParent() { + java.lang.Object ref = parent_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + parent_ = s; + return s; + } + } + /** + * + * + *
+   * Required. The parent, which owns this collection of catalogs.
+   * Format: projects/{project_id_or_number}/locations/{location_id}
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for parent. + */ + @java.lang.Override + public com.google.protobuf.ByteString getParentBytes() { + java.lang.Object ref = parent_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + parent_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int PAGE_SIZE_FIELD_NUMBER = 2; + private int pageSize_ = 0; + /** + * + * + *
+   * The maximum number of catalogs to return. The service may return fewer than
+   * this value.
+   * If unspecified, at most 50 catalogs will be returned.
+   * The maximum value is 1000; values above 1000 will be coerced to 1000.
+   * 
+ * + * int32 page_size = 2; + * + * @return The pageSize. + */ + @java.lang.Override + public int getPageSize() { + return pageSize_; + } + + public static final int PAGE_TOKEN_FIELD_NUMBER = 3; + + @SuppressWarnings("serial") + private volatile java.lang.Object pageToken_ = ""; + /** + * + * + *
+   * A page token, received from a previous `ListCatalogs` call.
+   * Provide this to retrieve the subsequent page.
+   * When paginating, all other parameters provided to `ListCatalogs` must match
+   * the call that provided the page token.
+   * 
+ * + * string page_token = 3; + * + * @return The pageToken. + */ + @java.lang.Override + public java.lang.String getPageToken() { + java.lang.Object ref = pageToken_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + pageToken_ = s; + return s; + } + } + /** + * + * + *
+   * A page token, received from a previous `ListCatalogs` call.
+   * Provide this to retrieve the subsequent page.
+   * When paginating, all other parameters provided to `ListCatalogs` must match
+   * the call that provided the page token.
+   * 
+ * + * string page_token = 3; + * + * @return The bytes for pageToken. + */ + @java.lang.Override + public com.google.protobuf.ByteString getPageTokenBytes() { + java.lang.Object ref = pageToken_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + pageToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); + } + if (pageSize_ != 0) { + output.writeInt32(2, pageSize_); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); + } + if (pageSize_ != 0) { + size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest)) { + return super.equals(obj); + } + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest other = + (com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest) obj; + + if (!getParent().equals(other.getParent())) return false; + if (getPageSize() != other.getPageSize()) return false; + if (!getPageToken().equals(other.getPageToken())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + PARENT_FIELD_NUMBER; + hash = (53 * hash) + getParent().hashCode(); + hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; + hash = (53 * hash) + getPageSize(); + hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; + hash = (53 * hash) + getPageToken().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest parseFrom( + byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Request message for the ListCatalogs method.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest) + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListCatalogsRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListCatalogsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest.Builder.class); + } + + // Construct using com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest.newBuilder() + private Builder() {} + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + } + + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + parent_ = ""; + pageSize_ = 0; + pageToken_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListCatalogsRequest_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest + getDefaultInstanceForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest build() { + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest buildPartial() { + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest result = + new com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest(this); + if (bitField0_ != 0) { + buildPartial0(result); + } + onBuilt(); + return result; + } + + private void buildPartial0( + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.parent_ = parent_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.pageSize_ = pageSize_; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.pageToken_ = pageToken_; + } + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest) { + return mergeFrom((com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest other) { + if (other + == com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest.getDefaultInstance()) + return this; + if (!other.getParent().isEmpty()) { + parent_ = other.parent_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (other.getPageSize() != 0) { + setPageSize(other.getPageSize()); + } + if (!other.getPageToken().isEmpty()) { + pageToken_ = other.pageToken_; + bitField0_ |= 0x00000004; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + parent_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 16: + { + pageSize_ = input.readInt32(); + bitField0_ |= 0x00000002; + break; + } // case 16 + case 26: + { + pageToken_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000004; + break; + } // case 26 + default: + { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + + private int bitField0_; + + private java.lang.Object parent_ = ""; + /** + * + * + *
+     * Required. The parent, which owns this collection of catalogs.
+     * Format: projects/{project_id_or_number}/locations/{location_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The parent. + */ + public java.lang.String getParent() { + java.lang.Object ref = parent_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + parent_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Required. The parent, which owns this collection of catalogs.
+     * Format: projects/{project_id_or_number}/locations/{location_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for parent. + */ + public com.google.protobuf.ByteString getParentBytes() { + java.lang.Object ref = parent_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + parent_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Required. The parent, which owns this collection of catalogs.
+     * Format: projects/{project_id_or_number}/locations/{location_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The parent to set. + * @return This builder for chaining. + */ + public Builder setParent(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + parent_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The parent, which owns this collection of catalogs.
+     * Format: projects/{project_id_or_number}/locations/{location_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return This builder for chaining. + */ + public Builder clearParent() { + parent_ = getDefaultInstance().getParent(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The parent, which owns this collection of catalogs.
+     * Format: projects/{project_id_or_number}/locations/{location_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The bytes for parent to set. + * @return This builder for chaining. + */ + public Builder setParentBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + parent_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private int pageSize_; + /** + * + * + *
+     * The maximum number of catalogs to return. The service may return fewer than
+     * this value.
+     * If unspecified, at most 50 catalogs will be returned.
+     * The maximum value is 1000; values above 1000 will be coerced to 1000.
+     * 
+ * + * int32 page_size = 2; + * + * @return The pageSize. + */ + @java.lang.Override + public int getPageSize() { + return pageSize_; + } + /** + * + * + *
+     * The maximum number of catalogs to return. The service may return fewer than
+     * this value.
+     * If unspecified, at most 50 catalogs will be returned.
+     * The maximum value is 1000; values above 1000 will be coerced to 1000.
+     * 
+ * + * int32 page_size = 2; + * + * @param value The pageSize to set. + * @return This builder for chaining. + */ + public Builder setPageSize(int value) { + + pageSize_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * + * + *
+     * The maximum number of catalogs to return. The service may return fewer than
+     * this value.
+     * If unspecified, at most 50 catalogs will be returned.
+     * The maximum value is 1000; values above 1000 will be coerced to 1000.
+     * 
+ * + * int32 page_size = 2; + * + * @return This builder for chaining. + */ + public Builder clearPageSize() { + bitField0_ = (bitField0_ & ~0x00000002); + pageSize_ = 0; + onChanged(); + return this; + } + + private java.lang.Object pageToken_ = ""; + /** + * + * + *
+     * A page token, received from a previous `ListCatalogs` call.
+     * Provide this to retrieve the subsequent page.
+     * When paginating, all other parameters provided to `ListCatalogs` must match
+     * the call that provided the page token.
+     * 
+ * + * string page_token = 3; + * + * @return The pageToken. + */ + public java.lang.String getPageToken() { + java.lang.Object ref = pageToken_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + pageToken_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * A page token, received from a previous `ListCatalogs` call.
+     * Provide this to retrieve the subsequent page.
+     * When paginating, all other parameters provided to `ListCatalogs` must match
+     * the call that provided the page token.
+     * 
+ * + * string page_token = 3; + * + * @return The bytes for pageToken. + */ + public com.google.protobuf.ByteString getPageTokenBytes() { + java.lang.Object ref = pageToken_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + pageToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * A page token, received from a previous `ListCatalogs` call.
+     * Provide this to retrieve the subsequent page.
+     * When paginating, all other parameters provided to `ListCatalogs` must match
+     * the call that provided the page token.
+     * 
+ * + * string page_token = 3; + * + * @param value The pageToken to set. + * @return This builder for chaining. + */ + public Builder setPageToken(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + pageToken_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * + * + *
+     * A page token, received from a previous `ListCatalogs` call.
+     * Provide this to retrieve the subsequent page.
+     * When paginating, all other parameters provided to `ListCatalogs` must match
+     * the call that provided the page token.
+     * 
+ * + * string page_token = 3; + * + * @return This builder for chaining. + */ + public Builder clearPageToken() { + pageToken_ = getDefaultInstance().getPageToken(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + return this; + } + /** + * + * + *
+     * A page token, received from a previous `ListCatalogs` call.
+     * Provide this to retrieve the subsequent page.
+     * When paginating, all other parameters provided to `ListCatalogs` must match
+     * the call that provided the page token.
+     * 
+ * + * string page_token = 3; + * + * @param value The bytes for pageToken to set. + * @return This builder for chaining. + */ + public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + pageToken_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest) + private static final com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest(); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest + getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ListCatalogsRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest + getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListCatalogsRequestOrBuilder.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListCatalogsRequestOrBuilder.java new file mode 100644 index 000000000000..45cdcaf1e0db --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListCatalogsRequestOrBuilder.java @@ -0,0 +1,103 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +public interface ListCatalogsRequestOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Required. The parent, which owns this collection of catalogs.
+   * Format: projects/{project_id_or_number}/locations/{location_id}
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The parent. + */ + java.lang.String getParent(); + /** + * + * + *
+   * Required. The parent, which owns this collection of catalogs.
+   * Format: projects/{project_id_or_number}/locations/{location_id}
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for parent. + */ + com.google.protobuf.ByteString getParentBytes(); + + /** + * + * + *
+   * The maximum number of catalogs to return. The service may return fewer than
+   * this value.
+   * If unspecified, at most 50 catalogs will be returned.
+   * The maximum value is 1000; values above 1000 will be coerced to 1000.
+   * 
+ * + * int32 page_size = 2; + * + * @return The pageSize. + */ + int getPageSize(); + + /** + * + * + *
+   * A page token, received from a previous `ListCatalogs` call.
+   * Provide this to retrieve the subsequent page.
+   * When paginating, all other parameters provided to `ListCatalogs` must match
+   * the call that provided the page token.
+   * 
+ * + * string page_token = 3; + * + * @return The pageToken. + */ + java.lang.String getPageToken(); + /** + * + * + *
+   * A page token, received from a previous `ListCatalogs` call.
+   * Provide this to retrieve the subsequent page.
+   * When paginating, all other parameters provided to `ListCatalogs` must match
+   * the call that provided the page token.
+   * 
+ * + * string page_token = 3; + * + * @return The bytes for pageToken. + */ + com.google.protobuf.ByteString getPageTokenBytes(); +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListCatalogsResponse.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListCatalogsResponse.java new file mode 100644 index 000000000000..0c64fc7b28f4 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListCatalogsResponse.java @@ -0,0 +1,1147 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +/** + * + * + *
+ * Response message for the ListCatalogs method.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse} + */ +public final class ListCatalogsResponse extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse) + ListCatalogsResponseOrBuilder { + private static final long serialVersionUID = 0L; + // Use ListCatalogsResponse.newBuilder() to construct. + private ListCatalogsResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private ListCatalogsResponse() { + catalogs_ = java.util.Collections.emptyList(); + nextPageToken_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new ListCatalogsResponse(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListCatalogsResponse_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListCatalogsResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse.class, + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse.Builder.class); + } + + public static final int CATALOGS_FIELD_NUMBER = 1; + + @SuppressWarnings("serial") + private java.util.List catalogs_; + /** + * + * + *
+   * The catalogs from the specified project.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1; + */ + @java.lang.Override + public java.util.List getCatalogsList() { + return catalogs_; + } + /** + * + * + *
+   * The catalogs from the specified project.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1; + */ + @java.lang.Override + public java.util.List + getCatalogsOrBuilderList() { + return catalogs_; + } + /** + * + * + *
+   * The catalogs from the specified project.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1; + */ + @java.lang.Override + public int getCatalogsCount() { + return catalogs_.size(); + } + /** + * + * + *
+   * The catalogs from the specified project.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1; + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.Catalog getCatalogs(int index) { + return catalogs_.get(index); + } + /** + * + * + *
+   * The catalogs from the specified project.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1; + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.CatalogOrBuilder getCatalogsOrBuilder( + int index) { + return catalogs_.get(index); + } + + public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; + + @SuppressWarnings("serial") + private volatile java.lang.Object nextPageToken_ = ""; + /** + * + * + *
+   * A token, which can be sent as `page_token` to retrieve the next page.
+   * If this field is omitted, there are no subsequent pages.
+   * 
+ * + * string next_page_token = 2; + * + * @return The nextPageToken. + */ + @java.lang.Override + public java.lang.String getNextPageToken() { + java.lang.Object ref = nextPageToken_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + nextPageToken_ = s; + return s; + } + } + /** + * + * + *
+   * A token, which can be sent as `page_token` to retrieve the next page.
+   * If this field is omitted, there are no subsequent pages.
+   * 
+ * + * string next_page_token = 2; + * + * @return The bytes for nextPageToken. + */ + @java.lang.Override + public com.google.protobuf.ByteString getNextPageTokenBytes() { + java.lang.Object ref = nextPageToken_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + nextPageToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + for (int i = 0; i < catalogs_.size(); i++) { + output.writeMessage(1, catalogs_.get(i)); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < catalogs_.size(); i++) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, catalogs_.get(i)); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse)) { + return super.equals(obj); + } + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse other = + (com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse) obj; + + if (!getCatalogsList().equals(other.getCatalogsList())) return false; + if (!getNextPageToken().equals(other.getNextPageToken())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (getCatalogsCount() > 0) { + hash = (37 * hash) + CATALOGS_FIELD_NUMBER; + hash = (53 * hash) + getCatalogsList().hashCode(); + } + hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; + hash = (53 * hash) + getNextPageToken().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse parseFrom( + byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Response message for the ListCatalogs method.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse) + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListCatalogsResponse_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListCatalogsResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse.class, + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse.Builder.class); + } + + // Construct using com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse.newBuilder() + private Builder() {} + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + } + + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + if (catalogsBuilder_ == null) { + catalogs_ = java.util.Collections.emptyList(); + } else { + catalogs_ = null; + catalogsBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + nextPageToken_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListCatalogsResponse_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse + getDefaultInstanceForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse build() { + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse buildPartial() { + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse result = + new com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse(this); + buildPartialRepeatedFields(result); + if (bitField0_ != 0) { + buildPartial0(result); + } + onBuilt(); + return result; + } + + private void buildPartialRepeatedFields( + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse result) { + if (catalogsBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0)) { + catalogs_ = java.util.Collections.unmodifiableList(catalogs_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.catalogs_ = catalogs_; + } else { + result.catalogs_ = catalogsBuilder_.build(); + } + } + + private void buildPartial0( + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000002) != 0)) { + result.nextPageToken_ = nextPageToken_; + } + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse) { + return mergeFrom((com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom( + com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse other) { + if (other + == com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse.getDefaultInstance()) + return this; + if (catalogsBuilder_ == null) { + if (!other.catalogs_.isEmpty()) { + if (catalogs_.isEmpty()) { + catalogs_ = other.catalogs_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureCatalogsIsMutable(); + catalogs_.addAll(other.catalogs_); + } + onChanged(); + } + } else { + if (!other.catalogs_.isEmpty()) { + if (catalogsBuilder_.isEmpty()) { + catalogsBuilder_.dispose(); + catalogsBuilder_ = null; + catalogs_ = other.catalogs_; + bitField0_ = (bitField0_ & ~0x00000001); + catalogsBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders + ? getCatalogsFieldBuilder() + : null; + } else { + catalogsBuilder_.addAllMessages(other.catalogs_); + } + } + } + if (!other.getNextPageToken().isEmpty()) { + nextPageToken_ = other.nextPageToken_; + bitField0_ |= 0x00000002; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + com.google.cloud.bigquery.biglake.v1alpha1.Catalog m = + input.readMessage( + com.google.cloud.bigquery.biglake.v1alpha1.Catalog.parser(), + extensionRegistry); + if (catalogsBuilder_ == null) { + ensureCatalogsIsMutable(); + catalogs_.add(m); + } else { + catalogsBuilder_.addMessage(m); + } + break; + } // case 10 + case 18: + { + nextPageToken_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000002; + break; + } // case 18 + default: + { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + + private int bitField0_; + + private java.util.List catalogs_ = + java.util.Collections.emptyList(); + + private void ensureCatalogsIsMutable() { + if (!((bitField0_ & 0x00000001) != 0)) { + catalogs_ = + new java.util.ArrayList(catalogs_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.Catalog, + com.google.cloud.bigquery.biglake.v1alpha1.Catalog.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.CatalogOrBuilder> + catalogsBuilder_; + + /** + * + * + *
+     * The catalogs from the specified project.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1; + */ + public java.util.List getCatalogsList() { + if (catalogsBuilder_ == null) { + return java.util.Collections.unmodifiableList(catalogs_); + } else { + return catalogsBuilder_.getMessageList(); + } + } + /** + * + * + *
+     * The catalogs from the specified project.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1; + */ + public int getCatalogsCount() { + if (catalogsBuilder_ == null) { + return catalogs_.size(); + } else { + return catalogsBuilder_.getCount(); + } + } + /** + * + * + *
+     * The catalogs from the specified project.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1; + */ + public com.google.cloud.bigquery.biglake.v1alpha1.Catalog getCatalogs(int index) { + if (catalogsBuilder_ == null) { + return catalogs_.get(index); + } else { + return catalogsBuilder_.getMessage(index); + } + } + /** + * + * + *
+     * The catalogs from the specified project.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1; + */ + public Builder setCatalogs( + int index, com.google.cloud.bigquery.biglake.v1alpha1.Catalog value) { + if (catalogsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureCatalogsIsMutable(); + catalogs_.set(index, value); + onChanged(); + } else { + catalogsBuilder_.setMessage(index, value); + } + return this; + } + /** + * + * + *
+     * The catalogs from the specified project.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1; + */ + public Builder setCatalogs( + int index, com.google.cloud.bigquery.biglake.v1alpha1.Catalog.Builder builderForValue) { + if (catalogsBuilder_ == null) { + ensureCatalogsIsMutable(); + catalogs_.set(index, builderForValue.build()); + onChanged(); + } else { + catalogsBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * The catalogs from the specified project.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1; + */ + public Builder addCatalogs(com.google.cloud.bigquery.biglake.v1alpha1.Catalog value) { + if (catalogsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureCatalogsIsMutable(); + catalogs_.add(value); + onChanged(); + } else { + catalogsBuilder_.addMessage(value); + } + return this; + } + /** + * + * + *
+     * The catalogs from the specified project.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1; + */ + public Builder addCatalogs( + int index, com.google.cloud.bigquery.biglake.v1alpha1.Catalog value) { + if (catalogsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureCatalogsIsMutable(); + catalogs_.add(index, value); + onChanged(); + } else { + catalogsBuilder_.addMessage(index, value); + } + return this; + } + /** + * + * + *
+     * The catalogs from the specified project.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1; + */ + public Builder addCatalogs( + com.google.cloud.bigquery.biglake.v1alpha1.Catalog.Builder builderForValue) { + if (catalogsBuilder_ == null) { + ensureCatalogsIsMutable(); + catalogs_.add(builderForValue.build()); + onChanged(); + } else { + catalogsBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * The catalogs from the specified project.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1; + */ + public Builder addCatalogs( + int index, com.google.cloud.bigquery.biglake.v1alpha1.Catalog.Builder builderForValue) { + if (catalogsBuilder_ == null) { + ensureCatalogsIsMutable(); + catalogs_.add(index, builderForValue.build()); + onChanged(); + } else { + catalogsBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * The catalogs from the specified project.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1; + */ + public Builder addAllCatalogs( + java.lang.Iterable values) { + if (catalogsBuilder_ == null) { + ensureCatalogsIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll(values, catalogs_); + onChanged(); + } else { + catalogsBuilder_.addAllMessages(values); + } + return this; + } + /** + * + * + *
+     * The catalogs from the specified project.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1; + */ + public Builder clearCatalogs() { + if (catalogsBuilder_ == null) { + catalogs_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + catalogsBuilder_.clear(); + } + return this; + } + /** + * + * + *
+     * The catalogs from the specified project.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1; + */ + public Builder removeCatalogs(int index) { + if (catalogsBuilder_ == null) { + ensureCatalogsIsMutable(); + catalogs_.remove(index); + onChanged(); + } else { + catalogsBuilder_.remove(index); + } + return this; + } + /** + * + * + *
+     * The catalogs from the specified project.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1; + */ + public com.google.cloud.bigquery.biglake.v1alpha1.Catalog.Builder getCatalogsBuilder( + int index) { + return getCatalogsFieldBuilder().getBuilder(index); + } + /** + * + * + *
+     * The catalogs from the specified project.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1; + */ + public com.google.cloud.bigquery.biglake.v1alpha1.CatalogOrBuilder getCatalogsOrBuilder( + int index) { + if (catalogsBuilder_ == null) { + return catalogs_.get(index); + } else { + return catalogsBuilder_.getMessageOrBuilder(index); + } + } + /** + * + * + *
+     * The catalogs from the specified project.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1; + */ + public java.util.List + getCatalogsOrBuilderList() { + if (catalogsBuilder_ != null) { + return catalogsBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(catalogs_); + } + } + /** + * + * + *
+     * The catalogs from the specified project.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1; + */ + public com.google.cloud.bigquery.biglake.v1alpha1.Catalog.Builder addCatalogsBuilder() { + return getCatalogsFieldBuilder() + .addBuilder(com.google.cloud.bigquery.biglake.v1alpha1.Catalog.getDefaultInstance()); + } + /** + * + * + *
+     * The catalogs from the specified project.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1; + */ + public com.google.cloud.bigquery.biglake.v1alpha1.Catalog.Builder addCatalogsBuilder( + int index) { + return getCatalogsFieldBuilder() + .addBuilder( + index, com.google.cloud.bigquery.biglake.v1alpha1.Catalog.getDefaultInstance()); + } + /** + * + * + *
+     * The catalogs from the specified project.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1; + */ + public java.util.List + getCatalogsBuilderList() { + return getCatalogsFieldBuilder().getBuilderList(); + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.Catalog, + com.google.cloud.bigquery.biglake.v1alpha1.Catalog.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.CatalogOrBuilder> + getCatalogsFieldBuilder() { + if (catalogsBuilder_ == null) { + catalogsBuilder_ = + new com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.Catalog, + com.google.cloud.bigquery.biglake.v1alpha1.Catalog.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.CatalogOrBuilder>( + catalogs_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); + catalogs_ = null; + } + return catalogsBuilder_; + } + + private java.lang.Object nextPageToken_ = ""; + /** + * + * + *
+     * A token, which can be sent as `page_token` to retrieve the next page.
+     * If this field is omitted, there are no subsequent pages.
+     * 
+ * + * string next_page_token = 2; + * + * @return The nextPageToken. + */ + public java.lang.String getNextPageToken() { + java.lang.Object ref = nextPageToken_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + nextPageToken_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * A token, which can be sent as `page_token` to retrieve the next page.
+     * If this field is omitted, there are no subsequent pages.
+     * 
+ * + * string next_page_token = 2; + * + * @return The bytes for nextPageToken. + */ + public com.google.protobuf.ByteString getNextPageTokenBytes() { + java.lang.Object ref = nextPageToken_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + nextPageToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * A token, which can be sent as `page_token` to retrieve the next page.
+     * If this field is omitted, there are no subsequent pages.
+     * 
+ * + * string next_page_token = 2; + * + * @param value The nextPageToken to set. + * @return This builder for chaining. + */ + public Builder setNextPageToken(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + nextPageToken_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * + * + *
+     * A token, which can be sent as `page_token` to retrieve the next page.
+     * If this field is omitted, there are no subsequent pages.
+     * 
+ * + * string next_page_token = 2; + * + * @return This builder for chaining. + */ + public Builder clearNextPageToken() { + nextPageToken_ = getDefaultInstance().getNextPageToken(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + * + * + *
+     * A token, which can be sent as `page_token` to retrieve the next page.
+     * If this field is omitted, there are no subsequent pages.
+     * 
+ * + * string next_page_token = 2; + * + * @param value The bytes for nextPageToken to set. + * @return This builder for chaining. + */ + public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + nextPageToken_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse) + private static final com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse(); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse + getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ListCatalogsResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse + getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListCatalogsResponseOrBuilder.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListCatalogsResponseOrBuilder.java new file mode 100644 index 000000000000..efed1bf72962 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListCatalogsResponseOrBuilder.java @@ -0,0 +1,104 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +public interface ListCatalogsResponseOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * The catalogs from the specified project.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1; + */ + java.util.List getCatalogsList(); + /** + * + * + *
+   * The catalogs from the specified project.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1; + */ + com.google.cloud.bigquery.biglake.v1alpha1.Catalog getCatalogs(int index); + /** + * + * + *
+   * The catalogs from the specified project.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1; + */ + int getCatalogsCount(); + /** + * + * + *
+   * The catalogs from the specified project.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1; + */ + java.util.List + getCatalogsOrBuilderList(); + /** + * + * + *
+   * The catalogs from the specified project.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Catalog catalogs = 1; + */ + com.google.cloud.bigquery.biglake.v1alpha1.CatalogOrBuilder getCatalogsOrBuilder(int index); + + /** + * + * + *
+   * A token, which can be sent as `page_token` to retrieve the next page.
+   * If this field is omitted, there are no subsequent pages.
+   * 
+ * + * string next_page_token = 2; + * + * @return The nextPageToken. + */ + java.lang.String getNextPageToken(); + /** + * + * + *
+   * A token, which can be sent as `page_token` to retrieve the next page.
+   * If this field is omitted, there are no subsequent pages.
+   * 
+ * + * string next_page_token = 2; + * + * @return The bytes for nextPageToken. + */ + com.google.protobuf.ByteString getNextPageTokenBytes(); +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListDatabasesRequest.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListDatabasesRequest.java new file mode 100644 index 000000000000..991cad7912e7 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListDatabasesRequest.java @@ -0,0 +1,973 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +/** + * + * + *
+ * Request message for the ListDatabases method.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest} + */ +public final class ListDatabasesRequest extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest) + ListDatabasesRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use ListDatabasesRequest.newBuilder() to construct. + private ListDatabasesRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private ListDatabasesRequest() { + parent_ = ""; + pageToken_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new ListDatabasesRequest(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListDatabasesRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListDatabasesRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest.Builder.class); + } + + public static final int PARENT_FIELD_NUMBER = 1; + + @SuppressWarnings("serial") + private volatile java.lang.Object parent_ = ""; + /** + * + * + *
+   * Required. The parent, which owns this collection of databases.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The parent. + */ + @java.lang.Override + public java.lang.String getParent() { + java.lang.Object ref = parent_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + parent_ = s; + return s; + } + } + /** + * + * + *
+   * Required. The parent, which owns this collection of databases.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for parent. + */ + @java.lang.Override + public com.google.protobuf.ByteString getParentBytes() { + java.lang.Object ref = parent_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + parent_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int PAGE_SIZE_FIELD_NUMBER = 2; + private int pageSize_ = 0; + /** + * + * + *
+   * The maximum number of databases to return. The service may return fewer
+   * than this value.
+   * If unspecified, at most 50 databases will be returned.
+   * The maximum value is 1000; values above 1000 will be coerced to 1000.
+   * 
+ * + * int32 page_size = 2; + * + * @return The pageSize. + */ + @java.lang.Override + public int getPageSize() { + return pageSize_; + } + + public static final int PAGE_TOKEN_FIELD_NUMBER = 3; + + @SuppressWarnings("serial") + private volatile java.lang.Object pageToken_ = ""; + /** + * + * + *
+   * A page token, received from a previous `ListDatabases` call.
+   * Provide this to retrieve the subsequent page.
+   * When paginating, all other parameters provided to `ListDatabases` must
+   * match the call that provided the page token.
+   * 
+ * + * string page_token = 3; + * + * @return The pageToken. + */ + @java.lang.Override + public java.lang.String getPageToken() { + java.lang.Object ref = pageToken_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + pageToken_ = s; + return s; + } + } + /** + * + * + *
+   * A page token, received from a previous `ListDatabases` call.
+   * Provide this to retrieve the subsequent page.
+   * When paginating, all other parameters provided to `ListDatabases` must
+   * match the call that provided the page token.
+   * 
+ * + * string page_token = 3; + * + * @return The bytes for pageToken. + */ + @java.lang.Override + public com.google.protobuf.ByteString getPageTokenBytes() { + java.lang.Object ref = pageToken_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + pageToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); + } + if (pageSize_ != 0) { + output.writeInt32(2, pageSize_); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); + } + if (pageSize_ != 0) { + size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest)) { + return super.equals(obj); + } + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest other = + (com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest) obj; + + if (!getParent().equals(other.getParent())) return false; + if (getPageSize() != other.getPageSize()) return false; + if (!getPageToken().equals(other.getPageToken())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + PARENT_FIELD_NUMBER; + hash = (53 * hash) + getParent().hashCode(); + hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; + hash = (53 * hash) + getPageSize(); + hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; + hash = (53 * hash) + getPageToken().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest parseFrom( + byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Request message for the ListDatabases method.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest) + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListDatabasesRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListDatabasesRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest.Builder.class); + } + + // Construct using com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest.newBuilder() + private Builder() {} + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + } + + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + parent_ = ""; + pageSize_ = 0; + pageToken_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListDatabasesRequest_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest + getDefaultInstanceForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest build() { + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest buildPartial() { + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest result = + new com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest(this); + if (bitField0_ != 0) { + buildPartial0(result); + } + onBuilt(); + return result; + } + + private void buildPartial0( + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.parent_ = parent_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.pageSize_ = pageSize_; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.pageToken_ = pageToken_; + } + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest) { + return mergeFrom((com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom( + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest other) { + if (other + == com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest.getDefaultInstance()) + return this; + if (!other.getParent().isEmpty()) { + parent_ = other.parent_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (other.getPageSize() != 0) { + setPageSize(other.getPageSize()); + } + if (!other.getPageToken().isEmpty()) { + pageToken_ = other.pageToken_; + bitField0_ |= 0x00000004; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + parent_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 16: + { + pageSize_ = input.readInt32(); + bitField0_ |= 0x00000002; + break; + } // case 16 + case 26: + { + pageToken_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000004; + break; + } // case 26 + default: + { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + + private int bitField0_; + + private java.lang.Object parent_ = ""; + /** + * + * + *
+     * Required. The parent, which owns this collection of databases.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The parent. + */ + public java.lang.String getParent() { + java.lang.Object ref = parent_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + parent_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Required. The parent, which owns this collection of databases.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for parent. + */ + public com.google.protobuf.ByteString getParentBytes() { + java.lang.Object ref = parent_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + parent_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Required. The parent, which owns this collection of databases.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The parent to set. + * @return This builder for chaining. + */ + public Builder setParent(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + parent_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The parent, which owns this collection of databases.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return This builder for chaining. + */ + public Builder clearParent() { + parent_ = getDefaultInstance().getParent(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The parent, which owns this collection of databases.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The bytes for parent to set. + * @return This builder for chaining. + */ + public Builder setParentBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + parent_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private int pageSize_; + /** + * + * + *
+     * The maximum number of databases to return. The service may return fewer
+     * than this value.
+     * If unspecified, at most 50 databases will be returned.
+     * The maximum value is 1000; values above 1000 will be coerced to 1000.
+     * 
+ * + * int32 page_size = 2; + * + * @return The pageSize. + */ + @java.lang.Override + public int getPageSize() { + return pageSize_; + } + /** + * + * + *
+     * The maximum number of databases to return. The service may return fewer
+     * than this value.
+     * If unspecified, at most 50 databases will be returned.
+     * The maximum value is 1000; values above 1000 will be coerced to 1000.
+     * 
+ * + * int32 page_size = 2; + * + * @param value The pageSize to set. + * @return This builder for chaining. + */ + public Builder setPageSize(int value) { + + pageSize_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * + * + *
+     * The maximum number of databases to return. The service may return fewer
+     * than this value.
+     * If unspecified, at most 50 databases will be returned.
+     * The maximum value is 1000; values above 1000 will be coerced to 1000.
+     * 
+ * + * int32 page_size = 2; + * + * @return This builder for chaining. + */ + public Builder clearPageSize() { + bitField0_ = (bitField0_ & ~0x00000002); + pageSize_ = 0; + onChanged(); + return this; + } + + private java.lang.Object pageToken_ = ""; + /** + * + * + *
+     * A page token, received from a previous `ListDatabases` call.
+     * Provide this to retrieve the subsequent page.
+     * When paginating, all other parameters provided to `ListDatabases` must
+     * match the call that provided the page token.
+     * 
+ * + * string page_token = 3; + * + * @return The pageToken. + */ + public java.lang.String getPageToken() { + java.lang.Object ref = pageToken_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + pageToken_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * A page token, received from a previous `ListDatabases` call.
+     * Provide this to retrieve the subsequent page.
+     * When paginating, all other parameters provided to `ListDatabases` must
+     * match the call that provided the page token.
+     * 
+ * + * string page_token = 3; + * + * @return The bytes for pageToken. + */ + public com.google.protobuf.ByteString getPageTokenBytes() { + java.lang.Object ref = pageToken_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + pageToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * A page token, received from a previous `ListDatabases` call.
+     * Provide this to retrieve the subsequent page.
+     * When paginating, all other parameters provided to `ListDatabases` must
+     * match the call that provided the page token.
+     * 
+ * + * string page_token = 3; + * + * @param value The pageToken to set. + * @return This builder for chaining. + */ + public Builder setPageToken(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + pageToken_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * + * + *
+     * A page token, received from a previous `ListDatabases` call.
+     * Provide this to retrieve the subsequent page.
+     * When paginating, all other parameters provided to `ListDatabases` must
+     * match the call that provided the page token.
+     * 
+ * + * string page_token = 3; + * + * @return This builder for chaining. + */ + public Builder clearPageToken() { + pageToken_ = getDefaultInstance().getPageToken(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + return this; + } + /** + * + * + *
+     * A page token, received from a previous `ListDatabases` call.
+     * Provide this to retrieve the subsequent page.
+     * When paginating, all other parameters provided to `ListDatabases` must
+     * match the call that provided the page token.
+     * 
+ * + * string page_token = 3; + * + * @param value The bytes for pageToken to set. + * @return This builder for chaining. + */ + public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + pageToken_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest) + private static final com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest(); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest + getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ListDatabasesRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest + getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListDatabasesRequestOrBuilder.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListDatabasesRequestOrBuilder.java new file mode 100644 index 000000000000..c510296095d3 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListDatabasesRequestOrBuilder.java @@ -0,0 +1,105 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +public interface ListDatabasesRequestOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Required. The parent, which owns this collection of databases.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The parent. + */ + java.lang.String getParent(); + /** + * + * + *
+   * Required. The parent, which owns this collection of databases.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for parent. + */ + com.google.protobuf.ByteString getParentBytes(); + + /** + * + * + *
+   * The maximum number of databases to return. The service may return fewer
+   * than this value.
+   * If unspecified, at most 50 databases will be returned.
+   * The maximum value is 1000; values above 1000 will be coerced to 1000.
+   * 
+ * + * int32 page_size = 2; + * + * @return The pageSize. + */ + int getPageSize(); + + /** + * + * + *
+   * A page token, received from a previous `ListDatabases` call.
+   * Provide this to retrieve the subsequent page.
+   * When paginating, all other parameters provided to `ListDatabases` must
+   * match the call that provided the page token.
+   * 
+ * + * string page_token = 3; + * + * @return The pageToken. + */ + java.lang.String getPageToken(); + /** + * + * + *
+   * A page token, received from a previous `ListDatabases` call.
+   * Provide this to retrieve the subsequent page.
+   * When paginating, all other parameters provided to `ListDatabases` must
+   * match the call that provided the page token.
+   * 
+ * + * string page_token = 3; + * + * @return The bytes for pageToken. + */ + com.google.protobuf.ByteString getPageTokenBytes(); +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListDatabasesResponse.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListDatabasesResponse.java new file mode 100644 index 000000000000..3d7f28342465 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListDatabasesResponse.java @@ -0,0 +1,1148 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +/** + * + * + *
+ * Response message for the ListDatabases method.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse} + */ +public final class ListDatabasesResponse extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse) + ListDatabasesResponseOrBuilder { + private static final long serialVersionUID = 0L; + // Use ListDatabasesResponse.newBuilder() to construct. + private ListDatabasesResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private ListDatabasesResponse() { + databases_ = java.util.Collections.emptyList(); + nextPageToken_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new ListDatabasesResponse(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListDatabasesResponse_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListDatabasesResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse.class, + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse.Builder.class); + } + + public static final int DATABASES_FIELD_NUMBER = 1; + + @SuppressWarnings("serial") + private java.util.List databases_; + /** + * + * + *
+   * The databases from the specified catalog.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Database databases = 1; + */ + @java.lang.Override + public java.util.List getDatabasesList() { + return databases_; + } + /** + * + * + *
+   * The databases from the specified catalog.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Database databases = 1; + */ + @java.lang.Override + public java.util.List + getDatabasesOrBuilderList() { + return databases_; + } + /** + * + * + *
+   * The databases from the specified catalog.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Database databases = 1; + */ + @java.lang.Override + public int getDatabasesCount() { + return databases_.size(); + } + /** + * + * + *
+   * The databases from the specified catalog.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Database databases = 1; + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.Database getDatabases(int index) { + return databases_.get(index); + } + /** + * + * + *
+   * The databases from the specified catalog.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Database databases = 1; + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.DatabaseOrBuilder getDatabasesOrBuilder( + int index) { + return databases_.get(index); + } + + public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; + + @SuppressWarnings("serial") + private volatile java.lang.Object nextPageToken_ = ""; + /** + * + * + *
+   * A token, which can be sent as `page_token` to retrieve the next page.
+   * If this field is omitted, there are no subsequent pages.
+   * 
+ * + * string next_page_token = 2; + * + * @return The nextPageToken. + */ + @java.lang.Override + public java.lang.String getNextPageToken() { + java.lang.Object ref = nextPageToken_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + nextPageToken_ = s; + return s; + } + } + /** + * + * + *
+   * A token, which can be sent as `page_token` to retrieve the next page.
+   * If this field is omitted, there are no subsequent pages.
+   * 
+ * + * string next_page_token = 2; + * + * @return The bytes for nextPageToken. + */ + @java.lang.Override + public com.google.protobuf.ByteString getNextPageTokenBytes() { + java.lang.Object ref = nextPageToken_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + nextPageToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + for (int i = 0; i < databases_.size(); i++) { + output.writeMessage(1, databases_.get(i)); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < databases_.size(); i++) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, databases_.get(i)); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse)) { + return super.equals(obj); + } + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse other = + (com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse) obj; + + if (!getDatabasesList().equals(other.getDatabasesList())) return false; + if (!getNextPageToken().equals(other.getNextPageToken())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (getDatabasesCount() > 0) { + hash = (37 * hash) + DATABASES_FIELD_NUMBER; + hash = (53 * hash) + getDatabasesList().hashCode(); + } + hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; + hash = (53 * hash) + getNextPageToken().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse parseFrom( + byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Response message for the ListDatabases method.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse) + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListDatabasesResponse_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListDatabasesResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse.class, + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse.Builder.class); + } + + // Construct using com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse.newBuilder() + private Builder() {} + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + } + + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + if (databasesBuilder_ == null) { + databases_ = java.util.Collections.emptyList(); + } else { + databases_ = null; + databasesBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + nextPageToken_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListDatabasesResponse_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse + getDefaultInstanceForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse build() { + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse buildPartial() { + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse result = + new com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse(this); + buildPartialRepeatedFields(result); + if (bitField0_ != 0) { + buildPartial0(result); + } + onBuilt(); + return result; + } + + private void buildPartialRepeatedFields( + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse result) { + if (databasesBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0)) { + databases_ = java.util.Collections.unmodifiableList(databases_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.databases_ = databases_; + } else { + result.databases_ = databasesBuilder_.build(); + } + } + + private void buildPartial0( + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000002) != 0)) { + result.nextPageToken_ = nextPageToken_; + } + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse) { + return mergeFrom((com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom( + com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse other) { + if (other + == com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse.getDefaultInstance()) + return this; + if (databasesBuilder_ == null) { + if (!other.databases_.isEmpty()) { + if (databases_.isEmpty()) { + databases_ = other.databases_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureDatabasesIsMutable(); + databases_.addAll(other.databases_); + } + onChanged(); + } + } else { + if (!other.databases_.isEmpty()) { + if (databasesBuilder_.isEmpty()) { + databasesBuilder_.dispose(); + databasesBuilder_ = null; + databases_ = other.databases_; + bitField0_ = (bitField0_ & ~0x00000001); + databasesBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders + ? getDatabasesFieldBuilder() + : null; + } else { + databasesBuilder_.addAllMessages(other.databases_); + } + } + } + if (!other.getNextPageToken().isEmpty()) { + nextPageToken_ = other.nextPageToken_; + bitField0_ |= 0x00000002; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + com.google.cloud.bigquery.biglake.v1alpha1.Database m = + input.readMessage( + com.google.cloud.bigquery.biglake.v1alpha1.Database.parser(), + extensionRegistry); + if (databasesBuilder_ == null) { + ensureDatabasesIsMutable(); + databases_.add(m); + } else { + databasesBuilder_.addMessage(m); + } + break; + } // case 10 + case 18: + { + nextPageToken_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000002; + break; + } // case 18 + default: + { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + + private int bitField0_; + + private java.util.List databases_ = + java.util.Collections.emptyList(); + + private void ensureDatabasesIsMutable() { + if (!((bitField0_ & 0x00000001) != 0)) { + databases_ = + new java.util.ArrayList( + databases_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.Database, + com.google.cloud.bigquery.biglake.v1alpha1.Database.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.DatabaseOrBuilder> + databasesBuilder_; + + /** + * + * + *
+     * The databases from the specified catalog.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Database databases = 1; + */ + public java.util.List getDatabasesList() { + if (databasesBuilder_ == null) { + return java.util.Collections.unmodifiableList(databases_); + } else { + return databasesBuilder_.getMessageList(); + } + } + /** + * + * + *
+     * The databases from the specified catalog.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Database databases = 1; + */ + public int getDatabasesCount() { + if (databasesBuilder_ == null) { + return databases_.size(); + } else { + return databasesBuilder_.getCount(); + } + } + /** + * + * + *
+     * The databases from the specified catalog.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Database databases = 1; + */ + public com.google.cloud.bigquery.biglake.v1alpha1.Database getDatabases(int index) { + if (databasesBuilder_ == null) { + return databases_.get(index); + } else { + return databasesBuilder_.getMessage(index); + } + } + /** + * + * + *
+     * The databases from the specified catalog.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Database databases = 1; + */ + public Builder setDatabases( + int index, com.google.cloud.bigquery.biglake.v1alpha1.Database value) { + if (databasesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureDatabasesIsMutable(); + databases_.set(index, value); + onChanged(); + } else { + databasesBuilder_.setMessage(index, value); + } + return this; + } + /** + * + * + *
+     * The databases from the specified catalog.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Database databases = 1; + */ + public Builder setDatabases( + int index, com.google.cloud.bigquery.biglake.v1alpha1.Database.Builder builderForValue) { + if (databasesBuilder_ == null) { + ensureDatabasesIsMutable(); + databases_.set(index, builderForValue.build()); + onChanged(); + } else { + databasesBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * The databases from the specified catalog.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Database databases = 1; + */ + public Builder addDatabases(com.google.cloud.bigquery.biglake.v1alpha1.Database value) { + if (databasesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureDatabasesIsMutable(); + databases_.add(value); + onChanged(); + } else { + databasesBuilder_.addMessage(value); + } + return this; + } + /** + * + * + *
+     * The databases from the specified catalog.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Database databases = 1; + */ + public Builder addDatabases( + int index, com.google.cloud.bigquery.biglake.v1alpha1.Database value) { + if (databasesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureDatabasesIsMutable(); + databases_.add(index, value); + onChanged(); + } else { + databasesBuilder_.addMessage(index, value); + } + return this; + } + /** + * + * + *
+     * The databases from the specified catalog.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Database databases = 1; + */ + public Builder addDatabases( + com.google.cloud.bigquery.biglake.v1alpha1.Database.Builder builderForValue) { + if (databasesBuilder_ == null) { + ensureDatabasesIsMutable(); + databases_.add(builderForValue.build()); + onChanged(); + } else { + databasesBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * The databases from the specified catalog.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Database databases = 1; + */ + public Builder addDatabases( + int index, com.google.cloud.bigquery.biglake.v1alpha1.Database.Builder builderForValue) { + if (databasesBuilder_ == null) { + ensureDatabasesIsMutable(); + databases_.add(index, builderForValue.build()); + onChanged(); + } else { + databasesBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * The databases from the specified catalog.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Database databases = 1; + */ + public Builder addAllDatabases( + java.lang.Iterable values) { + if (databasesBuilder_ == null) { + ensureDatabasesIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll(values, databases_); + onChanged(); + } else { + databasesBuilder_.addAllMessages(values); + } + return this; + } + /** + * + * + *
+     * The databases from the specified catalog.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Database databases = 1; + */ + public Builder clearDatabases() { + if (databasesBuilder_ == null) { + databases_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + databasesBuilder_.clear(); + } + return this; + } + /** + * + * + *
+     * The databases from the specified catalog.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Database databases = 1; + */ + public Builder removeDatabases(int index) { + if (databasesBuilder_ == null) { + ensureDatabasesIsMutable(); + databases_.remove(index); + onChanged(); + } else { + databasesBuilder_.remove(index); + } + return this; + } + /** + * + * + *
+     * The databases from the specified catalog.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Database databases = 1; + */ + public com.google.cloud.bigquery.biglake.v1alpha1.Database.Builder getDatabasesBuilder( + int index) { + return getDatabasesFieldBuilder().getBuilder(index); + } + /** + * + * + *
+     * The databases from the specified catalog.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Database databases = 1; + */ + public com.google.cloud.bigquery.biglake.v1alpha1.DatabaseOrBuilder getDatabasesOrBuilder( + int index) { + if (databasesBuilder_ == null) { + return databases_.get(index); + } else { + return databasesBuilder_.getMessageOrBuilder(index); + } + } + /** + * + * + *
+     * The databases from the specified catalog.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Database databases = 1; + */ + public java.util.List + getDatabasesOrBuilderList() { + if (databasesBuilder_ != null) { + return databasesBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(databases_); + } + } + /** + * + * + *
+     * The databases from the specified catalog.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Database databases = 1; + */ + public com.google.cloud.bigquery.biglake.v1alpha1.Database.Builder addDatabasesBuilder() { + return getDatabasesFieldBuilder() + .addBuilder(com.google.cloud.bigquery.biglake.v1alpha1.Database.getDefaultInstance()); + } + /** + * + * + *
+     * The databases from the specified catalog.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Database databases = 1; + */ + public com.google.cloud.bigquery.biglake.v1alpha1.Database.Builder addDatabasesBuilder( + int index) { + return getDatabasesFieldBuilder() + .addBuilder( + index, com.google.cloud.bigquery.biglake.v1alpha1.Database.getDefaultInstance()); + } + /** + * + * + *
+     * The databases from the specified catalog.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Database databases = 1; + */ + public java.util.List + getDatabasesBuilderList() { + return getDatabasesFieldBuilder().getBuilderList(); + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.Database, + com.google.cloud.bigquery.biglake.v1alpha1.Database.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.DatabaseOrBuilder> + getDatabasesFieldBuilder() { + if (databasesBuilder_ == null) { + databasesBuilder_ = + new com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.Database, + com.google.cloud.bigquery.biglake.v1alpha1.Database.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.DatabaseOrBuilder>( + databases_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); + databases_ = null; + } + return databasesBuilder_; + } + + private java.lang.Object nextPageToken_ = ""; + /** + * + * + *
+     * A token, which can be sent as `page_token` to retrieve the next page.
+     * If this field is omitted, there are no subsequent pages.
+     * 
+ * + * string next_page_token = 2; + * + * @return The nextPageToken. + */ + public java.lang.String getNextPageToken() { + java.lang.Object ref = nextPageToken_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + nextPageToken_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * A token, which can be sent as `page_token` to retrieve the next page.
+     * If this field is omitted, there are no subsequent pages.
+     * 
+ * + * string next_page_token = 2; + * + * @return The bytes for nextPageToken. + */ + public com.google.protobuf.ByteString getNextPageTokenBytes() { + java.lang.Object ref = nextPageToken_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + nextPageToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * A token, which can be sent as `page_token` to retrieve the next page.
+     * If this field is omitted, there are no subsequent pages.
+     * 
+ * + * string next_page_token = 2; + * + * @param value The nextPageToken to set. + * @return This builder for chaining. + */ + public Builder setNextPageToken(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + nextPageToken_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * + * + *
+     * A token, which can be sent as `page_token` to retrieve the next page.
+     * If this field is omitted, there are no subsequent pages.
+     * 
+ * + * string next_page_token = 2; + * + * @return This builder for chaining. + */ + public Builder clearNextPageToken() { + nextPageToken_ = getDefaultInstance().getNextPageToken(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + * + * + *
+     * A token, which can be sent as `page_token` to retrieve the next page.
+     * If this field is omitted, there are no subsequent pages.
+     * 
+ * + * string next_page_token = 2; + * + * @param value The bytes for nextPageToken to set. + * @return This builder for chaining. + */ + public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + nextPageToken_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse) + private static final com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse(); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse + getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ListDatabasesResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse + getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListDatabasesResponseOrBuilder.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListDatabasesResponseOrBuilder.java new file mode 100644 index 000000000000..c58fcb88f8d3 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListDatabasesResponseOrBuilder.java @@ -0,0 +1,104 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +public interface ListDatabasesResponseOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * The databases from the specified catalog.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Database databases = 1; + */ + java.util.List getDatabasesList(); + /** + * + * + *
+   * The databases from the specified catalog.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Database databases = 1; + */ + com.google.cloud.bigquery.biglake.v1alpha1.Database getDatabases(int index); + /** + * + * + *
+   * The databases from the specified catalog.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Database databases = 1; + */ + int getDatabasesCount(); + /** + * + * + *
+   * The databases from the specified catalog.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Database databases = 1; + */ + java.util.List + getDatabasesOrBuilderList(); + /** + * + * + *
+   * The databases from the specified catalog.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Database databases = 1; + */ + com.google.cloud.bigquery.biglake.v1alpha1.DatabaseOrBuilder getDatabasesOrBuilder(int index); + + /** + * + * + *
+   * A token, which can be sent as `page_token` to retrieve the next page.
+   * If this field is omitted, there are no subsequent pages.
+   * 
+ * + * string next_page_token = 2; + * + * @return The nextPageToken. + */ + java.lang.String getNextPageToken(); + /** + * + * + *
+   * A token, which can be sent as `page_token` to retrieve the next page.
+   * If this field is omitted, there are no subsequent pages.
+   * 
+ * + * string next_page_token = 2; + * + * @return The bytes for nextPageToken. + */ + com.google.protobuf.ByteString getNextPageTokenBytes(); +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListLocksRequest.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListLocksRequest.java new file mode 100644 index 000000000000..d4045ff840cc --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListLocksRequest.java @@ -0,0 +1,966 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +/** + * + * + *
+ * Request message for the ListLocks method.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest} + */ +public final class ListLocksRequest extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest) + ListLocksRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use ListLocksRequest.newBuilder() to construct. + private ListLocksRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private ListLocksRequest() { + parent_ = ""; + pageToken_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new ListLocksRequest(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListLocksRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListLocksRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest.Builder.class); + } + + public static final int PARENT_FIELD_NUMBER = 1; + + @SuppressWarnings("serial") + private volatile java.lang.Object parent_ = ""; + /** + * + * + *
+   * Required. The parent, which owns this collection of locks.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The parent. + */ + @java.lang.Override + public java.lang.String getParent() { + java.lang.Object ref = parent_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + parent_ = s; + return s; + } + } + /** + * + * + *
+   * Required. The parent, which owns this collection of locks.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for parent. + */ + @java.lang.Override + public com.google.protobuf.ByteString getParentBytes() { + java.lang.Object ref = parent_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + parent_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int PAGE_SIZE_FIELD_NUMBER = 2; + private int pageSize_ = 0; + /** + * + * + *
+   * The maximum number of locks to return. The service may return fewer than
+   * this value.
+   * If unspecified, at most 50 locks will be returned.
+   * The maximum value is 1000; values above 1000 will be coerced to 1000.
+   * 
+ * + * int32 page_size = 2; + * + * @return The pageSize. + */ + @java.lang.Override + public int getPageSize() { + return pageSize_; + } + + public static final int PAGE_TOKEN_FIELD_NUMBER = 3; + + @SuppressWarnings("serial") + private volatile java.lang.Object pageToken_ = ""; + /** + * + * + *
+   * A page token, received from a previous `ListLocks` call.
+   * Provide this to retrieve the subsequent page.
+   * When paginating, all other parameters provided to `ListLocks` must match
+   * the call that provided the page token.
+   * 
+ * + * string page_token = 3; + * + * @return The pageToken. + */ + @java.lang.Override + public java.lang.String getPageToken() { + java.lang.Object ref = pageToken_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + pageToken_ = s; + return s; + } + } + /** + * + * + *
+   * A page token, received from a previous `ListLocks` call.
+   * Provide this to retrieve the subsequent page.
+   * When paginating, all other parameters provided to `ListLocks` must match
+   * the call that provided the page token.
+   * 
+ * + * string page_token = 3; + * + * @return The bytes for pageToken. + */ + @java.lang.Override + public com.google.protobuf.ByteString getPageTokenBytes() { + java.lang.Object ref = pageToken_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + pageToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); + } + if (pageSize_ != 0) { + output.writeInt32(2, pageSize_); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); + } + if (pageSize_ != 0) { + size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest)) { + return super.equals(obj); + } + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest other = + (com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest) obj; + + if (!getParent().equals(other.getParent())) return false; + if (getPageSize() != other.getPageSize()) return false; + if (!getPageToken().equals(other.getPageToken())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + PARENT_FIELD_NUMBER; + hash = (53 * hash) + getParent().hashCode(); + hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; + hash = (53 * hash) + getPageSize(); + hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; + hash = (53 * hash) + getPageToken().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Request message for the ListLocks method.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest) + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListLocksRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListLocksRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest.Builder.class); + } + + // Construct using com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest.newBuilder() + private Builder() {} + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + } + + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + parent_ = ""; + pageSize_ = 0; + pageToken_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListLocksRequest_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest getDefaultInstanceForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest build() { + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest buildPartial() { + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest result = + new com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest(this); + if (bitField0_ != 0) { + buildPartial0(result); + } + onBuilt(); + return result; + } + + private void buildPartial0(com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.parent_ = parent_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.pageSize_ = pageSize_; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.pageToken_ = pageToken_; + } + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest) { + return mergeFrom((com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest other) { + if (other == com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest.getDefaultInstance()) + return this; + if (!other.getParent().isEmpty()) { + parent_ = other.parent_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (other.getPageSize() != 0) { + setPageSize(other.getPageSize()); + } + if (!other.getPageToken().isEmpty()) { + pageToken_ = other.pageToken_; + bitField0_ |= 0x00000004; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + parent_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 16: + { + pageSize_ = input.readInt32(); + bitField0_ |= 0x00000002; + break; + } // case 16 + case 26: + { + pageToken_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000004; + break; + } // case 26 + default: + { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + + private int bitField0_; + + private java.lang.Object parent_ = ""; + /** + * + * + *
+     * Required. The parent, which owns this collection of locks.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The parent. + */ + public java.lang.String getParent() { + java.lang.Object ref = parent_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + parent_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Required. The parent, which owns this collection of locks.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for parent. + */ + public com.google.protobuf.ByteString getParentBytes() { + java.lang.Object ref = parent_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + parent_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Required. The parent, which owns this collection of locks.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The parent to set. + * @return This builder for chaining. + */ + public Builder setParent(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + parent_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The parent, which owns this collection of locks.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return This builder for chaining. + */ + public Builder clearParent() { + parent_ = getDefaultInstance().getParent(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The parent, which owns this collection of locks.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The bytes for parent to set. + * @return This builder for chaining. + */ + public Builder setParentBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + parent_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private int pageSize_; + /** + * + * + *
+     * The maximum number of locks to return. The service may return fewer than
+     * this value.
+     * If unspecified, at most 50 locks will be returned.
+     * The maximum value is 1000; values above 1000 will be coerced to 1000.
+     * 
+ * + * int32 page_size = 2; + * + * @return The pageSize. + */ + @java.lang.Override + public int getPageSize() { + return pageSize_; + } + /** + * + * + *
+     * The maximum number of locks to return. The service may return fewer than
+     * this value.
+     * If unspecified, at most 50 locks will be returned.
+     * The maximum value is 1000; values above 1000 will be coerced to 1000.
+     * 
+ * + * int32 page_size = 2; + * + * @param value The pageSize to set. + * @return This builder for chaining. + */ + public Builder setPageSize(int value) { + + pageSize_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * + * + *
+     * The maximum number of locks to return. The service may return fewer than
+     * this value.
+     * If unspecified, at most 50 locks will be returned.
+     * The maximum value is 1000; values above 1000 will be coerced to 1000.
+     * 
+ * + * int32 page_size = 2; + * + * @return This builder for chaining. + */ + public Builder clearPageSize() { + bitField0_ = (bitField0_ & ~0x00000002); + pageSize_ = 0; + onChanged(); + return this; + } + + private java.lang.Object pageToken_ = ""; + /** + * + * + *
+     * A page token, received from a previous `ListLocks` call.
+     * Provide this to retrieve the subsequent page.
+     * When paginating, all other parameters provided to `ListLocks` must match
+     * the call that provided the page token.
+     * 
+ * + * string page_token = 3; + * + * @return The pageToken. + */ + public java.lang.String getPageToken() { + java.lang.Object ref = pageToken_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + pageToken_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * A page token, received from a previous `ListLocks` call.
+     * Provide this to retrieve the subsequent page.
+     * When paginating, all other parameters provided to `ListLocks` must match
+     * the call that provided the page token.
+     * 
+ * + * string page_token = 3; + * + * @return The bytes for pageToken. + */ + public com.google.protobuf.ByteString getPageTokenBytes() { + java.lang.Object ref = pageToken_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + pageToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * A page token, received from a previous `ListLocks` call.
+     * Provide this to retrieve the subsequent page.
+     * When paginating, all other parameters provided to `ListLocks` must match
+     * the call that provided the page token.
+     * 
+ * + * string page_token = 3; + * + * @param value The pageToken to set. + * @return This builder for chaining. + */ + public Builder setPageToken(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + pageToken_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * + * + *
+     * A page token, received from a previous `ListLocks` call.
+     * Provide this to retrieve the subsequent page.
+     * When paginating, all other parameters provided to `ListLocks` must match
+     * the call that provided the page token.
+     * 
+ * + * string page_token = 3; + * + * @return This builder for chaining. + */ + public Builder clearPageToken() { + pageToken_ = getDefaultInstance().getPageToken(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + return this; + } + /** + * + * + *
+     * A page token, received from a previous `ListLocks` call.
+     * Provide this to retrieve the subsequent page.
+     * When paginating, all other parameters provided to `ListLocks` must match
+     * the call that provided the page token.
+     * 
+ * + * string page_token = 3; + * + * @param value The bytes for pageToken to set. + * @return This builder for chaining. + */ + public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + pageToken_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest) + private static final com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest(); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ListLocksRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListLocksRequestOrBuilder.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListLocksRequestOrBuilder.java new file mode 100644 index 000000000000..aea8db30f3a2 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListLocksRequestOrBuilder.java @@ -0,0 +1,105 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +public interface ListLocksRequestOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Required. The parent, which owns this collection of locks.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The parent. + */ + java.lang.String getParent(); + /** + * + * + *
+   * Required. The parent, which owns this collection of locks.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for parent. + */ + com.google.protobuf.ByteString getParentBytes(); + + /** + * + * + *
+   * The maximum number of locks to return. The service may return fewer than
+   * this value.
+   * If unspecified, at most 50 locks will be returned.
+   * The maximum value is 1000; values above 1000 will be coerced to 1000.
+   * 
+ * + * int32 page_size = 2; + * + * @return The pageSize. + */ + int getPageSize(); + + /** + * + * + *
+   * A page token, received from a previous `ListLocks` call.
+   * Provide this to retrieve the subsequent page.
+   * When paginating, all other parameters provided to `ListLocks` must match
+   * the call that provided the page token.
+   * 
+ * + * string page_token = 3; + * + * @return The pageToken. + */ + java.lang.String getPageToken(); + /** + * + * + *
+   * A page token, received from a previous `ListLocks` call.
+   * Provide this to retrieve the subsequent page.
+   * When paginating, all other parameters provided to `ListLocks` must match
+   * the call that provided the page token.
+   * 
+ * + * string page_token = 3; + * + * @return The bytes for pageToken. + */ + com.google.protobuf.ByteString getPageTokenBytes(); +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListLocksResponse.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListLocksResponse.java new file mode 100644 index 000000000000..25bc2eaeb063 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListLocksResponse.java @@ -0,0 +1,1136 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +/** + * + * + *
+ * Response message for the ListLocks method.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse} + */ +public final class ListLocksResponse extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse) + ListLocksResponseOrBuilder { + private static final long serialVersionUID = 0L; + // Use ListLocksResponse.newBuilder() to construct. + private ListLocksResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private ListLocksResponse() { + locks_ = java.util.Collections.emptyList(); + nextPageToken_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new ListLocksResponse(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListLocksResponse_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListLocksResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse.class, + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse.Builder.class); + } + + public static final int LOCKS_FIELD_NUMBER = 1; + + @SuppressWarnings("serial") + private java.util.List locks_; + /** + * + * + *
+   * The locks from the specified database.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Lock locks = 1; + */ + @java.lang.Override + public java.util.List getLocksList() { + return locks_; + } + /** + * + * + *
+   * The locks from the specified database.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Lock locks = 1; + */ + @java.lang.Override + public java.util.List + getLocksOrBuilderList() { + return locks_; + } + /** + * + * + *
+   * The locks from the specified database.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Lock locks = 1; + */ + @java.lang.Override + public int getLocksCount() { + return locks_.size(); + } + /** + * + * + *
+   * The locks from the specified database.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Lock locks = 1; + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.Lock getLocks(int index) { + return locks_.get(index); + } + /** + * + * + *
+   * The locks from the specified database.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Lock locks = 1; + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.LockOrBuilder getLocksOrBuilder(int index) { + return locks_.get(index); + } + + public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; + + @SuppressWarnings("serial") + private volatile java.lang.Object nextPageToken_ = ""; + /** + * + * + *
+   * A token, which can be sent as `page_token` to retrieve the next page.
+   * If this field is omitted, there are no subsequent pages.
+   * 
+ * + * string next_page_token = 2; + * + * @return The nextPageToken. + */ + @java.lang.Override + public java.lang.String getNextPageToken() { + java.lang.Object ref = nextPageToken_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + nextPageToken_ = s; + return s; + } + } + /** + * + * + *
+   * A token, which can be sent as `page_token` to retrieve the next page.
+   * If this field is omitted, there are no subsequent pages.
+   * 
+ * + * string next_page_token = 2; + * + * @return The bytes for nextPageToken. + */ + @java.lang.Override + public com.google.protobuf.ByteString getNextPageTokenBytes() { + java.lang.Object ref = nextPageToken_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + nextPageToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + for (int i = 0; i < locks_.size(); i++) { + output.writeMessage(1, locks_.get(i)); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < locks_.size(); i++) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, locks_.get(i)); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse)) { + return super.equals(obj); + } + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse other = + (com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse) obj; + + if (!getLocksList().equals(other.getLocksList())) return false; + if (!getNextPageToken().equals(other.getNextPageToken())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (getLocksCount() > 0) { + hash = (37 * hash) + LOCKS_FIELD_NUMBER; + hash = (53 * hash) + getLocksList().hashCode(); + } + hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; + hash = (53 * hash) + getNextPageToken().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Response message for the ListLocks method.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse) + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListLocksResponse_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListLocksResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse.class, + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse.Builder.class); + } + + // Construct using com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse.newBuilder() + private Builder() {} + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + } + + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + if (locksBuilder_ == null) { + locks_ = java.util.Collections.emptyList(); + } else { + locks_ = null; + locksBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + nextPageToken_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListLocksResponse_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse + getDefaultInstanceForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse build() { + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse buildPartial() { + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse result = + new com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse(this); + buildPartialRepeatedFields(result); + if (bitField0_ != 0) { + buildPartial0(result); + } + onBuilt(); + return result; + } + + private void buildPartialRepeatedFields( + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse result) { + if (locksBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0)) { + locks_ = java.util.Collections.unmodifiableList(locks_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.locks_ = locks_; + } else { + result.locks_ = locksBuilder_.build(); + } + } + + private void buildPartial0( + com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000002) != 0)) { + result.nextPageToken_ = nextPageToken_; + } + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse) { + return mergeFrom((com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse other) { + if (other + == com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse.getDefaultInstance()) + return this; + if (locksBuilder_ == null) { + if (!other.locks_.isEmpty()) { + if (locks_.isEmpty()) { + locks_ = other.locks_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureLocksIsMutable(); + locks_.addAll(other.locks_); + } + onChanged(); + } + } else { + if (!other.locks_.isEmpty()) { + if (locksBuilder_.isEmpty()) { + locksBuilder_.dispose(); + locksBuilder_ = null; + locks_ = other.locks_; + bitField0_ = (bitField0_ & ~0x00000001); + locksBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders + ? getLocksFieldBuilder() + : null; + } else { + locksBuilder_.addAllMessages(other.locks_); + } + } + } + if (!other.getNextPageToken().isEmpty()) { + nextPageToken_ = other.nextPageToken_; + bitField0_ |= 0x00000002; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + com.google.cloud.bigquery.biglake.v1alpha1.Lock m = + input.readMessage( + com.google.cloud.bigquery.biglake.v1alpha1.Lock.parser(), + extensionRegistry); + if (locksBuilder_ == null) { + ensureLocksIsMutable(); + locks_.add(m); + } else { + locksBuilder_.addMessage(m); + } + break; + } // case 10 + case 18: + { + nextPageToken_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000002; + break; + } // case 18 + default: + { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + + private int bitField0_; + + private java.util.List locks_ = + java.util.Collections.emptyList(); + + private void ensureLocksIsMutable() { + if (!((bitField0_ & 0x00000001) != 0)) { + locks_ = new java.util.ArrayList(locks_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.Lock, + com.google.cloud.bigquery.biglake.v1alpha1.Lock.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.LockOrBuilder> + locksBuilder_; + + /** + * + * + *
+     * The locks from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Lock locks = 1; + */ + public java.util.List getLocksList() { + if (locksBuilder_ == null) { + return java.util.Collections.unmodifiableList(locks_); + } else { + return locksBuilder_.getMessageList(); + } + } + /** + * + * + *
+     * The locks from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Lock locks = 1; + */ + public int getLocksCount() { + if (locksBuilder_ == null) { + return locks_.size(); + } else { + return locksBuilder_.getCount(); + } + } + /** + * + * + *
+     * The locks from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Lock locks = 1; + */ + public com.google.cloud.bigquery.biglake.v1alpha1.Lock getLocks(int index) { + if (locksBuilder_ == null) { + return locks_.get(index); + } else { + return locksBuilder_.getMessage(index); + } + } + /** + * + * + *
+     * The locks from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Lock locks = 1; + */ + public Builder setLocks(int index, com.google.cloud.bigquery.biglake.v1alpha1.Lock value) { + if (locksBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureLocksIsMutable(); + locks_.set(index, value); + onChanged(); + } else { + locksBuilder_.setMessage(index, value); + } + return this; + } + /** + * + * + *
+     * The locks from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Lock locks = 1; + */ + public Builder setLocks( + int index, com.google.cloud.bigquery.biglake.v1alpha1.Lock.Builder builderForValue) { + if (locksBuilder_ == null) { + ensureLocksIsMutable(); + locks_.set(index, builderForValue.build()); + onChanged(); + } else { + locksBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * The locks from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Lock locks = 1; + */ + public Builder addLocks(com.google.cloud.bigquery.biglake.v1alpha1.Lock value) { + if (locksBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureLocksIsMutable(); + locks_.add(value); + onChanged(); + } else { + locksBuilder_.addMessage(value); + } + return this; + } + /** + * + * + *
+     * The locks from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Lock locks = 1; + */ + public Builder addLocks(int index, com.google.cloud.bigquery.biglake.v1alpha1.Lock value) { + if (locksBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureLocksIsMutable(); + locks_.add(index, value); + onChanged(); + } else { + locksBuilder_.addMessage(index, value); + } + return this; + } + /** + * + * + *
+     * The locks from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Lock locks = 1; + */ + public Builder addLocks( + com.google.cloud.bigquery.biglake.v1alpha1.Lock.Builder builderForValue) { + if (locksBuilder_ == null) { + ensureLocksIsMutable(); + locks_.add(builderForValue.build()); + onChanged(); + } else { + locksBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * The locks from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Lock locks = 1; + */ + public Builder addLocks( + int index, com.google.cloud.bigquery.biglake.v1alpha1.Lock.Builder builderForValue) { + if (locksBuilder_ == null) { + ensureLocksIsMutable(); + locks_.add(index, builderForValue.build()); + onChanged(); + } else { + locksBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * The locks from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Lock locks = 1; + */ + public Builder addAllLocks( + java.lang.Iterable values) { + if (locksBuilder_ == null) { + ensureLocksIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll(values, locks_); + onChanged(); + } else { + locksBuilder_.addAllMessages(values); + } + return this; + } + /** + * + * + *
+     * The locks from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Lock locks = 1; + */ + public Builder clearLocks() { + if (locksBuilder_ == null) { + locks_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + locksBuilder_.clear(); + } + return this; + } + /** + * + * + *
+     * The locks from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Lock locks = 1; + */ + public Builder removeLocks(int index) { + if (locksBuilder_ == null) { + ensureLocksIsMutable(); + locks_.remove(index); + onChanged(); + } else { + locksBuilder_.remove(index); + } + return this; + } + /** + * + * + *
+     * The locks from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Lock locks = 1; + */ + public com.google.cloud.bigquery.biglake.v1alpha1.Lock.Builder getLocksBuilder(int index) { + return getLocksFieldBuilder().getBuilder(index); + } + /** + * + * + *
+     * The locks from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Lock locks = 1; + */ + public com.google.cloud.bigquery.biglake.v1alpha1.LockOrBuilder getLocksOrBuilder(int index) { + if (locksBuilder_ == null) { + return locks_.get(index); + } else { + return locksBuilder_.getMessageOrBuilder(index); + } + } + /** + * + * + *
+     * The locks from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Lock locks = 1; + */ + public java.util.List + getLocksOrBuilderList() { + if (locksBuilder_ != null) { + return locksBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(locks_); + } + } + /** + * + * + *
+     * The locks from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Lock locks = 1; + */ + public com.google.cloud.bigquery.biglake.v1alpha1.Lock.Builder addLocksBuilder() { + return getLocksFieldBuilder() + .addBuilder(com.google.cloud.bigquery.biglake.v1alpha1.Lock.getDefaultInstance()); + } + /** + * + * + *
+     * The locks from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Lock locks = 1; + */ + public com.google.cloud.bigquery.biglake.v1alpha1.Lock.Builder addLocksBuilder(int index) { + return getLocksFieldBuilder() + .addBuilder(index, com.google.cloud.bigquery.biglake.v1alpha1.Lock.getDefaultInstance()); + } + /** + * + * + *
+     * The locks from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Lock locks = 1; + */ + public java.util.List + getLocksBuilderList() { + return getLocksFieldBuilder().getBuilderList(); + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.Lock, + com.google.cloud.bigquery.biglake.v1alpha1.Lock.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.LockOrBuilder> + getLocksFieldBuilder() { + if (locksBuilder_ == null) { + locksBuilder_ = + new com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.Lock, + com.google.cloud.bigquery.biglake.v1alpha1.Lock.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.LockOrBuilder>( + locks_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); + locks_ = null; + } + return locksBuilder_; + } + + private java.lang.Object nextPageToken_ = ""; + /** + * + * + *
+     * A token, which can be sent as `page_token` to retrieve the next page.
+     * If this field is omitted, there are no subsequent pages.
+     * 
+ * + * string next_page_token = 2; + * + * @return The nextPageToken. + */ + public java.lang.String getNextPageToken() { + java.lang.Object ref = nextPageToken_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + nextPageToken_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * A token, which can be sent as `page_token` to retrieve the next page.
+     * If this field is omitted, there are no subsequent pages.
+     * 
+ * + * string next_page_token = 2; + * + * @return The bytes for nextPageToken. + */ + public com.google.protobuf.ByteString getNextPageTokenBytes() { + java.lang.Object ref = nextPageToken_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + nextPageToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * A token, which can be sent as `page_token` to retrieve the next page.
+     * If this field is omitted, there are no subsequent pages.
+     * 
+ * + * string next_page_token = 2; + * + * @param value The nextPageToken to set. + * @return This builder for chaining. + */ + public Builder setNextPageToken(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + nextPageToken_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * + * + *
+     * A token, which can be sent as `page_token` to retrieve the next page.
+     * If this field is omitted, there are no subsequent pages.
+     * 
+ * + * string next_page_token = 2; + * + * @return This builder for chaining. + */ + public Builder clearNextPageToken() { + nextPageToken_ = getDefaultInstance().getNextPageToken(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + * + * + *
+     * A token, which can be sent as `page_token` to retrieve the next page.
+     * If this field is omitted, there are no subsequent pages.
+     * 
+ * + * string next_page_token = 2; + * + * @param value The bytes for nextPageToken to set. + * @return This builder for chaining. + */ + public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + nextPageToken_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse) + private static final com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse(); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ListLocksResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListLocksResponseOrBuilder.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListLocksResponseOrBuilder.java new file mode 100644 index 000000000000..4c2d967a3a29 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListLocksResponseOrBuilder.java @@ -0,0 +1,104 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +public interface ListLocksResponseOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * The locks from the specified database.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Lock locks = 1; + */ + java.util.List getLocksList(); + /** + * + * + *
+   * The locks from the specified database.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Lock locks = 1; + */ + com.google.cloud.bigquery.biglake.v1alpha1.Lock getLocks(int index); + /** + * + * + *
+   * The locks from the specified database.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Lock locks = 1; + */ + int getLocksCount(); + /** + * + * + *
+   * The locks from the specified database.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Lock locks = 1; + */ + java.util.List + getLocksOrBuilderList(); + /** + * + * + *
+   * The locks from the specified database.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Lock locks = 1; + */ + com.google.cloud.bigquery.biglake.v1alpha1.LockOrBuilder getLocksOrBuilder(int index); + + /** + * + * + *
+   * A token, which can be sent as `page_token` to retrieve the next page.
+   * If this field is omitted, there are no subsequent pages.
+   * 
+ * + * string next_page_token = 2; + * + * @return The nextPageToken. + */ + java.lang.String getNextPageToken(); + /** + * + * + *
+   * A token, which can be sent as `page_token` to retrieve the next page.
+   * If this field is omitted, there are no subsequent pages.
+   * 
+ * + * string next_page_token = 2; + * + * @return The bytes for nextPageToken. + */ + com.google.protobuf.ByteString getNextPageTokenBytes(); +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListTablesRequest.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListTablesRequest.java new file mode 100644 index 000000000000..b00af8889f19 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListTablesRequest.java @@ -0,0 +1,970 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +/** + * + * + *
+ * Request message for the ListTables method.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest} + */ +public final class ListTablesRequest extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest) + ListTablesRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use ListTablesRequest.newBuilder() to construct. + private ListTablesRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private ListTablesRequest() { + parent_ = ""; + pageToken_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new ListTablesRequest(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListTablesRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListTablesRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest.Builder.class); + } + + public static final int PARENT_FIELD_NUMBER = 1; + + @SuppressWarnings("serial") + private volatile java.lang.Object parent_ = ""; + /** + * + * + *
+   * Required. The parent, which owns this collection of tables.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The parent. + */ + @java.lang.Override + public java.lang.String getParent() { + java.lang.Object ref = parent_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + parent_ = s; + return s; + } + } + /** + * + * + *
+   * Required. The parent, which owns this collection of tables.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for parent. + */ + @java.lang.Override + public com.google.protobuf.ByteString getParentBytes() { + java.lang.Object ref = parent_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + parent_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int PAGE_SIZE_FIELD_NUMBER = 2; + private int pageSize_ = 0; + /** + * + * + *
+   * The maximum number of tables to return. The service may return fewer than
+   * this value.
+   * If unspecified, at most 50 tables will be returned.
+   * The maximum value is 1000; values above 1000 will be coerced to 1000.
+   * 
+ * + * int32 page_size = 2; + * + * @return The pageSize. + */ + @java.lang.Override + public int getPageSize() { + return pageSize_; + } + + public static final int PAGE_TOKEN_FIELD_NUMBER = 3; + + @SuppressWarnings("serial") + private volatile java.lang.Object pageToken_ = ""; + /** + * + * + *
+   * A page token, received from a previous `ListTables` call.
+   * Provide this to retrieve the subsequent page.
+   * When paginating, all other parameters provided to `ListTables` must match
+   * the call that provided the page token.
+   * 
+ * + * string page_token = 3; + * + * @return The pageToken. + */ + @java.lang.Override + public java.lang.String getPageToken() { + java.lang.Object ref = pageToken_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + pageToken_ = s; + return s; + } + } + /** + * + * + *
+   * A page token, received from a previous `ListTables` call.
+   * Provide this to retrieve the subsequent page.
+   * When paginating, all other parameters provided to `ListTables` must match
+   * the call that provided the page token.
+   * 
+ * + * string page_token = 3; + * + * @return The bytes for pageToken. + */ + @java.lang.Override + public com.google.protobuf.ByteString getPageTokenBytes() { + java.lang.Object ref = pageToken_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + pageToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); + } + if (pageSize_ != 0) { + output.writeInt32(2, pageSize_); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); + } + if (pageSize_ != 0) { + size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest)) { + return super.equals(obj); + } + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest other = + (com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest) obj; + + if (!getParent().equals(other.getParent())) return false; + if (getPageSize() != other.getPageSize()) return false; + if (!getPageToken().equals(other.getPageToken())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + PARENT_FIELD_NUMBER; + hash = (53 * hash) + getParent().hashCode(); + hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; + hash = (53 * hash) + getPageSize(); + hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; + hash = (53 * hash) + getPageToken().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Request message for the ListTables method.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest) + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListTablesRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListTablesRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest.Builder.class); + } + + // Construct using com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest.newBuilder() + private Builder() {} + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + } + + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + parent_ = ""; + pageSize_ = 0; + pageToken_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListTablesRequest_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest + getDefaultInstanceForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest build() { + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest buildPartial() { + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest result = + new com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest(this); + if (bitField0_ != 0) { + buildPartial0(result); + } + onBuilt(); + return result; + } + + private void buildPartial0( + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.parent_ = parent_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.pageSize_ = pageSize_; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.pageToken_ = pageToken_; + } + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest) { + return mergeFrom((com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest other) { + if (other + == com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest.getDefaultInstance()) + return this; + if (!other.getParent().isEmpty()) { + parent_ = other.parent_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (other.getPageSize() != 0) { + setPageSize(other.getPageSize()); + } + if (!other.getPageToken().isEmpty()) { + pageToken_ = other.pageToken_; + bitField0_ |= 0x00000004; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + parent_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 16: + { + pageSize_ = input.readInt32(); + bitField0_ |= 0x00000002; + break; + } // case 16 + case 26: + { + pageToken_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000004; + break; + } // case 26 + default: + { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + + private int bitField0_; + + private java.lang.Object parent_ = ""; + /** + * + * + *
+     * Required. The parent, which owns this collection of tables.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The parent. + */ + public java.lang.String getParent() { + java.lang.Object ref = parent_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + parent_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Required. The parent, which owns this collection of tables.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for parent. + */ + public com.google.protobuf.ByteString getParentBytes() { + java.lang.Object ref = parent_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + parent_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Required. The parent, which owns this collection of tables.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The parent to set. + * @return This builder for chaining. + */ + public Builder setParent(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + parent_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The parent, which owns this collection of tables.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return This builder for chaining. + */ + public Builder clearParent() { + parent_ = getDefaultInstance().getParent(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The parent, which owns this collection of tables.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @param value The bytes for parent to set. + * @return This builder for chaining. + */ + public Builder setParentBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + parent_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private int pageSize_; + /** + * + * + *
+     * The maximum number of tables to return. The service may return fewer than
+     * this value.
+     * If unspecified, at most 50 tables will be returned.
+     * The maximum value is 1000; values above 1000 will be coerced to 1000.
+     * 
+ * + * int32 page_size = 2; + * + * @return The pageSize. + */ + @java.lang.Override + public int getPageSize() { + return pageSize_; + } + /** + * + * + *
+     * The maximum number of tables to return. The service may return fewer than
+     * this value.
+     * If unspecified, at most 50 tables will be returned.
+     * The maximum value is 1000; values above 1000 will be coerced to 1000.
+     * 
+ * + * int32 page_size = 2; + * + * @param value The pageSize to set. + * @return This builder for chaining. + */ + public Builder setPageSize(int value) { + + pageSize_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * + * + *
+     * The maximum number of tables to return. The service may return fewer than
+     * this value.
+     * If unspecified, at most 50 tables will be returned.
+     * The maximum value is 1000; values above 1000 will be coerced to 1000.
+     * 
+ * + * int32 page_size = 2; + * + * @return This builder for chaining. + */ + public Builder clearPageSize() { + bitField0_ = (bitField0_ & ~0x00000002); + pageSize_ = 0; + onChanged(); + return this; + } + + private java.lang.Object pageToken_ = ""; + /** + * + * + *
+     * A page token, received from a previous `ListTables` call.
+     * Provide this to retrieve the subsequent page.
+     * When paginating, all other parameters provided to `ListTables` must match
+     * the call that provided the page token.
+     * 
+ * + * string page_token = 3; + * + * @return The pageToken. + */ + public java.lang.String getPageToken() { + java.lang.Object ref = pageToken_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + pageToken_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * A page token, received from a previous `ListTables` call.
+     * Provide this to retrieve the subsequent page.
+     * When paginating, all other parameters provided to `ListTables` must match
+     * the call that provided the page token.
+     * 
+ * + * string page_token = 3; + * + * @return The bytes for pageToken. + */ + public com.google.protobuf.ByteString getPageTokenBytes() { + java.lang.Object ref = pageToken_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + pageToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * A page token, received from a previous `ListTables` call.
+     * Provide this to retrieve the subsequent page.
+     * When paginating, all other parameters provided to `ListTables` must match
+     * the call that provided the page token.
+     * 
+ * + * string page_token = 3; + * + * @param value The pageToken to set. + * @return This builder for chaining. + */ + public Builder setPageToken(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + pageToken_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * + * + *
+     * A page token, received from a previous `ListTables` call.
+     * Provide this to retrieve the subsequent page.
+     * When paginating, all other parameters provided to `ListTables` must match
+     * the call that provided the page token.
+     * 
+ * + * string page_token = 3; + * + * @return This builder for chaining. + */ + public Builder clearPageToken() { + pageToken_ = getDefaultInstance().getPageToken(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + return this; + } + /** + * + * + *
+     * A page token, received from a previous `ListTables` call.
+     * Provide this to retrieve the subsequent page.
+     * When paginating, all other parameters provided to `ListTables` must match
+     * the call that provided the page token.
+     * 
+ * + * string page_token = 3; + * + * @param value The bytes for pageToken to set. + * @return This builder for chaining. + */ + public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + pageToken_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest) + private static final com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest(); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ListTablesRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListTablesRequestOrBuilder.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListTablesRequestOrBuilder.java new file mode 100644 index 000000000000..744f1256e0b9 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListTablesRequestOrBuilder.java @@ -0,0 +1,105 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +public interface ListTablesRequestOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Required. The parent, which owns this collection of tables.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The parent. + */ + java.lang.String getParent(); + /** + * + * + *
+   * Required. The parent, which owns this collection of tables.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+   * 
+ * + * + * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for parent. + */ + com.google.protobuf.ByteString getParentBytes(); + + /** + * + * + *
+   * The maximum number of tables to return. The service may return fewer than
+   * this value.
+   * If unspecified, at most 50 tables will be returned.
+   * The maximum value is 1000; values above 1000 will be coerced to 1000.
+   * 
+ * + * int32 page_size = 2; + * + * @return The pageSize. + */ + int getPageSize(); + + /** + * + * + *
+   * A page token, received from a previous `ListTables` call.
+   * Provide this to retrieve the subsequent page.
+   * When paginating, all other parameters provided to `ListTables` must match
+   * the call that provided the page token.
+   * 
+ * + * string page_token = 3; + * + * @return The pageToken. + */ + java.lang.String getPageToken(); + /** + * + * + *
+   * A page token, received from a previous `ListTables` call.
+   * Provide this to retrieve the subsequent page.
+   * When paginating, all other parameters provided to `ListTables` must match
+   * the call that provided the page token.
+   * 
+ * + * string page_token = 3; + * + * @return The bytes for pageToken. + */ + com.google.protobuf.ByteString getPageTokenBytes(); +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListTablesResponse.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListTablesResponse.java new file mode 100644 index 000000000000..3655f8a59ce3 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListTablesResponse.java @@ -0,0 +1,1137 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +/** + * + * + *
+ * Response message for the ListTables method.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse} + */ +public final class ListTablesResponse extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse) + ListTablesResponseOrBuilder { + private static final long serialVersionUID = 0L; + // Use ListTablesResponse.newBuilder() to construct. + private ListTablesResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private ListTablesResponse() { + tables_ = java.util.Collections.emptyList(); + nextPageToken_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new ListTablesResponse(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListTablesResponse_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListTablesResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse.class, + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse.Builder.class); + } + + public static final int TABLES_FIELD_NUMBER = 1; + + @SuppressWarnings("serial") + private java.util.List tables_; + /** + * + * + *
+   * The tables from the specified database.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1; + */ + @java.lang.Override + public java.util.List getTablesList() { + return tables_; + } + /** + * + * + *
+   * The tables from the specified database.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1; + */ + @java.lang.Override + public java.util.List + getTablesOrBuilderList() { + return tables_; + } + /** + * + * + *
+   * The tables from the specified database.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1; + */ + @java.lang.Override + public int getTablesCount() { + return tables_.size(); + } + /** + * + * + *
+   * The tables from the specified database.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1; + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.Table getTables(int index) { + return tables_.get(index); + } + /** + * + * + *
+   * The tables from the specified database.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1; + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.TableOrBuilder getTablesOrBuilder(int index) { + return tables_.get(index); + } + + public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; + + @SuppressWarnings("serial") + private volatile java.lang.Object nextPageToken_ = ""; + /** + * + * + *
+   * A token, which can be sent as `page_token` to retrieve the next page.
+   * If this field is omitted, there are no subsequent pages.
+   * 
+ * + * string next_page_token = 2; + * + * @return The nextPageToken. + */ + @java.lang.Override + public java.lang.String getNextPageToken() { + java.lang.Object ref = nextPageToken_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + nextPageToken_ = s; + return s; + } + } + /** + * + * + *
+   * A token, which can be sent as `page_token` to retrieve the next page.
+   * If this field is omitted, there are no subsequent pages.
+   * 
+ * + * string next_page_token = 2; + * + * @return The bytes for nextPageToken. + */ + @java.lang.Override + public com.google.protobuf.ByteString getNextPageTokenBytes() { + java.lang.Object ref = nextPageToken_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + nextPageToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + for (int i = 0; i < tables_.size(); i++) { + output.writeMessage(1, tables_.get(i)); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < tables_.size(); i++) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, tables_.get(i)); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse)) { + return super.equals(obj); + } + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse other = + (com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse) obj; + + if (!getTablesList().equals(other.getTablesList())) return false; + if (!getNextPageToken().equals(other.getNextPageToken())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (getTablesCount() > 0) { + hash = (37 * hash) + TABLES_FIELD_NUMBER; + hash = (53 * hash) + getTablesList().hashCode(); + } + hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; + hash = (53 * hash) + getNextPageToken().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Response message for the ListTables method.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse) + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListTablesResponse_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListTablesResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse.class, + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse.Builder.class); + } + + // Construct using com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse.newBuilder() + private Builder() {} + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + } + + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + if (tablesBuilder_ == null) { + tables_ = java.util.Collections.emptyList(); + } else { + tables_ = null; + tablesBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + nextPageToken_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_ListTablesResponse_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse + getDefaultInstanceForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse build() { + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse buildPartial() { + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse result = + new com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse(this); + buildPartialRepeatedFields(result); + if (bitField0_ != 0) { + buildPartial0(result); + } + onBuilt(); + return result; + } + + private void buildPartialRepeatedFields( + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse result) { + if (tablesBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0)) { + tables_ = java.util.Collections.unmodifiableList(tables_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.tables_ = tables_; + } else { + result.tables_ = tablesBuilder_.build(); + } + } + + private void buildPartial0( + com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000002) != 0)) { + result.nextPageToken_ = nextPageToken_; + } + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse) { + return mergeFrom((com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse other) { + if (other + == com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse.getDefaultInstance()) + return this; + if (tablesBuilder_ == null) { + if (!other.tables_.isEmpty()) { + if (tables_.isEmpty()) { + tables_ = other.tables_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureTablesIsMutable(); + tables_.addAll(other.tables_); + } + onChanged(); + } + } else { + if (!other.tables_.isEmpty()) { + if (tablesBuilder_.isEmpty()) { + tablesBuilder_.dispose(); + tablesBuilder_ = null; + tables_ = other.tables_; + bitField0_ = (bitField0_ & ~0x00000001); + tablesBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders + ? getTablesFieldBuilder() + : null; + } else { + tablesBuilder_.addAllMessages(other.tables_); + } + } + } + if (!other.getNextPageToken().isEmpty()) { + nextPageToken_ = other.nextPageToken_; + bitField0_ |= 0x00000002; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + com.google.cloud.bigquery.biglake.v1alpha1.Table m = + input.readMessage( + com.google.cloud.bigquery.biglake.v1alpha1.Table.parser(), + extensionRegistry); + if (tablesBuilder_ == null) { + ensureTablesIsMutable(); + tables_.add(m); + } else { + tablesBuilder_.addMessage(m); + } + break; + } // case 10 + case 18: + { + nextPageToken_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000002; + break; + } // case 18 + default: + { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + + private int bitField0_; + + private java.util.List tables_ = + java.util.Collections.emptyList(); + + private void ensureTablesIsMutable() { + if (!((bitField0_ & 0x00000001) != 0)) { + tables_ = + new java.util.ArrayList(tables_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.Table, + com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.TableOrBuilder> + tablesBuilder_; + + /** + * + * + *
+     * The tables from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1; + */ + public java.util.List getTablesList() { + if (tablesBuilder_ == null) { + return java.util.Collections.unmodifiableList(tables_); + } else { + return tablesBuilder_.getMessageList(); + } + } + /** + * + * + *
+     * The tables from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1; + */ + public int getTablesCount() { + if (tablesBuilder_ == null) { + return tables_.size(); + } else { + return tablesBuilder_.getCount(); + } + } + /** + * + * + *
+     * The tables from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1; + */ + public com.google.cloud.bigquery.biglake.v1alpha1.Table getTables(int index) { + if (tablesBuilder_ == null) { + return tables_.get(index); + } else { + return tablesBuilder_.getMessage(index); + } + } + /** + * + * + *
+     * The tables from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1; + */ + public Builder setTables(int index, com.google.cloud.bigquery.biglake.v1alpha1.Table value) { + if (tablesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureTablesIsMutable(); + tables_.set(index, value); + onChanged(); + } else { + tablesBuilder_.setMessage(index, value); + } + return this; + } + /** + * + * + *
+     * The tables from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1; + */ + public Builder setTables( + int index, com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder builderForValue) { + if (tablesBuilder_ == null) { + ensureTablesIsMutable(); + tables_.set(index, builderForValue.build()); + onChanged(); + } else { + tablesBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * The tables from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1; + */ + public Builder addTables(com.google.cloud.bigquery.biglake.v1alpha1.Table value) { + if (tablesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureTablesIsMutable(); + tables_.add(value); + onChanged(); + } else { + tablesBuilder_.addMessage(value); + } + return this; + } + /** + * + * + *
+     * The tables from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1; + */ + public Builder addTables(int index, com.google.cloud.bigquery.biglake.v1alpha1.Table value) { + if (tablesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureTablesIsMutable(); + tables_.add(index, value); + onChanged(); + } else { + tablesBuilder_.addMessage(index, value); + } + return this; + } + /** + * + * + *
+     * The tables from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1; + */ + public Builder addTables( + com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder builderForValue) { + if (tablesBuilder_ == null) { + ensureTablesIsMutable(); + tables_.add(builderForValue.build()); + onChanged(); + } else { + tablesBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * The tables from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1; + */ + public Builder addTables( + int index, com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder builderForValue) { + if (tablesBuilder_ == null) { + ensureTablesIsMutable(); + tables_.add(index, builderForValue.build()); + onChanged(); + } else { + tablesBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * The tables from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1; + */ + public Builder addAllTables( + java.lang.Iterable values) { + if (tablesBuilder_ == null) { + ensureTablesIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll(values, tables_); + onChanged(); + } else { + tablesBuilder_.addAllMessages(values); + } + return this; + } + /** + * + * + *
+     * The tables from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1; + */ + public Builder clearTables() { + if (tablesBuilder_ == null) { + tables_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + tablesBuilder_.clear(); + } + return this; + } + /** + * + * + *
+     * The tables from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1; + */ + public Builder removeTables(int index) { + if (tablesBuilder_ == null) { + ensureTablesIsMutable(); + tables_.remove(index); + onChanged(); + } else { + tablesBuilder_.remove(index); + } + return this; + } + /** + * + * + *
+     * The tables from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1; + */ + public com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder getTablesBuilder(int index) { + return getTablesFieldBuilder().getBuilder(index); + } + /** + * + * + *
+     * The tables from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1; + */ + public com.google.cloud.bigquery.biglake.v1alpha1.TableOrBuilder getTablesOrBuilder(int index) { + if (tablesBuilder_ == null) { + return tables_.get(index); + } else { + return tablesBuilder_.getMessageOrBuilder(index); + } + } + /** + * + * + *
+     * The tables from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1; + */ + public java.util.List + getTablesOrBuilderList() { + if (tablesBuilder_ != null) { + return tablesBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(tables_); + } + } + /** + * + * + *
+     * The tables from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1; + */ + public com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder addTablesBuilder() { + return getTablesFieldBuilder() + .addBuilder(com.google.cloud.bigquery.biglake.v1alpha1.Table.getDefaultInstance()); + } + /** + * + * + *
+     * The tables from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1; + */ + public com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder addTablesBuilder(int index) { + return getTablesFieldBuilder() + .addBuilder(index, com.google.cloud.bigquery.biglake.v1alpha1.Table.getDefaultInstance()); + } + /** + * + * + *
+     * The tables from the specified database.
+     * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1; + */ + public java.util.List + getTablesBuilderList() { + return getTablesFieldBuilder().getBuilderList(); + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.Table, + com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.TableOrBuilder> + getTablesFieldBuilder() { + if (tablesBuilder_ == null) { + tablesBuilder_ = + new com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.Table, + com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.TableOrBuilder>( + tables_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); + tables_ = null; + } + return tablesBuilder_; + } + + private java.lang.Object nextPageToken_ = ""; + /** + * + * + *
+     * A token, which can be sent as `page_token` to retrieve the next page.
+     * If this field is omitted, there are no subsequent pages.
+     * 
+ * + * string next_page_token = 2; + * + * @return The nextPageToken. + */ + public java.lang.String getNextPageToken() { + java.lang.Object ref = nextPageToken_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + nextPageToken_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * A token, which can be sent as `page_token` to retrieve the next page.
+     * If this field is omitted, there are no subsequent pages.
+     * 
+ * + * string next_page_token = 2; + * + * @return The bytes for nextPageToken. + */ + public com.google.protobuf.ByteString getNextPageTokenBytes() { + java.lang.Object ref = nextPageToken_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + nextPageToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * A token, which can be sent as `page_token` to retrieve the next page.
+     * If this field is omitted, there are no subsequent pages.
+     * 
+ * + * string next_page_token = 2; + * + * @param value The nextPageToken to set. + * @return This builder for chaining. + */ + public Builder setNextPageToken(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + nextPageToken_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * + * + *
+     * A token, which can be sent as `page_token` to retrieve the next page.
+     * If this field is omitted, there are no subsequent pages.
+     * 
+ * + * string next_page_token = 2; + * + * @return This builder for chaining. + */ + public Builder clearNextPageToken() { + nextPageToken_ = getDefaultInstance().getNextPageToken(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + * + * + *
+     * A token, which can be sent as `page_token` to retrieve the next page.
+     * If this field is omitted, there are no subsequent pages.
+     * 
+ * + * string next_page_token = 2; + * + * @param value The bytes for nextPageToken to set. + * @return This builder for chaining. + */ + public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + nextPageToken_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse) + private static final com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse(); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ListTablesResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListTablesResponseOrBuilder.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListTablesResponseOrBuilder.java new file mode 100644 index 000000000000..d7f3415e0aee --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListTablesResponseOrBuilder.java @@ -0,0 +1,104 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +public interface ListTablesResponseOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * The tables from the specified database.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1; + */ + java.util.List getTablesList(); + /** + * + * + *
+   * The tables from the specified database.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1; + */ + com.google.cloud.bigquery.biglake.v1alpha1.Table getTables(int index); + /** + * + * + *
+   * The tables from the specified database.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1; + */ + int getTablesCount(); + /** + * + * + *
+   * The tables from the specified database.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1; + */ + java.util.List + getTablesOrBuilderList(); + /** + * + * + *
+   * The tables from the specified database.
+   * 
+ * + * repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1; + */ + com.google.cloud.bigquery.biglake.v1alpha1.TableOrBuilder getTablesOrBuilder(int index); + + /** + * + * + *
+   * A token, which can be sent as `page_token` to retrieve the next page.
+   * If this field is omitted, there are no subsequent pages.
+   * 
+ * + * string next_page_token = 2; + * + * @return The nextPageToken. + */ + java.lang.String getNextPageToken(); + /** + * + * + *
+   * A token, which can be sent as `page_token` to retrieve the next page.
+   * If this field is omitted, there are no subsequent pages.
+   * 
+ * + * string next_page_token = 2; + * + * @return The bytes for nextPageToken. + */ + com.google.protobuf.ByteString getNextPageTokenBytes(); +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/LocationName.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/LocationName.java new file mode 100644 index 000000000000..a13ec13530c8 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/LocationName.java @@ -0,0 +1,192 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1; + +import com.google.api.pathtemplate.PathTemplate; +import com.google.api.resourcenames.ResourceName; +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; + +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") +public class LocationName implements ResourceName { + private static final PathTemplate PROJECT_LOCATION = + PathTemplate.createWithoutUrlEncoding("projects/{project}/locations/{location}"); + private volatile Map fieldValuesMap; + private final String project; + private final String location; + + @Deprecated + protected LocationName() { + project = null; + location = null; + } + + private LocationName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + location = Preconditions.checkNotNull(builder.getLocation()); + } + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public static Builder newBuilder() { + return new Builder(); + } + + public Builder toBuilder() { + return new Builder(this); + } + + public static LocationName of(String project, String location) { + return newBuilder().setProject(project).setLocation(location).build(); + } + + public static String format(String project, String location) { + return newBuilder().setProject(project).setLocation(location).build().toString(); + } + + public static LocationName parse(String formattedString) { + if (formattedString.isEmpty()) { + return null; + } + Map matchMap = + PROJECT_LOCATION.validatedMatch( + formattedString, "LocationName.parse: formattedString not in valid format"); + return of(matchMap.get("project"), matchMap.get("location")); + } + + public static List parseList(List formattedStrings) { + List list = new ArrayList<>(formattedStrings.size()); + for (String formattedString : formattedStrings) { + list.add(parse(formattedString)); + } + return list; + } + + public static List toStringList(List values) { + List list = new ArrayList<>(values.size()); + for (LocationName value : values) { + if (value == null) { + list.add(""); + } else { + list.add(value.toString()); + } + } + return list; + } + + public static boolean isParsableFrom(String formattedString) { + return PROJECT_LOCATION.matches(formattedString); + } + + @Override + public Map getFieldValuesMap() { + if (fieldValuesMap == null) { + synchronized (this) { + if (fieldValuesMap == null) { + ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); + if (project != null) { + fieldMapBuilder.put("project", project); + } + if (location != null) { + fieldMapBuilder.put("location", location); + } + fieldValuesMap = fieldMapBuilder.build(); + } + } + } + return fieldValuesMap; + } + + public String getFieldValue(String fieldName) { + return getFieldValuesMap().get(fieldName); + } + + @Override + public String toString() { + return PROJECT_LOCATION.instantiate("project", project, "location", location); + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + LocationName that = ((LocationName) o); + return Objects.equals(this.project, that.project) + && Objects.equals(this.location, that.location); + } + return false; + } + + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + h *= 1000003; + h ^= Objects.hashCode(location); + return h; + } + + /** Builder for projects/{project}/locations/{location}. */ + public static class Builder { + private String project; + private String location; + + protected Builder() {} + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public Builder setProject(String project) { + this.project = project; + return this; + } + + public Builder setLocation(String location) { + this.location = location; + return this; + } + + private Builder(LocationName locationName) { + this.project = locationName.project; + this.location = locationName.location; + } + + public LocationName build() { + return new LocationName(this); + } + } +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/Lock.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/Lock.java new file mode 100644 index 000000000000..557de4d9d237 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/Lock.java @@ -0,0 +1,1875 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +/** + * + * + *
+ * Represents a lock.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.Lock} + */ +public final class Lock extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.Lock) + LockOrBuilder { + private static final long serialVersionUID = 0L; + // Use Lock.newBuilder() to construct. + private Lock(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private Lock() { + name_ = ""; + type_ = 0; + state_ = 0; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new Lock(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_Lock_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_Lock_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.Lock.class, + com.google.cloud.bigquery.biglake.v1alpha1.Lock.Builder.class); + } + + /** + * + * + *
+   * The lock type.
+   * 
+ * + * Protobuf enum {@code google.cloud.bigquery.biglake.v1alpha1.Lock.Type} + */ + public enum Type implements com.google.protobuf.ProtocolMessageEnum { + /** + * + * + *
+     * The type is not specified.
+     * 
+ * + * TYPE_UNSPECIFIED = 0; + */ + TYPE_UNSPECIFIED(0), + /** + * + * + *
+     * An exclusive lock prevents another lock from being created on the same
+     * resource.
+     * 
+ * + * EXCLUSIVE = 1; + */ + EXCLUSIVE(1), + UNRECOGNIZED(-1), + ; + + /** + * + * + *
+     * The type is not specified.
+     * 
+ * + * TYPE_UNSPECIFIED = 0; + */ + public static final int TYPE_UNSPECIFIED_VALUE = 0; + /** + * + * + *
+     * An exclusive lock prevents another lock from being created on the same
+     * resource.
+     * 
+ * + * EXCLUSIVE = 1; + */ + public static final int EXCLUSIVE_VALUE = 1; + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static Type valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static Type forNumber(int value) { + switch (value) { + case 0: + return TYPE_UNSPECIFIED; + case 1: + return EXCLUSIVE; + default: + return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() { + return internalValueMap; + } + + private static final com.google.protobuf.Internal.EnumLiteMap internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public Type findValueByNumber(int number) { + return Type.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + + public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { + return getDescriptor(); + } + + public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.Lock.getDescriptor().getEnumTypes().get(0); + } + + private static final Type[] VALUES = values(); + + public static Type valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private Type(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:google.cloud.bigquery.biglake.v1alpha1.Lock.Type) + } + + /** + * + * + *
+   * The lock state.
+   * 
+ * + * Protobuf enum {@code google.cloud.bigquery.biglake.v1alpha1.Lock.State} + */ + public enum State implements com.google.protobuf.ProtocolMessageEnum { + /** + * + * + *
+     * The state is not specified.
+     * 
+ * + * STATE_UNSPECIFIED = 0; + */ + STATE_UNSPECIFIED(0), + /** + * + * + *
+     * Waiting to acquire the lock.
+     * 
+ * + * WAITING = 1; + */ + WAITING(1), + /** + * + * + *
+     * The lock has been acquired.
+     * 
+ * + * ACQUIRED = 2; + */ + ACQUIRED(2), + UNRECOGNIZED(-1), + ; + + /** + * + * + *
+     * The state is not specified.
+     * 
+ * + * STATE_UNSPECIFIED = 0; + */ + public static final int STATE_UNSPECIFIED_VALUE = 0; + /** + * + * + *
+     * Waiting to acquire the lock.
+     * 
+ * + * WAITING = 1; + */ + public static final int WAITING_VALUE = 1; + /** + * + * + *
+     * The lock has been acquired.
+     * 
+ * + * ACQUIRED = 2; + */ + public static final int ACQUIRED_VALUE = 2; + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static State valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static State forNumber(int value) { + switch (value) { + case 0: + return STATE_UNSPECIFIED; + case 1: + return WAITING; + case 2: + return ACQUIRED; + default: + return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() { + return internalValueMap; + } + + private static final com.google.protobuf.Internal.EnumLiteMap internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public State findValueByNumber(int number) { + return State.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + + public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { + return getDescriptor(); + } + + public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.Lock.getDescriptor().getEnumTypes().get(1); + } + + private static final State[] VALUES = values(); + + public static State valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private State(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:google.cloud.bigquery.biglake.v1alpha1.Lock.State) + } + + private int resourcesCase_ = 0; + private java.lang.Object resources_; + + public enum ResourcesCase + implements + com.google.protobuf.Internal.EnumLite, + com.google.protobuf.AbstractMessage.InternalOneOfEnum { + TABLE_ID(5), + RESOURCES_NOT_SET(0); + private final int value; + + private ResourcesCase(int value) { + this.value = value; + } + /** + * @param value The number of the enum to look for. + * @return The enum associated with the given number. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static ResourcesCase valueOf(int value) { + return forNumber(value); + } + + public static ResourcesCase forNumber(int value) { + switch (value) { + case 5: + return TABLE_ID; + case 0: + return RESOURCES_NOT_SET; + default: + return null; + } + } + + public int getNumber() { + return this.value; + } + }; + + public ResourcesCase getResourcesCase() { + return ResourcesCase.forNumber(resourcesCase_); + } + + public static final int TABLE_ID_FIELD_NUMBER = 5; + /** + * + * + *
+   * The table ID (not fully qualified name) in the same database that the
+   * lock will be created on. The table must exist.
+   * 
+ * + * string table_id = 5; + * + * @return Whether the tableId field is set. + */ + public boolean hasTableId() { + return resourcesCase_ == 5; + } + /** + * + * + *
+   * The table ID (not fully qualified name) in the same database that the
+   * lock will be created on. The table must exist.
+   * 
+ * + * string table_id = 5; + * + * @return The tableId. + */ + public java.lang.String getTableId() { + java.lang.Object ref = ""; + if (resourcesCase_ == 5) { + ref = resources_; + } + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (resourcesCase_ == 5) { + resources_ = s; + } + return s; + } + } + /** + * + * + *
+   * The table ID (not fully qualified name) in the same database that the
+   * lock will be created on. The table must exist.
+   * 
+ * + * string table_id = 5; + * + * @return The bytes for tableId. + */ + public com.google.protobuf.ByteString getTableIdBytes() { + java.lang.Object ref = ""; + if (resourcesCase_ == 5) { + ref = resources_; + } + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + if (resourcesCase_ == 5) { + resources_ = b; + } + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int NAME_FIELD_NUMBER = 1; + + @SuppressWarnings("serial") + private volatile java.lang.Object name_ = ""; + /** + * + * + *
+   * Output only. The resource name.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + @java.lang.Override + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + * + * + *
+   * Output only. The resource name.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + @java.lang.Override + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int CREATE_TIME_FIELD_NUMBER = 2; + private com.google.protobuf.Timestamp createTime_; + /** + * + * + *
+   * Output only. The creation time of the lock.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the createTime field is set. + */ + @java.lang.Override + public boolean hasCreateTime() { + return createTime_ != null; + } + /** + * + * + *
+   * Output only. The creation time of the lock.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The createTime. + */ + @java.lang.Override + public com.google.protobuf.Timestamp getCreateTime() { + return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_; + } + /** + * + * + *
+   * Output only. The creation time of the lock.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + @java.lang.Override + public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() { + return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_; + } + + public static final int TYPE_FIELD_NUMBER = 3; + private int type_ = 0; + /** + * + * + *
+   * The lock type.
+   * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.Lock.Type type = 3; + * + * @return The enum numeric value on the wire for type. + */ + @java.lang.Override + public int getTypeValue() { + return type_; + } + /** + * + * + *
+   * The lock type.
+   * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.Lock.Type type = 3; + * + * @return The type. + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.Lock.Type getType() { + com.google.cloud.bigquery.biglake.v1alpha1.Lock.Type result = + com.google.cloud.bigquery.biglake.v1alpha1.Lock.Type.forNumber(type_); + return result == null + ? com.google.cloud.bigquery.biglake.v1alpha1.Lock.Type.UNRECOGNIZED + : result; + } + + public static final int STATE_FIELD_NUMBER = 4; + private int state_ = 0; + /** + * + * + *
+   * Output only. The lock state.
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Lock.State state = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The enum numeric value on the wire for state. + */ + @java.lang.Override + public int getStateValue() { + return state_; + } + /** + * + * + *
+   * Output only. The lock state.
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Lock.State state = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The state. + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.Lock.State getState() { + com.google.cloud.bigquery.biglake.v1alpha1.Lock.State result = + com.google.cloud.bigquery.biglake.v1alpha1.Lock.State.forNumber(state_); + return result == null + ? com.google.cloud.bigquery.biglake.v1alpha1.Lock.State.UNRECOGNIZED + : result; + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + if (createTime_ != null) { + output.writeMessage(2, getCreateTime()); + } + if (type_ + != com.google.cloud.bigquery.biglake.v1alpha1.Lock.Type.TYPE_UNSPECIFIED.getNumber()) { + output.writeEnum(3, type_); + } + if (state_ + != com.google.cloud.bigquery.biglake.v1alpha1.Lock.State.STATE_UNSPECIFIED.getNumber()) { + output.writeEnum(4, state_); + } + if (resourcesCase_ == 5) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 5, resources_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + if (createTime_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getCreateTime()); + } + if (type_ + != com.google.cloud.bigquery.biglake.v1alpha1.Lock.Type.TYPE_UNSPECIFIED.getNumber()) { + size += com.google.protobuf.CodedOutputStream.computeEnumSize(3, type_); + } + if (state_ + != com.google.cloud.bigquery.biglake.v1alpha1.Lock.State.STATE_UNSPECIFIED.getNumber()) { + size += com.google.protobuf.CodedOutputStream.computeEnumSize(4, state_); + } + if (resourcesCase_ == 5) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, resources_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.Lock)) { + return super.equals(obj); + } + com.google.cloud.bigquery.biglake.v1alpha1.Lock other = + (com.google.cloud.bigquery.biglake.v1alpha1.Lock) obj; + + if (!getName().equals(other.getName())) return false; + if (hasCreateTime() != other.hasCreateTime()) return false; + if (hasCreateTime()) { + if (!getCreateTime().equals(other.getCreateTime())) return false; + } + if (type_ != other.type_) return false; + if (state_ != other.state_) return false; + if (!getResourcesCase().equals(other.getResourcesCase())) return false; + switch (resourcesCase_) { + case 5: + if (!getTableId().equals(other.getTableId())) return false; + break; + case 0: + default: + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + if (hasCreateTime()) { + hash = (37 * hash) + CREATE_TIME_FIELD_NUMBER; + hash = (53 * hash) + getCreateTime().hashCode(); + } + hash = (37 * hash) + TYPE_FIELD_NUMBER; + hash = (53 * hash) + type_; + hash = (37 * hash) + STATE_FIELD_NUMBER; + hash = (53 * hash) + state_; + switch (resourcesCase_) { + case 5: + hash = (37 * hash) + TABLE_ID_FIELD_NUMBER; + hash = (53 * hash) + getTableId().hashCode(); + break; + case 0: + default: + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Lock parseFrom(java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Lock parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Lock parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Lock parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Lock parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Lock parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Lock parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Lock parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Lock parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Lock parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Lock parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Lock parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder(com.google.cloud.bigquery.biglake.v1alpha1.Lock prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Represents a lock.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.Lock} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.Lock) + com.google.cloud.bigquery.biglake.v1alpha1.LockOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_Lock_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_Lock_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.Lock.class, + com.google.cloud.bigquery.biglake.v1alpha1.Lock.Builder.class); + } + + // Construct using com.google.cloud.bigquery.biglake.v1alpha1.Lock.newBuilder() + private Builder() {} + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + } + + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + name_ = ""; + createTime_ = null; + if (createTimeBuilder_ != null) { + createTimeBuilder_.dispose(); + createTimeBuilder_ = null; + } + type_ = 0; + state_ = 0; + resourcesCase_ = 0; + resources_ = null; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_Lock_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.Lock getDefaultInstanceForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.Lock.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.Lock build() { + com.google.cloud.bigquery.biglake.v1alpha1.Lock result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.Lock buildPartial() { + com.google.cloud.bigquery.biglake.v1alpha1.Lock result = + new com.google.cloud.bigquery.biglake.v1alpha1.Lock(this); + if (bitField0_ != 0) { + buildPartial0(result); + } + buildPartialOneofs(result); + onBuilt(); + return result; + } + + private void buildPartial0(com.google.cloud.bigquery.biglake.v1alpha1.Lock result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000002) != 0)) { + result.name_ = name_; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.createTime_ = createTimeBuilder_ == null ? createTime_ : createTimeBuilder_.build(); + } + if (((from_bitField0_ & 0x00000008) != 0)) { + result.type_ = type_; + } + if (((from_bitField0_ & 0x00000010) != 0)) { + result.state_ = state_; + } + } + + private void buildPartialOneofs(com.google.cloud.bigquery.biglake.v1alpha1.Lock result) { + result.resourcesCase_ = resourcesCase_; + result.resources_ = this.resources_; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.biglake.v1alpha1.Lock) { + return mergeFrom((com.google.cloud.bigquery.biglake.v1alpha1.Lock) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.bigquery.biglake.v1alpha1.Lock other) { + if (other == com.google.cloud.bigquery.biglake.v1alpha1.Lock.getDefaultInstance()) + return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + bitField0_ |= 0x00000002; + onChanged(); + } + if (other.hasCreateTime()) { + mergeCreateTime(other.getCreateTime()); + } + if (other.type_ != 0) { + setTypeValue(other.getTypeValue()); + } + if (other.state_ != 0) { + setStateValue(other.getStateValue()); + } + switch (other.getResourcesCase()) { + case TABLE_ID: + { + resourcesCase_ = 5; + resources_ = other.resources_; + onChanged(); + break; + } + case RESOURCES_NOT_SET: + { + break; + } + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + name_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000002; + break; + } // case 10 + case 18: + { + input.readMessage(getCreateTimeFieldBuilder().getBuilder(), extensionRegistry); + bitField0_ |= 0x00000004; + break; + } // case 18 + case 24: + { + type_ = input.readEnum(); + bitField0_ |= 0x00000008; + break; + } // case 24 + case 32: + { + state_ = input.readEnum(); + bitField0_ |= 0x00000010; + break; + } // case 32 + case 42: + { + java.lang.String s = input.readStringRequireUtf8(); + resourcesCase_ = 5; + resources_ = s; + break; + } // case 42 + default: + { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + + private int resourcesCase_ = 0; + private java.lang.Object resources_; + + public ResourcesCase getResourcesCase() { + return ResourcesCase.forNumber(resourcesCase_); + } + + public Builder clearResources() { + resourcesCase_ = 0; + resources_ = null; + onChanged(); + return this; + } + + private int bitField0_; + + /** + * + * + *
+     * The table ID (not fully qualified name) in the same database that the
+     * lock will be created on. The table must exist.
+     * 
+ * + * string table_id = 5; + * + * @return Whether the tableId field is set. + */ + @java.lang.Override + public boolean hasTableId() { + return resourcesCase_ == 5; + } + /** + * + * + *
+     * The table ID (not fully qualified name) in the same database that the
+     * lock will be created on. The table must exist.
+     * 
+ * + * string table_id = 5; + * + * @return The tableId. + */ + @java.lang.Override + public java.lang.String getTableId() { + java.lang.Object ref = ""; + if (resourcesCase_ == 5) { + ref = resources_; + } + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (resourcesCase_ == 5) { + resources_ = s; + } + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * The table ID (not fully qualified name) in the same database that the
+     * lock will be created on. The table must exist.
+     * 
+ * + * string table_id = 5; + * + * @return The bytes for tableId. + */ + @java.lang.Override + public com.google.protobuf.ByteString getTableIdBytes() { + java.lang.Object ref = ""; + if (resourcesCase_ == 5) { + ref = resources_; + } + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + if (resourcesCase_ == 5) { + resources_ = b; + } + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * The table ID (not fully qualified name) in the same database that the
+     * lock will be created on. The table must exist.
+     * 
+ * + * string table_id = 5; + * + * @param value The tableId to set. + * @return This builder for chaining. + */ + public Builder setTableId(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + resourcesCase_ = 5; + resources_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * The table ID (not fully qualified name) in the same database that the
+     * lock will be created on. The table must exist.
+     * 
+ * + * string table_id = 5; + * + * @return This builder for chaining. + */ + public Builder clearTableId() { + if (resourcesCase_ == 5) { + resourcesCase_ = 0; + resources_ = null; + onChanged(); + } + return this; + } + /** + * + * + *
+     * The table ID (not fully qualified name) in the same database that the
+     * lock will be created on. The table must exist.
+     * 
+ * + * string table_id = 5; + * + * @param value The bytes for tableId to set. + * @return This builder for chaining. + */ + public Builder setTableIdBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + resourcesCase_ = 5; + resources_ = value; + onChanged(); + return this; + } + + private java.lang.Object name_ = ""; + /** + * + * + *
+     * Output only. The resource name.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Output only. The resource name.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Output only. The resource name.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + name_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The resource name.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @return This builder for chaining. + */ + public Builder clearName() { + name_ = getDefaultInstance().getName(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The resource name.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + name_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + + private com.google.protobuf.Timestamp createTime_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + createTimeBuilder_; + /** + * + * + *
+     * Output only. The creation time of the lock.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the createTime field is set. + */ + public boolean hasCreateTime() { + return ((bitField0_ & 0x00000004) != 0); + } + /** + * + * + *
+     * Output only. The creation time of the lock.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The createTime. + */ + public com.google.protobuf.Timestamp getCreateTime() { + if (createTimeBuilder_ == null) { + return createTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : createTime_; + } else { + return createTimeBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Output only. The creation time of the lock.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setCreateTime(com.google.protobuf.Timestamp value) { + if (createTimeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + createTime_ = value; + } else { + createTimeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The creation time of the lock.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setCreateTime(com.google.protobuf.Timestamp.Builder builderForValue) { + if (createTimeBuilder_ == null) { + createTime_ = builderForValue.build(); + } else { + createTimeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The creation time of the lock.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder mergeCreateTime(com.google.protobuf.Timestamp value) { + if (createTimeBuilder_ == null) { + if (((bitField0_ & 0x00000004) != 0) + && createTime_ != null + && createTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { + getCreateTimeBuilder().mergeFrom(value); + } else { + createTime_ = value; + } + } else { + createTimeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The creation time of the lock.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder clearCreateTime() { + bitField0_ = (bitField0_ & ~0x00000004); + createTime_ = null; + if (createTimeBuilder_ != null) { + createTimeBuilder_.dispose(); + createTimeBuilder_ = null; + } + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The creation time of the lock.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.protobuf.Timestamp.Builder getCreateTimeBuilder() { + bitField0_ |= 0x00000004; + onChanged(); + return getCreateTimeFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Output only. The creation time of the lock.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() { + if (createTimeBuilder_ != null) { + return createTimeBuilder_.getMessageOrBuilder(); + } else { + return createTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : createTime_; + } + } + /** + * + * + *
+     * Output only. The creation time of the lock.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + getCreateTimeFieldBuilder() { + if (createTimeBuilder_ == null) { + createTimeBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder>( + getCreateTime(), getParentForChildren(), isClean()); + createTime_ = null; + } + return createTimeBuilder_; + } + + private int type_ = 0; + /** + * + * + *
+     * The lock type.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.Lock.Type type = 3; + * + * @return The enum numeric value on the wire for type. + */ + @java.lang.Override + public int getTypeValue() { + return type_; + } + /** + * + * + *
+     * The lock type.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.Lock.Type type = 3; + * + * @param value The enum numeric value on the wire for type to set. + * @return This builder for chaining. + */ + public Builder setTypeValue(int value) { + type_ = value; + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + * + * + *
+     * The lock type.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.Lock.Type type = 3; + * + * @return The type. + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.Lock.Type getType() { + com.google.cloud.bigquery.biglake.v1alpha1.Lock.Type result = + com.google.cloud.bigquery.biglake.v1alpha1.Lock.Type.forNumber(type_); + return result == null + ? com.google.cloud.bigquery.biglake.v1alpha1.Lock.Type.UNRECOGNIZED + : result; + } + /** + * + * + *
+     * The lock type.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.Lock.Type type = 3; + * + * @param value The type to set. + * @return This builder for chaining. + */ + public Builder setType(com.google.cloud.bigquery.biglake.v1alpha1.Lock.Type value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + type_ = value.getNumber(); + onChanged(); + return this; + } + /** + * + * + *
+     * The lock type.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.Lock.Type type = 3; + * + * @return This builder for chaining. + */ + public Builder clearType() { + bitField0_ = (bitField0_ & ~0x00000008); + type_ = 0; + onChanged(); + return this; + } + + private int state_ = 0; + /** + * + * + *
+     * Output only. The lock state.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Lock.State state = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The enum numeric value on the wire for state. + */ + @java.lang.Override + public int getStateValue() { + return state_; + } + /** + * + * + *
+     * Output only. The lock state.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Lock.State state = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @param value The enum numeric value on the wire for state to set. + * @return This builder for chaining. + */ + public Builder setStateValue(int value) { + state_ = value; + bitField0_ |= 0x00000010; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The lock state.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Lock.State state = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The state. + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.Lock.State getState() { + com.google.cloud.bigquery.biglake.v1alpha1.Lock.State result = + com.google.cloud.bigquery.biglake.v1alpha1.Lock.State.forNumber(state_); + return result == null + ? com.google.cloud.bigquery.biglake.v1alpha1.Lock.State.UNRECOGNIZED + : result; + } + /** + * + * + *
+     * Output only. The lock state.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Lock.State state = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @param value The state to set. + * @return This builder for chaining. + */ + public Builder setState(com.google.cloud.bigquery.biglake.v1alpha1.Lock.State value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000010; + state_ = value.getNumber(); + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The lock state.
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Lock.State state = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return This builder for chaining. + */ + public Builder clearState() { + bitField0_ = (bitField0_ & ~0x00000010); + state_ = 0; + onChanged(); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.Lock) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.Lock) + private static final com.google.cloud.bigquery.biglake.v1alpha1.Lock DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1alpha1.Lock(); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Lock getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Lock parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.Lock getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/LockName.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/LockName.java new file mode 100644 index 000000000000..ee4eece3372c --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/LockName.java @@ -0,0 +1,298 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1; + +import com.google.api.pathtemplate.PathTemplate; +import com.google.api.resourcenames.ResourceName; +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; + +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") +public class LockName implements ResourceName { + private static final PathTemplate PROJECT_LOCATION_CATALOG_DATABASE_LOCK = + PathTemplate.createWithoutUrlEncoding( + "projects/{project}/locations/{location}/catalogs/{catalog}/databases/{database}/locks/{lock}"); + private volatile Map fieldValuesMap; + private final String project; + private final String location; + private final String catalog; + private final String database; + private final String lock; + + @Deprecated + protected LockName() { + project = null; + location = null; + catalog = null; + database = null; + lock = null; + } + + private LockName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + location = Preconditions.checkNotNull(builder.getLocation()); + catalog = Preconditions.checkNotNull(builder.getCatalog()); + database = Preconditions.checkNotNull(builder.getDatabase()); + lock = Preconditions.checkNotNull(builder.getLock()); + } + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public String getCatalog() { + return catalog; + } + + public String getDatabase() { + return database; + } + + public String getLock() { + return lock; + } + + public static Builder newBuilder() { + return new Builder(); + } + + public Builder toBuilder() { + return new Builder(this); + } + + public static LockName of( + String project, String location, String catalog, String database, String lock) { + return newBuilder() + .setProject(project) + .setLocation(location) + .setCatalog(catalog) + .setDatabase(database) + .setLock(lock) + .build(); + } + + public static String format( + String project, String location, String catalog, String database, String lock) { + return newBuilder() + .setProject(project) + .setLocation(location) + .setCatalog(catalog) + .setDatabase(database) + .setLock(lock) + .build() + .toString(); + } + + public static LockName parse(String formattedString) { + if (formattedString.isEmpty()) { + return null; + } + Map matchMap = + PROJECT_LOCATION_CATALOG_DATABASE_LOCK.validatedMatch( + formattedString, "LockName.parse: formattedString not in valid format"); + return of( + matchMap.get("project"), + matchMap.get("location"), + matchMap.get("catalog"), + matchMap.get("database"), + matchMap.get("lock")); + } + + public static List parseList(List formattedStrings) { + List list = new ArrayList<>(formattedStrings.size()); + for (String formattedString : formattedStrings) { + list.add(parse(formattedString)); + } + return list; + } + + public static List toStringList(List values) { + List list = new ArrayList<>(values.size()); + for (LockName value : values) { + if (value == null) { + list.add(""); + } else { + list.add(value.toString()); + } + } + return list; + } + + public static boolean isParsableFrom(String formattedString) { + return PROJECT_LOCATION_CATALOG_DATABASE_LOCK.matches(formattedString); + } + + @Override + public Map getFieldValuesMap() { + if (fieldValuesMap == null) { + synchronized (this) { + if (fieldValuesMap == null) { + ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); + if (project != null) { + fieldMapBuilder.put("project", project); + } + if (location != null) { + fieldMapBuilder.put("location", location); + } + if (catalog != null) { + fieldMapBuilder.put("catalog", catalog); + } + if (database != null) { + fieldMapBuilder.put("database", database); + } + if (lock != null) { + fieldMapBuilder.put("lock", lock); + } + fieldValuesMap = fieldMapBuilder.build(); + } + } + } + return fieldValuesMap; + } + + public String getFieldValue(String fieldName) { + return getFieldValuesMap().get(fieldName); + } + + @Override + public String toString() { + return PROJECT_LOCATION_CATALOG_DATABASE_LOCK.instantiate( + "project", + project, + "location", + location, + "catalog", + catalog, + "database", + database, + "lock", + lock); + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + LockName that = ((LockName) o); + return Objects.equals(this.project, that.project) + && Objects.equals(this.location, that.location) + && Objects.equals(this.catalog, that.catalog) + && Objects.equals(this.database, that.database) + && Objects.equals(this.lock, that.lock); + } + return false; + } + + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + h *= 1000003; + h ^= Objects.hashCode(location); + h *= 1000003; + h ^= Objects.hashCode(catalog); + h *= 1000003; + h ^= Objects.hashCode(database); + h *= 1000003; + h ^= Objects.hashCode(lock); + return h; + } + + /** + * Builder for + * projects/{project}/locations/{location}/catalogs/{catalog}/databases/{database}/locks/{lock}. + */ + public static class Builder { + private String project; + private String location; + private String catalog; + private String database; + private String lock; + + protected Builder() {} + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public String getCatalog() { + return catalog; + } + + public String getDatabase() { + return database; + } + + public String getLock() { + return lock; + } + + public Builder setProject(String project) { + this.project = project; + return this; + } + + public Builder setLocation(String location) { + this.location = location; + return this; + } + + public Builder setCatalog(String catalog) { + this.catalog = catalog; + return this; + } + + public Builder setDatabase(String database) { + this.database = database; + return this; + } + + public Builder setLock(String lock) { + this.lock = lock; + return this; + } + + private Builder(LockName lockName) { + this.project = lockName.project; + this.location = lockName.location; + this.catalog = lockName.catalog; + this.database = lockName.database; + this.lock = lockName.lock; + } + + public LockName build() { + return new LockName(this); + } + } +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/LockOrBuilder.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/LockOrBuilder.java new file mode 100644 index 000000000000..864b5b615d79 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/LockOrBuilder.java @@ -0,0 +1,192 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +public interface LockOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.biglake.v1alpha1.Lock) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * The table ID (not fully qualified name) in the same database that the
+   * lock will be created on. The table must exist.
+   * 
+ * + * string table_id = 5; + * + * @return Whether the tableId field is set. + */ + boolean hasTableId(); + /** + * + * + *
+   * The table ID (not fully qualified name) in the same database that the
+   * lock will be created on. The table must exist.
+   * 
+ * + * string table_id = 5; + * + * @return The tableId. + */ + java.lang.String getTableId(); + /** + * + * + *
+   * The table ID (not fully qualified name) in the same database that the
+   * lock will be created on. The table must exist.
+   * 
+ * + * string table_id = 5; + * + * @return The bytes for tableId. + */ + com.google.protobuf.ByteString getTableIdBytes(); + + /** + * + * + *
+   * Output only. The resource name.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + java.lang.String getName(); + /** + * + * + *
+   * Output only. The resource name.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + com.google.protobuf.ByteString getNameBytes(); + + /** + * + * + *
+   * Output only. The creation time of the lock.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the createTime field is set. + */ + boolean hasCreateTime(); + /** + * + * + *
+   * Output only. The creation time of the lock.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The createTime. + */ + com.google.protobuf.Timestamp getCreateTime(); + /** + * + * + *
+   * Output only. The creation time of the lock.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder(); + + /** + * + * + *
+   * The lock type.
+   * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.Lock.Type type = 3; + * + * @return The enum numeric value on the wire for type. + */ + int getTypeValue(); + /** + * + * + *
+   * The lock type.
+   * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.Lock.Type type = 3; + * + * @return The type. + */ + com.google.cloud.bigquery.biglake.v1alpha1.Lock.Type getType(); + + /** + * + * + *
+   * Output only. The lock state.
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Lock.State state = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The enum numeric value on the wire for state. + */ + int getStateValue(); + /** + * + * + *
+   * Output only. The lock state.
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Lock.State state = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The state. + */ + com.google.cloud.bigquery.biglake.v1alpha1.Lock.State getState(); + + public com.google.cloud.bigquery.biglake.v1alpha1.Lock.ResourcesCase getResourcesCase(); +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/MetastoreProto.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/MetastoreProto.java new file mode 100644 index 000000000000..26d96a68f871 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/MetastoreProto.java @@ -0,0 +1,724 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +public final class MetastoreProto { + private MetastoreProto() {} + + public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {} + + public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry); + } + + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_biglake_v1alpha1_Catalog_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_biglake_v1alpha1_Catalog_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_biglake_v1alpha1_Database_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_biglake_v1alpha1_Database_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_biglake_v1alpha1_Table_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_biglake_v1alpha1_Table_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_biglake_v1alpha1_Lock_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_biglake_v1alpha1_Lock_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateCatalogRequest_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateCatalogRequest_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteCatalogRequest_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteCatalogRequest_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_biglake_v1alpha1_GetCatalogRequest_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_biglake_v1alpha1_GetCatalogRequest_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListCatalogsRequest_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListCatalogsRequest_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListCatalogsResponse_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListCatalogsResponse_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateDatabaseRequest_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateDatabaseRequest_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteDatabaseRequest_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteDatabaseRequest_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_biglake_v1alpha1_UpdateDatabaseRequest_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_biglake_v1alpha1_UpdateDatabaseRequest_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_biglake_v1alpha1_GetDatabaseRequest_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_biglake_v1alpha1_GetDatabaseRequest_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListDatabasesRequest_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListDatabasesRequest_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListDatabasesResponse_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListDatabasesResponse_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateTableRequest_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateTableRequest_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteTableRequest_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteTableRequest_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_biglake_v1alpha1_UpdateTableRequest_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_biglake_v1alpha1_UpdateTableRequest_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_biglake_v1alpha1_GetTableRequest_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_biglake_v1alpha1_GetTableRequest_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListTablesRequest_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListTablesRequest_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListTablesResponse_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListTablesResponse_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateLockRequest_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateLockRequest_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteLockRequest_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteLockRequest_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_biglake_v1alpha1_CheckLockRequest_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_biglake_v1alpha1_CheckLockRequest_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListLocksRequest_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListLocksRequest_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListLocksResponse_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListLocksResponse_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveDatabaseOptions_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveDatabaseOptions_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveDatabaseOptions_ParametersEntry_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveDatabaseOptions_ParametersEntry_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_SerDeInfo_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_SerDeInfo_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_StorageDescriptor_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_StorageDescriptor_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_ParametersEntry_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_ParametersEntry_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { + return descriptor; + } + + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; + + static { + java.lang.String[] descriptorData = { + "\n6google/cloud/bigquery/biglake/v1alpha1" + + "/metastore.proto\022&google.cloud.bigquery." + + "biglake.v1alpha1\032\034google/api/annotations" + + ".proto\032\027google/api/client.proto\032\037google/" + + "api/field_behavior.proto\032\031google/api/res" + + "ource.proto\032\033google/protobuf/empty.proto" + + "\032 google/protobuf/field_mask.proto\032\037goog" + + "le/protobuf/timestamp.proto\"\370\002\n\007Catalog\022" + + "4\n\004name\030\001 \001(\tB&\340A\003\372A \n\036biglake.googleapi" + + "s.com/Catalog\0224\n\013create_time\030\002 \001(\0132\032.goo" + + "gle.protobuf.TimestampB\003\340A\003\0224\n\013update_ti" + + "me\030\003 \001(\0132\032.google.protobuf.TimestampB\003\340A" + + "\003\0224\n\013delete_time\030\004 \001(\0132\032.google.protobuf" + + ".TimestampB\003\340A\003\0224\n\013expire_time\030\005 \001(\0132\032.g" + + "oogle.protobuf.TimestampB\003\340A\003:_\352A\\\n\036bigl" + + "ake.googleapis.com/Catalog\022:projects/{pr" + + "oject}/locations/{location}/catalogs/{ca" + + "talog}\"\335\004\n\010Database\022S\n\014hive_options\030\007 \001(" + + "\0132;.google.cloud.bigquery.biglake.v1alph" + + "a1.HiveDatabaseOptionsH\000\0225\n\004name\030\001 \001(\tB\'" + + "\340A\003\372A!\n\037biglake.googleapis.com/Database\022" + + "4\n\013create_time\030\002 \001(\0132\032.google.protobuf.T" + + "imestampB\003\340A\003\0224\n\013update_time\030\003 \001(\0132\032.goo" + + "gle.protobuf.TimestampB\003\340A\003\0224\n\013delete_ti" + + "me\030\004 \001(\0132\032.google.protobuf.TimestampB\003\340A" + + "\003\0224\n\013expire_time\030\005 \001(\0132\032.google.protobuf" + + ".TimestampB\003\340A\003\022C\n\004type\030\006 \001(\01625.google.c" + + "loud.bigquery.biglake.v1alpha1.Database." + + "Type\"&\n\004Type\022\024\n\020TYPE_UNSPECIFIED\020\000\022\010\n\004HI" + + "VE\020\001:u\352Ar\n\037biglake.googleapis.com/Databa" + + "se\022Oprojects/{project}/locations/{locati" + + "on}/catalogs/{catalog}/databases/{databa" + + "se}B\t\n\007options\"\336\004\n\005Table\022P\n\014hive_options" + + "\030\007 \001(\01328.google.cloud.bigquery.biglake.v" + + "1alpha1.HiveTableOptionsH\000\0222\n\004name\030\001 \001(\t" + + "B$\340A\003\372A\036\n\034biglake.googleapis.com/Table\0224" + + "\n\013create_time\030\002 \001(\0132\032.google.protobuf.Ti" + + "mestampB\003\340A\003\0224\n\013update_time\030\003 \001(\0132\032.goog" + + "le.protobuf.TimestampB\003\340A\003\0224\n\013delete_tim" + + "e\030\004 \001(\0132\032.google.protobuf.TimestampB\003\340A\003" + + "\0224\n\013expire_time\030\005 \001(\0132\032.google.protobuf." + + "TimestampB\003\340A\003\022@\n\004type\030\006 \001(\01622.google.cl" + + "oud.bigquery.biglake.v1alpha1.Table.Type" + + "\"&\n\004Type\022\024\n\020TYPE_UNSPECIFIED\020\000\022\010\n\004HIVE\020\001" + + ":\201\001\352A~\n\034biglake.googleapis.com/Table\022^pr" + + "ojects/{project}/locations/{location}/ca" + + "talogs/{catalog}/databases/{database}/ta" + + "bles/{table}B\t\n\007options\"\201\004\n\004Lock\022\022\n\010tabl" + + "e_id\030\005 \001(\tH\000\0221\n\004name\030\001 \001(\tB#\340A\003\372A\035\n\033bigl" + + "ake.googleapis.com/Lock\0224\n\013create_time\030\002" + + " \001(\0132\032.google.protobuf.TimestampB\003\340A\003\022?\n" + + "\004type\030\003 \001(\01621.google.cloud.bigquery.bigl" + + "ake.v1alpha1.Lock.Type\022F\n\005state\030\004 \001(\01622." + + "google.cloud.bigquery.biglake.v1alpha1.L" + + "ock.StateB\003\340A\003\"+\n\004Type\022\024\n\020TYPE_UNSPECIFI" + + "ED\020\000\022\r\n\tEXCLUSIVE\020\001\"9\n\005State\022\025\n\021STATE_UN" + + "SPECIFIED\020\000\022\013\n\007WAITING\020\001\022\014\n\010ACQUIRED\020\002:~" + + "\352A{\n\033biglake.googleapis.com/Lock\022\\projec" + + "ts/{project}/locations/{location}/catalo" + + "gs/{catalog}/databases/{database}/locks/" + + "{lock}B\013\n\tresources\"\261\001\n\024CreateCatalogReq" + + "uest\0229\n\006parent\030\001 \001(\tB)\340A\002\372A#\n!locations." + + "googleapis.com/Location\022E\n\007catalog\030\002 \001(\013" + + "2/.google.cloud.bigquery.biglake.v1alpha" + + "1.CatalogB\003\340A\002\022\027\n\ncatalog_id\030\003 \001(\tB\003\340A\002\"" + + "L\n\024DeleteCatalogRequest\0224\n\004name\030\001 \001(\tB&\340" + + "A\002\372A \n\036biglake.googleapis.com/Catalog\"I\n" + + "\021GetCatalogRequest\0224\n\004name\030\001 \001(\tB&\340A\002\372A " + + "\n\036biglake.googleapis.com/Catalog\"w\n\023List" + + "CatalogsRequest\0229\n\006parent\030\001 \001(\tB)\340A\002\372A#\n" + + "!locations.googleapis.com/Location\022\021\n\tpa" + + "ge_size\030\002 \001(\005\022\022\n\npage_token\030\003 \001(\t\"r\n\024Lis" + + "tCatalogsResponse\022A\n\010catalogs\030\001 \003(\0132/.go" + + "ogle.cloud.bigquery.biglake.v1alpha1.Cat" + + "alog\022\027\n\017next_page_token\030\002 \001(\t\"\262\001\n\025Create" + + "DatabaseRequest\0226\n\006parent\030\001 \001(\tB&\340A\002\372A \n" + + "\036biglake.googleapis.com/Catalog\022G\n\010datab" + + "ase\030\002 \001(\01320.google.cloud.bigquery.biglak" + + "e.v1alpha1.DatabaseB\003\340A\002\022\030\n\013database_id\030" + + "\003 \001(\tB\003\340A\002\"N\n\025DeleteDatabaseRequest\0225\n\004n" + + "ame\030\001 \001(\tB\'\340A\002\372A!\n\037biglake.googleapis.co" + + "m/Database\"\221\001\n\025UpdateDatabaseRequest\022G\n\010" + + "database\030\001 \001(\01320.google.cloud.bigquery.b" + + "iglake.v1alpha1.DatabaseB\003\340A\002\022/\n\013update_" + + "mask\030\002 \001(\0132\032.google.protobuf.FieldMask\"K" + + "\n\022GetDatabaseRequest\0225\n\004name\030\001 \001(\tB\'\340A\002\372" + + "A!\n\037biglake.googleapis.com/Database\"u\n\024L" + + "istDatabasesRequest\0226\n\006parent\030\001 \001(\tB&\340A\002" + + "\372A \n\036biglake.googleapis.com/Catalog\022\021\n\tp" + + "age_size\030\002 \001(\005\022\022\n\npage_token\030\003 \001(\t\"u\n\025Li" + + "stDatabasesResponse\022C\n\tdatabases\030\001 \003(\01320" + + ".google.cloud.bigquery.biglake.v1alpha1." + + "Database\022\027\n\017next_page_token\030\002 \001(\t\"\247\001\n\022Cr" + + "eateTableRequest\0227\n\006parent\030\001 \001(\tB\'\340A\002\372A!" + + "\n\037biglake.googleapis.com/Database\022A\n\005tab" + + "le\030\002 \001(\0132-.google.cloud.bigquery.biglake" + + ".v1alpha1.TableB\003\340A\002\022\025\n\010table_id\030\003 \001(\tB\003" + + "\340A\002\"H\n\022DeleteTableRequest\0222\n\004name\030\001 \001(\tB" + + "$\340A\002\372A\036\n\034biglake.googleapis.com/Table\"\210\001" + + "\n\022UpdateTableRequest\022A\n\005table\030\001 \001(\0132-.go" + + "ogle.cloud.bigquery.biglake.v1alpha1.Tab" + + "leB\003\340A\002\022/\n\013update_mask\030\002 \001(\0132\032.google.pr" + + "otobuf.FieldMask\"E\n\017GetTableRequest\0222\n\004n" + + "ame\030\001 \001(\tB$\340A\002\372A\036\n\034biglake.googleapis.co" + + "m/Table\"s\n\021ListTablesRequest\0227\n\006parent\030\001" + + " \001(\tB\'\340A\002\372A!\n\037biglake.googleapis.com/Dat" + + "abase\022\021\n\tpage_size\030\002 \001(\005\022\022\n\npage_token\030\003" + + " \001(\t\"l\n\022ListTablesResponse\022=\n\006tables\030\001 \003" + + "(\0132-.google.cloud.bigquery.biglake.v1alp" + + "ha1.Table\022\027\n\017next_page_token\030\002 \001(\t\"\215\001\n\021C" + + "reateLockRequest\0227\n\006parent\030\001 \001(\tB\'\340A\002\372A!" + + "\n\037biglake.googleapis.com/Database\022?\n\004loc" + + "k\030\002 \001(\0132,.google.cloud.bigquery.biglake." + + "v1alpha1.LockB\003\340A\002\"F\n\021DeleteLockRequest\022" + + "1\n\004name\030\001 \001(\tB#\340A\002\372A\035\n\033biglake.googleapi" + + "s.com/Lock\"E\n\020CheckLockRequest\0221\n\004name\030\001" + + " \001(\tB#\340A\002\372A\035\n\033biglake.googleapis.com/Loc" + + "k\"r\n\020ListLocksRequest\0227\n\006parent\030\001 \001(\tB\'\340" + + "A\002\372A!\n\037biglake.googleapis.com/Database\022\021" + + "\n\tpage_size\030\002 \001(\005\022\022\n\npage_token\030\003 \001(\t\"i\n" + + "\021ListLocksResponse\022;\n\005locks\030\001 \003(\0132,.goog" + + "le.cloud.bigquery.biglake.v1alpha1.Lock\022" + + "\027\n\017next_page_token\030\002 \001(\t\"\277\001\n\023HiveDatabas" + + "eOptions\022\024\n\014location_uri\030\001 \001(\t\022_\n\nparame" + + "ters\030\002 \003(\0132K.google.cloud.bigquery.bigla" + + "ke.v1alpha1.HiveDatabaseOptions.Paramete" + + "rsEntry\0321\n\017ParametersEntry\022\013\n\003key\030\001 \001(\t\022" + + "\r\n\005value\030\002 \001(\t:\0028\001\"\370\003\n\020HiveTableOptions\022" + + "\\\n\nparameters\030\001 \003(\0132H.google.cloud.bigqu" + + "ery.biglake.v1alpha1.HiveTableOptions.Pa" + + "rametersEntry\022\022\n\ntable_type\030\002 \001(\t\022f\n\022sto" + + "rage_descriptor\030\003 \001(\0132J.google.cloud.big" + + "query.biglake.v1alpha1.HiveTableOptions." + + "StorageDescriptor\032&\n\tSerDeInfo\022\031\n\021serial" + + "ization_lib\030\001 \001(\t\032\256\001\n\021StorageDescriptor\022" + + "\024\n\014location_uri\030\001 \001(\t\022\024\n\014input_format\030\002 " + + "\001(\t\022\025\n\routput_format\030\003 \001(\t\022V\n\nserde_info" + + "\030\004 \001(\0132B.google.cloud.bigquery.biglake.v" + + "1alpha1.HiveTableOptions.SerDeInfo\0321\n\017Pa" + + "rametersEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(" + + "\t:\0028\0012\305\037\n\020MetastoreService\022\337\001\n\rCreateCat" + + "alog\022<.google.cloud.bigquery.biglake.v1a" + + "lpha1.CreateCatalogRequest\032/.google.clou" + + "d.bigquery.biglake.v1alpha1.Catalog\"_\202\323\344" + + "\223\002=\"2/v1alpha1/{parent=projects/*/locati" + + "ons/*}/catalogs:\007catalog\332A\031parent,catalo" + + "g,catalog_id\022\301\001\n\rDeleteCatalog\022<.google." + + "cloud.bigquery.biglake.v1alpha1.DeleteCa" + + "talogRequest\032/.google.cloud.bigquery.big" + + "lake.v1alpha1.Catalog\"A\202\323\344\223\0024*2/v1alpha1" + + "/{name=projects/*/locations/*/catalogs/*" + + "}\332A\004name\022\273\001\n\nGetCatalog\0229.google.cloud.b" + + "igquery.biglake.v1alpha1.GetCatalogReque" + + "st\032/.google.cloud.bigquery.biglake.v1alp" + + "ha1.Catalog\"A\202\323\344\223\0024\0222/v1alpha1/{name=pro" + + "jects/*/locations/*/catalogs/*}\332A\004name\022\316" + + "\001\n\014ListCatalogs\022;.google.cloud.bigquery." + + "biglake.v1alpha1.ListCatalogsRequest\032<.g" + + "oogle.cloud.bigquery.biglake.v1alpha1.Li" + + "stCatalogsResponse\"C\202\323\344\223\0024\0222/v1alpha1/{p" + + "arent=projects/*/locations/*}/catalogs\332A" + + "\006parent\022\361\001\n\016CreateDatabase\022=.google.clou" + + "d.bigquery.biglake.v1alpha1.CreateDataba" + + "seRequest\0320.google.cloud.bigquery.biglak" + + "e.v1alpha1.Database\"n\202\323\344\223\002J\">/v1alpha1/{" + + "parent=projects/*/locations/*/catalogs/*" + + "}/databases:\010database\332A\033parent,database," + + "database_id\022\320\001\n\016DeleteDatabase\022=.google." + + "cloud.bigquery.biglake.v1alpha1.DeleteDa" + + "tabaseRequest\0320.google.cloud.bigquery.bi" + + "glake.v1alpha1.Database\"M\202\323\344\223\002@*>/v1alph" + + "a1/{name=projects/*/locations/*/catalogs" + + "/*/databases/*}\332A\004name\022\363\001\n\016UpdateDatabas" + + "e\022=.google.cloud.bigquery.biglake.v1alph" + + "a1.UpdateDatabaseRequest\0320.google.cloud." + + "bigquery.biglake.v1alpha1.Database\"p\202\323\344\223" + + "\002S2G/v1alpha1/{database.name=projects/*/" + + "locations/*/catalogs/*/databases/*}:\010dat" + + "abase\332A\024database,update_mask\022\312\001\n\013GetData" + + "base\022:.google.cloud.bigquery.biglake.v1a" + + "lpha1.GetDatabaseRequest\0320.google.cloud." + + "bigquery.biglake.v1alpha1.Database\"M\202\323\344\223" + + "\002@\022>/v1alpha1/{name=projects/*/locations" + + "/*/catalogs/*/databases/*}\332A\004name\022\335\001\n\rLi" + + "stDatabases\022<.google.cloud.bigquery.bigl" + + "ake.v1alpha1.ListDatabasesRequest\032=.goog" + + "le.cloud.bigquery.biglake.v1alpha1.ListD" + + "atabasesResponse\"O\202\323\344\223\002@\022>/v1alpha1/{par" + + "ent=projects/*/locations/*/catalogs/*}/d" + + "atabases\332A\006parent\022\350\001\n\013CreateTable\022:.goog" + + "le.cloud.bigquery.biglake.v1alpha1.Creat" + + "eTableRequest\032-.google.cloud.bigquery.bi" + + "glake.v1alpha1.Table\"n\202\323\344\223\002P\"G/v1alpha1/" + + "{parent=projects/*/locations/*/catalogs/" + + "*/databases/*}/tables:\005table\332A\025parent,ta" + + "ble,table_id\022\320\001\n\013DeleteTable\022:.google.cl" + + "oud.bigquery.biglake.v1alpha1.DeleteTabl" + + "eRequest\032-.google.cloud.bigquery.biglake" + + ".v1alpha1.Table\"V\202\323\344\223\002I*G/v1alpha1/{name" + + "=projects/*/locations/*/catalogs/*/datab" + + "ases/*/tables/*}\332A\004name\022\352\001\n\013UpdateTable\022" + + ":.google.cloud.bigquery.biglake.v1alpha1" + + ".UpdateTableRequest\032-.google.cloud.bigqu" + + "ery.biglake.v1alpha1.Table\"p\202\323\344\223\002V2M/v1a" + + "lpha1/{table.name=projects/*/locations/*" + + "/catalogs/*/databases/*/tables/*}:\005table" + + "\332A\021table,update_mask\022\312\001\n\010GetTable\0227.goog" + + "le.cloud.bigquery.biglake.v1alpha1.GetTa" + + "bleRequest\032-.google.cloud.bigquery.bigla" + + "ke.v1alpha1.Table\"V\202\323\344\223\002I\022G/v1alpha1/{na" + + "me=projects/*/locations/*/catalogs/*/dat" + + "abases/*/tables/*}\332A\004name\022\335\001\n\nListTables" + + "\0229.google.cloud.bigquery.biglake.v1alpha" + + "1.ListTablesRequest\032:.google.cloud.bigqu" + + "ery.biglake.v1alpha1.ListTablesResponse\"" + + "X\202\323\344\223\002I\022G/v1alpha1/{parent=projects/*/lo" + + "cations/*/catalogs/*/databases/*}/tables" + + "\332A\006parent\022\331\001\n\nCreateLock\0229.google.cloud." + + "bigquery.biglake.v1alpha1.CreateLockRequ" + + "est\032,.google.cloud.bigquery.biglake.v1al" + + "pha1.Lock\"b\202\323\344\223\002N\"F/v1alpha1/{parent=pro" + + "jects/*/locations/*/catalogs/*/databases" + + "/*}/locks:\004lock\332A\013parent,lock\022\266\001\n\nDelete" + + "Lock\0229.google.cloud.bigquery.biglake.v1a" + + "lpha1.DeleteLockRequest\032\026.google.protobu" + + "f.Empty\"U\202\323\344\223\002H*F/v1alpha1/{name=project" + + "s/*/locations/*/catalogs/*/databases/*/l" + + "ocks/*}\332A\004name\022\323\001\n\tCheckLock\0228.google.cl" + + "oud.bigquery.biglake.v1alpha1.CheckLockR" + + "equest\032,.google.cloud.bigquery.biglake.v" + + "1alpha1.Lock\"^\202\323\344\223\002Q\"L/v1alpha1/{name=pr" + + "ojects/*/locations/*/catalogs/*/database" + + "s/*/locks/*}:check:\001*\332A\004name\022\331\001\n\tListLoc" + + "ks\0228.google.cloud.bigquery.biglake.v1alp" + + "ha1.ListLocksRequest\0329.google.cloud.bigq" + + "uery.biglake.v1alpha1.ListLocksResponse\"" + + "W\202\323\344\223\002H\022F/v1alpha1/{parent=projects/*/lo" + + "cations/*/catalogs/*/databases/*}/locks\332" + + "A\006parent\032s\312A\026biglake.googleapis.com\322AWht" + + "tps://www.googleapis.com/auth/bigquery,h" + + "ttps://www.googleapis.com/auth/cloud-pla" + + "tformB\204\001\n*com.google.cloud.bigquery.bigl" + + "ake.v1alpha1B\016MetastoreProtoP\001ZDcloud.go" + + "ogle.com/go/bigquery/biglake/apiv1alpha1" + + "/biglakepb;biglakepbb\006proto3" + }; + descriptor = + com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( + descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + com.google.api.AnnotationsProto.getDescriptor(), + com.google.api.ClientProto.getDescriptor(), + com.google.api.FieldBehaviorProto.getDescriptor(), + com.google.api.ResourceProto.getDescriptor(), + com.google.protobuf.EmptyProto.getDescriptor(), + com.google.protobuf.FieldMaskProto.getDescriptor(), + com.google.protobuf.TimestampProto.getDescriptor(), + }); + internal_static_google_cloud_bigquery_biglake_v1alpha1_Catalog_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_google_cloud_bigquery_biglake_v1alpha1_Catalog_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_bigquery_biglake_v1alpha1_Catalog_descriptor, + new java.lang.String[] { + "Name", "CreateTime", "UpdateTime", "DeleteTime", "ExpireTime", + }); + internal_static_google_cloud_bigquery_biglake_v1alpha1_Database_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_google_cloud_bigquery_biglake_v1alpha1_Database_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_bigquery_biglake_v1alpha1_Database_descriptor, + new java.lang.String[] { + "HiveOptions", + "Name", + "CreateTime", + "UpdateTime", + "DeleteTime", + "ExpireTime", + "Type", + "Options", + }); + internal_static_google_cloud_bigquery_biglake_v1alpha1_Table_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_google_cloud_bigquery_biglake_v1alpha1_Table_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_bigquery_biglake_v1alpha1_Table_descriptor, + new java.lang.String[] { + "HiveOptions", + "Name", + "CreateTime", + "UpdateTime", + "DeleteTime", + "ExpireTime", + "Type", + "Options", + }); + internal_static_google_cloud_bigquery_biglake_v1alpha1_Lock_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_google_cloud_bigquery_biglake_v1alpha1_Lock_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_bigquery_biglake_v1alpha1_Lock_descriptor, + new java.lang.String[] { + "TableId", "Name", "CreateTime", "Type", "State", "Resources", + }); + internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateCatalogRequest_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateCatalogRequest_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateCatalogRequest_descriptor, + new java.lang.String[] { + "Parent", "Catalog", "CatalogId", + }); + internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteCatalogRequest_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteCatalogRequest_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteCatalogRequest_descriptor, + new java.lang.String[] { + "Name", + }); + internal_static_google_cloud_bigquery_biglake_v1alpha1_GetCatalogRequest_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_google_cloud_bigquery_biglake_v1alpha1_GetCatalogRequest_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_bigquery_biglake_v1alpha1_GetCatalogRequest_descriptor, + new java.lang.String[] { + "Name", + }); + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListCatalogsRequest_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListCatalogsRequest_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListCatalogsRequest_descriptor, + new java.lang.String[] { + "Parent", "PageSize", "PageToken", + }); + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListCatalogsResponse_descriptor = + getDescriptor().getMessageTypes().get(8); + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListCatalogsResponse_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListCatalogsResponse_descriptor, + new java.lang.String[] { + "Catalogs", "NextPageToken", + }); + internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateDatabaseRequest_descriptor = + getDescriptor().getMessageTypes().get(9); + internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateDatabaseRequest_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateDatabaseRequest_descriptor, + new java.lang.String[] { + "Parent", "Database", "DatabaseId", + }); + internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteDatabaseRequest_descriptor = + getDescriptor().getMessageTypes().get(10); + internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteDatabaseRequest_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteDatabaseRequest_descriptor, + new java.lang.String[] { + "Name", + }); + internal_static_google_cloud_bigquery_biglake_v1alpha1_UpdateDatabaseRequest_descriptor = + getDescriptor().getMessageTypes().get(11); + internal_static_google_cloud_bigquery_biglake_v1alpha1_UpdateDatabaseRequest_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_bigquery_biglake_v1alpha1_UpdateDatabaseRequest_descriptor, + new java.lang.String[] { + "Database", "UpdateMask", + }); + internal_static_google_cloud_bigquery_biglake_v1alpha1_GetDatabaseRequest_descriptor = + getDescriptor().getMessageTypes().get(12); + internal_static_google_cloud_bigquery_biglake_v1alpha1_GetDatabaseRequest_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_bigquery_biglake_v1alpha1_GetDatabaseRequest_descriptor, + new java.lang.String[] { + "Name", + }); + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListDatabasesRequest_descriptor = + getDescriptor().getMessageTypes().get(13); + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListDatabasesRequest_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListDatabasesRequest_descriptor, + new java.lang.String[] { + "Parent", "PageSize", "PageToken", + }); + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListDatabasesResponse_descriptor = + getDescriptor().getMessageTypes().get(14); + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListDatabasesResponse_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListDatabasesResponse_descriptor, + new java.lang.String[] { + "Databases", "NextPageToken", + }); + internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateTableRequest_descriptor = + getDescriptor().getMessageTypes().get(15); + internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateTableRequest_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateTableRequest_descriptor, + new java.lang.String[] { + "Parent", "Table", "TableId", + }); + internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteTableRequest_descriptor = + getDescriptor().getMessageTypes().get(16); + internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteTableRequest_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteTableRequest_descriptor, + new java.lang.String[] { + "Name", + }); + internal_static_google_cloud_bigquery_biglake_v1alpha1_UpdateTableRequest_descriptor = + getDescriptor().getMessageTypes().get(17); + internal_static_google_cloud_bigquery_biglake_v1alpha1_UpdateTableRequest_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_bigquery_biglake_v1alpha1_UpdateTableRequest_descriptor, + new java.lang.String[] { + "Table", "UpdateMask", + }); + internal_static_google_cloud_bigquery_biglake_v1alpha1_GetTableRequest_descriptor = + getDescriptor().getMessageTypes().get(18); + internal_static_google_cloud_bigquery_biglake_v1alpha1_GetTableRequest_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_bigquery_biglake_v1alpha1_GetTableRequest_descriptor, + new java.lang.String[] { + "Name", + }); + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListTablesRequest_descriptor = + getDescriptor().getMessageTypes().get(19); + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListTablesRequest_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListTablesRequest_descriptor, + new java.lang.String[] { + "Parent", "PageSize", "PageToken", + }); + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListTablesResponse_descriptor = + getDescriptor().getMessageTypes().get(20); + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListTablesResponse_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListTablesResponse_descriptor, + new java.lang.String[] { + "Tables", "NextPageToken", + }); + internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateLockRequest_descriptor = + getDescriptor().getMessageTypes().get(21); + internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateLockRequest_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateLockRequest_descriptor, + new java.lang.String[] { + "Parent", "Lock", + }); + internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteLockRequest_descriptor = + getDescriptor().getMessageTypes().get(22); + internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteLockRequest_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_bigquery_biglake_v1alpha1_DeleteLockRequest_descriptor, + new java.lang.String[] { + "Name", + }); + internal_static_google_cloud_bigquery_biglake_v1alpha1_CheckLockRequest_descriptor = + getDescriptor().getMessageTypes().get(23); + internal_static_google_cloud_bigquery_biglake_v1alpha1_CheckLockRequest_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_bigquery_biglake_v1alpha1_CheckLockRequest_descriptor, + new java.lang.String[] { + "Name", + }); + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListLocksRequest_descriptor = + getDescriptor().getMessageTypes().get(24); + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListLocksRequest_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListLocksRequest_descriptor, + new java.lang.String[] { + "Parent", "PageSize", "PageToken", + }); + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListLocksResponse_descriptor = + getDescriptor().getMessageTypes().get(25); + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListLocksResponse_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_bigquery_biglake_v1alpha1_ListLocksResponse_descriptor, + new java.lang.String[] { + "Locks", "NextPageToken", + }); + internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveDatabaseOptions_descriptor = + getDescriptor().getMessageTypes().get(26); + internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveDatabaseOptions_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveDatabaseOptions_descriptor, + new java.lang.String[] { + "LocationUri", "Parameters", + }); + internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveDatabaseOptions_ParametersEntry_descriptor = + internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveDatabaseOptions_descriptor + .getNestedTypes() + .get(0); + internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveDatabaseOptions_ParametersEntry_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveDatabaseOptions_ParametersEntry_descriptor, + new java.lang.String[] { + "Key", "Value", + }); + internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_descriptor = + getDescriptor().getMessageTypes().get(27); + internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_descriptor, + new java.lang.String[] { + "Parameters", "TableType", "StorageDescriptor", + }); + internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_SerDeInfo_descriptor = + internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_descriptor + .getNestedTypes() + .get(0); + internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_SerDeInfo_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_SerDeInfo_descriptor, + new java.lang.String[] { + "SerializationLib", + }); + internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_StorageDescriptor_descriptor = + internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_descriptor + .getNestedTypes() + .get(1); + internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_StorageDescriptor_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_StorageDescriptor_descriptor, + new java.lang.String[] { + "LocationUri", "InputFormat", "OutputFormat", "SerdeInfo", + }); + internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_ParametersEntry_descriptor = + internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_descriptor + .getNestedTypes() + .get(2); + internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_ParametersEntry_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_bigquery_biglake_v1alpha1_HiveTableOptions_ParametersEntry_descriptor, + new java.lang.String[] { + "Key", "Value", + }); + com.google.protobuf.ExtensionRegistry registry = + com.google.protobuf.ExtensionRegistry.newInstance(); + registry.add(com.google.api.ClientProto.defaultHost); + registry.add(com.google.api.FieldBehaviorProto.fieldBehavior); + registry.add(com.google.api.AnnotationsProto.http); + registry.add(com.google.api.ClientProto.methodSignature); + registry.add(com.google.api.ClientProto.oauthScopes); + registry.add(com.google.api.ResourceProto.resource); + registry.add(com.google.api.ResourceProto.resourceReference); + com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor( + descriptor, registry); + com.google.api.AnnotationsProto.getDescriptor(); + com.google.api.ClientProto.getDescriptor(); + com.google.api.FieldBehaviorProto.getDescriptor(); + com.google.api.ResourceProto.getDescriptor(); + com.google.protobuf.EmptyProto.getDescriptor(); + com.google.protobuf.FieldMaskProto.getDescriptor(); + com.google.protobuf.TimestampProto.getDescriptor(); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/Table.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/Table.java new file mode 100644 index 000000000000..1af5d472be7c --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/Table.java @@ -0,0 +1,2476 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +/** + * + * + *
+ * Represents a table.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.Table} + */ +public final class Table extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.Table) + TableOrBuilder { + private static final long serialVersionUID = 0L; + // Use Table.newBuilder() to construct. + private Table(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private Table() { + name_ = ""; + type_ = 0; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new Table(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_Table_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_Table_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.Table.class, + com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder.class); + } + + /** + * + * + *
+   * The table type.
+   * 
+ * + * Protobuf enum {@code google.cloud.bigquery.biglake.v1alpha1.Table.Type} + */ + public enum Type implements com.google.protobuf.ProtocolMessageEnum { + /** + * + * + *
+     * The type is not specified.
+     * 
+ * + * TYPE_UNSPECIFIED = 0; + */ + TYPE_UNSPECIFIED(0), + /** + * + * + *
+     * Represents a table compatible with Hive Metastore tables.
+     * 
+ * + * HIVE = 1; + */ + HIVE(1), + UNRECOGNIZED(-1), + ; + + /** + * + * + *
+     * The type is not specified.
+     * 
+ * + * TYPE_UNSPECIFIED = 0; + */ + public static final int TYPE_UNSPECIFIED_VALUE = 0; + /** + * + * + *
+     * Represents a table compatible with Hive Metastore tables.
+     * 
+ * + * HIVE = 1; + */ + public static final int HIVE_VALUE = 1; + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static Type valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static Type forNumber(int value) { + switch (value) { + case 0: + return TYPE_UNSPECIFIED; + case 1: + return HIVE; + default: + return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() { + return internalValueMap; + } + + private static final com.google.protobuf.Internal.EnumLiteMap internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public Type findValueByNumber(int number) { + return Type.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + + public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { + return getDescriptor(); + } + + public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.Table.getDescriptor().getEnumTypes().get(0); + } + + private static final Type[] VALUES = values(); + + public static Type valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private Type(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:google.cloud.bigquery.biglake.v1alpha1.Table.Type) + } + + private int optionsCase_ = 0; + private java.lang.Object options_; + + public enum OptionsCase + implements + com.google.protobuf.Internal.EnumLite, + com.google.protobuf.AbstractMessage.InternalOneOfEnum { + HIVE_OPTIONS(7), + OPTIONS_NOT_SET(0); + private final int value; + + private OptionsCase(int value) { + this.value = value; + } + /** + * @param value The number of the enum to look for. + * @return The enum associated with the given number. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static OptionsCase valueOf(int value) { + return forNumber(value); + } + + public static OptionsCase forNumber(int value) { + switch (value) { + case 7: + return HIVE_OPTIONS; + case 0: + return OPTIONS_NOT_SET; + default: + return null; + } + } + + public int getNumber() { + return this.value; + } + }; + + public OptionsCase getOptionsCase() { + return OptionsCase.forNumber(optionsCase_); + } + + public static final int HIVE_OPTIONS_FIELD_NUMBER = 7; + /** + * + * + *
+   * Options of a Hive table.
+   * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions hive_options = 7; + * + * @return Whether the hiveOptions field is set. + */ + @java.lang.Override + public boolean hasHiveOptions() { + return optionsCase_ == 7; + } + /** + * + * + *
+   * Options of a Hive table.
+   * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions hive_options = 7; + * + * @return The hiveOptions. + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions getHiveOptions() { + if (optionsCase_ == 7) { + return (com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions) options_; + } + return com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.getDefaultInstance(); + } + /** + * + * + *
+   * Options of a Hive table.
+   * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions hive_options = 7; + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptionsOrBuilder + getHiveOptionsOrBuilder() { + if (optionsCase_ == 7) { + return (com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions) options_; + } + return com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.getDefaultInstance(); + } + + public static final int NAME_FIELD_NUMBER = 1; + + @SuppressWarnings("serial") + private volatile java.lang.Object name_ = ""; + /** + * + * + *
+   * Output only. The resource name.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + @java.lang.Override + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + * + * + *
+   * Output only. The resource name.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + @java.lang.Override + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int CREATE_TIME_FIELD_NUMBER = 2; + private com.google.protobuf.Timestamp createTime_; + /** + * + * + *
+   * Output only. The creation time of the table.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the createTime field is set. + */ + @java.lang.Override + public boolean hasCreateTime() { + return createTime_ != null; + } + /** + * + * + *
+   * Output only. The creation time of the table.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The createTime. + */ + @java.lang.Override + public com.google.protobuf.Timestamp getCreateTime() { + return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_; + } + /** + * + * + *
+   * Output only. The creation time of the table.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + @java.lang.Override + public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() { + return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_; + } + + public static final int UPDATE_TIME_FIELD_NUMBER = 3; + private com.google.protobuf.Timestamp updateTime_; + /** + * + * + *
+   * Output only. The last modification time of the table.
+   * 
+ * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the updateTime field is set. + */ + @java.lang.Override + public boolean hasUpdateTime() { + return updateTime_ != null; + } + /** + * + * + *
+   * Output only. The last modification time of the table.
+   * 
+ * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The updateTime. + */ + @java.lang.Override + public com.google.protobuf.Timestamp getUpdateTime() { + return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_; + } + /** + * + * + *
+   * Output only. The last modification time of the table.
+   * 
+ * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + @java.lang.Override + public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() { + return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_; + } + + public static final int DELETE_TIME_FIELD_NUMBER = 4; + private com.google.protobuf.Timestamp deleteTime_; + /** + * + * + *
+   * Output only. The deletion time of the table. Only set after the table is
+   * deleted.
+   * 
+ * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the deleteTime field is set. + */ + @java.lang.Override + public boolean hasDeleteTime() { + return deleteTime_ != null; + } + /** + * + * + *
+   * Output only. The deletion time of the table. Only set after the table is
+   * deleted.
+   * 
+ * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The deleteTime. + */ + @java.lang.Override + public com.google.protobuf.Timestamp getDeleteTime() { + return deleteTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : deleteTime_; + } + /** + * + * + *
+   * Output only. The deletion time of the table. Only set after the table is
+   * deleted.
+   * 
+ * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + @java.lang.Override + public com.google.protobuf.TimestampOrBuilder getDeleteTimeOrBuilder() { + return deleteTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : deleteTime_; + } + + public static final int EXPIRE_TIME_FIELD_NUMBER = 5; + private com.google.protobuf.Timestamp expireTime_; + /** + * + * + *
+   * Output only. The time when this table is considered expired. Only set after
+   * the table is deleted.
+   * 
+ * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the expireTime field is set. + */ + @java.lang.Override + public boolean hasExpireTime() { + return expireTime_ != null; + } + /** + * + * + *
+   * Output only. The time when this table is considered expired. Only set after
+   * the table is deleted.
+   * 
+ * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The expireTime. + */ + @java.lang.Override + public com.google.protobuf.Timestamp getExpireTime() { + return expireTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : expireTime_; + } + /** + * + * + *
+   * Output only. The time when this table is considered expired. Only set after
+   * the table is deleted.
+   * 
+ * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + @java.lang.Override + public com.google.protobuf.TimestampOrBuilder getExpireTimeOrBuilder() { + return expireTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : expireTime_; + } + + public static final int TYPE_FIELD_NUMBER = 6; + private int type_ = 0; + /** + * + * + *
+   * The table type.
+   * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.Table.Type type = 6; + * + * @return The enum numeric value on the wire for type. + */ + @java.lang.Override + public int getTypeValue() { + return type_; + } + /** + * + * + *
+   * The table type.
+   * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.Table.Type type = 6; + * + * @return The type. + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.Table.Type getType() { + com.google.cloud.bigquery.biglake.v1alpha1.Table.Type result = + com.google.cloud.bigquery.biglake.v1alpha1.Table.Type.forNumber(type_); + return result == null + ? com.google.cloud.bigquery.biglake.v1alpha1.Table.Type.UNRECOGNIZED + : result; + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + if (createTime_ != null) { + output.writeMessage(2, getCreateTime()); + } + if (updateTime_ != null) { + output.writeMessage(3, getUpdateTime()); + } + if (deleteTime_ != null) { + output.writeMessage(4, getDeleteTime()); + } + if (expireTime_ != null) { + output.writeMessage(5, getExpireTime()); + } + if (type_ + != com.google.cloud.bigquery.biglake.v1alpha1.Table.Type.TYPE_UNSPECIFIED.getNumber()) { + output.writeEnum(6, type_); + } + if (optionsCase_ == 7) { + output.writeMessage( + 7, (com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions) options_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + if (createTime_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getCreateTime()); + } + if (updateTime_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getUpdateTime()); + } + if (deleteTime_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(4, getDeleteTime()); + } + if (expireTime_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(5, getExpireTime()); + } + if (type_ + != com.google.cloud.bigquery.biglake.v1alpha1.Table.Type.TYPE_UNSPECIFIED.getNumber()) { + size += com.google.protobuf.CodedOutputStream.computeEnumSize(6, type_); + } + if (optionsCase_ == 7) { + size += + com.google.protobuf.CodedOutputStream.computeMessageSize( + 7, (com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions) options_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.Table)) { + return super.equals(obj); + } + com.google.cloud.bigquery.biglake.v1alpha1.Table other = + (com.google.cloud.bigquery.biglake.v1alpha1.Table) obj; + + if (!getName().equals(other.getName())) return false; + if (hasCreateTime() != other.hasCreateTime()) return false; + if (hasCreateTime()) { + if (!getCreateTime().equals(other.getCreateTime())) return false; + } + if (hasUpdateTime() != other.hasUpdateTime()) return false; + if (hasUpdateTime()) { + if (!getUpdateTime().equals(other.getUpdateTime())) return false; + } + if (hasDeleteTime() != other.hasDeleteTime()) return false; + if (hasDeleteTime()) { + if (!getDeleteTime().equals(other.getDeleteTime())) return false; + } + if (hasExpireTime() != other.hasExpireTime()) return false; + if (hasExpireTime()) { + if (!getExpireTime().equals(other.getExpireTime())) return false; + } + if (type_ != other.type_) return false; + if (!getOptionsCase().equals(other.getOptionsCase())) return false; + switch (optionsCase_) { + case 7: + if (!getHiveOptions().equals(other.getHiveOptions())) return false; + break; + case 0: + default: + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + if (hasCreateTime()) { + hash = (37 * hash) + CREATE_TIME_FIELD_NUMBER; + hash = (53 * hash) + getCreateTime().hashCode(); + } + if (hasUpdateTime()) { + hash = (37 * hash) + UPDATE_TIME_FIELD_NUMBER; + hash = (53 * hash) + getUpdateTime().hashCode(); + } + if (hasDeleteTime()) { + hash = (37 * hash) + DELETE_TIME_FIELD_NUMBER; + hash = (53 * hash) + getDeleteTime().hashCode(); + } + if (hasExpireTime()) { + hash = (37 * hash) + EXPIRE_TIME_FIELD_NUMBER; + hash = (53 * hash) + getExpireTime().hashCode(); + } + hash = (37 * hash) + TYPE_FIELD_NUMBER; + hash = (53 * hash) + type_; + switch (optionsCase_) { + case 7: + hash = (37 * hash) + HIVE_OPTIONS_FIELD_NUMBER; + hash = (53 * hash) + getHiveOptions().hashCode(); + break; + case 0: + default: + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Table parseFrom(java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Table parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Table parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Table parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Table parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Table parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Table parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Table parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Table parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Table parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Table parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Table parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder(com.google.cloud.bigquery.biglake.v1alpha1.Table prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Represents a table.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.Table} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.Table) + com.google.cloud.bigquery.biglake.v1alpha1.TableOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_Table_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_Table_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.Table.class, + com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder.class); + } + + // Construct using com.google.cloud.bigquery.biglake.v1alpha1.Table.newBuilder() + private Builder() {} + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + } + + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + if (hiveOptionsBuilder_ != null) { + hiveOptionsBuilder_.clear(); + } + name_ = ""; + createTime_ = null; + if (createTimeBuilder_ != null) { + createTimeBuilder_.dispose(); + createTimeBuilder_ = null; + } + updateTime_ = null; + if (updateTimeBuilder_ != null) { + updateTimeBuilder_.dispose(); + updateTimeBuilder_ = null; + } + deleteTime_ = null; + if (deleteTimeBuilder_ != null) { + deleteTimeBuilder_.dispose(); + deleteTimeBuilder_ = null; + } + expireTime_ = null; + if (expireTimeBuilder_ != null) { + expireTimeBuilder_.dispose(); + expireTimeBuilder_ = null; + } + type_ = 0; + optionsCase_ = 0; + options_ = null; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_Table_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.Table getDefaultInstanceForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.Table.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.Table build() { + com.google.cloud.bigquery.biglake.v1alpha1.Table result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.Table buildPartial() { + com.google.cloud.bigquery.biglake.v1alpha1.Table result = + new com.google.cloud.bigquery.biglake.v1alpha1.Table(this); + if (bitField0_ != 0) { + buildPartial0(result); + } + buildPartialOneofs(result); + onBuilt(); + return result; + } + + private void buildPartial0(com.google.cloud.bigquery.biglake.v1alpha1.Table result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000002) != 0)) { + result.name_ = name_; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.createTime_ = createTimeBuilder_ == null ? createTime_ : createTimeBuilder_.build(); + } + if (((from_bitField0_ & 0x00000008) != 0)) { + result.updateTime_ = updateTimeBuilder_ == null ? updateTime_ : updateTimeBuilder_.build(); + } + if (((from_bitField0_ & 0x00000010) != 0)) { + result.deleteTime_ = deleteTimeBuilder_ == null ? deleteTime_ : deleteTimeBuilder_.build(); + } + if (((from_bitField0_ & 0x00000020) != 0)) { + result.expireTime_ = expireTimeBuilder_ == null ? expireTime_ : expireTimeBuilder_.build(); + } + if (((from_bitField0_ & 0x00000040) != 0)) { + result.type_ = type_; + } + } + + private void buildPartialOneofs(com.google.cloud.bigquery.biglake.v1alpha1.Table result) { + result.optionsCase_ = optionsCase_; + result.options_ = this.options_; + if (optionsCase_ == 7 && hiveOptionsBuilder_ != null) { + result.options_ = hiveOptionsBuilder_.build(); + } + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.biglake.v1alpha1.Table) { + return mergeFrom((com.google.cloud.bigquery.biglake.v1alpha1.Table) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.bigquery.biglake.v1alpha1.Table other) { + if (other == com.google.cloud.bigquery.biglake.v1alpha1.Table.getDefaultInstance()) + return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + bitField0_ |= 0x00000002; + onChanged(); + } + if (other.hasCreateTime()) { + mergeCreateTime(other.getCreateTime()); + } + if (other.hasUpdateTime()) { + mergeUpdateTime(other.getUpdateTime()); + } + if (other.hasDeleteTime()) { + mergeDeleteTime(other.getDeleteTime()); + } + if (other.hasExpireTime()) { + mergeExpireTime(other.getExpireTime()); + } + if (other.type_ != 0) { + setTypeValue(other.getTypeValue()); + } + switch (other.getOptionsCase()) { + case HIVE_OPTIONS: + { + mergeHiveOptions(other.getHiveOptions()); + break; + } + case OPTIONS_NOT_SET: + { + break; + } + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + name_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000002; + break; + } // case 10 + case 18: + { + input.readMessage(getCreateTimeFieldBuilder().getBuilder(), extensionRegistry); + bitField0_ |= 0x00000004; + break; + } // case 18 + case 26: + { + input.readMessage(getUpdateTimeFieldBuilder().getBuilder(), extensionRegistry); + bitField0_ |= 0x00000008; + break; + } // case 26 + case 34: + { + input.readMessage(getDeleteTimeFieldBuilder().getBuilder(), extensionRegistry); + bitField0_ |= 0x00000010; + break; + } // case 34 + case 42: + { + input.readMessage(getExpireTimeFieldBuilder().getBuilder(), extensionRegistry); + bitField0_ |= 0x00000020; + break; + } // case 42 + case 48: + { + type_ = input.readEnum(); + bitField0_ |= 0x00000040; + break; + } // case 48 + case 58: + { + input.readMessage(getHiveOptionsFieldBuilder().getBuilder(), extensionRegistry); + optionsCase_ = 7; + break; + } // case 58 + default: + { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + + private int optionsCase_ = 0; + private java.lang.Object options_; + + public OptionsCase getOptionsCase() { + return OptionsCase.forNumber(optionsCase_); + } + + public Builder clearOptions() { + optionsCase_ = 0; + options_ = null; + onChanged(); + return this; + } + + private int bitField0_; + + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions, + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptionsOrBuilder> + hiveOptionsBuilder_; + /** + * + * + *
+     * Options of a Hive table.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions hive_options = 7; + * + * @return Whether the hiveOptions field is set. + */ + @java.lang.Override + public boolean hasHiveOptions() { + return optionsCase_ == 7; + } + /** + * + * + *
+     * Options of a Hive table.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions hive_options = 7; + * + * @return The hiveOptions. + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions getHiveOptions() { + if (hiveOptionsBuilder_ == null) { + if (optionsCase_ == 7) { + return (com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions) options_; + } + return com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.getDefaultInstance(); + } else { + if (optionsCase_ == 7) { + return hiveOptionsBuilder_.getMessage(); + } + return com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.getDefaultInstance(); + } + } + /** + * + * + *
+     * Options of a Hive table.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions hive_options = 7; + */ + public Builder setHiveOptions( + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions value) { + if (hiveOptionsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + options_ = value; + onChanged(); + } else { + hiveOptionsBuilder_.setMessage(value); + } + optionsCase_ = 7; + return this; + } + /** + * + * + *
+     * Options of a Hive table.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions hive_options = 7; + */ + public Builder setHiveOptions( + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.Builder builderForValue) { + if (hiveOptionsBuilder_ == null) { + options_ = builderForValue.build(); + onChanged(); + } else { + hiveOptionsBuilder_.setMessage(builderForValue.build()); + } + optionsCase_ = 7; + return this; + } + /** + * + * + *
+     * Options of a Hive table.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions hive_options = 7; + */ + public Builder mergeHiveOptions( + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions value) { + if (hiveOptionsBuilder_ == null) { + if (optionsCase_ == 7 + && options_ + != com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions + .getDefaultInstance()) { + options_ = + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.newBuilder( + (com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions) options_) + .mergeFrom(value) + .buildPartial(); + } else { + options_ = value; + } + onChanged(); + } else { + if (optionsCase_ == 7) { + hiveOptionsBuilder_.mergeFrom(value); + } else { + hiveOptionsBuilder_.setMessage(value); + } + } + optionsCase_ = 7; + return this; + } + /** + * + * + *
+     * Options of a Hive table.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions hive_options = 7; + */ + public Builder clearHiveOptions() { + if (hiveOptionsBuilder_ == null) { + if (optionsCase_ == 7) { + optionsCase_ = 0; + options_ = null; + onChanged(); + } + } else { + if (optionsCase_ == 7) { + optionsCase_ = 0; + options_ = null; + } + hiveOptionsBuilder_.clear(); + } + return this; + } + /** + * + * + *
+     * Options of a Hive table.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions hive_options = 7; + */ + public com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.Builder + getHiveOptionsBuilder() { + return getHiveOptionsFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Options of a Hive table.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions hive_options = 7; + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptionsOrBuilder + getHiveOptionsOrBuilder() { + if ((optionsCase_ == 7) && (hiveOptionsBuilder_ != null)) { + return hiveOptionsBuilder_.getMessageOrBuilder(); + } else { + if (optionsCase_ == 7) { + return (com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions) options_; + } + return com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.getDefaultInstance(); + } + } + /** + * + * + *
+     * Options of a Hive table.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions hive_options = 7; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions, + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptionsOrBuilder> + getHiveOptionsFieldBuilder() { + if (hiveOptionsBuilder_ == null) { + if (!(optionsCase_ == 7)) { + options_ = + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.getDefaultInstance(); + } + hiveOptionsBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions, + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptionsOrBuilder>( + (com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions) options_, + getParentForChildren(), + isClean()); + options_ = null; + } + optionsCase_ = 7; + onChanged(); + return hiveOptionsBuilder_; + } + + private java.lang.Object name_ = ""; + /** + * + * + *
+     * Output only. The resource name.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Output only. The resource name.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + public com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Output only. The resource name.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + name_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The resource name.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @return This builder for chaining. + */ + public Builder clearName() { + name_ = getDefaultInstance().getName(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The resource name.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+     * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + name_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + + private com.google.protobuf.Timestamp createTime_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + createTimeBuilder_; + /** + * + * + *
+     * Output only. The creation time of the table.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the createTime field is set. + */ + public boolean hasCreateTime() { + return ((bitField0_ & 0x00000004) != 0); + } + /** + * + * + *
+     * Output only. The creation time of the table.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The createTime. + */ + public com.google.protobuf.Timestamp getCreateTime() { + if (createTimeBuilder_ == null) { + return createTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : createTime_; + } else { + return createTimeBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Output only. The creation time of the table.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setCreateTime(com.google.protobuf.Timestamp value) { + if (createTimeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + createTime_ = value; + } else { + createTimeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The creation time of the table.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setCreateTime(com.google.protobuf.Timestamp.Builder builderForValue) { + if (createTimeBuilder_ == null) { + createTime_ = builderForValue.build(); + } else { + createTimeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The creation time of the table.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder mergeCreateTime(com.google.protobuf.Timestamp value) { + if (createTimeBuilder_ == null) { + if (((bitField0_ & 0x00000004) != 0) + && createTime_ != null + && createTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { + getCreateTimeBuilder().mergeFrom(value); + } else { + createTime_ = value; + } + } else { + createTimeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The creation time of the table.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder clearCreateTime() { + bitField0_ = (bitField0_ & ~0x00000004); + createTime_ = null; + if (createTimeBuilder_ != null) { + createTimeBuilder_.dispose(); + createTimeBuilder_ = null; + } + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The creation time of the table.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.protobuf.Timestamp.Builder getCreateTimeBuilder() { + bitField0_ |= 0x00000004; + onChanged(); + return getCreateTimeFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Output only. The creation time of the table.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() { + if (createTimeBuilder_ != null) { + return createTimeBuilder_.getMessageOrBuilder(); + } else { + return createTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : createTime_; + } + } + /** + * + * + *
+     * Output only. The creation time of the table.
+     * 
+ * + * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + getCreateTimeFieldBuilder() { + if (createTimeBuilder_ == null) { + createTimeBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder>( + getCreateTime(), getParentForChildren(), isClean()); + createTime_ = null; + } + return createTimeBuilder_; + } + + private com.google.protobuf.Timestamp updateTime_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + updateTimeBuilder_; + /** + * + * + *
+     * Output only. The last modification time of the table.
+     * 
+ * + * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the updateTime field is set. + */ + public boolean hasUpdateTime() { + return ((bitField0_ & 0x00000008) != 0); + } + /** + * + * + *
+     * Output only. The last modification time of the table.
+     * 
+ * + * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The updateTime. + */ + public com.google.protobuf.Timestamp getUpdateTime() { + if (updateTimeBuilder_ == null) { + return updateTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : updateTime_; + } else { + return updateTimeBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Output only. The last modification time of the table.
+     * 
+ * + * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setUpdateTime(com.google.protobuf.Timestamp value) { + if (updateTimeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + updateTime_ = value; + } else { + updateTimeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The last modification time of the table.
+     * 
+ * + * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setUpdateTime(com.google.protobuf.Timestamp.Builder builderForValue) { + if (updateTimeBuilder_ == null) { + updateTime_ = builderForValue.build(); + } else { + updateTimeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The last modification time of the table.
+     * 
+ * + * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder mergeUpdateTime(com.google.protobuf.Timestamp value) { + if (updateTimeBuilder_ == null) { + if (((bitField0_ & 0x00000008) != 0) + && updateTime_ != null + && updateTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { + getUpdateTimeBuilder().mergeFrom(value); + } else { + updateTime_ = value; + } + } else { + updateTimeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The last modification time of the table.
+     * 
+ * + * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder clearUpdateTime() { + bitField0_ = (bitField0_ & ~0x00000008); + updateTime_ = null; + if (updateTimeBuilder_ != null) { + updateTimeBuilder_.dispose(); + updateTimeBuilder_ = null; + } + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The last modification time of the table.
+     * 
+ * + * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.protobuf.Timestamp.Builder getUpdateTimeBuilder() { + bitField0_ |= 0x00000008; + onChanged(); + return getUpdateTimeFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Output only. The last modification time of the table.
+     * 
+ * + * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() { + if (updateTimeBuilder_ != null) { + return updateTimeBuilder_.getMessageOrBuilder(); + } else { + return updateTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : updateTime_; + } + } + /** + * + * + *
+     * Output only. The last modification time of the table.
+     * 
+ * + * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + getUpdateTimeFieldBuilder() { + if (updateTimeBuilder_ == null) { + updateTimeBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder>( + getUpdateTime(), getParentForChildren(), isClean()); + updateTime_ = null; + } + return updateTimeBuilder_; + } + + private com.google.protobuf.Timestamp deleteTime_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + deleteTimeBuilder_; + /** + * + * + *
+     * Output only. The deletion time of the table. Only set after the table is
+     * deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the deleteTime field is set. + */ + public boolean hasDeleteTime() { + return ((bitField0_ & 0x00000010) != 0); + } + /** + * + * + *
+     * Output only. The deletion time of the table. Only set after the table is
+     * deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The deleteTime. + */ + public com.google.protobuf.Timestamp getDeleteTime() { + if (deleteTimeBuilder_ == null) { + return deleteTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : deleteTime_; + } else { + return deleteTimeBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Output only. The deletion time of the table. Only set after the table is
+     * deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setDeleteTime(com.google.protobuf.Timestamp value) { + if (deleteTimeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + deleteTime_ = value; + } else { + deleteTimeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000010; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The deletion time of the table. Only set after the table is
+     * deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setDeleteTime(com.google.protobuf.Timestamp.Builder builderForValue) { + if (deleteTimeBuilder_ == null) { + deleteTime_ = builderForValue.build(); + } else { + deleteTimeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000010; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The deletion time of the table. Only set after the table is
+     * deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder mergeDeleteTime(com.google.protobuf.Timestamp value) { + if (deleteTimeBuilder_ == null) { + if (((bitField0_ & 0x00000010) != 0) + && deleteTime_ != null + && deleteTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { + getDeleteTimeBuilder().mergeFrom(value); + } else { + deleteTime_ = value; + } + } else { + deleteTimeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000010; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The deletion time of the table. Only set after the table is
+     * deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder clearDeleteTime() { + bitField0_ = (bitField0_ & ~0x00000010); + deleteTime_ = null; + if (deleteTimeBuilder_ != null) { + deleteTimeBuilder_.dispose(); + deleteTimeBuilder_ = null; + } + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The deletion time of the table. Only set after the table is
+     * deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.protobuf.Timestamp.Builder getDeleteTimeBuilder() { + bitField0_ |= 0x00000010; + onChanged(); + return getDeleteTimeFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Output only. The deletion time of the table. Only set after the table is
+     * deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.protobuf.TimestampOrBuilder getDeleteTimeOrBuilder() { + if (deleteTimeBuilder_ != null) { + return deleteTimeBuilder_.getMessageOrBuilder(); + } else { + return deleteTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : deleteTime_; + } + } + /** + * + * + *
+     * Output only. The deletion time of the table. Only set after the table is
+     * deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + getDeleteTimeFieldBuilder() { + if (deleteTimeBuilder_ == null) { + deleteTimeBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder>( + getDeleteTime(), getParentForChildren(), isClean()); + deleteTime_ = null; + } + return deleteTimeBuilder_; + } + + private com.google.protobuf.Timestamp expireTime_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + expireTimeBuilder_; + /** + * + * + *
+     * Output only. The time when this table is considered expired. Only set after
+     * the table is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the expireTime field is set. + */ + public boolean hasExpireTime() { + return ((bitField0_ & 0x00000020) != 0); + } + /** + * + * + *
+     * Output only. The time when this table is considered expired. Only set after
+     * the table is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The expireTime. + */ + public com.google.protobuf.Timestamp getExpireTime() { + if (expireTimeBuilder_ == null) { + return expireTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : expireTime_; + } else { + return expireTimeBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Output only. The time when this table is considered expired. Only set after
+     * the table is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setExpireTime(com.google.protobuf.Timestamp value) { + if (expireTimeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + expireTime_ = value; + } else { + expireTimeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000020; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The time when this table is considered expired. Only set after
+     * the table is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setExpireTime(com.google.protobuf.Timestamp.Builder builderForValue) { + if (expireTimeBuilder_ == null) { + expireTime_ = builderForValue.build(); + } else { + expireTimeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000020; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The time when this table is considered expired. Only set after
+     * the table is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder mergeExpireTime(com.google.protobuf.Timestamp value) { + if (expireTimeBuilder_ == null) { + if (((bitField0_ & 0x00000020) != 0) + && expireTime_ != null + && expireTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { + getExpireTimeBuilder().mergeFrom(value); + } else { + expireTime_ = value; + } + } else { + expireTimeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000020; + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The time when this table is considered expired. Only set after
+     * the table is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder clearExpireTime() { + bitField0_ = (bitField0_ & ~0x00000020); + expireTime_ = null; + if (expireTimeBuilder_ != null) { + expireTimeBuilder_.dispose(); + expireTimeBuilder_ = null; + } + onChanged(); + return this; + } + /** + * + * + *
+     * Output only. The time when this table is considered expired. Only set after
+     * the table is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.protobuf.Timestamp.Builder getExpireTimeBuilder() { + bitField0_ |= 0x00000020; + onChanged(); + return getExpireTimeFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Output only. The time when this table is considered expired. Only set after
+     * the table is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.protobuf.TimestampOrBuilder getExpireTimeOrBuilder() { + if (expireTimeBuilder_ != null) { + return expireTimeBuilder_.getMessageOrBuilder(); + } else { + return expireTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : expireTime_; + } + } + /** + * + * + *
+     * Output only. The time when this table is considered expired. Only set after
+     * the table is deleted.
+     * 
+ * + * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + getExpireTimeFieldBuilder() { + if (expireTimeBuilder_ == null) { + expireTimeBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder>( + getExpireTime(), getParentForChildren(), isClean()); + expireTime_ = null; + } + return expireTimeBuilder_; + } + + private int type_ = 0; + /** + * + * + *
+     * The table type.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.Table.Type type = 6; + * + * @return The enum numeric value on the wire for type. + */ + @java.lang.Override + public int getTypeValue() { + return type_; + } + /** + * + * + *
+     * The table type.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.Table.Type type = 6; + * + * @param value The enum numeric value on the wire for type to set. + * @return This builder for chaining. + */ + public Builder setTypeValue(int value) { + type_ = value; + bitField0_ |= 0x00000040; + onChanged(); + return this; + } + /** + * + * + *
+     * The table type.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.Table.Type type = 6; + * + * @return The type. + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.Table.Type getType() { + com.google.cloud.bigquery.biglake.v1alpha1.Table.Type result = + com.google.cloud.bigquery.biglake.v1alpha1.Table.Type.forNumber(type_); + return result == null + ? com.google.cloud.bigquery.biglake.v1alpha1.Table.Type.UNRECOGNIZED + : result; + } + /** + * + * + *
+     * The table type.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.Table.Type type = 6; + * + * @param value The type to set. + * @return This builder for chaining. + */ + public Builder setType(com.google.cloud.bigquery.biglake.v1alpha1.Table.Type value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000040; + type_ = value.getNumber(); + onChanged(); + return this; + } + /** + * + * + *
+     * The table type.
+     * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.Table.Type type = 6; + * + * @return This builder for chaining. + */ + public Builder clearType() { + bitField0_ = (bitField0_ & ~0x00000040); + type_ = 0; + onChanged(); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.Table) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.Table) + private static final com.google.cloud.bigquery.biglake.v1alpha1.Table DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1alpha1.Table(); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.Table getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser
PARSER = + new com.google.protobuf.AbstractParser
() { + @java.lang.Override + public Table parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser
parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser
getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.Table getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/TableName.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/TableName.java new file mode 100644 index 000000000000..dc3b40792364 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/TableName.java @@ -0,0 +1,298 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1; + +import com.google.api.pathtemplate.PathTemplate; +import com.google.api.resourcenames.ResourceName; +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; + +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") +public class TableName implements ResourceName { + private static final PathTemplate PROJECT_LOCATION_CATALOG_DATABASE_TABLE = + PathTemplate.createWithoutUrlEncoding( + "projects/{project}/locations/{location}/catalogs/{catalog}/databases/{database}/tables/{table}"); + private volatile Map fieldValuesMap; + private final String project; + private final String location; + private final String catalog; + private final String database; + private final String table; + + @Deprecated + protected TableName() { + project = null; + location = null; + catalog = null; + database = null; + table = null; + } + + private TableName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + location = Preconditions.checkNotNull(builder.getLocation()); + catalog = Preconditions.checkNotNull(builder.getCatalog()); + database = Preconditions.checkNotNull(builder.getDatabase()); + table = Preconditions.checkNotNull(builder.getTable()); + } + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public String getCatalog() { + return catalog; + } + + public String getDatabase() { + return database; + } + + public String getTable() { + return table; + } + + public static Builder newBuilder() { + return new Builder(); + } + + public Builder toBuilder() { + return new Builder(this); + } + + public static TableName of( + String project, String location, String catalog, String database, String table) { + return newBuilder() + .setProject(project) + .setLocation(location) + .setCatalog(catalog) + .setDatabase(database) + .setTable(table) + .build(); + } + + public static String format( + String project, String location, String catalog, String database, String table) { + return newBuilder() + .setProject(project) + .setLocation(location) + .setCatalog(catalog) + .setDatabase(database) + .setTable(table) + .build() + .toString(); + } + + public static TableName parse(String formattedString) { + if (formattedString.isEmpty()) { + return null; + } + Map matchMap = + PROJECT_LOCATION_CATALOG_DATABASE_TABLE.validatedMatch( + formattedString, "TableName.parse: formattedString not in valid format"); + return of( + matchMap.get("project"), + matchMap.get("location"), + matchMap.get("catalog"), + matchMap.get("database"), + matchMap.get("table")); + } + + public static List parseList(List formattedStrings) { + List list = new ArrayList<>(formattedStrings.size()); + for (String formattedString : formattedStrings) { + list.add(parse(formattedString)); + } + return list; + } + + public static List toStringList(List values) { + List list = new ArrayList<>(values.size()); + for (TableName value : values) { + if (value == null) { + list.add(""); + } else { + list.add(value.toString()); + } + } + return list; + } + + public static boolean isParsableFrom(String formattedString) { + return PROJECT_LOCATION_CATALOG_DATABASE_TABLE.matches(formattedString); + } + + @Override + public Map getFieldValuesMap() { + if (fieldValuesMap == null) { + synchronized (this) { + if (fieldValuesMap == null) { + ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); + if (project != null) { + fieldMapBuilder.put("project", project); + } + if (location != null) { + fieldMapBuilder.put("location", location); + } + if (catalog != null) { + fieldMapBuilder.put("catalog", catalog); + } + if (database != null) { + fieldMapBuilder.put("database", database); + } + if (table != null) { + fieldMapBuilder.put("table", table); + } + fieldValuesMap = fieldMapBuilder.build(); + } + } + } + return fieldValuesMap; + } + + public String getFieldValue(String fieldName) { + return getFieldValuesMap().get(fieldName); + } + + @Override + public String toString() { + return PROJECT_LOCATION_CATALOG_DATABASE_TABLE.instantiate( + "project", + project, + "location", + location, + "catalog", + catalog, + "database", + database, + "table", + table); + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + TableName that = ((TableName) o); + return Objects.equals(this.project, that.project) + && Objects.equals(this.location, that.location) + && Objects.equals(this.catalog, that.catalog) + && Objects.equals(this.database, that.database) + && Objects.equals(this.table, that.table); + } + return false; + } + + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + h *= 1000003; + h ^= Objects.hashCode(location); + h *= 1000003; + h ^= Objects.hashCode(catalog); + h *= 1000003; + h ^= Objects.hashCode(database); + h *= 1000003; + h ^= Objects.hashCode(table); + return h; + } + + /** + * Builder for + * projects/{project}/locations/{location}/catalogs/{catalog}/databases/{database}/tables/{table}. + */ + public static class Builder { + private String project; + private String location; + private String catalog; + private String database; + private String table; + + protected Builder() {} + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public String getCatalog() { + return catalog; + } + + public String getDatabase() { + return database; + } + + public String getTable() { + return table; + } + + public Builder setProject(String project) { + this.project = project; + return this; + } + + public Builder setLocation(String location) { + this.location = location; + return this; + } + + public Builder setCatalog(String catalog) { + this.catalog = catalog; + return this; + } + + public Builder setDatabase(String database) { + this.database = database; + return this; + } + + public Builder setTable(String table) { + this.table = table; + return this; + } + + private Builder(TableName tableName) { + this.project = tableName.project; + this.location = tableName.location; + this.catalog = tableName.catalog; + this.database = tableName.database; + this.table = tableName.table; + } + + public TableName build() { + return new TableName(this); + } + } +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/TableOrBuilder.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/TableOrBuilder.java new file mode 100644 index 000000000000..9ee530c121e6 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/TableOrBuilder.java @@ -0,0 +1,278 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +public interface TableOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.biglake.v1alpha1.Table) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Options of a Hive table.
+   * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions hive_options = 7; + * + * @return Whether the hiveOptions field is set. + */ + boolean hasHiveOptions(); + /** + * + * + *
+   * Options of a Hive table.
+   * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions hive_options = 7; + * + * @return The hiveOptions. + */ + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions getHiveOptions(); + /** + * + * + *
+   * Options of a Hive table.
+   * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.HiveTableOptions hive_options = 7; + */ + com.google.cloud.bigquery.biglake.v1alpha1.HiveTableOptionsOrBuilder getHiveOptionsOrBuilder(); + + /** + * + * + *
+   * Output only. The resource name.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @return The name. + */ + java.lang.String getName(); + /** + * + * + *
+   * Output only. The resource name.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+   * 
+ * + * + * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } + * + * + * @return The bytes for name. + */ + com.google.protobuf.ByteString getNameBytes(); + + /** + * + * + *
+   * Output only. The creation time of the table.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the createTime field is set. + */ + boolean hasCreateTime(); + /** + * + * + *
+   * Output only. The creation time of the table.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The createTime. + */ + com.google.protobuf.Timestamp getCreateTime(); + /** + * + * + *
+   * Output only. The creation time of the table.
+   * 
+ * + * .google.protobuf.Timestamp create_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder(); + + /** + * + * + *
+   * Output only. The last modification time of the table.
+   * 
+ * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the updateTime field is set. + */ + boolean hasUpdateTime(); + /** + * + * + *
+   * Output only. The last modification time of the table.
+   * 
+ * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The updateTime. + */ + com.google.protobuf.Timestamp getUpdateTime(); + /** + * + * + *
+   * Output only. The last modification time of the table.
+   * 
+ * + * .google.protobuf.Timestamp update_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder(); + + /** + * + * + *
+   * Output only. The deletion time of the table. Only set after the table is
+   * deleted.
+   * 
+ * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the deleteTime field is set. + */ + boolean hasDeleteTime(); + /** + * + * + *
+   * Output only. The deletion time of the table. Only set after the table is
+   * deleted.
+   * 
+ * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The deleteTime. + */ + com.google.protobuf.Timestamp getDeleteTime(); + /** + * + * + *
+   * Output only. The deletion time of the table. Only set after the table is
+   * deleted.
+   * 
+ * + * .google.protobuf.Timestamp delete_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + com.google.protobuf.TimestampOrBuilder getDeleteTimeOrBuilder(); + + /** + * + * + *
+   * Output only. The time when this table is considered expired. Only set after
+   * the table is deleted.
+   * 
+ * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the expireTime field is set. + */ + boolean hasExpireTime(); + /** + * + * + *
+   * Output only. The time when this table is considered expired. Only set after
+   * the table is deleted.
+   * 
+ * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The expireTime. + */ + com.google.protobuf.Timestamp getExpireTime(); + /** + * + * + *
+   * Output only. The time when this table is considered expired. Only set after
+   * the table is deleted.
+   * 
+ * + * .google.protobuf.Timestamp expire_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + com.google.protobuf.TimestampOrBuilder getExpireTimeOrBuilder(); + + /** + * + * + *
+   * The table type.
+   * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.Table.Type type = 6; + * + * @return The enum numeric value on the wire for type. + */ + int getTypeValue(); + /** + * + * + *
+   * The table type.
+   * 
+ * + * .google.cloud.bigquery.biglake.v1alpha1.Table.Type type = 6; + * + * @return The type. + */ + com.google.cloud.bigquery.biglake.v1alpha1.Table.Type getType(); + + public com.google.cloud.bigquery.biglake.v1alpha1.Table.OptionsCase getOptionsCase(); +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/UpdateDatabaseRequest.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/UpdateDatabaseRequest.java new file mode 100644 index 000000000000..0f2aa839dd38 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/UpdateDatabaseRequest.java @@ -0,0 +1,1076 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +/** + * + * + *
+ * Request message for the UpdateDatabase method.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest} + */ +public final class UpdateDatabaseRequest extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest) + UpdateDatabaseRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use UpdateDatabaseRequest.newBuilder() to construct. + private UpdateDatabaseRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private UpdateDatabaseRequest() {} + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new UpdateDatabaseRequest(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_UpdateDatabaseRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_UpdateDatabaseRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest.Builder.class); + } + + public static final int DATABASE_FIELD_NUMBER = 1; + private com.google.cloud.bigquery.biglake.v1alpha1.Database database_; + /** + * + * + *
+   * Required. The database to update.
+   * The database's `name` field is used to identify the database to update.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Database database = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return Whether the database field is set. + */ + @java.lang.Override + public boolean hasDatabase() { + return database_ != null; + } + /** + * + * + *
+   * Required. The database to update.
+   * The database's `name` field is used to identify the database to update.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Database database = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return The database. + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.Database getDatabase() { + return database_ == null + ? com.google.cloud.bigquery.biglake.v1alpha1.Database.getDefaultInstance() + : database_; + } + /** + * + * + *
+   * Required. The database to update.
+   * The database's `name` field is used to identify the database to update.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Database database = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.DatabaseOrBuilder getDatabaseOrBuilder() { + return database_ == null + ? com.google.cloud.bigquery.biglake.v1alpha1.Database.getDefaultInstance() + : database_; + } + + public static final int UPDATE_MASK_FIELD_NUMBER = 2; + private com.google.protobuf.FieldMask updateMask_; + /** + * + * + *
+   * The list of fields to update.
+   * For the `FieldMask` definition, see
+   * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
+   * If not set, defaults to all of the fields that are allowed to update.
+   * 
+ * + * .google.protobuf.FieldMask update_mask = 2; + * + * @return Whether the updateMask field is set. + */ + @java.lang.Override + public boolean hasUpdateMask() { + return updateMask_ != null; + } + /** + * + * + *
+   * The list of fields to update.
+   * For the `FieldMask` definition, see
+   * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
+   * If not set, defaults to all of the fields that are allowed to update.
+   * 
+ * + * .google.protobuf.FieldMask update_mask = 2; + * + * @return The updateMask. + */ + @java.lang.Override + public com.google.protobuf.FieldMask getUpdateMask() { + return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; + } + /** + * + * + *
+   * The list of fields to update.
+   * For the `FieldMask` definition, see
+   * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
+   * If not set, defaults to all of the fields that are allowed to update.
+   * 
+ * + * .google.protobuf.FieldMask update_mask = 2; + */ + @java.lang.Override + public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { + return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (database_ != null) { + output.writeMessage(1, getDatabase()); + } + if (updateMask_ != null) { + output.writeMessage(2, getUpdateMask()); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (database_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getDatabase()); + } + if (updateMask_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest)) { + return super.equals(obj); + } + com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest other = + (com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest) obj; + + if (hasDatabase() != other.hasDatabase()) return false; + if (hasDatabase()) { + if (!getDatabase().equals(other.getDatabase())) return false; + } + if (hasUpdateMask() != other.hasUpdateMask()) return false; + if (hasUpdateMask()) { + if (!getUpdateMask().equals(other.getUpdateMask())) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasDatabase()) { + hash = (37 * hash) + DATABASE_FIELD_NUMBER; + hash = (53 * hash) + getDatabase().hashCode(); + } + if (hasUpdateMask()) { + hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; + hash = (53 * hash) + getUpdateMask().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest parseFrom( + byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Request message for the UpdateDatabase method.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest) + com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_UpdateDatabaseRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_UpdateDatabaseRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest.Builder.class); + } + + // Construct using com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest.newBuilder() + private Builder() {} + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + } + + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + database_ = null; + if (databaseBuilder_ != null) { + databaseBuilder_.dispose(); + databaseBuilder_ = null; + } + updateMask_ = null; + if (updateMaskBuilder_ != null) { + updateMaskBuilder_.dispose(); + updateMaskBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_UpdateDatabaseRequest_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest + getDefaultInstanceForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest build() { + com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest buildPartial() { + com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest result = + new com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest(this); + if (bitField0_ != 0) { + buildPartial0(result); + } + onBuilt(); + return result; + } + + private void buildPartial0( + com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.database_ = databaseBuilder_ == null ? database_ : databaseBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); + } + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest) { + return mergeFrom((com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom( + com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest other) { + if (other + == com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest.getDefaultInstance()) + return this; + if (other.hasDatabase()) { + mergeDatabase(other.getDatabase()); + } + if (other.hasUpdateMask()) { + mergeUpdateMask(other.getUpdateMask()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + input.readMessage(getDatabaseFieldBuilder().getBuilder(), extensionRegistry); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: + { + input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); + bitField0_ |= 0x00000002; + break; + } // case 18 + default: + { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + + private int bitField0_; + + private com.google.cloud.bigquery.biglake.v1alpha1.Database database_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.Database, + com.google.cloud.bigquery.biglake.v1alpha1.Database.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.DatabaseOrBuilder> + databaseBuilder_; + /** + * + * + *
+     * Required. The database to update.
+     * The database's `name` field is used to identify the database to update.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Database database = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return Whether the database field is set. + */ + public boolean hasDatabase() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * + * + *
+     * Required. The database to update.
+     * The database's `name` field is used to identify the database to update.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Database database = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return The database. + */ + public com.google.cloud.bigquery.biglake.v1alpha1.Database getDatabase() { + if (databaseBuilder_ == null) { + return database_ == null + ? com.google.cloud.bigquery.biglake.v1alpha1.Database.getDefaultInstance() + : database_; + } else { + return databaseBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Required. The database to update.
+     * The database's `name` field is used to identify the database to update.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Database database = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder setDatabase(com.google.cloud.bigquery.biglake.v1alpha1.Database value) { + if (databaseBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + database_ = value; + } else { + databaseBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The database to update.
+     * The database's `name` field is used to identify the database to update.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Database database = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder setDatabase( + com.google.cloud.bigquery.biglake.v1alpha1.Database.Builder builderForValue) { + if (databaseBuilder_ == null) { + database_ = builderForValue.build(); + } else { + databaseBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The database to update.
+     * The database's `name` field is used to identify the database to update.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Database database = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder mergeDatabase(com.google.cloud.bigquery.biglake.v1alpha1.Database value) { + if (databaseBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0) + && database_ != null + && database_ + != com.google.cloud.bigquery.biglake.v1alpha1.Database.getDefaultInstance()) { + getDatabaseBuilder().mergeFrom(value); + } else { + database_ = value; + } + } else { + databaseBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The database to update.
+     * The database's `name` field is used to identify the database to update.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Database database = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder clearDatabase() { + bitField0_ = (bitField0_ & ~0x00000001); + database_ = null; + if (databaseBuilder_ != null) { + databaseBuilder_.dispose(); + databaseBuilder_ = null; + } + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The database to update.
+     * The database's `name` field is used to identify the database to update.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Database database = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public com.google.cloud.bigquery.biglake.v1alpha1.Database.Builder getDatabaseBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getDatabaseFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Required. The database to update.
+     * The database's `name` field is used to identify the database to update.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Database database = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public com.google.cloud.bigquery.biglake.v1alpha1.DatabaseOrBuilder getDatabaseOrBuilder() { + if (databaseBuilder_ != null) { + return databaseBuilder_.getMessageOrBuilder(); + } else { + return database_ == null + ? com.google.cloud.bigquery.biglake.v1alpha1.Database.getDefaultInstance() + : database_; + } + } + /** + * + * + *
+     * Required. The database to update.
+     * The database's `name` field is used to identify the database to update.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Database database = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.Database, + com.google.cloud.bigquery.biglake.v1alpha1.Database.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.DatabaseOrBuilder> + getDatabaseFieldBuilder() { + if (databaseBuilder_ == null) { + databaseBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.Database, + com.google.cloud.bigquery.biglake.v1alpha1.Database.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.DatabaseOrBuilder>( + getDatabase(), getParentForChildren(), isClean()); + database_ = null; + } + return databaseBuilder_; + } + + private com.google.protobuf.FieldMask updateMask_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.FieldMask, + com.google.protobuf.FieldMask.Builder, + com.google.protobuf.FieldMaskOrBuilder> + updateMaskBuilder_; + /** + * + * + *
+     * The list of fields to update.
+     * For the `FieldMask` definition, see
+     * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
+     * If not set, defaults to all of the fields that are allowed to update.
+     * 
+ * + * .google.protobuf.FieldMask update_mask = 2; + * + * @return Whether the updateMask field is set. + */ + public boolean hasUpdateMask() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + * + * + *
+     * The list of fields to update.
+     * For the `FieldMask` definition, see
+     * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
+     * If not set, defaults to all of the fields that are allowed to update.
+     * 
+ * + * .google.protobuf.FieldMask update_mask = 2; + * + * @return The updateMask. + */ + public com.google.protobuf.FieldMask getUpdateMask() { + if (updateMaskBuilder_ == null) { + return updateMask_ == null + ? com.google.protobuf.FieldMask.getDefaultInstance() + : updateMask_; + } else { + return updateMaskBuilder_.getMessage(); + } + } + /** + * + * + *
+     * The list of fields to update.
+     * For the `FieldMask` definition, see
+     * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
+     * If not set, defaults to all of the fields that are allowed to update.
+     * 
+ * + * .google.protobuf.FieldMask update_mask = 2; + */ + public Builder setUpdateMask(com.google.protobuf.FieldMask value) { + if (updateMaskBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + updateMask_ = value; + } else { + updateMaskBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * + * + *
+     * The list of fields to update.
+     * For the `FieldMask` definition, see
+     * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
+     * If not set, defaults to all of the fields that are allowed to update.
+     * 
+ * + * .google.protobuf.FieldMask update_mask = 2; + */ + public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { + if (updateMaskBuilder_ == null) { + updateMask_ = builderForValue.build(); + } else { + updateMaskBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * + * + *
+     * The list of fields to update.
+     * For the `FieldMask` definition, see
+     * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
+     * If not set, defaults to all of the fields that are allowed to update.
+     * 
+ * + * .google.protobuf.FieldMask update_mask = 2; + */ + public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { + if (updateMaskBuilder_ == null) { + if (((bitField0_ & 0x00000002) != 0) + && updateMask_ != null + && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { + getUpdateMaskBuilder().mergeFrom(value); + } else { + updateMask_ = value; + } + } else { + updateMaskBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * + * + *
+     * The list of fields to update.
+     * For the `FieldMask` definition, see
+     * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
+     * If not set, defaults to all of the fields that are allowed to update.
+     * 
+ * + * .google.protobuf.FieldMask update_mask = 2; + */ + public Builder clearUpdateMask() { + bitField0_ = (bitField0_ & ~0x00000002); + updateMask_ = null; + if (updateMaskBuilder_ != null) { + updateMaskBuilder_.dispose(); + updateMaskBuilder_ = null; + } + onChanged(); + return this; + } + /** + * + * + *
+     * The list of fields to update.
+     * For the `FieldMask` definition, see
+     * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
+     * If not set, defaults to all of the fields that are allowed to update.
+     * 
+ * + * .google.protobuf.FieldMask update_mask = 2; + */ + public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getUpdateMaskFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * The list of fields to update.
+     * For the `FieldMask` definition, see
+     * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
+     * If not set, defaults to all of the fields that are allowed to update.
+     * 
+ * + * .google.protobuf.FieldMask update_mask = 2; + */ + public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { + if (updateMaskBuilder_ != null) { + return updateMaskBuilder_.getMessageOrBuilder(); + } else { + return updateMask_ == null + ? com.google.protobuf.FieldMask.getDefaultInstance() + : updateMask_; + } + } + /** + * + * + *
+     * The list of fields to update.
+     * For the `FieldMask` definition, see
+     * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
+     * If not set, defaults to all of the fields that are allowed to update.
+     * 
+ * + * .google.protobuf.FieldMask update_mask = 2; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.FieldMask, + com.google.protobuf.FieldMask.Builder, + com.google.protobuf.FieldMaskOrBuilder> + getUpdateMaskFieldBuilder() { + if (updateMaskBuilder_ == null) { + updateMaskBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.FieldMask, + com.google.protobuf.FieldMask.Builder, + com.google.protobuf.FieldMaskOrBuilder>( + getUpdateMask(), getParentForChildren(), isClean()); + updateMask_ = null; + } + return updateMaskBuilder_; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest) + private static final com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest(); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest + getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public UpdateDatabaseRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest + getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/UpdateDatabaseRequestOrBuilder.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/UpdateDatabaseRequestOrBuilder.java new file mode 100644 index 000000000000..a52fe5df8e33 --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/UpdateDatabaseRequestOrBuilder.java @@ -0,0 +1,119 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +public interface UpdateDatabaseRequestOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Required. The database to update.
+   * The database's `name` field is used to identify the database to update.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Database database = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return Whether the database field is set. + */ + boolean hasDatabase(); + /** + * + * + *
+   * Required. The database to update.
+   * The database's `name` field is used to identify the database to update.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Database database = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return The database. + */ + com.google.cloud.bigquery.biglake.v1alpha1.Database getDatabase(); + /** + * + * + *
+   * Required. The database to update.
+   * The database's `name` field is used to identify the database to update.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Database database = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + com.google.cloud.bigquery.biglake.v1alpha1.DatabaseOrBuilder getDatabaseOrBuilder(); + + /** + * + * + *
+   * The list of fields to update.
+   * For the `FieldMask` definition, see
+   * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
+   * If not set, defaults to all of the fields that are allowed to update.
+   * 
+ * + * .google.protobuf.FieldMask update_mask = 2; + * + * @return Whether the updateMask field is set. + */ + boolean hasUpdateMask(); + /** + * + * + *
+   * The list of fields to update.
+   * For the `FieldMask` definition, see
+   * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
+   * If not set, defaults to all of the fields that are allowed to update.
+   * 
+ * + * .google.protobuf.FieldMask update_mask = 2; + * + * @return The updateMask. + */ + com.google.protobuf.FieldMask getUpdateMask(); + /** + * + * + *
+   * The list of fields to update.
+   * For the `FieldMask` definition, see
+   * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
+   * If not set, defaults to all of the fields that are allowed to update.
+   * 
+ * + * .google.protobuf.FieldMask update_mask = 2; + */ + com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder(); +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/UpdateTableRequest.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/UpdateTableRequest.java new file mode 100644 index 000000000000..e231af627d2e --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/UpdateTableRequest.java @@ -0,0 +1,1072 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +/** + * + * + *
+ * Request message for the UpdateTable method.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest} + */ +public final class UpdateTableRequest extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest) + UpdateTableRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use UpdateTableRequest.newBuilder() to construct. + private UpdateTableRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private UpdateTableRequest() {} + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new UpdateTableRequest(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_UpdateTableRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_UpdateTableRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest.Builder.class); + } + + public static final int TABLE_FIELD_NUMBER = 1; + private com.google.cloud.bigquery.biglake.v1alpha1.Table table_; + /** + * + * + *
+   * Required. The table to update.
+   * The table's `name` field is used to identify the database to update.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Table table = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return Whether the table field is set. + */ + @java.lang.Override + public boolean hasTable() { + return table_ != null; + } + /** + * + * + *
+   * Required. The table to update.
+   * The table's `name` field is used to identify the database to update.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Table table = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return The table. + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.Table getTable() { + return table_ == null + ? com.google.cloud.bigquery.biglake.v1alpha1.Table.getDefaultInstance() + : table_; + } + /** + * + * + *
+   * Required. The table to update.
+   * The table's `name` field is used to identify the database to update.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Table table = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.TableOrBuilder getTableOrBuilder() { + return table_ == null + ? com.google.cloud.bigquery.biglake.v1alpha1.Table.getDefaultInstance() + : table_; + } + + public static final int UPDATE_MASK_FIELD_NUMBER = 2; + private com.google.protobuf.FieldMask updateMask_; + /** + * + * + *
+   * The list of fields to update.
+   * For the `FieldMask` definition, see
+   * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
+   * If not set, defaults to all of the fields that are allowed to update.
+   * 
+ * + * .google.protobuf.FieldMask update_mask = 2; + * + * @return Whether the updateMask field is set. + */ + @java.lang.Override + public boolean hasUpdateMask() { + return updateMask_ != null; + } + /** + * + * + *
+   * The list of fields to update.
+   * For the `FieldMask` definition, see
+   * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
+   * If not set, defaults to all of the fields that are allowed to update.
+   * 
+ * + * .google.protobuf.FieldMask update_mask = 2; + * + * @return The updateMask. + */ + @java.lang.Override + public com.google.protobuf.FieldMask getUpdateMask() { + return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; + } + /** + * + * + *
+   * The list of fields to update.
+   * For the `FieldMask` definition, see
+   * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
+   * If not set, defaults to all of the fields that are allowed to update.
+   * 
+ * + * .google.protobuf.FieldMask update_mask = 2; + */ + @java.lang.Override + public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { + return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (table_ != null) { + output.writeMessage(1, getTable()); + } + if (updateMask_ != null) { + output.writeMessage(2, getUpdateMask()); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (table_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getTable()); + } + if (updateMask_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest)) { + return super.equals(obj); + } + com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest other = + (com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest) obj; + + if (hasTable() != other.hasTable()) return false; + if (hasTable()) { + if (!getTable().equals(other.getTable())) return false; + } + if (hasUpdateMask() != other.hasUpdateMask()) return false; + if (hasUpdateMask()) { + if (!getUpdateMask().equals(other.getUpdateMask())) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasTable()) { + hash = (37 * hash) + TABLE_FIELD_NUMBER; + hash = (53 * hash) + getTable().hashCode(); + } + if (hasUpdateMask()) { + hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; + hash = (53 * hash) + getUpdateMask().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Request message for the UpdateTable method.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest) + com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_UpdateTableRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_UpdateTableRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest.class, + com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest.Builder.class); + } + + // Construct using com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest.newBuilder() + private Builder() {} + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + } + + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + table_ = null; + if (tableBuilder_ != null) { + tableBuilder_.dispose(); + tableBuilder_ = null; + } + updateMask_ = null; + if (updateMaskBuilder_ != null) { + updateMaskBuilder_.dispose(); + updateMaskBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto + .internal_static_google_cloud_bigquery_biglake_v1alpha1_UpdateTableRequest_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest + getDefaultInstanceForType() { + return com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest build() { + com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest buildPartial() { + com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest result = + new com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest(this); + if (bitField0_ != 0) { + buildPartial0(result); + } + onBuilt(); + return result; + } + + private void buildPartial0( + com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.table_ = tableBuilder_ == null ? table_ : tableBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); + } + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest) { + return mergeFrom((com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest other) { + if (other + == com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest.getDefaultInstance()) + return this; + if (other.hasTable()) { + mergeTable(other.getTable()); + } + if (other.hasUpdateMask()) { + mergeUpdateMask(other.getUpdateMask()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + input.readMessage(getTableFieldBuilder().getBuilder(), extensionRegistry); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: + { + input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); + bitField0_ |= 0x00000002; + break; + } // case 18 + default: + { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + + private int bitField0_; + + private com.google.cloud.bigquery.biglake.v1alpha1.Table table_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.Table, + com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.TableOrBuilder> + tableBuilder_; + /** + * + * + *
+     * Required. The table to update.
+     * The table's `name` field is used to identify the database to update.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Table table = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return Whether the table field is set. + */ + public boolean hasTable() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * + * + *
+     * Required. The table to update.
+     * The table's `name` field is used to identify the database to update.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Table table = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return The table. + */ + public com.google.cloud.bigquery.biglake.v1alpha1.Table getTable() { + if (tableBuilder_ == null) { + return table_ == null + ? com.google.cloud.bigquery.biglake.v1alpha1.Table.getDefaultInstance() + : table_; + } else { + return tableBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Required. The table to update.
+     * The table's `name` field is used to identify the database to update.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Table table = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder setTable(com.google.cloud.bigquery.biglake.v1alpha1.Table value) { + if (tableBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + table_ = value; + } else { + tableBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The table to update.
+     * The table's `name` field is used to identify the database to update.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Table table = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder setTable( + com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder builderForValue) { + if (tableBuilder_ == null) { + table_ = builderForValue.build(); + } else { + tableBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The table to update.
+     * The table's `name` field is used to identify the database to update.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Table table = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder mergeTable(com.google.cloud.bigquery.biglake.v1alpha1.Table value) { + if (tableBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0) + && table_ != null + && table_ != com.google.cloud.bigquery.biglake.v1alpha1.Table.getDefaultInstance()) { + getTableBuilder().mergeFrom(value); + } else { + table_ = value; + } + } else { + tableBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The table to update.
+     * The table's `name` field is used to identify the database to update.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Table table = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public Builder clearTable() { + bitField0_ = (bitField0_ & ~0x00000001); + table_ = null; + if (tableBuilder_ != null) { + tableBuilder_.dispose(); + tableBuilder_ = null; + } + onChanged(); + return this; + } + /** + * + * + *
+     * Required. The table to update.
+     * The table's `name` field is used to identify the database to update.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Table table = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder getTableBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getTableFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Required. The table to update.
+     * The table's `name` field is used to identify the database to update.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Table table = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + public com.google.cloud.bigquery.biglake.v1alpha1.TableOrBuilder getTableOrBuilder() { + if (tableBuilder_ != null) { + return tableBuilder_.getMessageOrBuilder(); + } else { + return table_ == null + ? com.google.cloud.bigquery.biglake.v1alpha1.Table.getDefaultInstance() + : table_; + } + } + /** + * + * + *
+     * Required. The table to update.
+     * The table's `name` field is used to identify the database to update.
+     * Format:
+     * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+     * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Table table = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.Table, + com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.TableOrBuilder> + getTableFieldBuilder() { + if (tableBuilder_ == null) { + tableBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.biglake.v1alpha1.Table, + com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder, + com.google.cloud.bigquery.biglake.v1alpha1.TableOrBuilder>( + getTable(), getParentForChildren(), isClean()); + table_ = null; + } + return tableBuilder_; + } + + private com.google.protobuf.FieldMask updateMask_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.FieldMask, + com.google.protobuf.FieldMask.Builder, + com.google.protobuf.FieldMaskOrBuilder> + updateMaskBuilder_; + /** + * + * + *
+     * The list of fields to update.
+     * For the `FieldMask` definition, see
+     * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
+     * If not set, defaults to all of the fields that are allowed to update.
+     * 
+ * + * .google.protobuf.FieldMask update_mask = 2; + * + * @return Whether the updateMask field is set. + */ + public boolean hasUpdateMask() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + * + * + *
+     * The list of fields to update.
+     * For the `FieldMask` definition, see
+     * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
+     * If not set, defaults to all of the fields that are allowed to update.
+     * 
+ * + * .google.protobuf.FieldMask update_mask = 2; + * + * @return The updateMask. + */ + public com.google.protobuf.FieldMask getUpdateMask() { + if (updateMaskBuilder_ == null) { + return updateMask_ == null + ? com.google.protobuf.FieldMask.getDefaultInstance() + : updateMask_; + } else { + return updateMaskBuilder_.getMessage(); + } + } + /** + * + * + *
+     * The list of fields to update.
+     * For the `FieldMask` definition, see
+     * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
+     * If not set, defaults to all of the fields that are allowed to update.
+     * 
+ * + * .google.protobuf.FieldMask update_mask = 2; + */ + public Builder setUpdateMask(com.google.protobuf.FieldMask value) { + if (updateMaskBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + updateMask_ = value; + } else { + updateMaskBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * + * + *
+     * The list of fields to update.
+     * For the `FieldMask` definition, see
+     * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
+     * If not set, defaults to all of the fields that are allowed to update.
+     * 
+ * + * .google.protobuf.FieldMask update_mask = 2; + */ + public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { + if (updateMaskBuilder_ == null) { + updateMask_ = builderForValue.build(); + } else { + updateMaskBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * + * + *
+     * The list of fields to update.
+     * For the `FieldMask` definition, see
+     * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
+     * If not set, defaults to all of the fields that are allowed to update.
+     * 
+ * + * .google.protobuf.FieldMask update_mask = 2; + */ + public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { + if (updateMaskBuilder_ == null) { + if (((bitField0_ & 0x00000002) != 0) + && updateMask_ != null + && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { + getUpdateMaskBuilder().mergeFrom(value); + } else { + updateMask_ = value; + } + } else { + updateMaskBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * + * + *
+     * The list of fields to update.
+     * For the `FieldMask` definition, see
+     * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
+     * If not set, defaults to all of the fields that are allowed to update.
+     * 
+ * + * .google.protobuf.FieldMask update_mask = 2; + */ + public Builder clearUpdateMask() { + bitField0_ = (bitField0_ & ~0x00000002); + updateMask_ = null; + if (updateMaskBuilder_ != null) { + updateMaskBuilder_.dispose(); + updateMaskBuilder_ = null; + } + onChanged(); + return this; + } + /** + * + * + *
+     * The list of fields to update.
+     * For the `FieldMask` definition, see
+     * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
+     * If not set, defaults to all of the fields that are allowed to update.
+     * 
+ * + * .google.protobuf.FieldMask update_mask = 2; + */ + public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getUpdateMaskFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * The list of fields to update.
+     * For the `FieldMask` definition, see
+     * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
+     * If not set, defaults to all of the fields that are allowed to update.
+     * 
+ * + * .google.protobuf.FieldMask update_mask = 2; + */ + public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { + if (updateMaskBuilder_ != null) { + return updateMaskBuilder_.getMessageOrBuilder(); + } else { + return updateMask_ == null + ? com.google.protobuf.FieldMask.getDefaultInstance() + : updateMask_; + } + } + /** + * + * + *
+     * The list of fields to update.
+     * For the `FieldMask` definition, see
+     * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
+     * If not set, defaults to all of the fields that are allowed to update.
+     * 
+ * + * .google.protobuf.FieldMask update_mask = 2; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.FieldMask, + com.google.protobuf.FieldMask.Builder, + com.google.protobuf.FieldMaskOrBuilder> + getUpdateMaskFieldBuilder() { + if (updateMaskBuilder_ == null) { + updateMaskBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.FieldMask, + com.google.protobuf.FieldMask.Builder, + com.google.protobuf.FieldMaskOrBuilder>( + getUpdateMask(), getParentForChildren(), isClean()); + updateMask_ = null; + } + return updateMaskBuilder_; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest) + private static final com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest(); + } + + public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public UpdateTableRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/UpdateTableRequestOrBuilder.java b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/UpdateTableRequestOrBuilder.java new file mode 100644 index 000000000000..f73f9bb3df7d --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/UpdateTableRequestOrBuilder.java @@ -0,0 +1,119 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto + +package com.google.cloud.bigquery.biglake.v1alpha1; + +public interface UpdateTableRequestOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * Required. The table to update.
+   * The table's `name` field is used to identify the database to update.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Table table = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return Whether the table field is set. + */ + boolean hasTable(); + /** + * + * + *
+   * Required. The table to update.
+   * The table's `name` field is used to identify the database to update.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Table table = 1 [(.google.api.field_behavior) = REQUIRED]; + * + * + * @return The table. + */ + com.google.cloud.bigquery.biglake.v1alpha1.Table getTable(); + /** + * + * + *
+   * Required. The table to update.
+   * The table's `name` field is used to identify the database to update.
+   * Format:
+   * projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id}
+   * 
+ * + * + * .google.cloud.bigquery.biglake.v1alpha1.Table table = 1 [(.google.api.field_behavior) = REQUIRED]; + * + */ + com.google.cloud.bigquery.biglake.v1alpha1.TableOrBuilder getTableOrBuilder(); + + /** + * + * + *
+   * The list of fields to update.
+   * For the `FieldMask` definition, see
+   * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
+   * If not set, defaults to all of the fields that are allowed to update.
+   * 
+ * + * .google.protobuf.FieldMask update_mask = 2; + * + * @return Whether the updateMask field is set. + */ + boolean hasUpdateMask(); + /** + * + * + *
+   * The list of fields to update.
+   * For the `FieldMask` definition, see
+   * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
+   * If not set, defaults to all of the fields that are allowed to update.
+   * 
+ * + * .google.protobuf.FieldMask update_mask = 2; + * + * @return The updateMask. + */ + com.google.protobuf.FieldMask getUpdateMask(); + /** + * + * + *
+   * The list of fields to update.
+   * For the `FieldMask` definition, see
+   * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
+   * If not set, defaults to all of the fields that are allowed to update.
+   * 
+ * + * .google.protobuf.FieldMask update_mask = 2; + */ + com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder(); +} diff --git a/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/proto/google/cloud/bigquery/biglake/v1alpha1/metastore.proto b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/proto/google/cloud/bigquery/biglake/v1alpha1/metastore.proto new file mode 100644 index 000000000000..7f6b67596bec --- /dev/null +++ b/java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/proto/google/cloud/bigquery/biglake/v1alpha1/metastore.proto @@ -0,0 +1,782 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.biglake.v1alpha1; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/field_mask.proto"; +import "google/protobuf/timestamp.proto"; + +option go_package = "cloud.google.com/go/bigquery/biglake/apiv1alpha1/biglakepb;biglakepb"; +option java_multiple_files = true; +option java_outer_classname = "MetastoreProto"; +option java_package = "com.google.cloud.bigquery.biglake.v1alpha1"; + +// BigLake Metastore is a serverless, highly available, multi-tenant runtime +// metastore for Google Cloud Data Analytics products. +// +// The BigLake Metastore API defines the following resource model: +// +// * A collection of Google Cloud projects: `/projects/*` +// * Each project has a collection of available locations: `/locations/*` +// * Each location has a collection of catalogs: `/catalogs/*` +// * Each catalog has a collection of databases: `/databases/*` +// * Each database has a collection of tables: `/tables/*` +service MetastoreService { + option (google.api.default_host) = "biglake.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/bigquery," + "https://www.googleapis.com/auth/cloud-platform"; + + // Creates a new catalog. + rpc CreateCatalog(CreateCatalogRequest) returns (Catalog) { + option (google.api.http) = { + post: "/v1alpha1/{parent=projects/*/locations/*}/catalogs" + body: "catalog" + }; + option (google.api.method_signature) = "parent,catalog,catalog_id"; + } + + // Deletes an existing catalog specified by the catalog ID. + rpc DeleteCatalog(DeleteCatalogRequest) returns (Catalog) { + option (google.api.http) = { + delete: "/v1alpha1/{name=projects/*/locations/*/catalogs/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Gets the catalog specified by the resource name. + rpc GetCatalog(GetCatalogRequest) returns (Catalog) { + option (google.api.http) = { + get: "/v1alpha1/{name=projects/*/locations/*/catalogs/*}" + }; + option (google.api.method_signature) = "name"; + } + + // List all catalogs in a specified project. + rpc ListCatalogs(ListCatalogsRequest) returns (ListCatalogsResponse) { + option (google.api.http) = { + get: "/v1alpha1/{parent=projects/*/locations/*}/catalogs" + }; + option (google.api.method_signature) = "parent"; + } + + // Creates a new database. + rpc CreateDatabase(CreateDatabaseRequest) returns (Database) { + option (google.api.http) = { + post: "/v1alpha1/{parent=projects/*/locations/*/catalogs/*}/databases" + body: "database" + }; + option (google.api.method_signature) = "parent,database,database_id"; + } + + // Deletes an existing database specified by the database ID. + rpc DeleteDatabase(DeleteDatabaseRequest) returns (Database) { + option (google.api.http) = { + delete: "/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Updates an existing database specified by the database ID. + rpc UpdateDatabase(UpdateDatabaseRequest) returns (Database) { + option (google.api.http) = { + patch: "/v1alpha1/{database.name=projects/*/locations/*/catalogs/*/databases/*}" + body: "database" + }; + option (google.api.method_signature) = "database,update_mask"; + } + + // Gets the database specified by the resource name. + rpc GetDatabase(GetDatabaseRequest) returns (Database) { + option (google.api.http) = { + get: "/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*}" + }; + option (google.api.method_signature) = "name"; + } + + // List all databases in a specified catalog. + rpc ListDatabases(ListDatabasesRequest) returns (ListDatabasesResponse) { + option (google.api.http) = { + get: "/v1alpha1/{parent=projects/*/locations/*/catalogs/*}/databases" + }; + option (google.api.method_signature) = "parent"; + } + + // Creates a new table. + rpc CreateTable(CreateTableRequest) returns (Table) { + option (google.api.http) = { + post: "/v1alpha1/{parent=projects/*/locations/*/catalogs/*/databases/*}/tables" + body: "table" + }; + option (google.api.method_signature) = "parent,table,table_id"; + } + + // Deletes an existing table specified by the table ID. + rpc DeleteTable(DeleteTableRequest) returns (Table) { + option (google.api.http) = { + delete: "/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*/tables/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Updates an existing table specified by the table ID. + rpc UpdateTable(UpdateTableRequest) returns (Table) { + option (google.api.http) = { + patch: "/v1alpha1/{table.name=projects/*/locations/*/catalogs/*/databases/*/tables/*}" + body: "table" + }; + option (google.api.method_signature) = "table,update_mask"; + } + + // Gets the table specified by the resource name. + rpc GetTable(GetTableRequest) returns (Table) { + option (google.api.http) = { + get: "/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*/tables/*}" + }; + option (google.api.method_signature) = "name"; + } + + // List all tables in a specified database. + rpc ListTables(ListTablesRequest) returns (ListTablesResponse) { + option (google.api.http) = { + get: "/v1alpha1/{parent=projects/*/locations/*/catalogs/*/databases/*}/tables" + }; + option (google.api.method_signature) = "parent"; + } + + // Creates a new lock. + rpc CreateLock(CreateLockRequest) returns (Lock) { + option (google.api.http) = { + post: "/v1alpha1/{parent=projects/*/locations/*/catalogs/*/databases/*}/locks" + body: "lock" + }; + option (google.api.method_signature) = "parent,lock"; + } + + // Deletes an existing lock specified by the lock ID. + rpc DeleteLock(DeleteLockRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*/locks/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Checks the state of a lock specified by the lock ID. + rpc CheckLock(CheckLockRequest) returns (Lock) { + option (google.api.http) = { + post: "/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*/locks/*}:check" + body: "*" + }; + option (google.api.method_signature) = "name"; + } + + // List all locks in a specified database. + rpc ListLocks(ListLocksRequest) returns (ListLocksResponse) { + option (google.api.http) = { + get: "/v1alpha1/{parent=projects/*/locations/*/catalogs/*/databases/*}/locks" + }; + option (google.api.method_signature) = "parent"; + } +} + +// Catalog is the container of databases. +message Catalog { + option (google.api.resource) = { + type: "biglake.googleapis.com/Catalog" + pattern: "projects/{project}/locations/{location}/catalogs/{catalog}" + }; + + // Output only. The resource name. + // Format: + // projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + string name = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.resource_reference) = { type: "biglake.googleapis.com/Catalog" } + ]; + + // Output only. The creation time of the catalog. + google.protobuf.Timestamp create_time = 2 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The last modification time of the catalog. + google.protobuf.Timestamp update_time = 3 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The deletion time of the catalog. Only set after the catalog + // is deleted. + google.protobuf.Timestamp delete_time = 4 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The time when this catalog is considered expired. Only set + // after the catalog is deleted. + google.protobuf.Timestamp expire_time = 5 + [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// Database is the container of tables. +message Database { + option (google.api.resource) = { + type: "biglake.googleapis.com/Database" + pattern: "projects/{project}/locations/{location}/catalogs/{catalog}/databases/{database}" + }; + + // The database type. + enum Type { + // The type is not specified. + TYPE_UNSPECIFIED = 0; + + // Represents a database storing tables compatible with Hive Metastore + // tables. + HIVE = 1; + } + + // Options specified for the database type. + oneof options { + // Options of a Hive database. + HiveDatabaseOptions hive_options = 7; + } + + // Output only. The resource name. + // Format: + // projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + string name = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.resource_reference) = { + type: "biglake.googleapis.com/Database" + } + ]; + + // Output only. The creation time of the database. + google.protobuf.Timestamp create_time = 2 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The last modification time of the database. + google.protobuf.Timestamp update_time = 3 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The deletion time of the database. Only set after the database + // is deleted. + google.protobuf.Timestamp delete_time = 4 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The time when this database is considered expired. Only set + // after the database is deleted. + google.protobuf.Timestamp expire_time = 5 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // The database type. + Type type = 6; +} + +// Represents a table. +message Table { + option (google.api.resource) = { + type: "biglake.googleapis.com/Table" + pattern: "projects/{project}/locations/{location}/catalogs/{catalog}/databases/{database}/tables/{table}" + }; + + // The table type. + enum Type { + // The type is not specified. + TYPE_UNSPECIFIED = 0; + + // Represents a table compatible with Hive Metastore tables. + HIVE = 1; + } + + // Options specified for the table type. + oneof options { + // Options of a Hive table. + HiveTableOptions hive_options = 7; + } + + // Output only. The resource name. + // Format: + // projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + string name = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.resource_reference) = { type: "biglake.googleapis.com/Table" } + ]; + + // Output only. The creation time of the table. + google.protobuf.Timestamp create_time = 2 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The last modification time of the table. + google.protobuf.Timestamp update_time = 3 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The deletion time of the table. Only set after the table is + // deleted. + google.protobuf.Timestamp delete_time = 4 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The time when this table is considered expired. Only set after + // the table is deleted. + google.protobuf.Timestamp expire_time = 5 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // The table type. + Type type = 6; +} + +// Represents a lock. +message Lock { + option (google.api.resource) = { + type: "biglake.googleapis.com/Lock" + pattern: "projects/{project}/locations/{location}/catalogs/{catalog}/databases/{database}/locks/{lock}" + }; + + // The lock type. + enum Type { + // The type is not specified. + TYPE_UNSPECIFIED = 0; + + // An exclusive lock prevents another lock from being created on the same + // resource. + EXCLUSIVE = 1; + } + + // The lock state. + enum State { + // The state is not specified. + STATE_UNSPECIFIED = 0; + + // Waiting to acquire the lock. + WAITING = 1; + + // The lock has been acquired. + ACQUIRED = 2; + } + + // The resource that the lock will be created on. + oneof resources { + // The table ID (not fully qualified name) in the same database that the + // lock will be created on. The table must exist. + string table_id = 5; + } + + // Output only. The resource name. + // Format: + // projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id} + string name = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.resource_reference) = { type: "biglake.googleapis.com/Lock" } + ]; + + // Output only. The creation time of the lock. + google.protobuf.Timestamp create_time = 2 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // The lock type. + Type type = 3; + + // Output only. The lock state. + State state = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// Request message for the CreateCatalog method. +message CreateCatalogRequest { + // Required. The parent resource where this catalog will be created. + // Format: projects/{project_id_or_number}/locations/{location_id} + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // Required. The catalog to create. + // The `name` field does not need to be provided. + Catalog catalog = 2 [(google.api.field_behavior) = REQUIRED]; + + // Required. The ID to use for the catalog, which will become the final + // component of the catalog's resource name. + string catalog_id = 3 [(google.api.field_behavior) = REQUIRED]; +} + +// Request message for the DeleteCatalog method. +message DeleteCatalogRequest { + // Required. The name of the catalog to delete. + // Format: + // projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "biglake.googleapis.com/Catalog" } + ]; +} + +// Request message for the GetCatalog method. +message GetCatalogRequest { + // Required. The name of the catalog to retrieve. + // Format: + // projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "biglake.googleapis.com/Catalog" } + ]; +} + +// Request message for the ListCatalogs method. +message ListCatalogsRequest { + // Required. The parent, which owns this collection of catalogs. + // Format: projects/{project_id_or_number}/locations/{location_id} + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // The maximum number of catalogs to return. The service may return fewer than + // this value. + // If unspecified, at most 50 catalogs will be returned. + // The maximum value is 1000; values above 1000 will be coerced to 1000. + int32 page_size = 2; + + // A page token, received from a previous `ListCatalogs` call. + // Provide this to retrieve the subsequent page. + // + // When paginating, all other parameters provided to `ListCatalogs` must match + // the call that provided the page token. + string page_token = 3; +} + +// Response message for the ListCatalogs method. +message ListCatalogsResponse { + // The catalogs from the specified project. + repeated Catalog catalogs = 1; + + // A token, which can be sent as `page_token` to retrieve the next page. + // If this field is omitted, there are no subsequent pages. + string next_page_token = 2; +} + +// Request message for the CreateDatabase method. +message CreateDatabaseRequest { + // Required. The parent resource where this database will be created. + // Format: + // projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "biglake.googleapis.com/Catalog" } + ]; + + // Required. The database to create. + // The `name` field does not need to be provided. + Database database = 2 [(google.api.field_behavior) = REQUIRED]; + + // Required. The ID to use for the database, which will become the final + // component of the database's resource name. + string database_id = 3 [(google.api.field_behavior) = REQUIRED]; +} + +// Request message for the DeleteDatabase method. +message DeleteDatabaseRequest { + // Required. The name of the database to delete. + // Format: + // projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "biglake.googleapis.com/Database" + } + ]; +} + +// Request message for the UpdateDatabase method. +message UpdateDatabaseRequest { + // Required. The database to update. + // + // The database's `name` field is used to identify the database to update. + // Format: + // projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + Database database = 1 [(google.api.field_behavior) = REQUIRED]; + + // The list of fields to update. + // + // For the `FieldMask` definition, see + // https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + // If not set, defaults to all of the fields that are allowed to update. + google.protobuf.FieldMask update_mask = 2; +} + +// Request message for the GetDatabase method. +message GetDatabaseRequest { + // Required. The name of the database to retrieve. + // Format: + // projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "biglake.googleapis.com/Database" + } + ]; +} + +// Request message for the ListDatabases method. +message ListDatabasesRequest { + // Required. The parent, which owns this collection of databases. + // Format: + // projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "biglake.googleapis.com/Catalog" } + ]; + + // The maximum number of databases to return. The service may return fewer + // than this value. + // If unspecified, at most 50 databases will be returned. + // The maximum value is 1000; values above 1000 will be coerced to 1000. + int32 page_size = 2; + + // A page token, received from a previous `ListDatabases` call. + // Provide this to retrieve the subsequent page. + // + // When paginating, all other parameters provided to `ListDatabases` must + // match the call that provided the page token. + string page_token = 3; +} + +// Response message for the ListDatabases method. +message ListDatabasesResponse { + // The databases from the specified catalog. + repeated Database databases = 1; + + // A token, which can be sent as `page_token` to retrieve the next page. + // If this field is omitted, there are no subsequent pages. + string next_page_token = 2; +} + +// Request message for the CreateTable method. +message CreateTableRequest { + // Required. The parent resource where this table will be created. + // Format: + // projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "biglake.googleapis.com/Database" + } + ]; + + // Required. The table to create. The `name` field does not need to be + // provided for the table creation. + Table table = 2 [(google.api.field_behavior) = REQUIRED]; + + // Required. The ID to use for the table, which will become the final + // component of the table's resource name. + string table_id = 3 [(google.api.field_behavior) = REQUIRED]; +} + +// Request message for the DeleteTable method. +message DeleteTableRequest { + // Required. The name of the table to delete. + // Format: + // projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "biglake.googleapis.com/Table" } + ]; +} + +// Request message for the UpdateTable method. +message UpdateTableRequest { + // Required. The table to update. + // + // The table's `name` field is used to identify the database to update. + // Format: + // projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + Table table = 1 [(google.api.field_behavior) = REQUIRED]; + + // The list of fields to update. + // + // For the `FieldMask` definition, see + // https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + // If not set, defaults to all of the fields that are allowed to update. + google.protobuf.FieldMask update_mask = 2; +} + +// Request message for the GetTable method. +message GetTableRequest { + // Required. The name of the table to retrieve. + // Format: + // projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "biglake.googleapis.com/Table" } + ]; +} + +// Request message for the ListTables method. +message ListTablesRequest { + // Required. The parent, which owns this collection of tables. + // Format: + // projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "biglake.googleapis.com/Database" + } + ]; + + // The maximum number of tables to return. The service may return fewer than + // this value. + // If unspecified, at most 50 tables will be returned. + // The maximum value is 1000; values above 1000 will be coerced to 1000. + int32 page_size = 2; + + // A page token, received from a previous `ListTables` call. + // Provide this to retrieve the subsequent page. + // + // When paginating, all other parameters provided to `ListTables` must match + // the call that provided the page token. + string page_token = 3; +} + +// Response message for the ListTables method. +message ListTablesResponse { + // The tables from the specified database. + repeated Table tables = 1; + + // A token, which can be sent as `page_token` to retrieve the next page. + // If this field is omitted, there are no subsequent pages. + string next_page_token = 2; +} + +// Request message for the CreateLock method. +message CreateLockRequest { + // Required. The parent resource where this lock will be created. + // Format: + // projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "biglake.googleapis.com/Database" + } + ]; + + // Required. The lock to create. The `name` field does not need to be provided + // for the lock creation. + Lock lock = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Request message for the DeleteLock method. +message DeleteLockRequest { + // Required. The name of the lock to delete. + // Format: + // projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id} + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "biglake.googleapis.com/Lock" } + ]; +} + +// Request message for the CheckLock method. +message CheckLockRequest { + // Required. The name of the lock to check. + // Format: + // projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id} + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "biglake.googleapis.com/Lock" } + ]; +} + +// Request message for the ListLocks method. +message ListLocksRequest { + // Required. The parent, which owns this collection of locks. + // Format: + // projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "biglake.googleapis.com/Database" + } + ]; + + // The maximum number of locks to return. The service may return fewer than + // this value. + // If unspecified, at most 50 locks will be returned. + // The maximum value is 1000; values above 1000 will be coerced to 1000. + int32 page_size = 2; + + // A page token, received from a previous `ListLocks` call. + // Provide this to retrieve the subsequent page. + // + // When paginating, all other parameters provided to `ListLocks` must match + // the call that provided the page token. + string page_token = 3; +} + +// Response message for the ListLocks method. +message ListLocksResponse { + // The locks from the specified database. + repeated Lock locks = 1; + + // A token, which can be sent as `page_token` to retrieve the next page. + // If this field is omitted, there are no subsequent pages. + string next_page_token = 2; +} + +// Options of a Hive database. +message HiveDatabaseOptions { + // Cloud Storage folder URI where the database data is stored, starting with + // "gs://". + string location_uri = 1; + + // Stores user supplied Hive database parameters. + map parameters = 2; +} + +// Options of a Hive table. +message HiveTableOptions { + // Serializer and deserializer information. + message SerDeInfo { + // The fully qualified Java class name of the serialization library. + string serialization_lib = 1; + } + + // Stores physical storage information of the data. + message StorageDescriptor { + // Cloud Storage folder URI where the table data is stored, starting with + // "gs://". + string location_uri = 1; + + // The fully qualified Java class name of the input format. + string input_format = 2; + + // The fully qualified Java class name of the output format. + string output_format = 3; + + // Serializer and deserializer information. + SerDeInfo serde_info = 4; + } + + // Stores user supplied Hive table parameters. + map parameters = 1; + + // Hive table type. For example, MANAGED_TABLE, EXTERNAL_TABLE. + string table_type = 2; + + // Stores physical storage information of the data. + StorageDescriptor storage_descriptor = 3; +} diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/checklock/AsyncCheckLock.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/checklock/AsyncCheckLock.java new file mode 100644 index 000000000000..5199be04138c --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/checklock/AsyncCheckLock.java @@ -0,0 +1,51 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_CheckLock_async] +import com.google.api.core.ApiFuture; +import com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.Lock; +import com.google.cloud.bigquery.biglake.v1alpha1.LockName; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class AsyncCheckLock { + + public static void main(String[] args) throws Exception { + asyncCheckLock(); + } + + public static void asyncCheckLock() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + CheckLockRequest request = + CheckLockRequest.newBuilder() + .setName( + LockName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[LOCK]") + .toString()) + .build(); + ApiFuture future = metastoreServiceClient.checkLockCallable().futureCall(request); + // Do something. + Lock response = future.get(); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_CheckLock_async] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/checklock/SyncCheckLock.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/checklock/SyncCheckLock.java new file mode 100644 index 000000000000..bae4da9c167a --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/checklock/SyncCheckLock.java @@ -0,0 +1,48 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_CheckLock_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.CheckLockRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.Lock; +import com.google.cloud.bigquery.biglake.v1alpha1.LockName; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class SyncCheckLock { + + public static void main(String[] args) throws Exception { + syncCheckLock(); + } + + public static void syncCheckLock() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + CheckLockRequest request = + CheckLockRequest.newBuilder() + .setName( + LockName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[LOCK]") + .toString()) + .build(); + Lock response = metastoreServiceClient.checkLock(request); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_CheckLock_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/checklock/SyncCheckLockLockname.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/checklock/SyncCheckLockLockname.java new file mode 100644 index 000000000000..0d2f126d62d9 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/checklock/SyncCheckLockLockname.java @@ -0,0 +1,42 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_CheckLock_Lockname_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.Lock; +import com.google.cloud.bigquery.biglake.v1alpha1.LockName; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class SyncCheckLockLockname { + + public static void main(String[] args) throws Exception { + syncCheckLockLockname(); + } + + public static void syncCheckLockLockname() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + LockName name = LockName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[LOCK]"); + Lock response = metastoreServiceClient.checkLock(name); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_CheckLock_Lockname_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/checklock/SyncCheckLockString.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/checklock/SyncCheckLockString.java new file mode 100644 index 000000000000..94afd25e805a --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/checklock/SyncCheckLockString.java @@ -0,0 +1,43 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_CheckLock_String_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.Lock; +import com.google.cloud.bigquery.biglake.v1alpha1.LockName; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class SyncCheckLockString { + + public static void main(String[] args) throws Exception { + syncCheckLockString(); + } + + public static void syncCheckLockString() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + String name = + LockName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[LOCK]").toString(); + Lock response = metastoreServiceClient.checkLock(name); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_CheckLock_String_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/create/SyncCreateSetCredentialsProvider.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/create/SyncCreateSetCredentialsProvider.java new file mode 100644 index 000000000000..b2aa0ba87e4e --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/create/SyncCreateSetCredentialsProvider.java @@ -0,0 +1,45 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_Create_SetCredentialsProvider_sync] +import com.google.api.gax.core.FixedCredentialsProvider; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceSettings; +import com.google.cloud.bigquery.biglake.v1alpha1.myCredentials; + +public class SyncCreateSetCredentialsProvider { + + public static void main(String[] args) throws Exception { + syncCreateSetCredentialsProvider(); + } + + public static void syncCreateSetCredentialsProvider() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + MetastoreServiceSettings metastoreServiceSettings = + MetastoreServiceSettings.newBuilder() + .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials)) + .build(); + MetastoreServiceClient metastoreServiceClient = + MetastoreServiceClient.create(metastoreServiceSettings); + } +} +// [END biglake_v1alpha1_generated_MetastoreService_Create_SetCredentialsProvider_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/create/SyncCreateSetCredentialsProvider1.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/create/SyncCreateSetCredentialsProvider1.java new file mode 100644 index 000000000000..5aa03d510c33 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/create/SyncCreateSetCredentialsProvider1.java @@ -0,0 +1,41 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_Create_SetCredentialsProvider1_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceSettings; + +public class SyncCreateSetCredentialsProvider1 { + + public static void main(String[] args) throws Exception { + syncCreateSetCredentialsProvider1(); + } + + public static void syncCreateSetCredentialsProvider1() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + MetastoreServiceSettings metastoreServiceSettings = + MetastoreServiceSettings.newHttpJsonBuilder().build(); + MetastoreServiceClient metastoreServiceClient = + MetastoreServiceClient.create(metastoreServiceSettings); + } +} +// [END biglake_v1alpha1_generated_MetastoreService_Create_SetCredentialsProvider1_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/create/SyncCreateSetEndpoint.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/create/SyncCreateSetEndpoint.java new file mode 100644 index 000000000000..c3bfcdab3286 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/create/SyncCreateSetEndpoint.java @@ -0,0 +1,42 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_Create_SetEndpoint_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceSettings; +import com.google.cloud.bigquery.biglake.v1alpha1.myEndpoint; + +public class SyncCreateSetEndpoint { + + public static void main(String[] args) throws Exception { + syncCreateSetEndpoint(); + } + + public static void syncCreateSetEndpoint() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + MetastoreServiceSettings metastoreServiceSettings = + MetastoreServiceSettings.newBuilder().setEndpoint(myEndpoint).build(); + MetastoreServiceClient metastoreServiceClient = + MetastoreServiceClient.create(metastoreServiceSettings); + } +} +// [END biglake_v1alpha1_generated_MetastoreService_Create_SetEndpoint_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createcatalog/AsyncCreateCatalog.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createcatalog/AsyncCreateCatalog.java new file mode 100644 index 000000000000..b43fa9c99c46 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createcatalog/AsyncCreateCatalog.java @@ -0,0 +1,52 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_CreateCatalog_async] +import com.google.api.core.ApiFuture; +import com.google.cloud.bigquery.biglake.v1alpha1.Catalog; +import com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.LocationName; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class AsyncCreateCatalog { + + public static void main(String[] args) throws Exception { + asyncCreateCatalog(); + } + + public static void asyncCreateCatalog() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + CreateCatalogRequest request = + CreateCatalogRequest.newBuilder() + .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString()) + .setCatalog(Catalog.newBuilder().build()) + .setCatalogId("catalogId1455933204") + .build(); + ApiFuture future = + metastoreServiceClient.createCatalogCallable().futureCall(request); + // Do something. + Catalog response = future.get(); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_CreateCatalog_async] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createcatalog/SyncCreateCatalog.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createcatalog/SyncCreateCatalog.java new file mode 100644 index 000000000000..74a4637c044b --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createcatalog/SyncCreateCatalog.java @@ -0,0 +1,48 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_CreateCatalog_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.Catalog; +import com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.LocationName; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class SyncCreateCatalog { + + public static void main(String[] args) throws Exception { + syncCreateCatalog(); + } + + public static void syncCreateCatalog() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + CreateCatalogRequest request = + CreateCatalogRequest.newBuilder() + .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString()) + .setCatalog(Catalog.newBuilder().build()) + .setCatalogId("catalogId1455933204") + .build(); + Catalog response = metastoreServiceClient.createCatalog(request); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_CreateCatalog_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createcatalog/SyncCreateCatalogLocationnameCatalogString.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createcatalog/SyncCreateCatalogLocationnameCatalogString.java new file mode 100644 index 000000000000..28c7759a297d --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createcatalog/SyncCreateCatalogLocationnameCatalogString.java @@ -0,0 +1,44 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_CreateCatalog_LocationnameCatalogString_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.Catalog; +import com.google.cloud.bigquery.biglake.v1alpha1.LocationName; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class SyncCreateCatalogLocationnameCatalogString { + + public static void main(String[] args) throws Exception { + syncCreateCatalogLocationnameCatalogString(); + } + + public static void syncCreateCatalogLocationnameCatalogString() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); + Catalog catalog = Catalog.newBuilder().build(); + String catalogId = "catalogId1455933204"; + Catalog response = metastoreServiceClient.createCatalog(parent, catalog, catalogId); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_CreateCatalog_LocationnameCatalogString_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createcatalog/SyncCreateCatalogStringCatalogString.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createcatalog/SyncCreateCatalogStringCatalogString.java new file mode 100644 index 000000000000..f23cb7439b0d --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createcatalog/SyncCreateCatalogStringCatalogString.java @@ -0,0 +1,44 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_CreateCatalog_StringCatalogString_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.Catalog; +import com.google.cloud.bigquery.biglake.v1alpha1.LocationName; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class SyncCreateCatalogStringCatalogString { + + public static void main(String[] args) throws Exception { + syncCreateCatalogStringCatalogString(); + } + + public static void syncCreateCatalogStringCatalogString() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + String parent = LocationName.of("[PROJECT]", "[LOCATION]").toString(); + Catalog catalog = Catalog.newBuilder().build(); + String catalogId = "catalogId1455933204"; + Catalog response = metastoreServiceClient.createCatalog(parent, catalog, catalogId); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_CreateCatalog_StringCatalogString_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createdatabase/AsyncCreateDatabase.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createdatabase/AsyncCreateDatabase.java new file mode 100644 index 000000000000..a5da62c350d3 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createdatabase/AsyncCreateDatabase.java @@ -0,0 +1,52 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_CreateDatabase_async] +import com.google.api.core.ApiFuture; +import com.google.cloud.bigquery.biglake.v1alpha1.CatalogName; +import com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.Database; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class AsyncCreateDatabase { + + public static void main(String[] args) throws Exception { + asyncCreateDatabase(); + } + + public static void asyncCreateDatabase() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + CreateDatabaseRequest request = + CreateDatabaseRequest.newBuilder() + .setParent(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) + .setDatabase(Database.newBuilder().build()) + .setDatabaseId("databaseId1688905718") + .build(); + ApiFuture future = + metastoreServiceClient.createDatabaseCallable().futureCall(request); + // Do something. + Database response = future.get(); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_CreateDatabase_async] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createdatabase/SyncCreateDatabase.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createdatabase/SyncCreateDatabase.java new file mode 100644 index 000000000000..199293daa4de --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createdatabase/SyncCreateDatabase.java @@ -0,0 +1,48 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_CreateDatabase_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.CatalogName; +import com.google.cloud.bigquery.biglake.v1alpha1.CreateDatabaseRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.Database; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class SyncCreateDatabase { + + public static void main(String[] args) throws Exception { + syncCreateDatabase(); + } + + public static void syncCreateDatabase() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + CreateDatabaseRequest request = + CreateDatabaseRequest.newBuilder() + .setParent(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) + .setDatabase(Database.newBuilder().build()) + .setDatabaseId("databaseId1688905718") + .build(); + Database response = metastoreServiceClient.createDatabase(request); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_CreateDatabase_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createdatabase/SyncCreateDatabaseCatalognameDatabaseString.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createdatabase/SyncCreateDatabaseCatalognameDatabaseString.java new file mode 100644 index 000000000000..d448665ebd86 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createdatabase/SyncCreateDatabaseCatalognameDatabaseString.java @@ -0,0 +1,44 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_CreateDatabase_CatalognameDatabaseString_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.CatalogName; +import com.google.cloud.bigquery.biglake.v1alpha1.Database; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class SyncCreateDatabaseCatalognameDatabaseString { + + public static void main(String[] args) throws Exception { + syncCreateDatabaseCatalognameDatabaseString(); + } + + public static void syncCreateDatabaseCatalognameDatabaseString() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + CatalogName parent = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); + Database database = Database.newBuilder().build(); + String databaseId = "databaseId1688905718"; + Database response = metastoreServiceClient.createDatabase(parent, database, databaseId); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_CreateDatabase_CatalognameDatabaseString_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createdatabase/SyncCreateDatabaseStringDatabaseString.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createdatabase/SyncCreateDatabaseStringDatabaseString.java new file mode 100644 index 000000000000..b70c5b7cde62 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createdatabase/SyncCreateDatabaseStringDatabaseString.java @@ -0,0 +1,44 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_CreateDatabase_StringDatabaseString_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.CatalogName; +import com.google.cloud.bigquery.biglake.v1alpha1.Database; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class SyncCreateDatabaseStringDatabaseString { + + public static void main(String[] args) throws Exception { + syncCreateDatabaseStringDatabaseString(); + } + + public static void syncCreateDatabaseStringDatabaseString() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + String parent = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString(); + Database database = Database.newBuilder().build(); + String databaseId = "databaseId1688905718"; + Database response = metastoreServiceClient.createDatabase(parent, database, databaseId); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_CreateDatabase_StringDatabaseString_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createlock/AsyncCreateLock.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createlock/AsyncCreateLock.java new file mode 100644 index 000000000000..a339aceb86b7 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createlock/AsyncCreateLock.java @@ -0,0 +1,51 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_CreateLock_async] +import com.google.api.core.ApiFuture; +import com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.DatabaseName; +import com.google.cloud.bigquery.biglake.v1alpha1.Lock; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class AsyncCreateLock { + + public static void main(String[] args) throws Exception { + asyncCreateLock(); + } + + public static void asyncCreateLock() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + CreateLockRequest request = + CreateLockRequest.newBuilder() + .setParent( + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString()) + .setLock(Lock.newBuilder().build()) + .build(); + ApiFuture future = metastoreServiceClient.createLockCallable().futureCall(request); + // Do something. + Lock response = future.get(); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_CreateLock_async] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createlock/SyncCreateLock.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createlock/SyncCreateLock.java new file mode 100644 index 000000000000..26ca305ae582 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createlock/SyncCreateLock.java @@ -0,0 +1,48 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_CreateLock_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.CreateLockRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.DatabaseName; +import com.google.cloud.bigquery.biglake.v1alpha1.Lock; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class SyncCreateLock { + + public static void main(String[] args) throws Exception { + syncCreateLock(); + } + + public static void syncCreateLock() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + CreateLockRequest request = + CreateLockRequest.newBuilder() + .setParent( + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString()) + .setLock(Lock.newBuilder().build()) + .build(); + Lock response = metastoreServiceClient.createLock(request); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_CreateLock_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createlock/SyncCreateLockDatabasenameLock.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createlock/SyncCreateLockDatabasenameLock.java new file mode 100644 index 000000000000..417a4c648246 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createlock/SyncCreateLockDatabasenameLock.java @@ -0,0 +1,43 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_CreateLock_DatabasenameLock_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.DatabaseName; +import com.google.cloud.bigquery.biglake.v1alpha1.Lock; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class SyncCreateLockDatabasenameLock { + + public static void main(String[] args) throws Exception { + syncCreateLockDatabasenameLock(); + } + + public static void syncCreateLockDatabasenameLock() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + DatabaseName parent = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]"); + Lock lock = Lock.newBuilder().build(); + Lock response = metastoreServiceClient.createLock(parent, lock); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_CreateLock_DatabasenameLock_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createlock/SyncCreateLockStringLock.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createlock/SyncCreateLockStringLock.java new file mode 100644 index 000000000000..8ec2b808a961 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createlock/SyncCreateLockStringLock.java @@ -0,0 +1,44 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_CreateLock_StringLock_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.DatabaseName; +import com.google.cloud.bigquery.biglake.v1alpha1.Lock; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class SyncCreateLockStringLock { + + public static void main(String[] args) throws Exception { + syncCreateLockStringLock(); + } + + public static void syncCreateLockStringLock() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + String parent = + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString(); + Lock lock = Lock.newBuilder().build(); + Lock response = metastoreServiceClient.createLock(parent, lock); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_CreateLock_StringLock_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createtable/AsyncCreateTable.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createtable/AsyncCreateTable.java new file mode 100644 index 000000000000..d7cf448af7aa --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createtable/AsyncCreateTable.java @@ -0,0 +1,52 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_CreateTable_async] +import com.google.api.core.ApiFuture; +import com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.DatabaseName; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; +import com.google.cloud.bigquery.biglake.v1alpha1.Table; + +public class AsyncCreateTable { + + public static void main(String[] args) throws Exception { + asyncCreateTable(); + } + + public static void asyncCreateTable() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + CreateTableRequest request = + CreateTableRequest.newBuilder() + .setParent( + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString()) + .setTable(Table.newBuilder().build()) + .setTableId("tableId-1552905847") + .build(); + ApiFuture
future = metastoreServiceClient.createTableCallable().futureCall(request); + // Do something. + Table response = future.get(); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_CreateTable_async] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createtable/SyncCreateTable.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createtable/SyncCreateTable.java new file mode 100644 index 000000000000..6c06449b669f --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createtable/SyncCreateTable.java @@ -0,0 +1,49 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_CreateTable_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.CreateTableRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.DatabaseName; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; +import com.google.cloud.bigquery.biglake.v1alpha1.Table; + +public class SyncCreateTable { + + public static void main(String[] args) throws Exception { + syncCreateTable(); + } + + public static void syncCreateTable() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + CreateTableRequest request = + CreateTableRequest.newBuilder() + .setParent( + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString()) + .setTable(Table.newBuilder().build()) + .setTableId("tableId-1552905847") + .build(); + Table response = metastoreServiceClient.createTable(request); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_CreateTable_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createtable/SyncCreateTableDatabasenameTableString.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createtable/SyncCreateTableDatabasenameTableString.java new file mode 100644 index 000000000000..65979a0dadc5 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createtable/SyncCreateTableDatabasenameTableString.java @@ -0,0 +1,44 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_CreateTable_DatabasenameTableString_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.DatabaseName; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; +import com.google.cloud.bigquery.biglake.v1alpha1.Table; + +public class SyncCreateTableDatabasenameTableString { + + public static void main(String[] args) throws Exception { + syncCreateTableDatabasenameTableString(); + } + + public static void syncCreateTableDatabasenameTableString() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + DatabaseName parent = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]"); + Table table = Table.newBuilder().build(); + String tableId = "tableId-1552905847"; + Table response = metastoreServiceClient.createTable(parent, table, tableId); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_CreateTable_DatabasenameTableString_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createtable/SyncCreateTableStringTableString.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createtable/SyncCreateTableStringTableString.java new file mode 100644 index 000000000000..dd60f83d3bf0 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/createtable/SyncCreateTableStringTableString.java @@ -0,0 +1,45 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_CreateTable_StringTableString_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.DatabaseName; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; +import com.google.cloud.bigquery.biglake.v1alpha1.Table; + +public class SyncCreateTableStringTableString { + + public static void main(String[] args) throws Exception { + syncCreateTableStringTableString(); + } + + public static void syncCreateTableStringTableString() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + String parent = + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString(); + Table table = Table.newBuilder().build(); + String tableId = "tableId-1552905847"; + Table response = metastoreServiceClient.createTable(parent, table, tableId); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_CreateTable_StringTableString_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletecatalog/AsyncDeleteCatalog.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletecatalog/AsyncDeleteCatalog.java new file mode 100644 index 000000000000..0889a7728cbc --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletecatalog/AsyncDeleteCatalog.java @@ -0,0 +1,50 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_DeleteCatalog_async] +import com.google.api.core.ApiFuture; +import com.google.cloud.bigquery.biglake.v1alpha1.Catalog; +import com.google.cloud.bigquery.biglake.v1alpha1.CatalogName; +import com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class AsyncDeleteCatalog { + + public static void main(String[] args) throws Exception { + asyncDeleteCatalog(); + } + + public static void asyncDeleteCatalog() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + DeleteCatalogRequest request = + DeleteCatalogRequest.newBuilder() + .setName(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) + .build(); + ApiFuture future = + metastoreServiceClient.deleteCatalogCallable().futureCall(request); + // Do something. + Catalog response = future.get(); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_DeleteCatalog_async] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletecatalog/SyncDeleteCatalog.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletecatalog/SyncDeleteCatalog.java new file mode 100644 index 000000000000..7eeee0c542fd --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletecatalog/SyncDeleteCatalog.java @@ -0,0 +1,46 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_DeleteCatalog_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.Catalog; +import com.google.cloud.bigquery.biglake.v1alpha1.CatalogName; +import com.google.cloud.bigquery.biglake.v1alpha1.DeleteCatalogRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class SyncDeleteCatalog { + + public static void main(String[] args) throws Exception { + syncDeleteCatalog(); + } + + public static void syncDeleteCatalog() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + DeleteCatalogRequest request = + DeleteCatalogRequest.newBuilder() + .setName(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) + .build(); + Catalog response = metastoreServiceClient.deleteCatalog(request); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_DeleteCatalog_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletecatalog/SyncDeleteCatalogCatalogname.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletecatalog/SyncDeleteCatalogCatalogname.java new file mode 100644 index 000000000000..45c5c6589255 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletecatalog/SyncDeleteCatalogCatalogname.java @@ -0,0 +1,42 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_DeleteCatalog_Catalogname_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.Catalog; +import com.google.cloud.bigquery.biglake.v1alpha1.CatalogName; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class SyncDeleteCatalogCatalogname { + + public static void main(String[] args) throws Exception { + syncDeleteCatalogCatalogname(); + } + + public static void syncDeleteCatalogCatalogname() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + CatalogName name = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); + Catalog response = metastoreServiceClient.deleteCatalog(name); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_DeleteCatalog_Catalogname_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletecatalog/SyncDeleteCatalogString.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletecatalog/SyncDeleteCatalogString.java new file mode 100644 index 000000000000..7d0638afbfd4 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletecatalog/SyncDeleteCatalogString.java @@ -0,0 +1,42 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_DeleteCatalog_String_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.Catalog; +import com.google.cloud.bigquery.biglake.v1alpha1.CatalogName; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class SyncDeleteCatalogString { + + public static void main(String[] args) throws Exception { + syncDeleteCatalogString(); + } + + public static void syncDeleteCatalogString() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + String name = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString(); + Catalog response = metastoreServiceClient.deleteCatalog(name); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_DeleteCatalog_String_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletedatabase/AsyncDeleteDatabase.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletedatabase/AsyncDeleteDatabase.java new file mode 100644 index 000000000000..440793d6cce0 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletedatabase/AsyncDeleteDatabase.java @@ -0,0 +1,51 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_DeleteDatabase_async] +import com.google.api.core.ApiFuture; +import com.google.cloud.bigquery.biglake.v1alpha1.Database; +import com.google.cloud.bigquery.biglake.v1alpha1.DatabaseName; +import com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class AsyncDeleteDatabase { + + public static void main(String[] args) throws Exception { + asyncDeleteDatabase(); + } + + public static void asyncDeleteDatabase() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + DeleteDatabaseRequest request = + DeleteDatabaseRequest.newBuilder() + .setName( + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString()) + .build(); + ApiFuture future = + metastoreServiceClient.deleteDatabaseCallable().futureCall(request); + // Do something. + Database response = future.get(); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_DeleteDatabase_async] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletedatabase/SyncDeleteDatabase.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletedatabase/SyncDeleteDatabase.java new file mode 100644 index 000000000000..af79aedbfa4c --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletedatabase/SyncDeleteDatabase.java @@ -0,0 +1,47 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_DeleteDatabase_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.Database; +import com.google.cloud.bigquery.biglake.v1alpha1.DatabaseName; +import com.google.cloud.bigquery.biglake.v1alpha1.DeleteDatabaseRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class SyncDeleteDatabase { + + public static void main(String[] args) throws Exception { + syncDeleteDatabase(); + } + + public static void syncDeleteDatabase() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + DeleteDatabaseRequest request = + DeleteDatabaseRequest.newBuilder() + .setName( + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString()) + .build(); + Database response = metastoreServiceClient.deleteDatabase(request); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_DeleteDatabase_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletedatabase/SyncDeleteDatabaseDatabasename.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletedatabase/SyncDeleteDatabaseDatabasename.java new file mode 100644 index 000000000000..fcc50ba8cebb --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletedatabase/SyncDeleteDatabaseDatabasename.java @@ -0,0 +1,42 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_DeleteDatabase_Databasename_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.Database; +import com.google.cloud.bigquery.biglake.v1alpha1.DatabaseName; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class SyncDeleteDatabaseDatabasename { + + public static void main(String[] args) throws Exception { + syncDeleteDatabaseDatabasename(); + } + + public static void syncDeleteDatabaseDatabasename() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + DatabaseName name = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]"); + Database response = metastoreServiceClient.deleteDatabase(name); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_DeleteDatabase_Databasename_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletedatabase/SyncDeleteDatabaseString.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletedatabase/SyncDeleteDatabaseString.java new file mode 100644 index 000000000000..5ecee74225bd --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletedatabase/SyncDeleteDatabaseString.java @@ -0,0 +1,43 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_DeleteDatabase_String_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.Database; +import com.google.cloud.bigquery.biglake.v1alpha1.DatabaseName; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class SyncDeleteDatabaseString { + + public static void main(String[] args) throws Exception { + syncDeleteDatabaseString(); + } + + public static void syncDeleteDatabaseString() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + String name = + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString(); + Database response = metastoreServiceClient.deleteDatabase(name); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_DeleteDatabase_String_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletelock/AsyncDeleteLock.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletelock/AsyncDeleteLock.java new file mode 100644 index 000000000000..55f8c0c4cbd5 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletelock/AsyncDeleteLock.java @@ -0,0 +1,51 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_DeleteLock_async] +import com.google.api.core.ApiFuture; +import com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.LockName; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; +import com.google.protobuf.Empty; + +public class AsyncDeleteLock { + + public static void main(String[] args) throws Exception { + asyncDeleteLock(); + } + + public static void asyncDeleteLock() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + DeleteLockRequest request = + DeleteLockRequest.newBuilder() + .setName( + LockName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[LOCK]") + .toString()) + .build(); + ApiFuture future = metastoreServiceClient.deleteLockCallable().futureCall(request); + // Do something. + future.get(); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_DeleteLock_async] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletelock/SyncDeleteLock.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletelock/SyncDeleteLock.java new file mode 100644 index 000000000000..f589ce0ff075 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletelock/SyncDeleteLock.java @@ -0,0 +1,48 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_DeleteLock_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.DeleteLockRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.LockName; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; +import com.google.protobuf.Empty; + +public class SyncDeleteLock { + + public static void main(String[] args) throws Exception { + syncDeleteLock(); + } + + public static void syncDeleteLock() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + DeleteLockRequest request = + DeleteLockRequest.newBuilder() + .setName( + LockName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[LOCK]") + .toString()) + .build(); + metastoreServiceClient.deleteLock(request); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_DeleteLock_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletelock/SyncDeleteLockLockname.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletelock/SyncDeleteLockLockname.java new file mode 100644 index 000000000000..927b4d454dbc --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletelock/SyncDeleteLockLockname.java @@ -0,0 +1,42 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_DeleteLock_Lockname_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.LockName; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; +import com.google.protobuf.Empty; + +public class SyncDeleteLockLockname { + + public static void main(String[] args) throws Exception { + syncDeleteLockLockname(); + } + + public static void syncDeleteLockLockname() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + LockName name = LockName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[LOCK]"); + metastoreServiceClient.deleteLock(name); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_DeleteLock_Lockname_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletelock/SyncDeleteLockString.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletelock/SyncDeleteLockString.java new file mode 100644 index 000000000000..f17c34b32bed --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletelock/SyncDeleteLockString.java @@ -0,0 +1,43 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_DeleteLock_String_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.LockName; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; +import com.google.protobuf.Empty; + +public class SyncDeleteLockString { + + public static void main(String[] args) throws Exception { + syncDeleteLockString(); + } + + public static void syncDeleteLockString() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + String name = + LockName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[LOCK]").toString(); + metastoreServiceClient.deleteLock(name); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_DeleteLock_String_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletetable/AsyncDeleteTable.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletetable/AsyncDeleteTable.java new file mode 100644 index 000000000000..2e666e2c6cd5 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletetable/AsyncDeleteTable.java @@ -0,0 +1,51 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_DeleteTable_async] +import com.google.api.core.ApiFuture; +import com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; +import com.google.cloud.bigquery.biglake.v1alpha1.Table; +import com.google.cloud.bigquery.biglake.v1alpha1.TableName; + +public class AsyncDeleteTable { + + public static void main(String[] args) throws Exception { + asyncDeleteTable(); + } + + public static void asyncDeleteTable() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + DeleteTableRequest request = + DeleteTableRequest.newBuilder() + .setName( + TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]") + .toString()) + .build(); + ApiFuture
future = metastoreServiceClient.deleteTableCallable().futureCall(request); + // Do something. + Table response = future.get(); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_DeleteTable_async] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletetable/SyncDeleteTable.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletetable/SyncDeleteTable.java new file mode 100644 index 000000000000..dbc678a28794 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletetable/SyncDeleteTable.java @@ -0,0 +1,48 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_DeleteTable_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.DeleteTableRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; +import com.google.cloud.bigquery.biglake.v1alpha1.Table; +import com.google.cloud.bigquery.biglake.v1alpha1.TableName; + +public class SyncDeleteTable { + + public static void main(String[] args) throws Exception { + syncDeleteTable(); + } + + public static void syncDeleteTable() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + DeleteTableRequest request = + DeleteTableRequest.newBuilder() + .setName( + TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]") + .toString()) + .build(); + Table response = metastoreServiceClient.deleteTable(request); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_DeleteTable_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletetable/SyncDeleteTableString.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletetable/SyncDeleteTableString.java new file mode 100644 index 000000000000..4d33e35a2d42 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletetable/SyncDeleteTableString.java @@ -0,0 +1,43 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_DeleteTable_String_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; +import com.google.cloud.bigquery.biglake.v1alpha1.Table; +import com.google.cloud.bigquery.biglake.v1alpha1.TableName; + +public class SyncDeleteTableString { + + public static void main(String[] args) throws Exception { + syncDeleteTableString(); + } + + public static void syncDeleteTableString() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + String name = + TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]").toString(); + Table response = metastoreServiceClient.deleteTable(name); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_DeleteTable_String_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletetable/SyncDeleteTableTablename.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletetable/SyncDeleteTableTablename.java new file mode 100644 index 000000000000..28cddff8266b --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/deletetable/SyncDeleteTableTablename.java @@ -0,0 +1,43 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_DeleteTable_Tablename_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; +import com.google.cloud.bigquery.biglake.v1alpha1.Table; +import com.google.cloud.bigquery.biglake.v1alpha1.TableName; + +public class SyncDeleteTableTablename { + + public static void main(String[] args) throws Exception { + syncDeleteTableTablename(); + } + + public static void syncDeleteTableTablename() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + TableName name = + TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]"); + Table response = metastoreServiceClient.deleteTable(name); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_DeleteTable_Tablename_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/getcatalog/AsyncGetCatalog.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/getcatalog/AsyncGetCatalog.java new file mode 100644 index 000000000000..3fcf6c55e2cc --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/getcatalog/AsyncGetCatalog.java @@ -0,0 +1,49 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_GetCatalog_async] +import com.google.api.core.ApiFuture; +import com.google.cloud.bigquery.biglake.v1alpha1.Catalog; +import com.google.cloud.bigquery.biglake.v1alpha1.CatalogName; +import com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class AsyncGetCatalog { + + public static void main(String[] args) throws Exception { + asyncGetCatalog(); + } + + public static void asyncGetCatalog() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + GetCatalogRequest request = + GetCatalogRequest.newBuilder() + .setName(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) + .build(); + ApiFuture future = metastoreServiceClient.getCatalogCallable().futureCall(request); + // Do something. + Catalog response = future.get(); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_GetCatalog_async] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/getcatalog/SyncGetCatalog.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/getcatalog/SyncGetCatalog.java new file mode 100644 index 000000000000..b7e77e3e4861 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/getcatalog/SyncGetCatalog.java @@ -0,0 +1,46 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_GetCatalog_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.Catalog; +import com.google.cloud.bigquery.biglake.v1alpha1.CatalogName; +import com.google.cloud.bigquery.biglake.v1alpha1.GetCatalogRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class SyncGetCatalog { + + public static void main(String[] args) throws Exception { + syncGetCatalog(); + } + + public static void syncGetCatalog() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + GetCatalogRequest request = + GetCatalogRequest.newBuilder() + .setName(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) + .build(); + Catalog response = metastoreServiceClient.getCatalog(request); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_GetCatalog_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/getcatalog/SyncGetCatalogCatalogname.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/getcatalog/SyncGetCatalogCatalogname.java new file mode 100644 index 000000000000..2757fa2d7267 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/getcatalog/SyncGetCatalogCatalogname.java @@ -0,0 +1,42 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_GetCatalog_Catalogname_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.Catalog; +import com.google.cloud.bigquery.biglake.v1alpha1.CatalogName; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class SyncGetCatalogCatalogname { + + public static void main(String[] args) throws Exception { + syncGetCatalogCatalogname(); + } + + public static void syncGetCatalogCatalogname() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + CatalogName name = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); + Catalog response = metastoreServiceClient.getCatalog(name); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_GetCatalog_Catalogname_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/getcatalog/SyncGetCatalogString.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/getcatalog/SyncGetCatalogString.java new file mode 100644 index 000000000000..08cef4aa9b0d --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/getcatalog/SyncGetCatalogString.java @@ -0,0 +1,42 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_GetCatalog_String_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.Catalog; +import com.google.cloud.bigquery.biglake.v1alpha1.CatalogName; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class SyncGetCatalogString { + + public static void main(String[] args) throws Exception { + syncGetCatalogString(); + } + + public static void syncGetCatalogString() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + String name = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString(); + Catalog response = metastoreServiceClient.getCatalog(name); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_GetCatalog_String_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/getdatabase/AsyncGetDatabase.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/getdatabase/AsyncGetDatabase.java new file mode 100644 index 000000000000..d80a38c7a5b2 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/getdatabase/AsyncGetDatabase.java @@ -0,0 +1,50 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_GetDatabase_async] +import com.google.api.core.ApiFuture; +import com.google.cloud.bigquery.biglake.v1alpha1.Database; +import com.google.cloud.bigquery.biglake.v1alpha1.DatabaseName; +import com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class AsyncGetDatabase { + + public static void main(String[] args) throws Exception { + asyncGetDatabase(); + } + + public static void asyncGetDatabase() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + GetDatabaseRequest request = + GetDatabaseRequest.newBuilder() + .setName( + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString()) + .build(); + ApiFuture future = metastoreServiceClient.getDatabaseCallable().futureCall(request); + // Do something. + Database response = future.get(); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_GetDatabase_async] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/getdatabase/SyncGetDatabase.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/getdatabase/SyncGetDatabase.java new file mode 100644 index 000000000000..5ffcf0141dce --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/getdatabase/SyncGetDatabase.java @@ -0,0 +1,47 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_GetDatabase_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.Database; +import com.google.cloud.bigquery.biglake.v1alpha1.DatabaseName; +import com.google.cloud.bigquery.biglake.v1alpha1.GetDatabaseRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class SyncGetDatabase { + + public static void main(String[] args) throws Exception { + syncGetDatabase(); + } + + public static void syncGetDatabase() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + GetDatabaseRequest request = + GetDatabaseRequest.newBuilder() + .setName( + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString()) + .build(); + Database response = metastoreServiceClient.getDatabase(request); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_GetDatabase_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/getdatabase/SyncGetDatabaseDatabasename.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/getdatabase/SyncGetDatabaseDatabasename.java new file mode 100644 index 000000000000..a7ffff337b4c --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/getdatabase/SyncGetDatabaseDatabasename.java @@ -0,0 +1,42 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_GetDatabase_Databasename_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.Database; +import com.google.cloud.bigquery.biglake.v1alpha1.DatabaseName; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class SyncGetDatabaseDatabasename { + + public static void main(String[] args) throws Exception { + syncGetDatabaseDatabasename(); + } + + public static void syncGetDatabaseDatabasename() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + DatabaseName name = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]"); + Database response = metastoreServiceClient.getDatabase(name); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_GetDatabase_Databasename_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/getdatabase/SyncGetDatabaseString.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/getdatabase/SyncGetDatabaseString.java new file mode 100644 index 000000000000..e4b369e51526 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/getdatabase/SyncGetDatabaseString.java @@ -0,0 +1,43 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_GetDatabase_String_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.Database; +import com.google.cloud.bigquery.biglake.v1alpha1.DatabaseName; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class SyncGetDatabaseString { + + public static void main(String[] args) throws Exception { + syncGetDatabaseString(); + } + + public static void syncGetDatabaseString() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + String name = + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString(); + Database response = metastoreServiceClient.getDatabase(name); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_GetDatabase_String_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/gettable/AsyncGetTable.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/gettable/AsyncGetTable.java new file mode 100644 index 000000000000..7b7d09cb5d59 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/gettable/AsyncGetTable.java @@ -0,0 +1,51 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_GetTable_async] +import com.google.api.core.ApiFuture; +import com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; +import com.google.cloud.bigquery.biglake.v1alpha1.Table; +import com.google.cloud.bigquery.biglake.v1alpha1.TableName; + +public class AsyncGetTable { + + public static void main(String[] args) throws Exception { + asyncGetTable(); + } + + public static void asyncGetTable() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + GetTableRequest request = + GetTableRequest.newBuilder() + .setName( + TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]") + .toString()) + .build(); + ApiFuture
future = metastoreServiceClient.getTableCallable().futureCall(request); + // Do something. + Table response = future.get(); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_GetTable_async] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/gettable/SyncGetTable.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/gettable/SyncGetTable.java new file mode 100644 index 000000000000..a669911f7835 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/gettable/SyncGetTable.java @@ -0,0 +1,48 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_GetTable_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.GetTableRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; +import com.google.cloud.bigquery.biglake.v1alpha1.Table; +import com.google.cloud.bigquery.biglake.v1alpha1.TableName; + +public class SyncGetTable { + + public static void main(String[] args) throws Exception { + syncGetTable(); + } + + public static void syncGetTable() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + GetTableRequest request = + GetTableRequest.newBuilder() + .setName( + TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]") + .toString()) + .build(); + Table response = metastoreServiceClient.getTable(request); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_GetTable_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/gettable/SyncGetTableString.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/gettable/SyncGetTableString.java new file mode 100644 index 000000000000..1c9b19928a28 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/gettable/SyncGetTableString.java @@ -0,0 +1,43 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_GetTable_String_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; +import com.google.cloud.bigquery.biglake.v1alpha1.Table; +import com.google.cloud.bigquery.biglake.v1alpha1.TableName; + +public class SyncGetTableString { + + public static void main(String[] args) throws Exception { + syncGetTableString(); + } + + public static void syncGetTableString() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + String name = + TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]").toString(); + Table response = metastoreServiceClient.getTable(name); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_GetTable_String_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/gettable/SyncGetTableTablename.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/gettable/SyncGetTableTablename.java new file mode 100644 index 000000000000..a1b83431ae8c --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/gettable/SyncGetTableTablename.java @@ -0,0 +1,43 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_GetTable_Tablename_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; +import com.google.cloud.bigquery.biglake.v1alpha1.Table; +import com.google.cloud.bigquery.biglake.v1alpha1.TableName; + +public class SyncGetTableTablename { + + public static void main(String[] args) throws Exception { + syncGetTableTablename(); + } + + public static void syncGetTableTablename() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + TableName name = + TableName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]", "[TABLE]"); + Table response = metastoreServiceClient.getTable(name); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_GetTable_Tablename_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listcatalogs/AsyncListCatalogs.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listcatalogs/AsyncListCatalogs.java new file mode 100644 index 000000000000..be055fb4d319 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listcatalogs/AsyncListCatalogs.java @@ -0,0 +1,54 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_ListCatalogs_async] +import com.google.api.core.ApiFuture; +import com.google.cloud.bigquery.biglake.v1alpha1.Catalog; +import com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.LocationName; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class AsyncListCatalogs { + + public static void main(String[] args) throws Exception { + asyncListCatalogs(); + } + + public static void asyncListCatalogs() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + ListCatalogsRequest request = + ListCatalogsRequest.newBuilder() + .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString()) + .setPageSize(883849137) + .setPageToken("pageToken873572522") + .build(); + ApiFuture future = + metastoreServiceClient.listCatalogsPagedCallable().futureCall(request); + // Do something. + for (Catalog element : future.get().iterateAll()) { + // doThingsWith(element); + } + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_ListCatalogs_async] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listcatalogs/AsyncListCatalogsPaged.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listcatalogs/AsyncListCatalogsPaged.java new file mode 100644 index 000000000000..56ebaa0098da --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listcatalogs/AsyncListCatalogsPaged.java @@ -0,0 +1,61 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_ListCatalogs_Paged_async] +import com.google.cloud.bigquery.biglake.v1alpha1.Catalog; +import com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsResponse; +import com.google.cloud.bigquery.biglake.v1alpha1.LocationName; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; +import com.google.common.base.Strings; + +public class AsyncListCatalogsPaged { + + public static void main(String[] args) throws Exception { + asyncListCatalogsPaged(); + } + + public static void asyncListCatalogsPaged() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + ListCatalogsRequest request = + ListCatalogsRequest.newBuilder() + .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString()) + .setPageSize(883849137) + .setPageToken("pageToken873572522") + .build(); + while (true) { + ListCatalogsResponse response = metastoreServiceClient.listCatalogsCallable().call(request); + for (Catalog element : response.getCatalogsList()) { + // doThingsWith(element); + } + String nextPageToken = response.getNextPageToken(); + if (!Strings.isNullOrEmpty(nextPageToken)) { + request = request.toBuilder().setPageToken(nextPageToken).build(); + } else { + break; + } + } + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_ListCatalogs_Paged_async] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listcatalogs/SyncListCatalogs.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listcatalogs/SyncListCatalogs.java new file mode 100644 index 000000000000..dd6b6edf8483 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listcatalogs/SyncListCatalogs.java @@ -0,0 +1,50 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_ListCatalogs_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.Catalog; +import com.google.cloud.bigquery.biglake.v1alpha1.ListCatalogsRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.LocationName; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class SyncListCatalogs { + + public static void main(String[] args) throws Exception { + syncListCatalogs(); + } + + public static void syncListCatalogs() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + ListCatalogsRequest request = + ListCatalogsRequest.newBuilder() + .setParent(LocationName.of("[PROJECT]", "[LOCATION]").toString()) + .setPageSize(883849137) + .setPageToken("pageToken873572522") + .build(); + for (Catalog element : metastoreServiceClient.listCatalogs(request).iterateAll()) { + // doThingsWith(element); + } + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_ListCatalogs_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listcatalogs/SyncListCatalogsLocationname.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listcatalogs/SyncListCatalogsLocationname.java new file mode 100644 index 000000000000..0ae0f3f4bf3f --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listcatalogs/SyncListCatalogsLocationname.java @@ -0,0 +1,44 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_ListCatalogs_Locationname_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.Catalog; +import com.google.cloud.bigquery.biglake.v1alpha1.LocationName; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class SyncListCatalogsLocationname { + + public static void main(String[] args) throws Exception { + syncListCatalogsLocationname(); + } + + public static void syncListCatalogsLocationname() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); + for (Catalog element : metastoreServiceClient.listCatalogs(parent).iterateAll()) { + // doThingsWith(element); + } + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_ListCatalogs_Locationname_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listcatalogs/SyncListCatalogsString.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listcatalogs/SyncListCatalogsString.java new file mode 100644 index 000000000000..66fbd670341e --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listcatalogs/SyncListCatalogsString.java @@ -0,0 +1,44 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_ListCatalogs_String_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.Catalog; +import com.google.cloud.bigquery.biglake.v1alpha1.LocationName; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class SyncListCatalogsString { + + public static void main(String[] args) throws Exception { + syncListCatalogsString(); + } + + public static void syncListCatalogsString() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + String parent = LocationName.of("[PROJECT]", "[LOCATION]").toString(); + for (Catalog element : metastoreServiceClient.listCatalogs(parent).iterateAll()) { + // doThingsWith(element); + } + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_ListCatalogs_String_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listdatabases/AsyncListDatabases.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listdatabases/AsyncListDatabases.java new file mode 100644 index 000000000000..61ababf83e05 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listdatabases/AsyncListDatabases.java @@ -0,0 +1,54 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_ListDatabases_async] +import com.google.api.core.ApiFuture; +import com.google.cloud.bigquery.biglake.v1alpha1.CatalogName; +import com.google.cloud.bigquery.biglake.v1alpha1.Database; +import com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class AsyncListDatabases { + + public static void main(String[] args) throws Exception { + asyncListDatabases(); + } + + public static void asyncListDatabases() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + ListDatabasesRequest request = + ListDatabasesRequest.newBuilder() + .setParent(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) + .setPageSize(883849137) + .setPageToken("pageToken873572522") + .build(); + ApiFuture future = + metastoreServiceClient.listDatabasesPagedCallable().futureCall(request); + // Do something. + for (Database element : future.get().iterateAll()) { + // doThingsWith(element); + } + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_ListDatabases_async] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listdatabases/AsyncListDatabasesPaged.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listdatabases/AsyncListDatabasesPaged.java new file mode 100644 index 000000000000..1c580d962394 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listdatabases/AsyncListDatabasesPaged.java @@ -0,0 +1,62 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_ListDatabases_Paged_async] +import com.google.cloud.bigquery.biglake.v1alpha1.CatalogName; +import com.google.cloud.bigquery.biglake.v1alpha1.Database; +import com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesResponse; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; +import com.google.common.base.Strings; + +public class AsyncListDatabasesPaged { + + public static void main(String[] args) throws Exception { + asyncListDatabasesPaged(); + } + + public static void asyncListDatabasesPaged() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + ListDatabasesRequest request = + ListDatabasesRequest.newBuilder() + .setParent(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) + .setPageSize(883849137) + .setPageToken("pageToken873572522") + .build(); + while (true) { + ListDatabasesResponse response = + metastoreServiceClient.listDatabasesCallable().call(request); + for (Database element : response.getDatabasesList()) { + // doThingsWith(element); + } + String nextPageToken = response.getNextPageToken(); + if (!Strings.isNullOrEmpty(nextPageToken)) { + request = request.toBuilder().setPageToken(nextPageToken).build(); + } else { + break; + } + } + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_ListDatabases_Paged_async] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listdatabases/SyncListDatabases.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listdatabases/SyncListDatabases.java new file mode 100644 index 000000000000..f98198423258 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listdatabases/SyncListDatabases.java @@ -0,0 +1,50 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_ListDatabases_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.CatalogName; +import com.google.cloud.bigquery.biglake.v1alpha1.Database; +import com.google.cloud.bigquery.biglake.v1alpha1.ListDatabasesRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class SyncListDatabases { + + public static void main(String[] args) throws Exception { + syncListDatabases(); + } + + public static void syncListDatabases() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + ListDatabasesRequest request = + ListDatabasesRequest.newBuilder() + .setParent(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) + .setPageSize(883849137) + .setPageToken("pageToken873572522") + .build(); + for (Database element : metastoreServiceClient.listDatabases(request).iterateAll()) { + // doThingsWith(element); + } + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_ListDatabases_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listdatabases/SyncListDatabasesCatalogname.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listdatabases/SyncListDatabasesCatalogname.java new file mode 100644 index 000000000000..5698cfd5fc16 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listdatabases/SyncListDatabasesCatalogname.java @@ -0,0 +1,44 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_ListDatabases_Catalogname_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.CatalogName; +import com.google.cloud.bigquery.biglake.v1alpha1.Database; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class SyncListDatabasesCatalogname { + + public static void main(String[] args) throws Exception { + syncListDatabasesCatalogname(); + } + + public static void syncListDatabasesCatalogname() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + CatalogName parent = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); + for (Database element : metastoreServiceClient.listDatabases(parent).iterateAll()) { + // doThingsWith(element); + } + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_ListDatabases_Catalogname_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listdatabases/SyncListDatabasesString.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listdatabases/SyncListDatabasesString.java new file mode 100644 index 000000000000..4e414938e7f3 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listdatabases/SyncListDatabasesString.java @@ -0,0 +1,44 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_ListDatabases_String_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.CatalogName; +import com.google.cloud.bigquery.biglake.v1alpha1.Database; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class SyncListDatabasesString { + + public static void main(String[] args) throws Exception { + syncListDatabasesString(); + } + + public static void syncListDatabasesString() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + String parent = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString(); + for (Database element : metastoreServiceClient.listDatabases(parent).iterateAll()) { + // doThingsWith(element); + } + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_ListDatabases_String_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listlocks/AsyncListLocks.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listlocks/AsyncListLocks.java new file mode 100644 index 000000000000..7e26e635cc79 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listlocks/AsyncListLocks.java @@ -0,0 +1,54 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_ListLocks_async] +import com.google.api.core.ApiFuture; +import com.google.cloud.bigquery.biglake.v1alpha1.DatabaseName; +import com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.Lock; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class AsyncListLocks { + + public static void main(String[] args) throws Exception { + asyncListLocks(); + } + + public static void asyncListLocks() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + ListLocksRequest request = + ListLocksRequest.newBuilder() + .setParent( + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString()) + .setPageSize(883849137) + .setPageToken("pageToken873572522") + .build(); + ApiFuture future = metastoreServiceClient.listLocksPagedCallable().futureCall(request); + // Do something. + for (Lock element : future.get().iterateAll()) { + // doThingsWith(element); + } + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_ListLocks_async] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listlocks/AsyncListLocksPaged.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listlocks/AsyncListLocksPaged.java new file mode 100644 index 000000000000..8747a8e1c503 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listlocks/AsyncListLocksPaged.java @@ -0,0 +1,62 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_ListLocks_Paged_async] +import com.google.cloud.bigquery.biglake.v1alpha1.DatabaseName; +import com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.ListLocksResponse; +import com.google.cloud.bigquery.biglake.v1alpha1.Lock; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; +import com.google.common.base.Strings; + +public class AsyncListLocksPaged { + + public static void main(String[] args) throws Exception { + asyncListLocksPaged(); + } + + public static void asyncListLocksPaged() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + ListLocksRequest request = + ListLocksRequest.newBuilder() + .setParent( + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString()) + .setPageSize(883849137) + .setPageToken("pageToken873572522") + .build(); + while (true) { + ListLocksResponse response = metastoreServiceClient.listLocksCallable().call(request); + for (Lock element : response.getLocksList()) { + // doThingsWith(element); + } + String nextPageToken = response.getNextPageToken(); + if (!Strings.isNullOrEmpty(nextPageToken)) { + request = request.toBuilder().setPageToken(nextPageToken).build(); + } else { + break; + } + } + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_ListLocks_Paged_async] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listlocks/SyncListLocks.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listlocks/SyncListLocks.java new file mode 100644 index 000000000000..3a74932c2983 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listlocks/SyncListLocks.java @@ -0,0 +1,51 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_ListLocks_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.DatabaseName; +import com.google.cloud.bigquery.biglake.v1alpha1.ListLocksRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.Lock; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class SyncListLocks { + + public static void main(String[] args) throws Exception { + syncListLocks(); + } + + public static void syncListLocks() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + ListLocksRequest request = + ListLocksRequest.newBuilder() + .setParent( + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString()) + .setPageSize(883849137) + .setPageToken("pageToken873572522") + .build(); + for (Lock element : metastoreServiceClient.listLocks(request).iterateAll()) { + // doThingsWith(element); + } + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_ListLocks_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listlocks/SyncListLocksDatabasename.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listlocks/SyncListLocksDatabasename.java new file mode 100644 index 000000000000..b837985ef956 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listlocks/SyncListLocksDatabasename.java @@ -0,0 +1,44 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_ListLocks_Databasename_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.DatabaseName; +import com.google.cloud.bigquery.biglake.v1alpha1.Lock; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class SyncListLocksDatabasename { + + public static void main(String[] args) throws Exception { + syncListLocksDatabasename(); + } + + public static void syncListLocksDatabasename() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + DatabaseName parent = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]"); + for (Lock element : metastoreServiceClient.listLocks(parent).iterateAll()) { + // doThingsWith(element); + } + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_ListLocks_Databasename_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listlocks/SyncListLocksString.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listlocks/SyncListLocksString.java new file mode 100644 index 000000000000..9eb10dc495ca --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listlocks/SyncListLocksString.java @@ -0,0 +1,45 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_ListLocks_String_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.DatabaseName; +import com.google.cloud.bigquery.biglake.v1alpha1.Lock; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; + +public class SyncListLocksString { + + public static void main(String[] args) throws Exception { + syncListLocksString(); + } + + public static void syncListLocksString() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + String parent = + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString(); + for (Lock element : metastoreServiceClient.listLocks(parent).iterateAll()) { + // doThingsWith(element); + } + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_ListLocks_String_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listtables/AsyncListTables.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listtables/AsyncListTables.java new file mode 100644 index 000000000000..96074f9bb7e2 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listtables/AsyncListTables.java @@ -0,0 +1,55 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_ListTables_async] +import com.google.api.core.ApiFuture; +import com.google.cloud.bigquery.biglake.v1alpha1.DatabaseName; +import com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; +import com.google.cloud.bigquery.biglake.v1alpha1.Table; + +public class AsyncListTables { + + public static void main(String[] args) throws Exception { + asyncListTables(); + } + + public static void asyncListTables() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + ListTablesRequest request = + ListTablesRequest.newBuilder() + .setParent( + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString()) + .setPageSize(883849137) + .setPageToken("pageToken873572522") + .build(); + ApiFuture
future = + metastoreServiceClient.listTablesPagedCallable().futureCall(request); + // Do something. + for (Table element : future.get().iterateAll()) { + // doThingsWith(element); + } + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_ListTables_async] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listtables/AsyncListTablesPaged.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listtables/AsyncListTablesPaged.java new file mode 100644 index 000000000000..50b2fdb87c79 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listtables/AsyncListTablesPaged.java @@ -0,0 +1,62 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_ListTables_Paged_async] +import com.google.cloud.bigquery.biglake.v1alpha1.DatabaseName; +import com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; +import com.google.cloud.bigquery.biglake.v1alpha1.Table; +import com.google.common.base.Strings; + +public class AsyncListTablesPaged { + + public static void main(String[] args) throws Exception { + asyncListTablesPaged(); + } + + public static void asyncListTablesPaged() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + ListTablesRequest request = + ListTablesRequest.newBuilder() + .setParent( + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString()) + .setPageSize(883849137) + .setPageToken("pageToken873572522") + .build(); + while (true) { + ListTablesResponse response = metastoreServiceClient.listTablesCallable().call(request); + for (Table element : response.getTablesList()) { + // doThingsWith(element); + } + String nextPageToken = response.getNextPageToken(); + if (!Strings.isNullOrEmpty(nextPageToken)) { + request = request.toBuilder().setPageToken(nextPageToken).build(); + } else { + break; + } + } + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_ListTables_Paged_async] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listtables/SyncListTables.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listtables/SyncListTables.java new file mode 100644 index 000000000000..6ff4c61cb98f --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listtables/SyncListTables.java @@ -0,0 +1,51 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_ListTables_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.DatabaseName; +import com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; +import com.google.cloud.bigquery.biglake.v1alpha1.Table; + +public class SyncListTables { + + public static void main(String[] args) throws Exception { + syncListTables(); + } + + public static void syncListTables() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + ListTablesRequest request = + ListTablesRequest.newBuilder() + .setParent( + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString()) + .setPageSize(883849137) + .setPageToken("pageToken873572522") + .build(); + for (Table element : metastoreServiceClient.listTables(request).iterateAll()) { + // doThingsWith(element); + } + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_ListTables_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listtables/SyncListTablesDatabasename.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listtables/SyncListTablesDatabasename.java new file mode 100644 index 000000000000..d5a652e9c91b --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listtables/SyncListTablesDatabasename.java @@ -0,0 +1,44 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_ListTables_Databasename_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.DatabaseName; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; +import com.google.cloud.bigquery.biglake.v1alpha1.Table; + +public class SyncListTablesDatabasename { + + public static void main(String[] args) throws Exception { + syncListTablesDatabasename(); + } + + public static void syncListTablesDatabasename() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + DatabaseName parent = DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]"); + for (Table element : metastoreServiceClient.listTables(parent).iterateAll()) { + // doThingsWith(element); + } + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_ListTables_Databasename_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listtables/SyncListTablesString.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listtables/SyncListTablesString.java new file mode 100644 index 000000000000..832e174dd3e7 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/listtables/SyncListTablesString.java @@ -0,0 +1,45 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_ListTables_String_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.DatabaseName; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; +import com.google.cloud.bigquery.biglake.v1alpha1.Table; + +public class SyncListTablesString { + + public static void main(String[] args) throws Exception { + syncListTablesString(); + } + + public static void syncListTablesString() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + String parent = + DatabaseName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[DATABASE]").toString(); + for (Table element : metastoreServiceClient.listTables(parent).iterateAll()) { + // doThingsWith(element); + } + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_ListTables_String_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/updatedatabase/AsyncUpdateDatabase.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/updatedatabase/AsyncUpdateDatabase.java new file mode 100644 index 000000000000..42fc19ac4fc7 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/updatedatabase/AsyncUpdateDatabase.java @@ -0,0 +1,51 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_UpdateDatabase_async] +import com.google.api.core.ApiFuture; +import com.google.cloud.bigquery.biglake.v1alpha1.Database; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; +import com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest; +import com.google.protobuf.FieldMask; + +public class AsyncUpdateDatabase { + + public static void main(String[] args) throws Exception { + asyncUpdateDatabase(); + } + + public static void asyncUpdateDatabase() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + UpdateDatabaseRequest request = + UpdateDatabaseRequest.newBuilder() + .setDatabase(Database.newBuilder().build()) + .setUpdateMask(FieldMask.newBuilder().build()) + .build(); + ApiFuture future = + metastoreServiceClient.updateDatabaseCallable().futureCall(request); + // Do something. + Database response = future.get(); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_UpdateDatabase_async] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/updatedatabase/SyncUpdateDatabase.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/updatedatabase/SyncUpdateDatabase.java new file mode 100644 index 000000000000..1f02d7cb4850 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/updatedatabase/SyncUpdateDatabase.java @@ -0,0 +1,47 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_UpdateDatabase_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.Database; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; +import com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest; +import com.google.protobuf.FieldMask; + +public class SyncUpdateDatabase { + + public static void main(String[] args) throws Exception { + syncUpdateDatabase(); + } + + public static void syncUpdateDatabase() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + UpdateDatabaseRequest request = + UpdateDatabaseRequest.newBuilder() + .setDatabase(Database.newBuilder().build()) + .setUpdateMask(FieldMask.newBuilder().build()) + .build(); + Database response = metastoreServiceClient.updateDatabase(request); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_UpdateDatabase_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/updatedatabase/SyncUpdateDatabaseDatabaseFieldmask.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/updatedatabase/SyncUpdateDatabaseDatabaseFieldmask.java new file mode 100644 index 000000000000..1575d6b8a064 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/updatedatabase/SyncUpdateDatabaseDatabaseFieldmask.java @@ -0,0 +1,43 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_UpdateDatabase_DatabaseFieldmask_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.Database; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; +import com.google.protobuf.FieldMask; + +public class SyncUpdateDatabaseDatabaseFieldmask { + + public static void main(String[] args) throws Exception { + syncUpdateDatabaseDatabaseFieldmask(); + } + + public static void syncUpdateDatabaseDatabaseFieldmask() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + Database database = Database.newBuilder().build(); + FieldMask updateMask = FieldMask.newBuilder().build(); + Database response = metastoreServiceClient.updateDatabase(database, updateMask); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_UpdateDatabase_DatabaseFieldmask_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/updatetable/AsyncUpdateTable.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/updatetable/AsyncUpdateTable.java new file mode 100644 index 000000000000..f0420329d6b3 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/updatetable/AsyncUpdateTable.java @@ -0,0 +1,50 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_UpdateTable_async] +import com.google.api.core.ApiFuture; +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; +import com.google.cloud.bigquery.biglake.v1alpha1.Table; +import com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest; +import com.google.protobuf.FieldMask; + +public class AsyncUpdateTable { + + public static void main(String[] args) throws Exception { + asyncUpdateTable(); + } + + public static void asyncUpdateTable() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + UpdateTableRequest request = + UpdateTableRequest.newBuilder() + .setTable(Table.newBuilder().build()) + .setUpdateMask(FieldMask.newBuilder().build()) + .build(); + ApiFuture
future = metastoreServiceClient.updateTableCallable().futureCall(request); + // Do something. + Table response = future.get(); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_UpdateTable_async] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/updatetable/SyncUpdateTable.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/updatetable/SyncUpdateTable.java new file mode 100644 index 000000000000..26e4fb4c6ad0 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/updatetable/SyncUpdateTable.java @@ -0,0 +1,47 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_UpdateTable_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; +import com.google.cloud.bigquery.biglake.v1alpha1.Table; +import com.google.cloud.bigquery.biglake.v1alpha1.UpdateTableRequest; +import com.google.protobuf.FieldMask; + +public class SyncUpdateTable { + + public static void main(String[] args) throws Exception { + syncUpdateTable(); + } + + public static void syncUpdateTable() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + UpdateTableRequest request = + UpdateTableRequest.newBuilder() + .setTable(Table.newBuilder().build()) + .setUpdateMask(FieldMask.newBuilder().build()) + .build(); + Table response = metastoreServiceClient.updateTable(request); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_UpdateTable_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/updatetable/SyncUpdateTableTableFieldmask.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/updatetable/SyncUpdateTableTableFieldmask.java new file mode 100644 index 000000000000..e0e3ca50b5b3 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservice/updatetable/SyncUpdateTableTableFieldmask.java @@ -0,0 +1,43 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreService_UpdateTable_TableFieldmask_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceClient; +import com.google.cloud.bigquery.biglake.v1alpha1.Table; +import com.google.protobuf.FieldMask; + +public class SyncUpdateTableTableFieldmask { + + public static void main(String[] args) throws Exception { + syncUpdateTableTableFieldmask(); + } + + public static void syncUpdateTableTableFieldmask() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + try (MetastoreServiceClient metastoreServiceClient = MetastoreServiceClient.create()) { + Table table = Table.newBuilder().build(); + FieldMask updateMask = FieldMask.newBuilder().build(); + Table response = metastoreServiceClient.updateTable(table, updateMask); + } + } +} +// [END biglake_v1alpha1_generated_MetastoreService_UpdateTable_TableFieldmask_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservicesettings/createcatalog/SyncCreateCatalog.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservicesettings/createcatalog/SyncCreateCatalog.java new file mode 100644 index 000000000000..c8434a365731 --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/metastoreservicesettings/createcatalog/SyncCreateCatalog.java @@ -0,0 +1,49 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.samples; + +// [START biglake_v1alpha1_generated_MetastoreServiceSettings_CreateCatalog_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.MetastoreServiceSettings; +import java.time.Duration; + +public class SyncCreateCatalog { + + public static void main(String[] args) throws Exception { + syncCreateCatalog(); + } + + public static void syncCreateCatalog() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + MetastoreServiceSettings.Builder metastoreServiceSettingsBuilder = + MetastoreServiceSettings.newBuilder(); + metastoreServiceSettingsBuilder + .createCatalogSettings() + .setRetrySettings( + metastoreServiceSettingsBuilder + .createCatalogSettings() + .getRetrySettings() + .toBuilder() + .setTotalTimeout(Duration.ofSeconds(30)) + .build()); + MetastoreServiceSettings metastoreServiceSettings = metastoreServiceSettingsBuilder.build(); + } +} +// [END biglake_v1alpha1_generated_MetastoreServiceSettings_CreateCatalog_sync] diff --git a/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/stub/metastoreservicestubsettings/createcatalog/SyncCreateCatalog.java b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/stub/metastoreservicestubsettings/createcatalog/SyncCreateCatalog.java new file mode 100644 index 000000000000..9423d0e1683c --- /dev/null +++ b/java-biglake/samples/snippets/generated/com/google/cloud/bigquery/biglake/v1alpha1/stub/metastoreservicestubsettings/createcatalog/SyncCreateCatalog.java @@ -0,0 +1,49 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.biglake.v1alpha1.stub.samples; + +// [START biglake_v1alpha1_generated_MetastoreServiceStubSettings_CreateCatalog_sync] +import com.google.cloud.bigquery.biglake.v1alpha1.stub.MetastoreServiceStubSettings; +import java.time.Duration; + +public class SyncCreateCatalog { + + public static void main(String[] args) throws Exception { + syncCreateCatalog(); + } + + public static void syncCreateCatalog() throws Exception { + // This snippet has been automatically generated and should be regarded as a code template only. + // It will require modifications to work: + // - It may require correct/in-range values for request initialization. + // - It may require specifying regional endpoints when creating the service client as shown in + // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library + MetastoreServiceStubSettings.Builder metastoreServiceSettingsBuilder = + MetastoreServiceStubSettings.newBuilder(); + metastoreServiceSettingsBuilder + .createCatalogSettings() + .setRetrySettings( + metastoreServiceSettingsBuilder + .createCatalogSettings() + .getRetrySettings() + .toBuilder() + .setTotalTimeout(Duration.ofSeconds(30)) + .build()); + MetastoreServiceStubSettings metastoreServiceSettings = metastoreServiceSettingsBuilder.build(); + } +} +// [END biglake_v1alpha1_generated_MetastoreServiceStubSettings_CreateCatalog_sync] diff --git a/java-bigqueryconnection/README.md b/java-bigqueryconnection/README.md index 058000eb5486..c7d73320979d 100644 --- a/java-bigqueryconnection/README.md +++ b/java-bigqueryconnection/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Cloud BigQuery Connection][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-bigqueryconnection" % "2.14.0" ``` + ## Authentication diff --git a/java-bigquerydatatransfer/README.md b/java-bigquerydatatransfer/README.md index c094787d4ef9..14703900bcfa 100644 --- a/java-bigquerydatatransfer/README.md +++ b/java-bigquerydatatransfer/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [BigQuery Data Transfer Service][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-bigquerydatatransfer" % "2.12.0" ``` + ## Authentication diff --git a/java-bigquerymigration/README.md b/java-bigquerymigration/README.md index 6478cb9c836e..2d5e0cf651bf 100644 --- a/java-bigquerymigration/README.md +++ b/java-bigquerymigration/README.md @@ -17,6 +17,7 @@ Java idiomatic client for [BigQuery Migration][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -37,6 +38,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-bigquerymigration" % "0.15.0" ``` + ## Authentication diff --git a/java-billing/README.md b/java-billing/README.md index 384a18541196..040f01b274c3 100644 --- a/java-billing/README.md +++ b/java-billing/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Cloud Billing][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-billing" % "2.12.0" ``` + ## Authentication diff --git a/java-billingbudgets/README.md b/java-billingbudgets/README.md index fd72ceefb448..233c0dc5d659 100644 --- a/java-billingbudgets/README.md +++ b/java-billingbudgets/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Cloud Billing Budgets][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-billingbudgets" % "2.12.0" ``` + ## Authentication diff --git a/java-binary-authorization/README.md b/java-binary-authorization/README.md index 5cca3764adcd..03f97974f531 100644 --- a/java-binary-authorization/README.md +++ b/java-binary-authorization/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Binary Authorization][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-binary-authorization" % "1.11.0" ``` + ## Authentication diff --git a/java-certificate-manager/README.md b/java-certificate-manager/README.md index 227e8818fd8f..7f57ceba0239 100644 --- a/java-certificate-manager/README.md +++ b/java-certificate-manager/README.md @@ -17,6 +17,7 @@ Java idiomatic client for [Certificate Manager][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -37,6 +38,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-certificate-manager" % "0.15.0" ``` + ## Authentication diff --git a/java-channel/README.md b/java-channel/README.md index 4cb5eecf69e3..eabd47ec268d 100644 --- a/java-channel/README.md +++ b/java-channel/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Channel Services][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-channel" % "3.16.0" ``` + ## Authentication diff --git a/java-cloudbuild/README.md b/java-cloudbuild/README.md index 044d55a10ef8..61d25647e028 100644 --- a/java-cloudbuild/README.md +++ b/java-cloudbuild/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Cloud Build][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-build" % "3.14.0" ``` + ## Authentication diff --git a/java-cloudcommerceconsumerprocurement/README.md b/java-cloudcommerceconsumerprocurement/README.md index 2b179dce56b7..7c78287fe507 100644 --- a/java-cloudcommerceconsumerprocurement/README.md +++ b/java-cloudcommerceconsumerprocurement/README.md @@ -17,6 +17,7 @@ Java idiomatic client for [Cloud Commerce Consumer Procurement][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -37,6 +38,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-cloudcommerceconsumerprocurement" % "0.10.0" ``` + ## Authentication diff --git a/java-contact-center-insights/README.md b/java-contact-center-insights/README.md index a1a23b875947..53aa060d86e0 100644 --- a/java-contact-center-insights/README.md +++ b/java-contact-center-insights/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [CCAI Insights][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-contact-center-insights" % "2.12.0" ``` + ## Authentication diff --git a/java-container/README.md b/java-container/README.md index 7a85e4b66d76..e71c5c1d6c09 100644 --- a/java-container/README.md +++ b/java-container/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Kubernetes Engine][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-container" % "2.15.0" ``` + ## Authentication diff --git a/java-data-fusion/README.md b/java-data-fusion/README.md index bddff11b115d..48e4832a03e5 100644 --- a/java-data-fusion/README.md +++ b/java-data-fusion/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Cloud Data Fusion][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-data-fusion" % "1.12.0" ``` + ## Authentication diff --git a/java-datacatalog/README.md b/java-datacatalog/README.md index cf26344d6ff9..8a1cec379813 100644 --- a/java-datacatalog/README.md +++ b/java-datacatalog/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Data Catalog][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-datacatalog" % "1.18.0" ``` + ## Authentication diff --git a/java-datalabeling/README.md b/java-datalabeling/README.md index c8b34c1d8474..35779657804f 100644 --- a/java-datalabeling/README.md +++ b/java-datalabeling/README.md @@ -17,6 +17,7 @@ Java idiomatic client for [Data Labeling][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -37,6 +38,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-datalabeling" % "0.132.0" ``` + ## Authentication diff --git a/java-datalineage/README.md b/java-datalineage/README.md index 60b831198860..113033a2fddd 100644 --- a/java-datalineage/README.md +++ b/java-datalineage/README.md @@ -17,6 +17,7 @@ Java idiomatic client for [Data Lineage][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -37,6 +38,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-datalineage" % "0.4.0" ``` + ## Authentication diff --git a/java-dataplex/README.md b/java-dataplex/README.md index eb15b7997275..88a5daed8c6e 100644 --- a/java-dataplex/README.md +++ b/java-dataplex/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Cloud Dataplex][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-dataplex" % "1.10.0" ``` + ## Authentication diff --git a/java-dataproc-metastore/README.md b/java-dataproc-metastore/README.md index cde231c6a9e6..1e34ed126632 100644 --- a/java-dataproc-metastore/README.md +++ b/java-dataproc-metastore/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Dataproc Metastore][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-dataproc-metastore" % "2.13.0" ``` + ## Authentication diff --git a/java-dataproc/README.md b/java-dataproc/README.md index b5e41e9b360c..b8b445d6b650 100644 --- a/java-dataproc/README.md +++ b/java-dataproc/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Dataproc][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-dataproc" % "4.9.0" ``` + ## Authentication diff --git a/java-datastream/README.md b/java-datastream/README.md index 3434e64a113c..0a40a64d8777 100644 --- a/java-datastream/README.md +++ b/java-datastream/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Datastream][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-datastream" % "1.11.0" ``` + ## Authentication diff --git a/java-debugger-client/README.md b/java-debugger-client/README.md index d34258b8d7ec..8d40d7fa0821 100644 --- a/java-debugger-client/README.md +++ b/java-debugger-client/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Cloud Debugger][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-debugger-client" % "1.12.0" ``` + ## Authentication diff --git a/java-dialogflow-cx/README.md b/java-dialogflow-cx/README.md index 225a897aa555..d276340eb748 100644 --- a/java-dialogflow-cx/README.md +++ b/java-dialogflow-cx/README.md @@ -17,6 +17,7 @@ Java idiomatic client for [Dialogflow CX][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -37,6 +38,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-dialogflow-cx" % "0.23.0" ``` + ## Authentication diff --git a/java-dialogflow/README.md b/java-dialogflow/README.md index b36b6838ef2f..29eb85bd1bc1 100644 --- a/java-dialogflow/README.md +++ b/java-dialogflow/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Dialogflow API][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-dialogflow" % "4.18.0" ``` + ## Authentication diff --git a/java-distributedcloudedge/README.md b/java-distributedcloudedge/README.md index bc5acca3fe2b..9ada77c07d4d 100644 --- a/java-distributedcloudedge/README.md +++ b/java-distributedcloudedge/README.md @@ -17,6 +17,7 @@ Java idiomatic client for [Google Distributed Cloud Edge][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -37,6 +38,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-distributedcloudedge" % "0.9.0" ``` + ## Authentication diff --git a/java-dlp/README.md b/java-dlp/README.md index 67651c790779..495079b5e95c 100644 --- a/java-dlp/README.md +++ b/java-dlp/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Cloud Data Loss Prevention][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-dlp" % "3.16.0" ``` + ## Authentication diff --git a/java-dms/README.md b/java-dms/README.md index aadb53e6b982..309bb57cbd67 100644 --- a/java-dms/README.md +++ b/java-dms/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Database Migration Service][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-dms" % "2.11.0" ``` + ## Authentication diff --git a/java-document-ai/README.md b/java-document-ai/README.md index e24ea1c862f7..10fce848de99 100644 --- a/java-document-ai/README.md +++ b/java-document-ai/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Document AI][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-document-ai" % "2.16.0" ``` + ## Authentication diff --git a/java-errorreporting/README.md b/java-errorreporting/README.md index ad0a2b41eaab..1a639d830f4a 100644 --- a/java-errorreporting/README.md +++ b/java-errorreporting/README.md @@ -17,6 +17,7 @@ Java idiomatic client for [Error Reporting][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -37,6 +38,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-errorreporting" % "0.133.0-beta" ``` + ## Authentication diff --git a/java-essential-contacts/README.md b/java-essential-contacts/README.md index 452b4678d5ac..06d6ad6d04ef 100644 --- a/java-essential-contacts/README.md +++ b/java-essential-contacts/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Essential Contacts API][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-essential-contacts" % "2.12.0" ``` + ## Authentication diff --git a/java-filestore/README.md b/java-filestore/README.md index 2aef63307110..0c35d26e7fac 100644 --- a/java-filestore/README.md +++ b/java-filestore/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Cloud Filestore API][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-filestore" % "1.13.0" ``` + ## Authentication diff --git a/java-game-servers/README.md b/java-game-servers/README.md index 63f6eb08c2ac..996cda3a4b36 100644 --- a/java-game-servers/README.md +++ b/java-game-servers/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Cloud Gaming][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-game-servers" % "2.12.0" ``` + ## Authentication diff --git a/java-gke-connect-gateway/README.md b/java-gke-connect-gateway/README.md index 0d22277b9f84..409127b1292c 100644 --- a/java-gke-connect-gateway/README.md +++ b/java-gke-connect-gateway/README.md @@ -17,6 +17,7 @@ Java idiomatic client for [Connect Gateway API][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -37,6 +38,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-gke-connect-gateway" % "0.13.0" ``` + ## Authentication diff --git a/java-gkehub/README.md b/java-gkehub/README.md index 1a2b0c46fc84..31bbf3bd7ab8 100644 --- a/java-gkehub/README.md +++ b/java-gkehub/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [GKE Hub API][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-gkehub" % "1.12.0" ``` + ## Authentication diff --git a/java-grafeas/README.md b/java-grafeas/README.md index 94a24aaa8ec0..b2258b7124e4 100644 --- a/java-grafeas/README.md +++ b/java-grafeas/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Grafeas][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "io.grafeas" % "grafeas" % "2.13.0" ``` + ## Authentication diff --git a/java-gsuite-addons/README.md b/java-gsuite-addons/README.md index 01d848c79ffd..70550228a1c8 100644 --- a/java-gsuite-addons/README.md +++ b/java-gsuite-addons/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Google Workspace Add-ons API][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-gsuite-addons" % "2.12.0" ``` + ## Authentication diff --git a/java-iamcredentials/README.md b/java-iamcredentials/README.md index e41cc33d66f6..aee981599e6c 100644 --- a/java-iamcredentials/README.md +++ b/java-iamcredentials/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [IAM Service Account Credentials API][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-iamcredentials" % "2.12.0" ``` + ## Authentication diff --git a/java-iot/README.md b/java-iot/README.md index da6a456c58d7..9347eaabc96c 100644 --- a/java-iot/README.md +++ b/java-iot/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Cloud Internet of Things (IoT) Core][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-iot" % "2.12.0" ``` + ## Authentication diff --git a/java-kms/README.md b/java-kms/README.md index 96ccdc36e9dd..c78db1c72484 100644 --- a/java-kms/README.md +++ b/java-kms/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Cloud Key Management Service][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-kms" % "2.15.0" ``` + ## Authentication diff --git a/java-life-sciences/README.md b/java-life-sciences/README.md index 3f1d56a3c795..cf699aa75421 100644 --- a/java-life-sciences/README.md +++ b/java-life-sciences/README.md @@ -17,6 +17,7 @@ Java idiomatic client for [Cloud Life Sciences][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -37,6 +38,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-life-sciences" % "0.14.0" ``` + ## Authentication diff --git a/java-managed-identities/README.md b/java-managed-identities/README.md index 4c1c57f0d02f..a20a469da50a 100644 --- a/java-managed-identities/README.md +++ b/java-managed-identities/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Managed Service for Microsoft Active Directory][produ If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-managed-identities" % "1.10.0" ``` + ## Authentication diff --git a/java-maps-routing/README.md b/java-maps-routing/README.md index 504c82ddf11e..28638868ea70 100644 --- a/java-maps-routing/README.md +++ b/java-maps-routing/README.md @@ -17,6 +17,7 @@ Java idiomatic client for [Routes API][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -37,6 +38,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.maps" % "google-maps-routing" % "0.6.0" ``` + ## Authentication diff --git a/java-mediatranslation/README.md b/java-mediatranslation/README.md index 888e6237371f..22f662722bb2 100644 --- a/java-mediatranslation/README.md +++ b/java-mediatranslation/README.md @@ -17,6 +17,7 @@ Java idiomatic client for [Media Translation API][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -37,6 +38,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-mediatranslation" % "0.18.0" ``` + ## Authentication diff --git a/java-memcache/README.md b/java-memcache/README.md index 4532a814ea50..9e538543d364 100644 --- a/java-memcache/README.md +++ b/java-memcache/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Cloud Memcache][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-memcache" % "2.12.0" ``` + ## Authentication diff --git a/java-network-management/README.md b/java-network-management/README.md index d7c63c933686..402ff0855be5 100644 --- a/java-network-management/README.md +++ b/java-network-management/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Network Management API][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-network-management" % "1.13.0" ``` + ## Authentication diff --git a/java-network-security/README.md b/java-network-security/README.md index f7de484d3889..9c0a24bf06db 100644 --- a/java-network-security/README.md +++ b/java-network-security/README.md @@ -17,6 +17,7 @@ Java idiomatic client for [Network Security API][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -37,6 +38,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-network-security" % "0.15.0" ``` + ## Authentication diff --git a/java-networkconnectivity/README.md b/java-networkconnectivity/README.md index 6eb66df7cdaf..fa8bb17cc96d 100644 --- a/java-networkconnectivity/README.md +++ b/java-networkconnectivity/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Network Connectivity Center][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-networkconnectivity" % "1.11.0" ``` + ## Authentication diff --git a/java-notebooks/README.md b/java-notebooks/README.md index 16083259b4ba..901cbcf972bd 100644 --- a/java-notebooks/README.md +++ b/java-notebooks/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [AI Platform Notebooks][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-notebooks" % "1.10.0" ``` + ## Authentication diff --git a/java-optimization/README.md b/java-optimization/README.md index 12c5a5944673..fb80aabdbb3a 100644 --- a/java-optimization/README.md +++ b/java-optimization/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Cloud Fleet Routing][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-optimization" % "1.10.0" ``` + ## Authentication diff --git a/java-orchestration-airflow/README.md b/java-orchestration-airflow/README.md index 3775479a037c..a39962266309 100644 --- a/java-orchestration-airflow/README.md +++ b/java-orchestration-airflow/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Cloud Composer][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-orchestration-airflow" % "1.12.0" ``` + ## Authentication diff --git a/java-os-config/README.md b/java-os-config/README.md index f3fb9da5c2f8..54fe5b62a89e 100644 --- a/java-os-config/README.md +++ b/java-os-config/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [OS Config API][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-os-config" % "2.14.0" ``` + ## Authentication diff --git a/java-os-login/README.md b/java-os-login/README.md index eb662523e8e6..cc32a2a69e4c 100644 --- a/java-os-login/README.md +++ b/java-os-login/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Cloud OS Login][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-os-login" % "2.11.0" ``` + ## Authentication diff --git a/java-phishingprotection/README.md b/java-phishingprotection/README.md index 85200df67fbd..7fc9ecdbdc4c 100644 --- a/java-phishingprotection/README.md +++ b/java-phishingprotection/README.md @@ -17,6 +17,7 @@ Java idiomatic client for [Phishing Protection][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -37,6 +38,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-phishingprotection" % "0.43.0" ``` + ## Authentication diff --git a/java-policy-troubleshooter/README.md b/java-policy-troubleshooter/README.md index 80ee95872f15..fe5a33f83dbe 100644 --- a/java-policy-troubleshooter/README.md +++ b/java-policy-troubleshooter/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [IAM Policy Troubleshooter API][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-policy-troubleshooter" % "1.11.0" ``` + ## Authentication diff --git a/java-private-catalog/README.md b/java-private-catalog/README.md index 7af16648233c..fb3bc7971b73 100644 --- a/java-private-catalog/README.md +++ b/java-private-catalog/README.md @@ -17,6 +17,7 @@ Java idiomatic client for [Private Catalog][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -37,6 +38,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-private-catalog" % "0.14.0" ``` + ## Authentication diff --git a/java-profiler/README.md b/java-profiler/README.md index 5448c554f058..d81c60d97d7a 100644 --- a/java-profiler/README.md +++ b/java-profiler/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Cloud Profiler][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-profiler" % "2.12.0" ``` + ## Authentication diff --git a/java-publicca/README.md b/java-publicca/README.md index 3239069eb575..a7a8f5a6b486 100644 --- a/java-publicca/README.md +++ b/java-publicca/README.md @@ -17,6 +17,7 @@ Java idiomatic client for [Public Certificate Authority][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -37,6 +38,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-publicca" % "0.9.0" ``` + ## Authentication diff --git a/java-recaptchaenterprise/README.md b/java-recaptchaenterprise/README.md index e5198229f0ad..f83738527550 100644 --- a/java-recaptchaenterprise/README.md +++ b/java-recaptchaenterprise/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [reCAPTCHA Enterprise][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-recaptchaenterprise" % "3.9.0" ``` + ## Authentication diff --git a/java-recommendations-ai/README.md b/java-recommendations-ai/README.md index 66dfb8a2e0a8..2113b23cceeb 100644 --- a/java-recommendations-ai/README.md +++ b/java-recommendations-ai/README.md @@ -18,6 +18,7 @@ Java idiomatic client for [Recommendations AI][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -38,6 +39,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-recommendations-ai" % "0.19.0" ``` + ## Authentication diff --git a/java-redis/README.md b/java-redis/README.md index ad19315865fb..c7a1becb1fce 100644 --- a/java-redis/README.md +++ b/java-redis/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Cloud Redis][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-redis" % "2.15.0" ``` + ## Authentication diff --git a/java-resource-settings/README.md b/java-resource-settings/README.md index f10263ff1571..5565bbbd2851 100644 --- a/java-resource-settings/README.md +++ b/java-resource-settings/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Resource Settings API][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-resource-settings" % "1.12.0" ``` + ## Authentication diff --git a/java-resourcemanager/README.md b/java-resourcemanager/README.md index f6de4af99421..adc8d5c74767 100644 --- a/java-resourcemanager/README.md +++ b/java-resourcemanager/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Resource Manager API][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-resourcemanager" % "1.14.0" ``` + ## Authentication diff --git a/java-retail/README.md b/java-retail/README.md index c37f690eab43..802f30ac6092 100644 --- a/java-retail/README.md +++ b/java-retail/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Cloud Retail][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-retail" % "2.14.0" ``` + ## Authentication diff --git a/java-run/README.md b/java-run/README.md index db69b52b252b..267ce3d79b95 100644 --- a/java-run/README.md +++ b/java-run/README.md @@ -17,6 +17,7 @@ Java idiomatic client for [Cloud Run][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -37,6 +38,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-run" % "0.12.0" ``` + ## Authentication diff --git a/java-scheduler/README.md b/java-scheduler/README.md index 013fc43fe6ef..94b21859e1ee 100644 --- a/java-scheduler/README.md +++ b/java-scheduler/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Google Cloud Scheduler][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-scheduler" % "2.12.0" ``` + ## Authentication diff --git a/java-secretmanager/README.md b/java-secretmanager/README.md index 5b16edc02c47..33b7544d9c67 100644 --- a/java-secretmanager/README.md +++ b/java-secretmanager/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Secret Management][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-secretmanager" % "2.12.0" ``` + ## Authentication diff --git a/java-security-private-ca/README.md b/java-security-private-ca/README.md index e31545c8baea..c274453cf810 100644 --- a/java-security-private-ca/README.md +++ b/java-security-private-ca/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Certificate Authority Service][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-security-private-ca" % "2.14.0" ``` + ## Authentication diff --git a/java-securitycenter-settings/README.md b/java-securitycenter-settings/README.md index 6c0fe08d4d16..ce671f4e91c2 100644 --- a/java-securitycenter-settings/README.md +++ b/java-securitycenter-settings/README.md @@ -17,6 +17,7 @@ Java idiomatic client for [Security Command Center Settings API][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -37,6 +38,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-securitycenter-settings" % "0.15.0" ``` + ## Authentication diff --git a/java-securitycenter/README.md b/java-securitycenter/README.md index 5a9b37173729..7566db8256fb 100644 --- a/java-securitycenter/README.md +++ b/java-securitycenter/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Security Command Center][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-securitycenter" % "2.20.0" ``` + ## Authentication diff --git a/java-service-control/README.md b/java-service-control/README.md index 8816e876a24d..548c8d8e0307 100644 --- a/java-service-control/README.md +++ b/java-service-control/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Service Control API][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-service-control" % "1.12.0" ``` + ## Authentication diff --git a/java-service-management/README.md b/java-service-management/README.md index 7046033a5626..37e013a857d5 100644 --- a/java-service-management/README.md +++ b/java-service-management/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Service Management API][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-service-management" % "3.10.0" ``` + ## Authentication diff --git a/java-service-usage/README.md b/java-service-usage/README.md index c635ff180d13..55f8f3f0ffb4 100644 --- a/java-service-usage/README.md +++ b/java-service-usage/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Service Usage][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-service-usage" % "2.12.0" ``` + ## Authentication diff --git a/java-servicedirectory/README.md b/java-servicedirectory/README.md index 0df1f90e276b..8a0088b9b188 100644 --- a/java-servicedirectory/README.md +++ b/java-servicedirectory/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Service Directory][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-servicedirectory" % "2.13.0" ``` + ## Authentication diff --git a/java-shell/README.md b/java-shell/README.md index a8a17fcdae67..51dd05340993 100644 --- a/java-shell/README.md +++ b/java-shell/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Cloud Shell][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-shell" % "2.11.0" ``` + ## Authentication diff --git a/java-speech/README.md b/java-speech/README.md index ab504e652438..03494ae044f8 100644 --- a/java-speech/README.md +++ b/java-speech/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Cloud Speech][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-speech" % "4.7.0" ``` + ## Authentication diff --git a/java-storage-transfer/README.md b/java-storage-transfer/README.md index f53af17929aa..d33dba3e5a07 100644 --- a/java-storage-transfer/README.md +++ b/java-storage-transfer/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Storage Transfer Service][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-storage-transfer" % "1.12.0" ``` + ## Authentication diff --git a/java-talent/README.md b/java-talent/README.md index 47647e1190d7..43573eec371e 100644 --- a/java-talent/README.md +++ b/java-talent/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Talent Solution][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-talent" % "2.13.0" ``` + ## Authentication diff --git a/java-tasks/README.md b/java-tasks/README.md index 0edc152cc38c..40a0f9178e35 100644 --- a/java-tasks/README.md +++ b/java-tasks/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Cloud Tasks][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-tasks" % "2.12.0" ``` + ## Authentication diff --git a/java-tpu/README.md b/java-tpu/README.md index 37ef967b0f0a..3a88c6264fa6 100644 --- a/java-tpu/README.md +++ b/java-tpu/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Cloud TPU][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-tpu" % "2.13.0" ``` + ## Authentication diff --git a/java-trace/README.md b/java-trace/README.md index 8ebfbf27252b..1537241fb4cf 100644 --- a/java-trace/README.md +++ b/java-trace/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Stackdriver Trace][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-trace" % "2.12.0" ``` + ## Authentication diff --git a/java-translate/README.md b/java-translate/README.md index aeb197517770..1891a1535706 100644 --- a/java-translate/README.md +++ b/java-translate/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Cloud Translation][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-translate" % "2.12.0" ``` + ## Authentication diff --git a/java-video-intelligence/README.md b/java-video-intelligence/README.md index 0ff5d4edf7b5..365b60f46d18 100644 --- a/java-video-intelligence/README.md +++ b/java-video-intelligence/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Cloud Video Intelligence][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-video-intelligence" % "2.11.0" ``` + ## Authentication diff --git a/java-video-live-stream/README.md b/java-video-live-stream/README.md index e9d8e3bf0b92..f75377958c8e 100644 --- a/java-video-live-stream/README.md +++ b/java-video-live-stream/README.md @@ -17,6 +17,7 @@ Java idiomatic client for [Live Stream API][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -37,6 +38,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-live-stream" % "0.14.0" ``` + ## Authentication diff --git a/java-video-transcoder/README.md b/java-video-transcoder/README.md index 65deacca78cf..258736cf61c3 100644 --- a/java-video-transcoder/README.md +++ b/java-video-transcoder/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Video Transcoder][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-video-transcoder" % "1.11.0" ``` + ## Authentication diff --git a/java-vision/README.md b/java-vision/README.md index a27fc88d1d92..3a3186f8d574 100644 --- a/java-vision/README.md +++ b/java-vision/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Cloud Vision][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-vision" % "3.10.0" ``` + ## Authentication diff --git a/java-vmmigration/README.md b/java-vmmigration/README.md index 225d9c165dbe..20b9717c62c9 100644 --- a/java-vmmigration/README.md +++ b/java-vmmigration/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [VM Migration][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-vmmigration" % "1.12.0" ``` + ## Authentication diff --git a/java-vmwareengine/README.md b/java-vmwareengine/README.md index e917af50ffc5..8cab51774aae 100644 --- a/java-vmwareengine/README.md +++ b/java-vmwareengine/README.md @@ -17,6 +17,7 @@ Java idiomatic client for [Google Cloud VMware Engine][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -37,6 +38,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-vmwareengine" % "0.6.0" ``` + ## Authentication diff --git a/java-vpcaccess/README.md b/java-vpcaccess/README.md index f46a86f3d0cf..9c2e5da5a270 100644 --- a/java-vpcaccess/README.md +++ b/java-vpcaccess/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Serverless VPC Access][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-vpcaccess" % "2.13.0" ``` + ## Authentication diff --git a/java-websecurityscanner/README.md b/java-websecurityscanner/README.md index 88639a737496..790134e0f685 100644 --- a/java-websecurityscanner/README.md +++ b/java-websecurityscanner/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Cloud Security Scanner][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-websecurityscanner" % "2.12.0" ``` + ## Authentication diff --git a/java-workflow-executions/README.md b/java-workflow-executions/README.md index fc01dc0fc427..a31eb25a4062 100644 --- a/java-workflow-executions/README.md +++ b/java-workflow-executions/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Cloud Workflow Executions][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-workflow-executions" % "2.12.0" ``` + ## Authentication diff --git a/java-workflows/README.md b/java-workflows/README.md index 11e2a79c2f3a..1f7cb9523834 100644 --- a/java-workflows/README.md +++ b/java-workflows/README.md @@ -14,6 +14,7 @@ Java idiomatic client for [Cloud Workflows][product-docs]. If you are using Maven, add this to your pom.xml file: + ```xml @@ -34,6 +35,7 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-workflows" % "2.12.0" ``` + ## Authentication diff --git a/pom.xml b/pom.xml index 0d32610ff6eb..3d2142264c08 100644 --- a/pom.xml +++ b/pom.xml @@ -39,6 +39,7 @@ java-beyondcorp-appgateways java-beyondcorp-clientconnectorservices java-beyondcorp-clientgateways + java-biglake java-bigqueryconnection java-bigquery-data-exchange java-bigquerydatapolicy diff --git a/versions.txt b/versions.txt index 16bd1ce7573a..9c6a2f7cfbf1 100644 --- a/versions.txt +++ b/versions.txt @@ -624,3 +624,6 @@ proto-google-cloud-alloydb-v1alpha:0.1.0:0.2.0-SNAPSHOT grpc-google-cloud-alloydb-v1beta:0.1.0:0.2.0-SNAPSHOT grpc-google-cloud-alloydb-v1:0.1.0:0.2.0-SNAPSHOT grpc-google-cloud-alloydb-v1alpha:0.1.0:0.2.0-SNAPSHOT +google-cloud-biglake:0.0.0:0.0.1-SNAPSHOT +proto-google-cloud-biglake-v1alpha1:0.0.0:0.0.1-SNAPSHOT +grpc-google-cloud-biglake-v1alpha1:0.0.0:0.0.1-SNAPSHOT From cbc9a9b9a7196fef7cdc27d2005ee24d0a714954 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Thu, 16 Mar 2023 17:16:40 +0000 Subject: [PATCH 2/2] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20po?= =?UTF-8?q?st-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- java-biglake/README.md | 2 -- java-biglake/google-cloud-biglake-bom/pom.xml | 2 +- java-biglake/google-cloud-biglake/pom.xml | 4 ++-- java-biglake/pom.xml | 3 ++- 4 files changed, 5 insertions(+), 6 deletions(-) diff --git a/java-biglake/README.md b/java-biglake/README.md index 8efc9dbf8cba..3d3a4f6597da 100644 --- a/java-biglake/README.md +++ b/java-biglake/README.md @@ -17,7 +17,6 @@ Java idiomatic client for [BigLake][product-docs]. If you are using Maven, add this to your pom.xml file: - ```xml @@ -38,7 +37,6 @@ If you are using SBT, add this to your dependencies: ```Scala libraryDependencies += "com.google.cloud" % "google-cloud-biglake" % "0.0.0" ``` - ## Authentication diff --git a/java-biglake/google-cloud-biglake-bom/pom.xml b/java-biglake/google-cloud-biglake-bom/pom.xml index f74616604375..c9f3f3705e3e 100644 --- a/java-biglake/google-cloud-biglake-bom/pom.xml +++ b/java-biglake/google-cloud-biglake-bom/pom.xml @@ -1,4 +1,4 @@ - + 4.0.0 com.google.cloud diff --git a/java-biglake/google-cloud-biglake/pom.xml b/java-biglake/google-cloud-biglake/pom.xml index 35dec0603886..d8cbc21f82b2 100644 --- a/java-biglake/google-cloud-biglake/pom.xml +++ b/java-biglake/google-cloud-biglake/pom.xml @@ -1,4 +1,4 @@ - + 4.0.0 com.google.cloud @@ -110,4 +110,4 @@ test - \ No newline at end of file + diff --git a/java-biglake/pom.xml b/java-biglake/pom.xml index 13cdc2ede00c..d7a71aa5f66a 100644 --- a/java-biglake/pom.xml +++ b/java-biglake/pom.xml @@ -50,4 +50,5 @@ proto-google-cloud-biglake-v1alpha1 google-cloud-biglake-bom - + +