Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
133796: drtprod: add create and destroy yamls for pcr and restore clusters r=nameisbhaskar a=vidit-bhat

This PR adds the yaml configurations for the scale test PCR and Restore clusters

Fixes: #77376
Release note: None

133850: sql/catalog: fix panic inside sequence expression parsing r=fqazi a=fqazi

Previously, if the correct overloads were not found for sequence builtins it was possible for the server to panic. This could happen when rewriting a CREATE TABLE expression with an invalid sequence builtin call. To address this, this patch updates the sequence logic to return the error instead of panicking on it.

Fixes: #133399

Release note (bug fix): Address a panic inside CREATE TABLE AS if sequence builtin expressions had invalid function overloads.

Co-authored-by: Vidit Bhat <[email protected]>
Co-authored-by: Faizan Qazi <[email protected]>
  • Loading branch information
3 people committed Oct 30, 2024
3 parents 6ddc9e8 + 8256ac3 + ad1f8ab commit d4f2a65
Show file tree
Hide file tree
Showing 7 changed files with 254 additions and 6 deletions.
4 changes: 2 additions & 2 deletions pkg/cmd/drtprod/configs/drt_scale.yaml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# Yaml for creating and configuring the drt-scale cluster. This also configures the datadog.
# Build the roachprod and roachtest binaries (using --cross) before running this script and delete any certs folder
# Yaml for creating and configuring the drt-scale cluster. This also configures Datadog.
# Build the roachprod and roachtest binaries (using --cross) before running this script
environment:
ROACHPROD_GCE_DEFAULT_SERVICE_ACCOUNT: [email protected]
ROACHPROD_DNS: drt.crdb.io
Expand Down
58 changes: 58 additions & 0 deletions pkg/cmd/drtprod/configs/drt_scale_pcr.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
# Yaml for creating and configuring the drt-scale-pcr cluster. This also configures Datadog.
# Build the roachprod and roachtest binaries (using --cross) before running this script
environment:
ROACHPROD_GCE_DEFAULT_SERVICE_ACCOUNT: [email protected]
ROACHPROD_DNS: drt.crdb.io
ROACHPROD_GCE_DNS_DOMAIN: drt.crdb.io
ROACHPROD_GCE_DNS_ZONE: drt
ROACHPROD_GCE_DEFAULT_PROJECT: cockroach-drt
CLUSTER: drt-scale-pcr
CLUSTER_NODES: 150

targets:
# crdb cluster specs
- target_name: $CLUSTER
steps:
- command: create
args:
- $CLUSTER
flags:
clouds: gce
gce-managed: true
gce-enable-multiple-stores: true
gce-zones: "us-central1-a"
nodes: $CLUSTER_NODES
gce-machine-type: n2-standard-16
local-ssd: false
gce-pd-volume-size: 375
gce-pd-volume-type: pd-ssd
gce-pd-volume-count: 4
os-volume-size: 100
username: drt
lifetime: 8760h
gce-image: "ubuntu-2204-jammy-v20240319"
- command: sync
flags:
clouds: gce
- command: stage
args:
- $CLUSTER
- cockroach
- script: "pkg/cmd/drtprod/scripts/setup_datadog_cluster"
- command: start
args:
- $CLUSTER
- "--binary"
- "./cockroach"
flags:
# add flag to set provisioned throughput on each store according to their cloud provider limits
enable-fluent-sink: true
store-count: 4
args: --wal-failover=among-stores
restart: false
sql-port: 26257
- command: run
args:
- $CLUSTER
- --
- "sudo systemctl unmask cron.service ; sudo systemctl enable cron.service ; echo \"crontab -l ; echo '@reboot sleep 100 && ~/cockroach.sh' | crontab -\" > t.sh ; sh t.sh ; rm t.sh"
15 changes: 15 additions & 0 deletions pkg/cmd/drtprod/configs/drt_scale_pcr_destroy.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
# Yaml for destroying the drt-scale-pcr cluster.
environment:
ROACHPROD_GCE_DEFAULT_SERVICE_ACCOUNT: [email protected]
ROACHPROD_DNS: drt.crdb.io
ROACHPROD_GCE_DNS_DOMAIN: drt.crdb.io
ROACHPROD_GCE_DNS_ZONE: drt
ROACHPROD_GCE_DEFAULT_PROJECT: cockroach-drt
CLUSTER: drt-scale-pcr

targets:
- target_name: $CLUSTER
steps:
- command: destroy
args:
- $CLUSTER
141 changes: 141 additions & 0 deletions pkg/cmd/drtprod/configs/drt_scale_restore.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,141 @@
# Yaml for creating and configuring the drt-scale-restore cluster. This also configures Datadog.
# Build the roachprod and roachtest binaries (using --cross) before running this script
environment:
ROACHPROD_GCE_DEFAULT_SERVICE_ACCOUNT: [email protected]
ROACHPROD_DNS: drt.crdb.io
ROACHPROD_GCE_DNS_DOMAIN: drt.crdb.io
ROACHPROD_GCE_DNS_ZONE: drt
ROACHPROD_GCE_DEFAULT_PROJECT: cockroach-drt
CLUSTER: drt-scale-restore
WORKLOAD_CLUSTER: workload-scale-restore
CLUSTER_NODES: 60
WORKLOAD_NODES: 1

targets:
# crdb cluster specs
- target_name: $CLUSTER
steps:
- command: create
args:
- $CLUSTER
flags:
clouds: gce
gce-managed: true
gce-enable-multiple-stores: true
gce-zones: "us-central1-a"
nodes: $CLUSTER_NODES
gce-machine-type: n2-standard-16
local-ssd: false
gce-pd-volume-size: 375
gce-pd-volume-type: pd-ssd
gce-pd-volume-count: 4
os-volume-size: 100
username: drt
lifetime: 8760h
gce-image: "ubuntu-2204-jammy-v20240319"
- command: sync
flags:
clouds: gce
- command: stage
args:
- $CLUSTER
- cockroach
- script: "pkg/cmd/drtprod/scripts/setup_datadog_cluster"
- command: start
args:
- $CLUSTER
- "--binary"
- "./cockroach"
flags:
# add flag to set provisioned throughput on each store according to their cloud provider limits
enable-fluent-sink: true
store-count: 4
args: --wal-failover=among-stores
restart: false
sql-port: 26257
- command: run
args:
- $CLUSTER
- --
- "sudo systemctl unmask cron.service ; sudo systemctl enable cron.service ; echo \"crontab -l ; echo '@reboot sleep 100 && ~/cockroach.sh' | crontab -\" > t.sh ; sh t.sh ; rm t.sh"
# workload cluster specs
- target_name: $WORKLOAD_CLUSTER
steps:
- command: create
args:
- $WORKLOAD_CLUSTER
flags:
clouds: gce
gce-zones: "us-central1-a"
nodes: $WORKLOAD_NODES
gce-machine-type: n2-standard-8
os-volume-size: 100
username: workload
lifetime: 8760h
on_rollback:
- command: destroy
args:
- $WORKLOAD_CLUSTER
- command: sync
flags:
clouds: gce
- command: stage
args:
- $WORKLOAD_CLUSTER
- cockroach
- command: stage
args:
- $WORKLOAD_CLUSTER
- workload
- script: "pkg/cmd/drtprod/scripts/setup_datadog_workload"
- target_name: post_tasks
dependent_targets:
- $CLUSTER
- $WORKLOAD_CLUSTER
steps:
- script: rm
args:
- -rf
- certs-$CLUSTER
- command: get
args:
- $CLUSTER:1
- certs
- certs-$CLUSTER
- command: put
args:
- $WORKLOAD_CLUSTER
- certs-$CLUSTER
- certs
- command: ssh
args:
- $WORKLOAD_CLUSTER
- --
- chmod
- 600
- './certs/*'
- command: put
args:
- $WORKLOAD_CLUSTER
- artifacts/roachprod
- roachprod
- script: "pkg/cmd/drtprod/scripts/tpcc_init.sh"
args:
- cct_tpcc_3k # suffix added to script name tpcc_init_cct_tpcc_3k.sh
- false # determines whether to execute the script immediately on workload node
flags:
warehouses: 3000
db: cct_tpcc
- script: "pkg/cmd/drtprod/scripts/generate_tpcc_run.sh"
args:
- cct_tpcc_3k # suffix added to script name tpcc_run_3k.sh
- false # determines whether to execute the script immediately on workload node
flags:
db: cct_tpcc
warehouses: 3000
max-rate: 100
workers: 50
conns: 50
duration: 12h
ramp: 10m
wait: 0
21 changes: 21 additions & 0 deletions pkg/cmd/drtprod/configs/drt_scale_restore_destroy.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
# Yaml for destroying the drt-scale-restore cluster.
environment:
ROACHPROD_GCE_DEFAULT_SERVICE_ACCOUNT: [email protected]
ROACHPROD_DNS: drt.crdb.io
ROACHPROD_GCE_DNS_DOMAIN: drt.crdb.io
ROACHPROD_GCE_DNS_ZONE: drt
ROACHPROD_GCE_DEFAULT_PROJECT: cockroach-drt
CLUSTER: drt-scale-restore
WORKLOAD_CLUSTER: workload-scale-restore

targets:
- target_name: $CLUSTER
steps:
- command: destroy
args:
- $CLUSTER
- target_name: $WORKLOAD_CLUSTER
steps:
- command: destroy
args:
- $WORKLOAD_CLUSTER
8 changes: 4 additions & 4 deletions pkg/sql/catalog/seqexpr/sequence.go
Original file line number Diff line number Diff line change
Expand Up @@ -80,10 +80,10 @@ func GetSequenceFromFunc(funcExpr *tree.FuncExpr) (*SeqIdentifier, error) {
if len(funcExpr.Exprs) == overload.Types.Length() {
paramTypes, ok := overload.Types.(tree.ParamTypes)
if !ok {
panic(pgerror.Newf(
return nil, pgerror.Newf(
pgcode.InvalidFunctionDefinition,
"%s has invalid argument types", funcExpr.Func.String(),
))
)
}
found = true
for i := 0; i < len(paramTypes); i++ {
Expand All @@ -98,10 +98,10 @@ func GetSequenceFromFunc(funcExpr *tree.FuncExpr) (*SeqIdentifier, error) {
}
}
if !found {
panic(pgerror.New(
return nil, pgerror.New(
pgcode.DatatypeMismatch,
"could not find matching function overload for given arguments",
))
)
}
}
return nil, nil
Expand Down
13 changes: 13 additions & 0 deletions pkg/sql/logictest/testdata/logic_test/create_table
Original file line number Diff line number Diff line change
Expand Up @@ -1097,3 +1097,16 @@ statement ok
SET inject_retry_errors_enabled=false

subtest end


# Addresses a bug where parsing nextval expressions with extra values could end
# end up with a panic when rewriting sequence expressions.
subtest 133399

statement ok
CREATE TABLE v_133399 (c01 INT);

statement error pgcode 42804 could not find matching function overload for given arguments
CREATE TABLE t_133399 AS (SELECT * FROM v_133399 WINDOW window_name AS (ROWS c01 BETWEEN nextval ('abc', 'abc', 'abc') AND c01 PRECEDING));

subtest end

0 comments on commit d4f2a65

Please sign in to comment.