Skip to content

Commit

Permalink
merged master to branch
Browse files Browse the repository at this point in the history
  • Loading branch information
leyasalazar committed Aug 29, 2023
2 parents 7c998c1 + 8cee583 commit 720a4e2
Show file tree
Hide file tree
Showing 36 changed files with 2,774 additions and 3,925 deletions.
1 change: 1 addition & 0 deletions .github/workflows/labeler.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,3 +42,4 @@ jobs:
"go.mod"
"go.sum"
".yarn/releases/*"
"devtools/pgdump-lite/pgd/*"
4 changes: 2 additions & 2 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -250,14 +250,14 @@ smoketest:
test-migrations: bin/goalert
(cd test/smoke && go test -run TestMigrations)

db-schema: bin/goalert bin/psql-lite
db-schema: bin/goalert bin/psql-lite bin/pgdump-lite
./bin/psql-lite -d "$(DB_URL)" -c 'DROP DATABASE IF EXISTS mk_dump_schema; CREATE DATABASE mk_dump_schema;'
./bin/goalert migrate --db-url "$(dir $(DB_URL))mk_dump_schema"
echo '-- This file is auto-generated by "make db-schema"; DO NOT EDIT' > migrate/schema.sql
echo "-- DATA=$(shell $(SHA_CMD) migrate/migrations/* | sort | $(SHA_CMD))" >> migrate/schema.sql
echo "-- DISK=$(shell ls migrate/migrations | sort | $(SHA_CMD))" >> migrate/schema.sql
echo "-- PSQL=$$(psql -d '$(dir $(DB_URL))mk_dump_schema' -XqAtc 'select id from gorp_migrations order by id' | sort | $(SHA_CMD))" >> migrate/schema.sql
pg_dump -d "$(dir $(DB_URL))mk_dump_schema" -sO >> migrate/schema.sql
./bin/pgdump-lite -d "$(dir $(DB_URL))mk_dump_schema" -s >> migrate/schema.sql
./bin/psql-lite -d "$(DB_URL)" -c 'DROP DATABASE IF EXISTS mk_dump_schema;'

tools:
Expand Down
26 changes: 19 additions & 7 deletions devtools/pgdump-lite/cmd/pgdump-lite/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ func main() {
file := flag.String("f", "", "Output file (default is stdout).")
db := flag.String("d", os.Getenv("DBURL"), "DB URL") // use same env var as pg_dump
dataOnly := flag.Bool("a", false, "dump only the data, not the schema")
schemaOnly := flag.Bool("s", false, "dump only the schema, no data")
skip := flag.String("T", "", "skip tables")
flag.Parse()

Expand All @@ -42,14 +43,25 @@ func main() {
}
defer conn.Close(ctx)

err = pgdump.DumpData(ctx, conn, out, strings.Split(*skip, ","))
if err != nil {
log.Fatalln("ERROR: dump data:", err)
if !*dataOnly {
s, err := pgdump.DumpSchema(ctx, conn)
if err != nil {
log.Fatalln("ERROR: dump data:", err)
}
_, err = out.WriteString("--\n-- pgdump-lite database dump\n--\n\n")
if err != nil {
log.Fatalln("ERROR: write header:", err)
}
_, err = out.WriteString(s.String())
if err != nil {
log.Fatalln("ERROR: write schema:", err)
}
}

if *dataOnly {
return
if !*schemaOnly {
err = pgdump.DumpData(ctx, conn, out, strings.Split(*skip, ","))
if err != nil {
log.Fatalln("ERROR: dump data:", err)
}
}

// TODO: dump schema
}
336 changes: 336 additions & 0 deletions devtools/pgdump-lite/dumpschema.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,336 @@
package pgdump

import (
"context"
"fmt"
"strings"

"github.com/jackc/pgx/v4"
"github.com/target/goalert/devtools/pgdump-lite/pgd"
)

type Schema struct {
Extensions []Extension
Functions []Function
Tables []Table
Enums []Enum
Sequences []Sequence
}

func (s Schema) String() string {
var b strings.Builder
b.WriteString("-- Extensions\n\n")
for _, e := range s.Extensions {
b.WriteString(e.String())
b.WriteString("\n\n")
}

b.WriteString("-- Enums\n\n")
for _, e := range s.Enums {
b.WriteString(e.String())
b.WriteString("\n\n")
}

b.WriteString("-- Functions\n\n")
for _, e := range s.Functions {
b.WriteString(e.String())
b.WriteString("\n\n")
}

b.WriteString("-- Tables\n\n")
for _, e := range s.Tables {
b.WriteString(e.String())
b.WriteString("\n\n")
}

b.WriteString("-- Sequences\n\n")
for _, e := range s.Sequences {
b.WriteString(e.String())
b.WriteString("\n\n")
}

return b.String()
}

type Index struct {
Name string
Def string
}

func (idx Index) String() string { return idx.Def + ";" }

type Trigger struct {
Name string
Def string
}

func (t Trigger) String() string { return t.Def + ";" }

type Sequence struct {
Name string
StartValue int64
Increment int64
MinValue int64
MaxValue int64
Cache int64

OwnedBy string
}

func (s Sequence) String() string {
def := fmt.Sprintf("CREATE SEQUENCE %s\n\tSTART WITH %d\n\tINCREMENT BY %d\n\tMINVALUE %d\n\tMAXVALUE %d\n\tCACHE %d",
s.Name, s.StartValue, s.Increment, s.MinValue, s.MaxValue, s.Cache)

if s.OwnedBy == "" {
return def + ";"
}

return fmt.Sprintf("%s\n\tOWNED BY%s;",
def,
s.OwnedBy,
)
}

type Extension struct {
Name string
}

func (e Extension) String() string {
return fmt.Sprintf("CREATE EXTENSION IF NOT EXISTS %s;", e.Name)
}

type Function struct {
Name string
Def string
}

func (f Function) String() string { return f.Def + ";" }

type Enum struct {
Name string
Values []string
}

func (e Enum) String() string {
return fmt.Sprintf("CREATE TYPE %s AS ENUM (\n\t'%s'\n);", e.Name, strings.Join(e.Values, "',\n\t'"))
}

type Table struct {
Name string

Columns []Column
Constraints []Constraint
Indexes []Index
Triggers []Trigger
Sequences []Sequence
}

func (t Table) String() string {
var lines []string
for _, c := range t.Columns {
lines = append(lines, c.String())
}
for _, c := range t.Constraints {
lines = append(lines, c.String())
}

var b strings.Builder
fmt.Fprintf(&b, "CREATE TABLE %s (\n\t%s\n);\n", t.Name, strings.Join(lines, ",\n\t"))

if len(t.Indexes) > 0 {
b.WriteString("\n")
}
for _, idx := range t.Indexes {
b.WriteString(idx.String())
b.WriteString("\n")
}

if len(t.Triggers) > 0 {
b.WriteString("\n")
}
for _, trg := range t.Triggers {
b.WriteString(trg.String())
b.WriteString("\n")
}
return b.String()
}

type Constraint struct {
Name string
Def string
}

func (c Constraint) String() string {
return fmt.Sprintf("CONSTRAINT %s %s", c.Name, c.Def)
}

type Column struct {
Name string
Type string
NotNull bool
DefaultValue string
}

func (c Column) String() string {
var def string
if c.DefaultValue != "" {
def = fmt.Sprintf(" DEFAULT %s", c.DefaultValue)
}
if c.NotNull {
def += " NOT NULL"
}
return fmt.Sprintf("%s %s%s", c.Name, c.Type, def)
}

func DumpSchema(ctx context.Context, conn *pgx.Conn) (*Schema, error) {
db := pgd.New(conn)

var s Schema

// list extensions
exts, err := db.ListExtensions(ctx)
if err != nil {
return nil, fmt.Errorf("list extensions: %w", err)
}
for _, e := range exts {
s.Extensions = append(s.Extensions, Extension{Name: e.ExtName})
}

// list enums
enums, err := db.ListEnums(ctx)
if err != nil {
return nil, fmt.Errorf("list types: %w", err)
}
for _, e := range enums {
s.Enums = append(s.Enums, Enum{
Name: e.EnumName,
Values: strings.Split(string(e.EnumValues), ","),
})
}

// list functions
funcs, err := db.ListFunctions(ctx)
if err != nil {
return nil, fmt.Errorf("list functions: %w", err)
}
for _, f := range funcs {
s.Functions = append(s.Functions, Function{
Name: f.FunctionName,
Def: f.FuncDef,
})
}

seqs, err := db.ListSequences(ctx)
if err != nil {
return nil, fmt.Errorf("list sequences: %w", err)
}
for _, seq := range seqs {
if seq.TableName != "" {
continue
}
s.Sequences = append(s.Sequences, Sequence{
Name: seq.SequenceName,
StartValue: seq.StartValue.Int64,
Increment: seq.Increment.Int64,
MinValue: seq.MinValue.Int64,
MaxValue: seq.MaxValue.Int64,
Cache: seq.Cache.Int64,
})
}

cols, err := db.ListColumns(ctx)
if err != nil {
return nil, fmt.Errorf("list columns: %w", err)
}

var tables []string
for _, c := range cols {
// these are always sorted by schema, then table
if len(tables) == 0 || tables[len(tables)-1] != c.TableName {
tables = append(tables, c.TableName)
}
}

cstr, err := db.ListConstraints(ctx)
if err != nil {
return nil, fmt.Errorf("list constraints: %w", err)
}
idxs, err := db.ListIndexes(ctx)
if err != nil {
return nil, fmt.Errorf("list indexes: %w", err)
}
trgs, err := db.ListTriggers(ctx)
if err != nil {
return nil, fmt.Errorf("list triggers: %w", err)
}

for _, tbl := range tables {
t := Table{Name: tbl}
for _, c := range cols {
if c.TableName != tbl {
continue
}
t.Columns = append(t.Columns, Column{
Name: c.ColumnName,
Type: c.ColumnType,
NotNull: c.NotNull,
DefaultValue: c.ColumnDefault,
})
}

for _, c := range cstr {
if c.TableName != tbl {
continue
}
if strings.HasPrefix(c.ConstraintDefinition, "TRIGGER") {
// skip triggers
continue
}

t.Constraints = append(t.Constraints, Constraint{
Name: c.ConstraintName,
Def: c.ConstraintDefinition,
})
}

for _, idx := range idxs {
if idx.TableName != tbl {
continue
}
t.Indexes = append(t.Indexes, Index{
Name: idx.IndexName,
Def: idx.IndexDefinition,
})
}

for _, trg := range trgs {
if trg.TableName != tbl {
continue
}
t.Triggers = append(t.Triggers, Trigger{
Name: trg.TriggerName,
Def: trg.TriggerDefinition,
})
}

for _, seq := range seqs {
if seq.TableName != tbl {
continue
}

t.Sequences = append(t.Sequences, Sequence{
Name: seq.SequenceName,
StartValue: seq.StartValue.Int64,
Increment: seq.Increment.Int64,
MinValue: seq.MinValue.Int64,
MaxValue: seq.MaxValue.Int64,
Cache: seq.Cache.Int64,
OwnedBy: seq.TableName + "." + seq.ColumnName,
})
}

s.Tables = append(s.Tables, t)
}

return &s, nil
}
Loading

0 comments on commit 720a4e2

Please sign in to comment.