Skip to content

Commit

Permalink
Merge pull request #1006 from fishtown-analytics/fix/bigquery-undersc…
Browse files Browse the repository at this point in the history
…ores

Include datasets with underscores when listing BigQuery datasets
  • Loading branch information
beckjake authored Sep 18, 2018
2 parents 5fc97bc + c11cd92 commit 2a8f0b8
Show file tree
Hide file tree
Showing 3 changed files with 37 additions and 11 deletions.
5 changes: 3 additions & 2 deletions dbt/adapters/bigquery/impl.py
Original file line number Diff line number Diff line change
Expand Up @@ -495,7 +495,7 @@ def get_existing_schemas(cls, config, model_name=None):
client = conn.handle

with cls.exception_handler(config, 'list dataset', model_name):
all_datasets = client.list_datasets()
all_datasets = client.list_datasets(include_all=True)
return [ds.dataset_id for ds in all_datasets]

@classmethod
Expand Down Expand Up @@ -535,9 +535,10 @@ def get_dbt_columns_from_bq_table(cls, table):
@classmethod
def check_schema_exists(cls, config, schema, model_name=None):
conn = cls.get_connection(config, model_name)
client = conn.handle

with cls.exception_handler(config, 'get dataset', model_name):
all_datasets = conn.handle.list_datasets()
all_datasets = client.list_datasets(include_all=True)
return any([ds.dataset_id == schema for ds in all_datasets])

@classmethod
Expand Down
7 changes: 0 additions & 7 deletions dbt/node_runners.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,13 +280,6 @@ def call_already_exists(schema, table):
"already_exists": call_already_exists,
}

@classmethod
def create_schemas(cls, config, adapter, manifest):
required_schemas = cls.get_model_schemas(manifest)
existing_schemas = set(adapter.get_existing_schemas(config))
for schema in (required_schemas - existing_schemas):
adapter.create_schema(config, schema)


class ModelRunner(CompileRunner):

Expand Down
36 changes: 34 additions & 2 deletions test/integration/022_bigquery_test/test_simple_bigquery_view.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
from test.integration.base import DBTIntegrationTest, FakeArgs, use_profile
import random
import time


class TestSimpleBigQueryRun(DBTIntegrationTest):
class TestBaseBigQueryRun(DBTIntegrationTest):

@property
def schema(self):
Expand All @@ -22,6 +23,9 @@ def project_config(self):
def profile_config(self):
return self.bigquery_profile()


class TestSimpleBigQueryRun(TestBaseBigQueryRun):

@use_profile('bigquery')
def test__bigquery_simple_run(self):
# make sure seed works twice. Full-refresh is a no-op
Expand Down Expand Up @@ -68,3 +72,31 @@ def test__bigquery_exists_non_destructive(self):
self.assertFalse(result.skipped)
# status = # of failing rows
self.assertEqual(result.status, 0)


class TestUnderscoreBigQueryRun(TestBaseBigQueryRun):
prefix = "_test{}{:04}".format(int(time.time()), random.randint(0, 9999))

@use_profile('bigquery')
def test_bigquery_run_twice(self):
self.run_dbt(['seed'])
results = self.run_dbt()
self.assertEqual(len(results), 4)
results = self.run_dbt()
self.assertEqual(len(results), 4)

# The 'dupe' model should fail, but all others should pass
test_results = self.run_dbt(['test'], expect_pass=False)

for result in test_results:
if 'dupe' in result.node.get('name'):
self.assertFalse(result.errored)
self.assertFalse(result.skipped)
self.assertTrue(result.status > 0)

# assert that actual tests pass
else:
self.assertFalse(result.errored)
self.assertFalse(result.skipped)
# status = # of failing rows
self.assertEqual(result.status, 0)

0 comments on commit 2a8f0b8

Please sign in to comment.