Skip to content

Commit

Permalink
Merge branch 'master' into add_database_to_hive_urn
Browse files Browse the repository at this point in the history
  • Loading branch information
treff7es authored Oct 18, 2022
2 parents 1f25262 + a3db265 commit 15694ef
Show file tree
Hide file tree
Showing 5 changed files with 166 additions and 70 deletions.
2 changes: 1 addition & 1 deletion docs/what/mxe.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ DataHub makes use a few important Kafka events for operation. The most notable o
2. Metadata Change Log (Versioned + Timeseries)
3. Platform Event

Each event is originally authored using [PDL]( https://linkedin.github.io/rest.li/DATA-Data-Schema-and-Templates), a modeling language developed by LinkedIn, and
Each event is originally authored using [PDL](https://linkedin.github.io/rest.li/pdl_schema), a modeling language developed by LinkedIn, and
then converted into their Avro equivalents, which are used when writing and reading the events to Kafka.

In the document, we'll describe each of these events in detail - including notes about their structure & semantics.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import logging
import os
import re
import traceback
from collections import defaultdict
from datetime import datetime, timedelta
from typing import Dict, Iterable, List, Optional, Tuple, Type, Union, cast
Expand Down Expand Up @@ -549,6 +550,8 @@ def _process_project(
try:
yield from self._process_schema(conn, project_id, bigquery_dataset)
except Exception as e:
trace = traceback.format_exc()
logger.error(trace)
logger.error(
f"Unable to get tables for dataset {bigquery_dataset.name} in project {project_id}, skipping. The error was: {e}"
)
Expand Down Expand Up @@ -624,7 +627,9 @@ def _process_table(
conn, table_identifier, self.config.column_limit
)
if not table.columns:
logger.warning(f"Unable to get columns for table: {table_identifier}")
logger.warning(
f"Table doesn't have any column or unable to get columns for table: {table_identifier}"
)

lineage_info: Optional[Tuple[UpstreamLineage, Dict[str, str]]] = None

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
describe("containers", () => {
it("can see elements inside the container", () => {
cy.login();
cy.visit(
"http://localhost:9002/container/urn:li:container:348c96555971d3f5c1ffd7dd2e7446cb"
);

cy.contains("jaffle_shop");

cy.contains("customers");
cy.contains("customers_source");
cy.contains("orders");
cy.contains("raw_orders");
cy.contains("1 - 9 of 9");
});
});
12 changes: 12 additions & 0 deletions smoke-test/tests/cypress/cypress/integration/domains/domains.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
describe("domains", () => {
it("can see elements inside the domain", () => {
cy.login();
cy.visit(
"http://localhost:9002/domain/urn:li:domain:marketing/Entities?is_lineage_mode=false"
);

cy.contains("Marketing");
cy.contains("SampleCypressKafkaDataset");
cy.contains("1 - 1 of 1");
});
});
Loading

0 comments on commit 15694ef

Please sign in to comment.