Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update fields in source-connectors specifications: kustomer-singer, lemlist, linnworks, looker, marketo, mixpanel #8578

Merged
merged 20 commits into from
Dec 25, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"sourceDefinitionId": "00405b19-9768-4e0c-b1ae-9fc2ee2b2a8c",
"name": "Looker",
"dockerRepository": "airbyte/source-looker",
"dockerImageTag": "0.2.5",
"dockerImageTag": "0.2.6",
"documentationUrl": "https://docs.airbyte.io/integrations/sources/looker",
"icon": "looker.svg"
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"sourceDefinitionId": "12928b32-bf0a-4f1e-964f-07e12e37153a",
"name": "Mixpanel",
"dockerRepository": "airbyte/source-mixpanel",
"dockerImageTag": "0.1.7",
"dockerImageTag": "0.1.8.1",
bazarnov marked this conversation as resolved.
Show resolved Hide resolved
"documentationUrl": "https://docs.airbyte.io/integrations/sources/mixpanel",
"icon": "mixpanel.svg"
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"sourceDefinitionId": "7b86879e-26c5-4ef6-a5ce-2be5c7b46d1e",
"name": "Linnworks",
"dockerRepository": "airbyte/source-linnworks",
"dockerImageTag": "0.1.0",
"dockerImageTag": "0.1.5",
"documentationUrl": "https://docs.airbyte.io/integrations/sources/linnworks",
"icon": "linworks.svg"
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"sourceDefinitionId": "9e0556f4-69df-4522-a3fb-03264d36b348",
"name": "Marketo",
"dockerRepository": "airbyte/source-marketo",
"dockerImageTag": "0.1.1",
"dockerImageTag": "0.1.3",
"documentationUrl": "https://docs.airbyte.io/integrations/sources/marketo",
"icon": "marketo.svg"
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
{
"sourceDefinitionId": "cd06e646-31bf-4dc8-af48-cbc6530fcad3",
"name": "Kustomer",
"dockerRepository": "airbyte/source-kustomer-singer",
"dockerImageTag": "0.1.2",
"documentationUrl": "https://docs.airbyte.io/integrations/sources/kustomer"
}
Original file line number Diff line number Diff line change
Expand Up @@ -363,14 +363,14 @@
- name: Linnworks
sourceDefinitionId: 7b86879e-26c5-4ef6-a5ce-2be5c7b46d1e
dockerRepository: airbyte/source-linnworks
dockerImageTag: 0.1.4
dockerImageTag: 0.1.5
documentationUrl: https://docs.airbyte.io/integrations/sources/linnworks
icon: linnworks.svg
sourceType: api
- name: Looker
sourceDefinitionId: 00405b19-9768-4e0c-b1ae-9fc2ee2b2a8c
dockerRepository: airbyte/source-looker
dockerImageTag: 0.2.5
dockerImageTag: 0.2.6
documentationUrl: https://docs.airbyte.io/integrations/sources/looker
icon: looker.svg
sourceType: api
Expand All @@ -391,7 +391,7 @@
- name: Marketo
sourceDefinitionId: 9e0556f4-69df-4522-a3fb-03264d36b348
dockerRepository: airbyte/source-marketo
dockerImageTag: 0.1.2
dockerImageTag: 0.1.3
documentationUrl: https://docs.airbyte.io/integrations/sources/marketo
icon: marketo.svg
sourceType: api
Expand All @@ -412,7 +412,7 @@
- name: Mixpanel
sourceDefinitionId: 12928b32-bf0a-4f1e-964f-07e12e37153a
dockerRepository: airbyte/source-mixpanel
dockerImageTag: 0.1.7
dockerImageTag: 0.1.8.1
documentationUrl: https://docs.airbyte.io/integrations/sources/mixpanel
icon: mixpanel.svg
sourceType: api
Expand Down Expand Up @@ -797,3 +797,9 @@
documentationUrl: https://docs.airbyte.io/integrations/sources/zuora
icon: zuora.svg
sourceType: api
- name: Kustomer
sourceDefinitionId: cd06e646-31bf-4dc8-af48-cbc6530fcad3
dockerRepository: airbyte/source-kustomer-singer
dockerImageTag: 0.1.2
documentationUrl: https://docs.airbyte.io/integrations/sources/kustomer
sourceType: api
72 changes: 52 additions & 20 deletions airbyte-config/init/src/main/resources/seed/source_specs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3647,7 +3647,7 @@
- - "client_secret"
oauthFlowOutputParameters:
- - "refresh_token"
- dockerImage: "airbyte/source-linnworks:0.1.4"
- dockerImage: "airbyte/source-linnworks:0.1.5"
spec:
documentationUrl: "https://docs.airbyte.io/integrations/sources/linnworks"
connectionSpecification:
Expand All @@ -3662,23 +3662,27 @@
additionalProperties: false
properties:
application_id:
title: "Application ID"
title: "Application ID."
description: "Linnworks Application ID"
type: "string"
application_secret:
title: "Application secret"
title: "Application Secret"
description: "Linnworks Application Secret"
type: "string"
airbyte_secret: true
token:
title: "Token"
title: "API Token"
type: "string"
start_date:
title: "Start date"
title: "Start Date"
description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\
\ data before this date will not be replicated."
type: "string"
format: "date-time"
supportsNormalization: false
supportsDBT: false
supported_destination_sync_modes: []
- dockerImage: "airbyte/source-looker:0.2.5"
- dockerImage: "airbyte/source-looker:0.2.6"
spec:
documentationUrl: "https://docs.airbyte.io/integrations/sources/looker"
connectionSpecification:
Expand All @@ -3693,6 +3697,7 @@
properties:
domain:
type: "string"
title: "Domain"
examples:
- "domainname.looker.com"
- "looker.clientname.com"
Expand Down Expand Up @@ -3777,7 +3782,7 @@
supportsNormalization: false
supportsDBT: false
supported_destination_sync_modes: []
- dockerImage: "airbyte/source-marketo:0.1.2"
- dockerImage: "airbyte/source-marketo:0.1.3"
spec:
documentationUrl: "https://docs.airbyte.io/integrations/sources/marketo"
connectionSpecification:
Expand All @@ -3803,23 +3808,25 @@
client_id:
title: "Client ID"
type: "string"
description: "The Client ID of your Marketo developer application. See <a\
\ href=\"https://docs.airbyte.io/integrations/sources/marketo\"> the docs\
\ </a> for info on how to obtain this."
order: 0
description: "Your Marketo client_id. See <a href=\"https://docs.airbyte.io/integrations/sources/marketo\"\
> the docs </a> for info on how to obtain this."
airbyte_secret: true
client_secret:
title: "Client Secret"
type: "string"
description: "The Client Secret of your Marketo developer application. See\
\ <a href=\"https://docs.airbyte.io/integrations/sources/marketo\"> the\
\ docs </a> for info on how to obtain this."
order: 1
description: "Your Marketo client secret. See <a href=\"https://docs.airbyte.io/integrations/sources/marketo\"\
> the docs </a> for info on how to obtain this."
airbyte_secret: true
start_date:
title: "Start Date"
type: "string"
order: 2
description: "Data generated in Marketo after this date will be replicated.\
\ This date must be specified in the format YYYY-MM-DDT00:00:00Z."
description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\
\ data before this date will not be replicated."
examples:
- "2020-09-25T00:00:00Z"
pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
Expand Down Expand Up @@ -4176,7 +4183,7 @@
path_in_connector_config:
- "credentials"
- "client_secret"
- dockerImage: "airbyte/source-mixpanel:0.1.7"
- dockerImage: "airbyte/source-mixpanel:0.1.8.1"
spec:
documentationUrl: "https://docs.airbyte.io/integrations/sources/mixpanel"
connectionSpecification:
Expand All @@ -4196,9 +4203,9 @@
attribution_window:
title: "Attribution Window"
type: "integer"
description: "Latency minimum number of days to look-back to account for\
\ delays in attributing accurate results. Default attribution window is\
\ 5 days."
description: " A period of time for attributing results to ads and the lookback\
\ period after those actions occur during which ad results are counted.Default\
\ attribution window is 5 days."
default: 5
project_timezone:
title: "Project Timezone"
Expand All @@ -4219,9 +4226,9 @@
start_date:
title: "Start Date"
type: "string"
description: "The default value to use if no bookmark exists for an endpoint.\
\ If this option is not set, the connector will replicate data from up\
\ to one year ago by default."
description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\
\ data before this date will not be replicated. If this option is not\
\ set, the connector will replicate data from up to one year ago by default."
examples:
- "2021-11-16"
pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}(T[0-9]{2}:[0-9]{2}:[0-9]{2}Z)?$"
Expand Down Expand Up @@ -7724,3 +7731,28 @@
supportsNormalization: false
supportsDBT: false
supported_destination_sync_modes: []
- dockerImage: "airbyte/source-kustomer-singer:0.1.2"
spec:
documentationUrl: "https://docs.airbyte.io/integrations/sources/kustomer"
connectionSpecification:
$schema: "http://json-schema.org/draft-07/schema#"
title: "Source Kustomer Singer Spec"
type: "object"
required:
- "api_token"
- "start_date"
additionalProperties: true
properties:
api_token:
type: "string"
description: "Kustomer API Token. See the <a href=\"https://developer.kustomer.com/kustomer-api-docs/reference/authentication\"\
>docs</a> on how to obtain this"
airbyte_secret: true
start_date:
type: "string"
description: "The date from which you'd like to replicate the data"
examples:
- "2019-01-01T00:00:00Z"
supportsNormalization: false
supportsDBT: false
supported_destination_sync_modes: []
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,8 @@ public class StagingFilenameGenerator {
private int currentFileSuffixPartCount = 0;

// This variable is responsible to set the size of chunks size (In MB). After chunks created in
// S3 or GCS they will be uploaded to Snowflake or Redshift. These service have some limitations for the uploading file.
// S3 or GCS they will be uploaded to Snowflake or Redshift. These service have some limitations for
// the uploading file.
// So we make the calculation to determine how many parts we can put to the single chunk file.
private final long iterations;

Expand Down
Original file line number Diff line number Diff line change
@@ -1,13 +1,20 @@
/*
* Copyright (c) 2021 Airbyte, Inc., all rights reserved.
*/

package io.airbyte.integrations.destination.jdbc.constants;

import io.aesy.datasize.ByteUnit.IEC;
import io.aesy.datasize.DataSize;

public interface GlobalDataSizeConstants {

/** 256 MB to BYTES as comparison will be done in BYTES */
int DEFAULT_MAX_BATCH_SIZE_BYTES = DataSize.of(256L, IEC.MEBIBYTE).toUnit(IEC.BYTE).getValue().intValue();
/** This constant determines the max possible size of file(e.g. 1 GB / 256 megabytes ≈ 4 chunks of file)
see StagingFilenameGenerator.java:28
*/
/**
* This constant determines the max possible size of file(e.g. 1 GB / 256 megabytes ≈ 4 chunks of
* file) see StagingFilenameGenerator.java:28
*/
long MAX_FILE_SIZE = DataSize.of(1L, IEC.GIBIBYTE).toUnit(IEC.BYTE).getValue().longValue();

}
Original file line number Diff line number Diff line change
Expand Up @@ -6,16 +6,15 @@

import static org.junit.jupiter.api.Assertions.*;

import io.aesy.datasize.ByteUnit.IEC;
import io.aesy.datasize.DataSize;
import io.airbyte.integrations.destination.jdbc.constants.GlobalDataSizeConstants;
import org.apache.commons.lang3.RandomStringUtils;
import org.junit.jupiter.api.Test;

class StagingFilenameGeneratorTest {

private static final String STREAM_NAME = RandomStringUtils.randomAlphabetic(5).toLowerCase();
// Equal to GlobalDataSizeConstants.MAX_BYTE_PARTS_PER_FILE / GlobalDataSizeConstants.DEFAULT_MAX_BATCH_SIZE_BYTES
// Equal to GlobalDataSizeConstants.MAX_BYTE_PARTS_PER_FILE /
// GlobalDataSizeConstants.DEFAULT_MAX_BATCH_SIZE_BYTES
// because <insert explanation here>
private static final int EXPECTED_ITERATIONS_WITH_STANDARD_BYTE_BUFFER = 4;
private static final StagingFilenameGenerator FILENAME_GENERATOR =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ public class SnowflakeDatabase {
private static final Duration NETWORK_TIMEOUT = Duration.ofMinutes(1);
private static final Duration QUERY_TIMEOUT = Duration.ofHours(3);
private static final SnowflakeSQLNameTransformer nameTransformer = new SnowflakeSQLNameTransformer();

public static Connection getConnection(final JsonNode config) throws SQLException {
final String connectUrl = String.format("jdbc:snowflake://%s", config.get("host").asText());

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,6 @@
package io.airbyte.integrations.destination.snowflake;

import com.google.cloud.storage.Storage;
import io.aesy.datasize.ByteUnit.IEC;
import io.aesy.datasize.DataSize;
import io.airbyte.db.jdbc.JdbcDatabase;
import io.airbyte.integrations.destination.ExtendedNameTransformer;
import io.airbyte.integrations.destination.jdbc.SqlOperations;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@
import io.airbyte.integrations.destination.s3.S3DestinationConfig;
import io.airbyte.protocol.models.DestinationSyncMode;
import java.sql.SQLException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class SnowflakeS3StreamCopier extends LegacyS3StreamCopier {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -174,9 +174,7 @@
"part_size": {
"type": "integer",
"default": 5,
"examples": [
5
],
"examples": [5],
"description": "Optional. Increase this if syncing tables larger than 100GB. Only relevant for COPY. Files are streamed to S3 in parts. This determines the size of each part, in MBs. As S3 has a limit of 10,000 parts per file, part size affects the table size. This is 10MB by default, resulting in a default limit of 100GB tables. Note, a larger part size will result in larger memory requirements. A rule of thumb is to multiply the part size by 10 to get the memory requirement. Modify this with care.",
"title": "Stream Part Size",
"order": 5
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,13 @@

package io.airbyte.integrations.destination.snowflake;

import static org.junit.jupiter.api.Assertions.*;

import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import io.airbyte.commons.json.Jsons;
import io.airbyte.commons.string.Strings;
import io.airbyte.protocol.models.AirbyteConnectionStatus;
import org.junit.jupiter.api.Test;

import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
Expand All @@ -19,10 +19,10 @@
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Properties;

import static org.junit.jupiter.api.Assertions.*;
import org.junit.jupiter.api.Test;

class SnowflakeDestinationIntegrationTest {

private final SnowflakeSQLNameTransformer namingResolver = new SnowflakeSQLNameTransformer();

@Test
Expand All @@ -46,11 +46,11 @@ public void testInvalidSchemaName() {
public void syncWithNamingResolver() throws IOException, SQLException {
final JsonNode config = getConfig();
final String createSchemaQuery = String.format("CREATE SCHEMA %s", namingResolver.getIdentifier(config.get("schema").asText()));
Connection connection =null;
Connection connection = null;
try {
connection = SnowflakeDatabase.getConnection(config);
connection.createStatement().execute(createSchemaQuery);
}finally {
} finally {
if (connection != null) {
final String dropSchemaQuery = String.format("DROP SCHEMA IF EXISTS %s", namingResolver.getIdentifier(config.get("schema").asText()));
connection.createStatement().execute(dropSchemaQuery);
Expand Down Expand Up @@ -82,8 +82,8 @@ public Connection getConnection(JsonNode config, boolean useNameTransformer) thr
properties.put("database", config.get("database").asText());
properties.put("role", config.get("role").asText());
properties.put("schema", useNameTransformer
? namingResolver.getIdentifier(config.get("schema").asText())
: config.get("schema").asText());
? namingResolver.getIdentifier(config.get("schema").asText())
: config.get("schema").asText());

properties.put("JDBC_QUERY_RESULT_FORMAT", "JSON");

Expand All @@ -96,4 +96,5 @@ private JsonNode getConfig() throws IOException {
((ObjectNode) config).put("schema", schemaName);
return config;
}

}
Loading