Skip to content

Commit

Permalink
Merge pull request #6 from datahub-project/master
Browse files Browse the repository at this point in the history
Upstream
  • Loading branch information
leifker authored Mar 16, 2022
2 parents 175cad1 + 9025bfb commit 0b58f4c
Show file tree
Hide file tree
Showing 201 changed files with 3,803 additions and 1,256 deletions.
2 changes: 2 additions & 0 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,7 @@ project.ext.externalDependency = [
'neo4jJavaDriver': 'org.neo4j.driver:neo4j-java-driver:4.0.1',
'opentelemetryApi': 'io.opentelemetry:opentelemetry-api:1.0.0',
'opentelemetryAnnotations': 'io.opentelemetry:opentelemetry-extension-annotations:1.0.0',
'opentracingJdbc':'io.opentracing.contrib:opentracing-jdbc:0.2.15',
'parseqTest': 'com.linkedin.parseq:parseq:3.0.7:test',
'parquet': 'org.apache.parquet:parquet-avro:1.12.0',
'picocli': 'info.picocli:picocli:4.5.0',
Expand Down Expand Up @@ -142,6 +143,7 @@ project.ext.externalDependency = [
'typesafeConfig':'com.typesafe:config:1.4.1',
'wiremock':'com.github.tomakehurst:wiremock:2.10.0',
'zookeeper': 'org.apache.zookeeper:zookeeper:3.4.14'

]

allprojects {
Expand Down
14 changes: 8 additions & 6 deletions datahub-frontend/app/auth/AuthModule.java
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,8 @@
import com.datahub.authentication.Authentication;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Arrays;
import java.util.Collections;
import org.apache.commons.codec.digest.DigestUtils;
import org.pac4j.core.client.Client;
import org.pac4j.core.client.Clients;
import org.pac4j.core.config.Config;
Expand Down Expand Up @@ -63,12 +62,15 @@ public AuthModule(final Environment environment, final com.typesafe.config.Confi
protected void configure() {
PlayCookieSessionStore playCacheCookieStore;
try {
// To generate a valid encryption key from an input value, we first
// hash the input to generate a fixed-length string. Then, we convert
// it to hex and slice the first 16 bytes, because AES key length must strictly
// have a specific length.
final String aesKeyBase = _configs.getString(PAC4J_AES_KEY_BASE_CONF);
MessageDigest sha = MessageDigest.getInstance("SHA-1");
byte[] key = sha.digest(aesKeyBase.getBytes(StandardCharsets.UTF_8));
key = Arrays.copyOf(key, 16);
final String aesKeyHash = DigestUtils.sha1Hex(aesKeyBase.getBytes(StandardCharsets.UTF_8));
final String aesEncryptionKey = aesKeyHash.substring(0, 16);
playCacheCookieStore = new PlayCookieSessionStore(
new ShiroAesDataEncrypter(new String(key)));
new ShiroAesDataEncrypter(aesEncryptionKey));
} catch (Exception e) {
throw new RuntimeException("Failed to instantiate Pac4j cookie session store!", e);
}
Expand Down
18 changes: 16 additions & 2 deletions datahub-frontend/app/auth/sso/oidc/OidcCallbackLogic.java
Original file line number Diff line number Diff line change
Expand Up @@ -245,8 +245,22 @@ private List<CorpGroupSnapshot> extractGroups(CommonProfile profile) {
if (profile.containsAttribute(groupsClaimName)) {
try {
final List<CorpGroupSnapshot> groupSnapshots = new ArrayList<>();
// We found some groups. Note that we assume it is an array of strings!
final Collection<String> groupNames = (Collection<String>) profile.getAttribute(groupsClaimName, Collection.class);
final Collection<String> groupNames;
final Object groupAttribute = profile.getAttribute(groupsClaimName);
if (groupAttribute instanceof Collection) {
// List of group names
groupNames = (Collection<String>) profile.getAttribute(groupsClaimName, Collection.class);
} else if (groupAttribute instanceof String) {
// Single group name
groupNames = Collections.singleton(profile.getAttribute(groupsClaimName, String.class));
} else {
log.error(String.format("Failed to parse OIDC group claim with name %s. Unknown type %s provided.",
groupsClaimName,
groupAttribute.getClass()));
// Return empty list. Do not throw.
return Collections.emptyList();
}

for (String groupName : groupNames) {
// Create a basic CorpGroupSnapshot from the information.
try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@
import com.linkedin.datahub.graphql.generated.AggregationMetadata;
import com.linkedin.datahub.graphql.generated.Aspect;
import com.linkedin.datahub.graphql.generated.Assertion;
import com.linkedin.datahub.graphql.generated.AutoCompleteResultForEntity;
import com.linkedin.datahub.graphql.generated.AutoCompleteResults;
import com.linkedin.datahub.graphql.generated.BrowseResults;
import com.linkedin.datahub.graphql.generated.Chart;
import com.linkedin.datahub.graphql.generated.ChartInfo;
Expand Down Expand Up @@ -514,6 +516,10 @@ private void configureContainerResolvers(final RuntimeWiring.Builder builder) {
.type("Container", typeWiring -> typeWiring
.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))
.dataFetcher("entities", new ContainerEntitiesResolver(entityClient))
.dataFetcher("domain", new LoadableTypeResolver<>(domainType, (env) -> {
final Container container = env.getSource();
return container.getDomain() != null ? container.getDomain().getUrn() : null;
}))
.dataFetcher("platform",
new LoadableTypeResolver<>(dataPlatformType,
(env) -> ((Container) env.getSource()).getPlatform().getUrn()))
Expand Down Expand Up @@ -730,7 +736,20 @@ private void configureGenericEntityResolvers(final RuntimeWiring.Builder builder
(env) -> ((ListDomainsResult) env.getSource()).getDomains().stream()
.map(Domain::getUrn)
.collect(Collectors.toList())))
)
.type("AutoCompleteResults", typeWiring -> typeWiring
.dataFetcher("entities",
new EntityTypeBatchResolver(
new ArrayList<>(entityTypes),
(env) -> ((AutoCompleteResults) env.getSource()).getEntities()))
)
.type("AutoCompleteResultForEntity", typeWiring -> typeWiring
.dataFetcher("entities",
new EntityTypeBatchResolver(
new ArrayList<>(entityTypes),
(env) -> ((AutoCompleteResultForEntity) env.getSource()).getEntities()))
);
;
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import com.linkedin.datahub.graphql.exception.AuthorizationException;
import com.linkedin.datahub.graphql.generated.AddOwnerInput;
import com.linkedin.datahub.graphql.generated.OwnerEntityType;
import com.linkedin.datahub.graphql.generated.OwnershipType;
import com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils;
import com.linkedin.metadata.entity.EntityService;
import graphql.schema.DataFetcher;
Expand All @@ -30,6 +31,7 @@ public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throw

Urn ownerUrn = Urn.createFromString(input.getOwnerUrn());
OwnerEntityType ownerEntityType = input.getOwnerEntityType();
OwnershipType type = input.getType() == null ? OwnershipType.NONE : input.getType();
Urn targetUrn = Urn.createFromString(input.getResourceUrn());

if (!OwnerUtils.isAuthorizedToUpdateOwners(environment.getContext(), targetUrn)) {
Expand All @@ -50,6 +52,8 @@ public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throw
Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn());
OwnerUtils.addOwner(
ownerUrn,
// Assumption Alert: Assumes that GraphQL ownership type === GMS ownership type
com.linkedin.common.OwnershipType.valueOf(type.name()),
targetUrn,
actor,
_entityService
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import com.linkedin.metadata.Constants;
import com.linkedin.metadata.authorization.PoliciesConfig;
import com.linkedin.metadata.entity.EntityService;
import java.util.stream.Collectors;
import javax.annotation.Nonnull;
import lombok.extern.slf4j.Slf4j;

Expand All @@ -34,6 +35,7 @@ private OwnerUtils() { }

public static void addOwner(
Urn ownerUrn,
OwnershipType type,
Urn resourceUrn,
Urn actor,
EntityService entityService
Expand All @@ -43,7 +45,7 @@ public static void addOwner(
Constants.OWNERSHIP_ASPECT_NAME,
entityService,
new Ownership());
addOwner(ownershipAspect, ownerUrn);
addOwner(ownershipAspect, ownerUrn, type);
persistAspect(resourceUrn, Constants.OWNERSHIP_ASPECT_NAME, ownershipAspect, actor, entityService);
}

Expand All @@ -63,23 +65,22 @@ public static void removeOwner(
persistAspect(resourceUrn, Constants.OWNERSHIP_ASPECT_NAME, ownershipAspect, actor, entityService);
}

private static void addOwner(Ownership ownershipAspect, Urn ownerUrn) {
private static void addOwner(Ownership ownershipAspect, Urn ownerUrn, OwnershipType type) {
if (!ownershipAspect.hasOwners()) {
ownershipAspect.setOwners(new OwnerArray());
}

OwnerArray ownerArray = ownershipAspect.getOwners();

// if owner exists, do not add it again
if (ownerArray.stream().anyMatch(association -> association.getOwner().equals(ownerUrn))) {
return;
}
final OwnerArray ownerArray = new OwnerArray(ownershipAspect.getOwners()
.stream()
.filter(owner -> !owner.getOwner().equals(ownerUrn))
.collect(Collectors.toList()));

Owner newOwner = new Owner();
newOwner.setType(OwnershipType.DATAOWNER);
newOwner.setType(type);
newOwner.setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL));
newOwner.setOwner(ownerUrn);
ownerArray.add(newOwner);
ownershipAspect.setOwners(ownerArray);
}

private static void removeOwner(Ownership ownership, Urn ownerUrn) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import org.slf4j.LoggerFactory;

import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument;
import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*;
import static org.apache.commons.lang3.StringUtils.isBlank;

/**
Expand All @@ -27,11 +28,9 @@ public class AutoCompleteForMultipleResolver implements DataFetcher<CompletableF

private static final Logger _logger = LoggerFactory.getLogger(AutoCompleteForMultipleResolver.class.getName());

private final List<SearchableEntityType<?>> _searchableEntities;
private final Map<EntityType, SearchableEntityType<?>> _typeToEntity;

public AutoCompleteForMultipleResolver(@Nonnull final List<SearchableEntityType<?>> searchableEntities) {
_searchableEntities = searchableEntities;
_typeToEntity = searchableEntities.stream().collect(Collectors.toMap(
SearchableEntityType::type,
entity -> entity
Expand All @@ -51,10 +50,18 @@ public CompletableFuture<AutoCompleteMultipleResults> get(DataFetchingEnvironmen

List<EntityType> types = input.getTypes();
if (types != null && types.size() > 0) {
return AutocompleteUtils.batchGetAutocompleteResults(types.stream().map(type -> _typeToEntity.get(type)).collect(
Collectors.toList()), sanitizedQuery, input, environment);
return AutocompleteUtils.batchGetAutocompleteResults(
types.stream().map(_typeToEntity::get).collect(Collectors.toList()),
sanitizedQuery,
input,
environment);
}

return AutocompleteUtils.batchGetAutocompleteResults(_searchableEntities, sanitizedQuery, input, environment);
// By default, autocomplete only against the set of Searchable Entity Types.
return AutocompleteUtils.batchGetAutocompleteResults(
AUTO_COMPLETE_ENTITY_TYPES.stream().map(_typeToEntity::get).collect(Collectors.toList()),
sanitizedQuery,
input,
environment);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import com.linkedin.datahub.graphql.types.SearchableEntityType;
import graphql.schema.DataFetchingEnvironment;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
Expand All @@ -30,35 +30,35 @@ public static CompletableFuture<AutoCompleteMultipleResults> batchGetAutocomplet
) {
final int limit = input.getLimit() != null ? input.getLimit() : DEFAULT_LIMIT;

final CompletableFuture<AutoCompleteResultForEntity>[] autoCompletesFuture = entities.stream().map(entity -> {
return CompletableFuture.supplyAsync(() -> {
try {
final AutoCompleteResults searchResult = entity.autoComplete(
sanitizedQuery,
input.getField(),
input.getFilters(),
limit,
environment.getContext()
);
final AutoCompleteResultForEntity autoCompleteResultForEntity =
new AutoCompleteResultForEntity(entity.type(), searchResult.getSuggestions());
return autoCompleteResultForEntity;
} catch (Exception e) {
_logger.error("Failed to execute autocomplete all: "
+ String.format("field %s, query %s, filters: %s, limit: %s",
input.getField(),
input.getQuery(),
input.getFilters(),
input.getLimit()) + " "
+ e.getMessage());
return new AutoCompleteResultForEntity(entity.type(), new ArrayList<>());
}
});
}).toArray(CompletableFuture[]::new);
return CompletableFuture.allOf(autoCompletesFuture)
final List<CompletableFuture<AutoCompleteResultForEntity>> autoCompletesFuture = entities.stream().map(entity -> CompletableFuture.supplyAsync(() -> {
try {
final AutoCompleteResults searchResult = entity.autoComplete(
sanitizedQuery,
input.getField(),
input.getFilters(),
limit,
environment.getContext()
);
return new AutoCompleteResultForEntity(
entity.type(),
searchResult.getSuggestions(),
searchResult.getEntities()
);
} catch (Exception e) {
_logger.error("Failed to execute autocomplete all: "
+ String.format("field %s, query %s, filters: %s, limit: %s",
input.getField(),
input.getQuery(),
input.getFilters(),
input.getLimit()) + " "
+ e.getMessage());
return new AutoCompleteResultForEntity(entity.type(), Collections.emptyList(), Collections.emptyList());
}
})).collect(Collectors.toList());
return CompletableFuture.allOf(autoCompletesFuture.toArray(new CompletableFuture[0]))
.thenApplyAsync((res) -> {
AutoCompleteMultipleResults result = new AutoCompleteMultipleResults(sanitizedQuery, new ArrayList<>());
result.setSuggestions(Arrays.stream(autoCompletesFuture)
result.setSuggestions(autoCompletesFuture.stream()
.map(CompletableFuture::join)
.filter(
autoCompleteResultForEntity ->
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,41 @@ public class SearchUtils {
private SearchUtils() {
}

/**
* Entities that are searched by default in Search Across Entities
*/
public static final List<EntityType> SEARCHABLE_ENTITY_TYPES =
ImmutableList.of(EntityType.DATASET, EntityType.DASHBOARD, EntityType.CHART, EntityType.MLMODEL,
EntityType.MLMODEL_GROUP, EntityType.MLFEATURE_TABLE, EntityType.DATA_FLOW, EntityType.DATA_JOB,
EntityType.GLOSSARY_TERM, EntityType.TAG, EntityType.CORP_USER, EntityType.CORP_GROUP, EntityType.CONTAINER,
ImmutableList.of(
EntityType.DATASET,
EntityType.DASHBOARD,
EntityType.CHART,
EntityType.MLMODEL,
EntityType.MLMODEL_GROUP,
EntityType.MLFEATURE_TABLE,
EntityType.DATA_FLOW,
EntityType.DATA_JOB,
EntityType.GLOSSARY_TERM,
EntityType.TAG,
EntityType.CORP_USER,
EntityType.CORP_GROUP,
EntityType.CONTAINER,
EntityType.DOMAIN);

/**
* Entities that are part of autocomplete by default in Auto Complete Across Entities
*/
public static final List<EntityType> AUTO_COMPLETE_ENTITY_TYPES =
ImmutableList.of(
EntityType.DATASET,
EntityType.DASHBOARD,
EntityType.CHART,
EntityType.MLMODEL,
EntityType.MLMODEL_GROUP,
EntityType.MLFEATURE_TABLE,
EntityType.DATA_FLOW,
EntityType.DATA_JOB,
EntityType.GLOSSARY_TERM,
EntityType.TAG,
EntityType.CORP_USER,
EntityType.CORP_GROUP);
}
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
package com.linkedin.datahub.graphql.types.mappers;

import com.linkedin.datahub.graphql.generated.AutoCompleteResults;
import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper;
import com.linkedin.metadata.query.AutoCompleteResult;

import java.util.stream.Collectors;
import javax.annotation.Nonnull;


Expand All @@ -19,6 +21,8 @@ public AutoCompleteResults apply(@Nonnull final AutoCompleteResult input) {
final AutoCompleteResults result = new AutoCompleteResults();
result.setQuery(input.getQuery());
result.setSuggestions(input.getSuggestions());
result.setEntities(input.getEntities().stream().map(entity -> UrnToEntityMapper.map(entity.getUrn())).collect(
Collectors.toList()));
return result;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ public Collection<MetadataChangeProposal> apply(
final Ownership ownership = new Ownership();
final Owner owner = new Owner();
owner.setOwner(actor);
owner.setType(OwnershipType.DATAOWNER);
owner.setType(OwnershipType.NONE);
owner.setSource(new OwnershipSource().setType(OwnershipSourceType.SERVICE));
ownership.setOwners(new OwnerArray(owner));
ownership.setLastModified(auditStamp);
Expand Down
Loading

0 comments on commit 0b58f4c

Please sign in to comment.