Skip to content

Commit

Permalink
feat(search): search access controls part 1
Browse files Browse the repository at this point in the history
TODO:
* ownership migration upgrade step
* complete unit tests for access controls
* comprehensive api coveration graphql, openapi, restli
* restricted entity hydration and graphql response
  • Loading branch information
david-leifker committed Feb 22, 2024
1 parent 4a44be8 commit 718813a
Show file tree
Hide file tree
Showing 67 changed files with 688 additions and 303 deletions.
6 changes: 3 additions & 3 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ buildscript {

ext.javaClassVersion = { p ->
// If Spring 6 is present, hard dependency on jdk17
if (p.configurations.any { it.getDependencies().any{
if (p.configurations.any { it.getDependencies().any {
(it.getGroup().equals("org.springframework") && it.getVersion().startsWith("6."))
|| (it.getGroup().equals("org.springframework.boot") && it.getVersion().startsWith("3.") && !it.getName().equals("spring-boot-starter-test"))
}}) {
Expand All @@ -43,7 +43,7 @@ buildscript {
ext.elasticsearchVersion = '2.9.0' // ES 7.10, Opensearch 1.x, 2.x
ext.jacksonVersion = '2.15.3'
ext.jettyVersion = '11.0.19'
ext.playVersion = '2.8.18'
ext.playVersion = '2.8.21'
ext.log4jVersion = '2.19.0'
ext.slf4jVersion = '1.7.36'
ext.logbackClassic = '1.4.14'
Expand Down Expand Up @@ -132,7 +132,7 @@ project.ext.externalDependency = [
'graphqlJavaScalars': 'com.graphql-java:graphql-java-extended-scalars:21.0',
'gson': 'com.google.code.gson:gson:2.8.9',
'guice': 'com.google.inject:guice:7.0.0',
'guice4': 'com.google.inject:guice:4.2.3', // Used for frontend while still on old Play version
'guicePlay': 'com.google.inject:guice:5.0.1', // Used for frontend while still on old Play version
'guava': 'com.google.guava:guava:32.1.2-jre',
'h2': 'com.h2database:h2:2.2.224',
'hadoopCommon':'org.apache.hadoop:hadoop-common:2.7.2',
Expand Down
43 changes: 41 additions & 2 deletions datahub-frontend/app/auth/AuthModule.java
Original file line number Diff line number Diff line change
Expand Up @@ -8,18 +8,28 @@
import com.datahub.authentication.Actor;
import com.datahub.authentication.ActorType;
import com.datahub.authentication.Authentication;
import com.datahub.plugins.auth.authorization.Authorizer;
import com.google.inject.AbstractModule;
import com.google.inject.Provides;
import com.google.inject.Singleton;
import com.google.inject.name.Named;
import com.linkedin.entity.client.SystemEntityClient;
import com.linkedin.entity.client.SystemRestliEntityClient;
import com.linkedin.metadata.restli.DefaultRestliClientFactory;
import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl;
import com.linkedin.parseq.retry.backoff.ExponentialBackoff;
import com.linkedin.util.Configuration;
import config.ConfigurationProvider;
import controllers.SsoCallbackController;
import java.nio.charset.StandardCharsets;
import java.util.Collections;

import io.datahubproject.metadata.context.ActorContext;
import io.datahubproject.metadata.context.AuthorizerContext;
import io.datahubproject.metadata.context.EntityRegistryContext;
import io.datahubproject.metadata.context.OperationContext;
import io.datahubproject.metadata.context.OperationContextConfig;
import io.datahubproject.metadata.context.SearchContext;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.http.impl.client.CloseableHttpClient;
Expand All @@ -32,11 +42,15 @@
import org.pac4j.play.store.PlayCookieSessionStore;
import org.pac4j.play.store.PlaySessionStore;
import org.pac4j.play.store.ShiroAesDataEncrypter;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Bean;
import play.Environment;
import play.cache.SyncCacheApi;
import utils.ConfigUtil;

import javax.annotation.Nonnull;

/** Responsible for configuring, validating, and providing authentication related components. */
@Slf4j
public class AuthModule extends AbstractModule {
Expand Down Expand Up @@ -152,6 +166,31 @@ protected Authentication provideSystemAuthentication() {
Collections.emptyMap());
}

@Provides
@Singleton
@Named("systemOperationContext")
protected OperationContext provideOperationContext(final Authentication systemAuthentication,
final ConfigurationProvider configurationProvider) {
ActorContext systemActorContext =
ActorContext.builder()
.systemAuthentication(true)
.authentication(systemAuthentication)
.build();
OperationContextConfig systemConfig = OperationContextConfig.builder()
.searchAuthorizationConfiguration(configurationProvider.getAuthorization().getSearch())
.allowSystemAuthentication(true)
.build();

return OperationContext.builder()
.operationContextConfig(systemConfig)
.systemActorContext(systemActorContext)
.searchContext(SearchContext.EMPTY)
.entityRegistryContext(EntityRegistryContext.EMPTY)
// Authorizer.EMPTY doesn't actually apply to system auth
.authorizerContext(AuthorizerContext.builder().authorizer(Authorizer.EMPTY).build())
.build(systemAuthentication);
}

@Provides
@Singleton
protected ConfigurationProvider provideConfigurationProvider() {
Expand All @@ -163,13 +202,13 @@ protected ConfigurationProvider provideConfigurationProvider() {
@Provides
@Singleton
protected SystemEntityClient provideEntityClient(
final Authentication systemAuthentication,
@Named("systemOperationContext") final OperationContext systemOperationContext,
final ConfigurationProvider configurationProvider) {
return new SystemRestliEntityClient(
systemOperationContext,
buildRestliClient(),
new ExponentialBackoff(_configs.getInt(ENTITY_CLIENT_RETRY_INTERVAL)),
_configs.getInt(ENTITY_CLIENT_NUM_RETRIES),
systemAuthentication,
configurationProvider.getCache().getClient().getEntityClient());
}

Expand Down
4 changes: 4 additions & 0 deletions datahub-frontend/app/config/ConfigurationProvider.java
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package config;

import com.datahub.authorization.AuthorizationConfiguration;
import com.linkedin.metadata.config.VisualConfiguration;
import com.linkedin.metadata.config.cache.CacheConfiguration;
import com.linkedin.metadata.config.kafka.KafkaConfiguration;
Expand All @@ -26,4 +27,7 @@ public class ConfigurationProvider {

/** Configuration for the view layer */
private VisualConfiguration visualConfig;

/** Configuration for authorization */
private AuthorizationConfiguration authorization;
}
4 changes: 2 additions & 2 deletions datahub-frontend/play.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ dependencies {

implementation externalDependency.slf4jApi
compileOnly externalDependency.lombok
runtimeOnly externalDependency.guice4
runtimeOnly externalDependency.guicePlay
runtimeOnly (externalDependency.playDocs) {
exclude group: 'com.typesafe.akka', module: 'akka-http-core_2.12'
}
Expand All @@ -90,7 +90,7 @@ dependencies {

play {
platform {
playVersion = '2.8.18'
playVersion = '2.8.21'
scalaVersion = '2.12'
javaVersion = JavaVersion.VERSION_11
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ private Health computeIncidentsHealthForAsset(
final Filter filter = buildIncidentsEntityFilter(entityUrn, IncidentState.ACTIVE.toString());
final SearchResult searchResult =
_entityClient.filter(
Constants.INCIDENT_ENTITY_NAME, filter, null, 0, 1, context.getAuthentication());
context.getOperationContext(), Constants.INCIDENT_ENTITY_NAME, filter, null, 0, 1);
final Integer activeIncidentCount = searchResult.getNumEntities();
if (activeIncidentCount > 0) {
// There are active incidents.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,12 +62,12 @@ public CompletableFuture<EntityIncidentsResult> get(DataFetchingEnvironment envi
final SortCriterion sortCriterion = buildIncidentsSortCriterion();
final SearchResult searchResult =
_entityClient.filter(
context.getOperationContext(),
Constants.INCIDENT_ENTITY_NAME,
filter,
sortCriterion,
start,
count,
context.getAuthentication());
count);

final List<Urn> incidentUrns =
searchResult.getEntities().stream()
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package com.linkedin.datahub.graphql.resolvers.incident;

import static com.linkedin.datahub.graphql.resolvers.incident.EntityIncidentsResolver.*;
import static org.mockito.Mockito.mock;
import static org.testng.Assert.*;

import com.datahub.authentication.Authentication;
Expand Down Expand Up @@ -34,6 +35,7 @@
import com.linkedin.metadata.search.SearchResult;
import com.linkedin.metadata.search.utils.QueryUtils;
import graphql.schema.DataFetchingEnvironment;
import io.datahubproject.metadata.context.OperationContext;
import java.util.HashMap;
import java.util.Map;
import org.mockito.Mockito;
Expand Down Expand Up @@ -86,12 +88,12 @@ public void testGetSuccess() throws Exception {

Mockito.when(
mockClient.filter(
Mockito.any(OperationContext.class),
Mockito.eq(Constants.INCIDENT_ENTITY_NAME),
Mockito.eq(expectedFilter),
Mockito.eq(expectedSort),
Mockito.eq(0),
Mockito.eq(10),
Mockito.any(Authentication.class)))
Mockito.eq(10)))
.thenReturn(
new SearchResult()
.setFrom(0)
Expand Down Expand Up @@ -120,6 +122,7 @@ public void testGetSuccess() throws Exception {
// Execute resolver
QueryContext mockContext = Mockito.mock(QueryContext.class);
Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class));
Mockito.when(mockContext.getOperationContext()).thenReturn(mock(OperationContext.class));
DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class);

Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("start"), Mockito.eq(0))).thenReturn(0);
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package com.linkedin.metadata.aspect.batch;

import com.linkedin.data.DataMap;
import com.linkedin.data.template.RecordTemplate;
import com.linkedin.metadata.aspect.SystemAspect;
import java.lang.reflect.InvocationTargetException;
import javax.annotation.Nonnull;
Expand All @@ -23,6 +24,14 @@ public interface ChangeMCP extends MCPItem {

void setNextAspectVersion(long nextAspectVersion);

@Nullable
default RecordTemplate getPreviousRecordTemplate() {
if (getPreviousSystemAspect() != null) {
return getPreviousSystemAspect().getRecordTemplate();
}
return null;
}

default <T> T getPreviousAspect(Class<T> clazz) {
if (getPreviousSystemAspect() != null) {
try {
Expand All @@ -35,8 +44,7 @@ default <T> T getPreviousAspect(Class<T> clazz) {
| NoSuchMethodException e) {
throw new RuntimeException(e);
}
} else {
return null;
}
return null;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,24 +3,25 @@
import static com.linkedin.metadata.Constants.DEFAULT_OWNERSHIP_TYPE_URN;
import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME;

import com.linkedin.common.AuditStamp;
import com.linkedin.common.Owner;
import com.linkedin.common.Ownership;
import com.linkedin.common.UrnArray;
import com.linkedin.common.UrnArrayMap;
import com.linkedin.common.urn.Urn;
import com.linkedin.data.template.RecordTemplate;
import com.linkedin.events.metadata.ChangeType;
import com.linkedin.metadata.aspect.AspectRetriever;
import com.linkedin.metadata.aspect.batch.ChangeMCP;
import com.linkedin.metadata.aspect.plugins.config.AspectPluginConfig;
import com.linkedin.metadata.aspect.plugins.hooks.MutationHook;
import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever;
import com.linkedin.metadata.models.AspectSpec;
import com.linkedin.metadata.models.EntitySpec;
import com.linkedin.mxe.SystemMetadata;
import com.linkedin.util.Pair;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;

Expand All @@ -31,42 +32,70 @@ public OwnerTypeMap(AspectPluginConfig aspectPluginConfig) {
}

@Override
protected void mutate(
@Nonnull ChangeType changeType,
@Nonnull EntitySpec entitySpec,
@Nonnull AspectSpec aspectSpec,
@Nullable RecordTemplate oldAspectValue,
@Nullable RecordTemplate newAspectValue,
@Nullable SystemMetadata oldSystemMetadata,
@Nullable SystemMetadata newSystemMetadata,
@Nonnull AuditStamp auditStamp,
@Nonnull AspectRetriever aspectRetriever) {
if (OWNERSHIP_ASPECT_NAME.equals(aspectSpec.getName()) && newAspectValue != null) {
Ownership ownership = new Ownership(newAspectValue.data());
if (!ownership.getOwners().isEmpty()) {
protected Stream<Pair<ChangeMCP, Boolean>> writeMutation(
@Nonnull Collection<ChangeMCP> changeMCPS, @Nonnull AspectRetriever aspectRetriever) {

List<Pair<ChangeMCP, Boolean>> results = new LinkedList<>();

for (ChangeMCP item : changeMCPS) {
if (OWNERSHIP_ASPECT_NAME.equals(item.getAspectName()) && item.getRecordTemplate() != null) {
final Map<Urn, Set<Owner>> oldOwnerTypes = groupByOwner(item.getPreviousRecordTemplate());
final Map<Urn, Set<Owner>> newOwnerTypes = groupByOwner(item.getRecordTemplate());

Set<Urn> removed =
oldOwnerTypes.keySet().stream()
.filter(owner -> !newOwnerTypes.containsKey(owner))
.collect(Collectors.toSet());

Set<Urn> updated = newOwnerTypes.keySet();

Map<String, UrnArray> ownerTypes =
Stream.concat(removed.stream(), updated.stream())
.map(
ownerUrn -> {
final String ownerFieldName = encodeFieldName(ownerUrn.toString());
if (removed.contains(ownerUrn)) {
// removed
return Pair.of(ownerFieldName, new UrnArray());
}
// updated
return Pair.of(
ownerFieldName,
new UrnArray(
newOwnerTypes.getOrDefault(ownerUrn, Collections.emptySet()).stream()
.map(
owner ->
owner.getTypeUrn() != null
? owner.getTypeUrn()
: DEFAULT_OWNERSHIP_TYPE_URN)
.collect(Collectors.toSet())));
})
.collect(Collectors.toMap(Pair::getFirst, Pair::getSecond));

Map<Urn, Set<Owner>> ownerTypes =
ownership.getOwners().stream()
.collect(Collectors.groupingBy(Owner::getOwner, Collectors.toSet()));
if (!ownerTypes.isEmpty()) {
item.getAspect(Ownership.class).setOwnerTypes(new UrnArrayMap(ownerTypes));
results.add(Pair.of(item, true));
continue;
}
}

// no op
results.add(Pair.of(item, false));
}

ownership.setOwnerTypes(
new UrnArrayMap(
ownerTypes.entrySet().stream()
.map(
entry ->
Pair.of(
encodeFieldName(entry.getKey().toString()),
new UrnArray(
entry.getValue().stream()
.map(
owner ->
owner.getTypeUrn() != null
? owner.getTypeUrn()
: DEFAULT_OWNERSHIP_TYPE_URN)
.collect(Collectors.toSet()))))
.collect(Collectors.toMap(Pair::getKey, Pair::getValue))));
return results.stream();
}

private static Map<Urn, Set<Owner>> groupByOwner(
@Nullable RecordTemplate ownershipRecordTemplate) {
if (ownershipRecordTemplate != null) {
Ownership ownership = new Ownership(ownershipRecordTemplate.data());
if (!ownership.getOwners().isEmpty()) {
return ownership.getOwners().stream()
.collect(Collectors.groupingBy(Owner::getOwner, Collectors.toSet()));
}
}
return Collections.emptyMap();
}

public static String encodeFieldName(String value) {
Expand Down
Loading

0 comments on commit 718813a

Please sign in to comment.