Skip to content

Commit

Permalink
wip access controls
Browse files Browse the repository at this point in the history
TODO:
* cache keys - need to be context aware to prevent incorrect results
* ownership migration upgrade step
* complete unit tests for access controls
* restricted entity hydration and graphql response (chris)
  • Loading branch information
david-leifker committed Feb 22, 2024
1 parent 4a44be8 commit ef56331
Show file tree
Hide file tree
Showing 37 changed files with 334 additions and 136 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ private Health computeIncidentsHealthForAsset(
final Filter filter = buildIncidentsEntityFilter(entityUrn, IncidentState.ACTIVE.toString());
final SearchResult searchResult =
_entityClient.filter(
Constants.INCIDENT_ENTITY_NAME, filter, null, 0, 1, context.getAuthentication());
context.getOperationContext(), Constants.INCIDENT_ENTITY_NAME, filter, null, 0, 1);
final Integer activeIncidentCount = searchResult.getNumEntities();
if (activeIncidentCount > 0) {
// There are active incidents.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,12 +62,12 @@ public CompletableFuture<EntityIncidentsResult> get(DataFetchingEnvironment envi
final SortCriterion sortCriterion = buildIncidentsSortCriterion();
final SearchResult searchResult =
_entityClient.filter(
context.getOperationContext(),
Constants.INCIDENT_ENTITY_NAME,
filter,
sortCriterion,
start,
count,
context.getAuthentication());
count);

final List<Urn> incidentUrns =
searchResult.getEntities().stream()
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package com.linkedin.datahub.graphql.resolvers.incident;

import static com.linkedin.datahub.graphql.resolvers.incident.EntityIncidentsResolver.*;
import static org.mockito.Mockito.mock;
import static org.testng.Assert.*;

import com.datahub.authentication.Authentication;
Expand Down Expand Up @@ -34,6 +35,7 @@
import com.linkedin.metadata.search.SearchResult;
import com.linkedin.metadata.search.utils.QueryUtils;
import graphql.schema.DataFetchingEnvironment;
import io.datahubproject.metadata.context.OperationContext;
import java.util.HashMap;
import java.util.Map;
import org.mockito.Mockito;
Expand Down Expand Up @@ -86,12 +88,12 @@ public void testGetSuccess() throws Exception {

Mockito.when(
mockClient.filter(
Mockito.any(OperationContext.class),
Mockito.eq(Constants.INCIDENT_ENTITY_NAME),
Mockito.eq(expectedFilter),
Mockito.eq(expectedSort),
Mockito.eq(0),
Mockito.eq(10),
Mockito.any(Authentication.class)))
Mockito.eq(10)))
.thenReturn(
new SearchResult()
.setFrom(0)
Expand Down Expand Up @@ -120,6 +122,7 @@ public void testGetSuccess() throws Exception {
// Execute resolver
QueryContext mockContext = Mockito.mock(QueryContext.class);
Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class));
Mockito.when(mockContext.getOperationContext()).thenReturn(mock(OperationContext.class));
DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class);

Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("start"), Mockito.eq(0))).thenReturn(0);
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package com.linkedin.metadata.aspect.batch;

import com.linkedin.data.DataMap;
import com.linkedin.data.template.RecordTemplate;
import com.linkedin.metadata.aspect.SystemAspect;
import java.lang.reflect.InvocationTargetException;
import javax.annotation.Nonnull;
Expand All @@ -23,6 +24,14 @@ public interface ChangeMCP extends MCPItem {

void setNextAspectVersion(long nextAspectVersion);

@Nullable
default RecordTemplate getPreviousRecordTemplate() {
if (getPreviousSystemAspect() != null) {
return getPreviousSystemAspect().getRecordTemplate();
}
return null;
}

default <T> T getPreviousAspect(Class<T> clazz) {
if (getPreviousSystemAspect() != null) {
try {
Expand All @@ -35,8 +44,7 @@ default <T> T getPreviousAspect(Class<T> clazz) {
| NoSuchMethodException e) {
throw new RuntimeException(e);
}
} else {
return null;
}
return null;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,24 +3,25 @@
import static com.linkedin.metadata.Constants.DEFAULT_OWNERSHIP_TYPE_URN;
import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME;

import com.linkedin.common.AuditStamp;
import com.linkedin.common.Owner;
import com.linkedin.common.Ownership;
import com.linkedin.common.UrnArray;
import com.linkedin.common.UrnArrayMap;
import com.linkedin.common.urn.Urn;
import com.linkedin.data.template.RecordTemplate;
import com.linkedin.events.metadata.ChangeType;
import com.linkedin.metadata.aspect.AspectRetriever;
import com.linkedin.metadata.aspect.batch.ChangeMCP;
import com.linkedin.metadata.aspect.plugins.config.AspectPluginConfig;
import com.linkedin.metadata.aspect.plugins.hooks.MutationHook;
import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever;
import com.linkedin.metadata.models.AspectSpec;
import com.linkedin.metadata.models.EntitySpec;
import com.linkedin.mxe.SystemMetadata;
import com.linkedin.util.Pair;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;

Expand All @@ -31,42 +32,70 @@ public OwnerTypeMap(AspectPluginConfig aspectPluginConfig) {
}

@Override
protected void mutate(
@Nonnull ChangeType changeType,
@Nonnull EntitySpec entitySpec,
@Nonnull AspectSpec aspectSpec,
@Nullable RecordTemplate oldAspectValue,
@Nullable RecordTemplate newAspectValue,
@Nullable SystemMetadata oldSystemMetadata,
@Nullable SystemMetadata newSystemMetadata,
@Nonnull AuditStamp auditStamp,
@Nonnull AspectRetriever aspectRetriever) {
if (OWNERSHIP_ASPECT_NAME.equals(aspectSpec.getName()) && newAspectValue != null) {
Ownership ownership = new Ownership(newAspectValue.data());
if (!ownership.getOwners().isEmpty()) {
protected Stream<Pair<ChangeMCP, Boolean>> writeMutation(
@Nonnull Collection<ChangeMCP> changeMCPS, @Nonnull AspectRetriever aspectRetriever) {

List<Pair<ChangeMCP, Boolean>> results = new LinkedList<>();

for (ChangeMCP item : changeMCPS) {
if (OWNERSHIP_ASPECT_NAME.equals(item.getAspectName()) && item.getRecordTemplate() != null) {
final Map<Urn, Set<Owner>> oldOwnerTypes = groupByOwner(item.getPreviousRecordTemplate());
final Map<Urn, Set<Owner>> newOwnerTypes = groupByOwner(item.getRecordTemplate());

Set<Urn> removed =
oldOwnerTypes.keySet().stream()
.filter(owner -> !newOwnerTypes.containsKey(owner))
.collect(Collectors.toSet());

Set<Urn> updated = newOwnerTypes.keySet();

Map<String, UrnArray> ownerTypes =
Stream.concat(removed.stream(), updated.stream())
.map(
ownerUrn -> {
final String ownerFieldName = encodeFieldName(ownerUrn.toString());
if (removed.contains(ownerUrn)) {
// removed
return Pair.of(ownerFieldName, new UrnArray());
}
// updated
return Pair.of(
ownerFieldName,
new UrnArray(
newOwnerTypes.getOrDefault(ownerUrn, Collections.emptySet()).stream()
.map(
owner ->
owner.getTypeUrn() != null
? owner.getTypeUrn()
: DEFAULT_OWNERSHIP_TYPE_URN)
.collect(Collectors.toSet())));
})
.collect(Collectors.toMap(Pair::getFirst, Pair::getSecond));

Map<Urn, Set<Owner>> ownerTypes =
ownership.getOwners().stream()
.collect(Collectors.groupingBy(Owner::getOwner, Collectors.toSet()));
if (!ownerTypes.isEmpty()) {
item.getAspect(Ownership.class).setOwnerTypes(new UrnArrayMap(ownerTypes));
results.add(Pair.of(item, true));
continue;
}
}

// no op
results.add(Pair.of(item, false));
}

ownership.setOwnerTypes(
new UrnArrayMap(
ownerTypes.entrySet().stream()
.map(
entry ->
Pair.of(
encodeFieldName(entry.getKey().toString()),
new UrnArray(
entry.getValue().stream()
.map(
owner ->
owner.getTypeUrn() != null
? owner.getTypeUrn()
: DEFAULT_OWNERSHIP_TYPE_URN)
.collect(Collectors.toSet()))))
.collect(Collectors.toMap(Pair::getKey, Pair::getValue))));
return results.stream();
}

private static Map<Urn, Set<Owner>> groupByOwner(
@Nullable RecordTemplate ownershipRecordTemplate) {
if (ownershipRecordTemplate != null) {
Ownership ownership = new Ownership(ownershipRecordTemplate.data());
if (!ownership.getOwners().isEmpty()) {
return ownership.getOwners().stream()
.collect(Collectors.groupingBy(Owner::getOwner, Collectors.toSet()));
}
}
return Collections.emptyMap();
}

public static String encodeFieldName(String value) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@ public class SearchableAnnotation {

public static final String FIELD_NAME_ALIASES = "fieldNameAliases";
public static final String ANNOTATION_NAME = "Searchable";
public static final Set<FieldType> OBJECT_FIELD_TYPES =
ImmutableSet.of(FieldType.OBJECT, FieldType.MAP_ARRAY);
private static final Set<FieldType> DEFAULT_QUERY_FIELD_TYPES =
ImmutableSet.of(
FieldType.TEXT,
Expand Down Expand Up @@ -71,7 +73,8 @@ public enum FieldType {
OBJECT,
BROWSE_PATH_V2,
WORD_GRAM,
DOUBLE
DOUBLE,
MAP_ARRAY
}

@Nonnull
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,15 @@
package com.datahub.authorization.config;

import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;

@Builder
@Builder(toBuilder = true)
@Data
@AllArgsConstructor(access = AccessLevel.PACKAGE)
@NoArgsConstructor(access = AccessLevel.PACKAGE)
public class SearchAuthorizationConfiguration {
private boolean enabled;
}
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,7 @@ def get_resource_owners_work_unit(

if not current_ownership:
# If we want to overwrite or there are no existing tags, create a new GlobalTags object
current_ownership = OwnershipClass(owners, get_audit_stamp())
current_ownership = OwnershipClass(owners, lastModified=get_audit_stamp())
else:
current_owner_urns: Set[str] = set(
[owner.owner for owner in current_ownership.owners]
Expand Down
2 changes: 1 addition & 1 deletion metadata-ingestion/tests/unit/test_rest_sink.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,7 +235,7 @@
"changeType": "UPSERT",
"aspectName": "ownership",
"aspect": {
"value": '{"owners": [{"owner": "urn:li:corpuser:fbar", "type": "DATAOWNER"}], "lastModified": {"time": 0, "actor": "urn:li:corpuser:fbar"}}',
"value": '{"owners": [{"owner": "urn:li:corpuser:fbar", "type": "DATAOWNER"}], "ownerTypes": {}, "lastModified": {"time": 0, "actor": "urn:li:corpuser:fbar"}}',
"contentType": "application/json",
},
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,9 @@ public Pair<Map<String, Set<String>>, List<ChangeMCP>> toUpsertBatchItems(
upsertItem = patchBatchItem.applyPatch(currentValue, aspectRetriever);
}

// Populate old aspect for write hooks
upsertItem.setPreviousSystemAspect(latest);

return upsertItem;
})
.collect(Collectors.toCollection(LinkedList::new));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -190,16 +190,18 @@ public SearchResult filter(
@Nonnull OperationContext opContext,
@Nonnull String entityName,
@Nullable Filter filters,
@Nonnull SearchFlags searchFlags,
@Nullable SearchFlags searchFlags,
@Nullable SortCriterion sortCriterion,
int from,
int size) {
log.debug(
String.format(
"Filtering Search documents entityName: %s, filters: %s, sortCriterion: %s, from: %s, size: %s",
entityName, filters, sortCriterion, from, size));
SearchFlags finalSearchFlags =
applyDefaultSearchFlags(searchFlags, null, DEFAULT_SERVICE_SEARCH_FLAGS);
return esSearchDAO.filter(
opContext, entityName, filters, searchFlags, sortCriterion, from, size);
opContext, entityName, filters, finalSearchFlags, sortCriterion, from, size);
}

@Nonnull
Expand Down Expand Up @@ -317,10 +319,19 @@ public ScrollResult fullTextScroll(
String.format(
"Scrolling Structured Search documents entities: %s, input: %s, postFilters: %s, sortCriterion: %s, scrollId: %s, size: %s",
entities, input, postFilters, sortCriterion, scrollId, size));
SearchFlags flags = Optional.ofNullable(searchFlags).orElse(new SearchFlags());
flags.setFulltext(true);
SearchFlags finalSearchFlags =
applyDefaultSearchFlags(searchFlags, null, DEFAULT_SERVICE_SEARCH_FLAGS);
finalSearchFlags.setFulltext(true);
return esSearchDAO.scroll(
opContext, entities, input, postFilters, sortCriterion, scrollId, keepAlive, size, flags);
opContext,
entities,
input,
postFilters,
sortCriterion,
scrollId,
keepAlive,
size,
finalSearchFlags);
}

@Nonnull
Expand All @@ -339,10 +350,19 @@ public ScrollResult structuredScroll(
String.format(
"Scrolling FullText Search documents entities: %s, input: %s, postFilters: %s, sortCriterion: %s, scrollId: %s, size: %s",
entities, input, postFilters, sortCriterion, scrollId, size));
SearchFlags flags = Optional.ofNullable(searchFlags).orElse(new SearchFlags());
flags.setFulltext(false);
SearchFlags finalSearchFlags =
applyDefaultSearchFlags(searchFlags, null, DEFAULT_SERVICE_SEARCH_FLAGS);
finalSearchFlags.setFulltext(false);
return esSearchDAO.scroll(
opContext, entities, input, postFilters, sortCriterion, scrollId, keepAlive, size, flags);
opContext,
entities,
input,
postFilters,
sortCriterion,
scrollId,
keepAlive,
size,
finalSearchFlags);
}

public Optional<SearchResponse> raw(@Nonnull String indexName, @Nullable String jsonQuery) {
Expand All @@ -356,6 +376,7 @@ public int maxResultSize() {

@Override
public ExplainResponse explain(
@Nonnull OperationContext opContext,
@Nonnull String query,
@Nonnull String documentId,
@Nonnull String entityName,
Expand All @@ -366,13 +387,17 @@ public ExplainResponse explain(
@Nullable String keepAlive,
int size,
@Nullable List<String> facets) {
SearchFlags finalSearchFlags =
applyDefaultSearchFlags(searchFlags, null, DEFAULT_SERVICE_SEARCH_FLAGS);

return esSearchDAO.explain(
opContext,
query,
documentId,
entityName,
postFilters,
sortCriterion,
searchFlags,
finalSearchFlags,
scrollId,
keepAlive,
size,
Expand Down
Loading

0 comments on commit ef56331

Please sign in to comment.