Skip to content

Commit

Permalink
Remove per user metadata caching from CachingHiveMetastore
Browse files Browse the repository at this point in the history
  • Loading branch information
dain committed Feb 7, 2022
1 parent a738573 commit f4a2844
Show file tree
Hide file tree
Showing 17 changed files with 122 additions and 257 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
import io.airlift.json.JsonCodec;
import io.airlift.units.Duration;
import io.trino.plugin.base.CatalogName;
import io.trino.plugin.hive.authentication.HiveIdentity;
import io.trino.plugin.hive.metastore.HiveMetastoreFactory;
import io.trino.plugin.hive.metastore.MetastoreConfig;
import io.trino.plugin.hive.metastore.SemiTransactionalHiveMetastore;
Expand Down Expand Up @@ -192,7 +193,7 @@ public HiveMetadataFactory(
public TransactionalMetadata create(ConnectorIdentity identity, boolean autoCommit)
{
HiveMetastoreClosure hiveMetastoreClosure = new HiveMetastoreClosure(
memoizeMetastore(metastoreFactory.createMetastore(Optional.of(identity)), perTransactionCacheMaximumSize)); // per-transaction cache
memoizeMetastore(metastoreFactory.createMetastore(Optional.of(identity)), new HiveIdentity(identity), perTransactionCacheMaximumSize)); // per-transaction cache

SemiTransactionalHiveMetastore metastore = new SemiTransactionalHiveMetastore(
hdfsEnvironment,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -301,11 +301,6 @@ public Set<HivePrivilegeInfo> listTablePrivileges(String databaseName, String ta
return delegate.listTablePrivileges(databaseName, tableName, tableOwner, principal);
}

public boolean isImpersonationEnabled()
{
return delegate.isImpersonationEnabled();
}

public long openTransaction(HiveIdentity identity)
{
return delegate.openTransaction(identity);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ private ConnectorPageSink createPageSink(HiveWritableTableHandle handle, boolean
session.getQueryId(),
new HivePageSinkMetadataProvider(
handle.getPageSinkMetadata(),
new HiveMetastoreClosure(memoizeMetastore(metastoreFactory.createMetastore(Optional.of(session.getIdentity())), perTransactionMetastoreCacheMaximumSize)),
new HiveMetastoreClosure(memoizeMetastore(metastoreFactory.createMetastore(Optional.of(session.getIdentity())), new HiveIdentity(session), perTransactionMetastoreCacheMaximumSize)),
new HiveIdentity(session)),
typeManager,
hdfsEnvironment,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -356,12 +356,6 @@ public Set<HivePrivilegeInfo> listTablePrivileges(String databaseName,
return delegate.listTablePrivileges(databaseName, tableName, tableOwner, principal);
}

@Override
public boolean isImpersonationEnabled()
{
return delegate.isImpersonationEnabled();
}

@Override
public long openTransaction(HiveIdentity identity)
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -143,8 +143,6 @@ default void updatePartitionStatistics(HiveIdentity identity, Table table, Strin
*/
Set<HivePrivilegeInfo> listTablePrivileges(String databaseName, String tableName, Optional<String> tableOwner, Optional<HivePrincipal> principal);

boolean isImpersonationEnabled();

default long openTransaction(HiveIdentity identity)
{
throw new UnsupportedOperationException();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -479,10 +479,4 @@ public Set<HivePrivilegeInfo> listTablePrivileges(String databaseName, String ta
{
throw new TrinoException(NOT_SUPPORTED, "listTablePrivileges");
}

@Override
public boolean isImpersonationEnabled()
{
return false;
}
}

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import com.google.common.util.concurrent.UncheckedExecutionException;
import io.airlift.units.Duration;
import io.trino.plugin.base.CatalogName;
import io.trino.plugin.hive.authentication.HiveIdentity;
import io.trino.plugin.hive.metastore.HiveMetastore;
import io.trino.plugin.hive.metastore.HiveMetastoreFactory;
import io.trino.spi.NodeManager;
Expand Down Expand Up @@ -126,6 +127,7 @@ public HiveMetastoreFactory createCachingHiveMetastoreFactory(HiveMetastoreFacto
// In case there are no empty executor slots, such operation would deadlock. Therefore, a reentrant executor needs to be
// used.
metastoreFactory.createMetastore(Optional.empty()),
HiveIdentity.none(),
new ReentrantBoundedExecutor(executorService, maxMetastoreRefreshThreads),
metastoreCacheTtl,
metastoreRefreshInterval,
Expand Down Expand Up @@ -200,8 +202,10 @@ public HiveMetastore createMetastore(Optional<ConnectorIdentity> identity)

private HiveMetastore createUserCachingMetastore(String user)
{
ConnectorIdentity identity = ConnectorIdentity.ofUser(user);
return cachingHiveMetastore(
metastoreFactory.createMetastore(Optional.of(ConnectorIdentity.ofUser(user))),
metastoreFactory.createMetastore(Optional.of(identity)),
new HiveIdentity(identity),
new ReentrantBoundedExecutor(executorService, maxMetastoreRefreshThreads),
metastoreCacheTtl,
metastoreRefreshInterval,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1146,12 +1146,6 @@ public synchronized void revokeTablePrivileges(String databaseName, String table
setTablePrivileges(grantee, databaseName, tableName, Sets.difference(currentPrivileges, privilegesToRemove));
}

@Override
public boolean isImpersonationEnabled()
{
return false;
}

private synchronized void setTablePrivileges(
HivePrincipal grantee,
String databaseName,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1128,10 +1128,4 @@ public Set<HivePrivilegeInfo> listTablePrivileges(String databaseName, String ta
{
return ImmutableSet.of();
}

@Override
public boolean isImpersonationEnabled()
{
return false;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -356,12 +356,6 @@ public Set<RoleGrant> listRoleGrants(HivePrincipal principal)
() -> delegate.listRoleGrants(principal));
}

@Override
public boolean isImpersonationEnabled()
{
return delegate.isImpersonationEnabled();
}

private void verifyRecordingMode()
{
if (recording.isReplay()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -463,12 +463,6 @@ public Set<HivePrivilegeInfo> listTablePrivileges(String databaseName, String ta
return delegate.listTablePrivileges(databaseName, tableName, tableOwner, principal);
}

@Override
public boolean isImpersonationEnabled()
{
return delegate.isImpersonationEnabled();
}

@Override
public Optional<String> getConfigValue(String name)
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -791,6 +791,7 @@ protected final void setup(String host, int port, String databaseName, String ti
new ThriftMetastoreConfig(),
hdfsEnvironment,
false)),
HiveIdentity.none(),
executor,
new Duration(1, MINUTES),
Optional.of(new Duration(15, SECONDS)),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -277,10 +277,4 @@ public Set<RoleGrant> listRoleGrants(HivePrincipal principal)
{
throw new UnsupportedOperationException();
}

@Override
public boolean isImpersonationEnabled()
{
throw new UnsupportedOperationException();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,7 @@ public void setUp()
executor = listeningDecorator(newCachedThreadPool(daemonThreadsNamed(getClass().getSimpleName() + "-%s")));
metastore = cachingHiveMetastore(
new BridgingHiveMetastore(thriftHiveMetastore),
IDENTITY,
executor,
new Duration(5, TimeUnit.MINUTES),
Optional.of(new Duration(1, TimeUnit.MINUTES)),
Expand Down Expand Up @@ -480,6 +481,7 @@ public void testCachingHiveMetastoreCreationViaMemoize()
ThriftHiveMetastore thriftHiveMetastore = createThriftHiveMetastore();
metastore = (CachingHiveMetastore) memoizeMetastore(
new BridgingHiveMetastore(thriftHiveMetastore),
IDENTITY,
1000);

assertEquals(mockClient.getAccessCount(), 0);
Expand Down Expand Up @@ -566,17 +568,12 @@ public Map<String, Optional<Partition>> getPartitionsByNames(HiveIdentity identi

return result;
}

@Override
public boolean isImpersonationEnabled()
{
return false;
}
};

// Caching metastore
metastore = cachingHiveMetastore(
mockMetastore,
IDENTITY,
executor,
new Duration(5, TimeUnit.MINUTES),
Optional.of(new Duration(1, TimeUnit.MINUTES)),
Expand Down Expand Up @@ -670,6 +667,7 @@ private CachingHiveMetastore createMetastoreWithDirectExecutor(CachingHiveMetast
{
return (CachingHiveMetastore) cachingHiveMetastore(
new BridgingHiveMetastore(createThriftHiveMetastore()),
IDENTITY,
directExecutor(),
config.getMetastoreCacheTtl(),
config.getMetastoreRefreshInterval(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
import io.trino.plugin.hive.HdfsEnvironment;
import io.trino.plugin.hive.HiveConfig;
import io.trino.plugin.hive.NodeVersion;
import io.trino.plugin.hive.authentication.HiveIdentity;
import io.trino.plugin.hive.metastore.HiveMetastoreFactory;
import io.trino.plugin.iceberg.catalog.IcebergTableOperationsProvider;
import io.trino.spi.TrinoException;
Expand Down Expand Up @@ -77,7 +78,7 @@ public TrinoCatalog create(ConnectorIdentity identity)
case HIVE_METASTORE:
return new TrinoHiveCatalog(
catalogName,
memoizeMetastore(metastoreFactory.createMetastore(Optional.of(identity)), 1000),
memoizeMetastore(metastoreFactory.createMetastore(Optional.of(identity)), new HiveIdentity(identity), 1000),
hdfsEnvironment,
typeManager,
tableOperationsProvider,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -299,13 +299,6 @@ public Set<HivePrivilegeInfo> listTablePrivileges(String databaseName, String ta
throw new UnsupportedOperationException();
}

@Override
public boolean isImpersonationEnabled()
{
// Local operation, doesn't need to be included in methodInvocations
return delegate.isImpersonationEnabled();
}

@Override
public PartitionStatistics getTableStatistics(HiveIdentity identity, Table table)
{
Expand Down

0 comments on commit f4a2844

Please sign in to comment.