From 00ed819419a4959a6d62da2fc5477621c046eff0 Mon Sep 17 00:00:00 2001 From: Michael Date: Wed, 29 May 2019 07:20:28 -0700 Subject: [PATCH] feat: feature/@key (#1463) * Initial @key implementation * Adding support for GSI/LSIs with @key. * Adding support for create, update, delete, & get with updated index structures. Still need to update list as well as implement the new query resolver using existing parts. The last change is to handle the partial key update scenario in update operations. * Adding support for listX. * Adding custom-index test steps and fixing lint issue * Updating testing steps * Update testing-custom-indexes.md * Updating instructions * Adding @auth support for @key queryFields and adding more tests * pulling testing instruction updates * Rearranging for logical consistency * Fixing compilation errors * fixing compilation errors * Fixing TSC errors * Moving to new sort key structure and fixing tests * Fixing circle ci issue * Fixing test * Fixing circleci build error * Adding updated auth snapshots * Adding update mutation validation to secondary keys to protect composite sort key integrity * Fixing list with filter operations * Updating snapshots * Addressing PR comments --- .../lib/transform-graphql-schema.js | 6 +- .../package.json | 1 + .../src/ModelAuthTransformer.ts | 30 +- .../OperationsArgument.test.ts.snap | 220 ++++-- .../src/ModelConnectionTransformer.ts | 16 +- .../src/resources.ts | 83 +- .../src/DynamoDBModelTransformer.ts | 6 +- .../src/definitions.ts | 61 -- .../src/resources.ts | 186 +++-- .../src/__tests__/FunctionTransformer.test.ts | 10 +- packages/graphql-key-transformer/package.json | 42 + .../src/KeyTransformer.ts | 743 ++++++++++++++++++ .../src/__tests__/KeyTransformer.test.ts | 71 ++ packages/graphql-key-transformer/src/index.ts | 2 + .../graphql-key-transformer/tsconfig.json | 18 + packages/graphql-key-transformer/tslint.json | 6 + .../graphql-mapping-template/src/dynamodb.ts | 57 +- .../src/ModelResourceIDs.ts | 17 +- .../src/ResolverResourceIDs.ts | 5 +- .../src/ResourceConstants.ts | 4 + .../src/connectionUtils.ts | 15 + .../src/definition.ts | 15 +- .../src/dynamodbUtils.ts | 497 ++++++++++++ .../graphql-transformer-common/src/index.ts | 2 + .../graphql-transformer-core/src/errors.ts | 5 + .../package.json | 1 + .../src/__tests__/KeyTransformer.e2e.test.ts | 478 +++++++++++ .../__tests__/KeyTransformerLocal.e2e.test.ts | 325 ++++++++ .../src/__tests__/KeyWithAuth.e2e.test.ts | 356 +++++++++ .../src/testUtil.ts | 56 ++ testing-custom-indexes.md | 163 ++++ 31 files changed, 3211 insertions(+), 286 deletions(-) create mode 100644 packages/graphql-key-transformer/package.json create mode 100644 packages/graphql-key-transformer/src/KeyTransformer.ts create mode 100644 packages/graphql-key-transformer/src/__tests__/KeyTransformer.test.ts create mode 100644 packages/graphql-key-transformer/src/index.ts create mode 100644 packages/graphql-key-transformer/tsconfig.json create mode 100644 packages/graphql-key-transformer/tslint.json create mode 100644 packages/graphql-transformer-common/src/connectionUtils.ts create mode 100644 packages/graphql-transformer-common/src/dynamodbUtils.ts create mode 100644 packages/graphql-transformers-e2e-tests/src/__tests__/KeyTransformer.e2e.test.ts create mode 100644 packages/graphql-transformers-e2e-tests/src/__tests__/KeyTransformerLocal.e2e.test.ts create mode 100644 packages/graphql-transformers-e2e-tests/src/__tests__/KeyWithAuth.e2e.test.ts create mode 100644 packages/graphql-transformers-e2e-tests/src/testUtil.ts create mode 100644 testing-custom-indexes.md diff --git a/packages/amplify-provider-awscloudformation/lib/transform-graphql-schema.js b/packages/amplify-provider-awscloudformation/lib/transform-graphql-schema.js index ea373de70ee..034d05c5dea 100644 --- a/packages/amplify-provider-awscloudformation/lib/transform-graphql-schema.js +++ b/packages/amplify-provider-awscloudformation/lib/transform-graphql-schema.js @@ -9,6 +9,7 @@ const SearchableModelTransformer = require('graphql-elasticsearch-transformer'). const VersionedModelTransformer = require('graphql-versioned-transformer').default; const FunctionTransformer = require('graphql-function-transformer').default; const HTTPTransformer = require('graphql-http-transformer').default; +const KeyTransformer = require('graphql-key-transformer').default; const providerName = require('./constants').ProviderName; const TransformPackage = require('graphql-transformer-core'); @@ -211,10 +212,13 @@ async function transformGraphQLSchema(context, options) { const transformerList = [ new DynamoDBModelTransformer(getModelConfig(project)), new ModelConnectionTransformer(), - new ModelAuthTransformer({ authMode }), new VersionedModelTransformer(), new FunctionTransformer(), new HTTPTransformer(), + new KeyTransformer(), + // TODO: Build dependency mechanism into transformers. Auth runs last + // so any resolvers that need to be protected will already be created. + new ModelAuthTransformer({ authMode }), ]; if (usedDirectives.includes('searchable')) { diff --git a/packages/amplify-provider-awscloudformation/package.json b/packages/amplify-provider-awscloudformation/package.json index 3e30e04e856..f807f481fe3 100755 --- a/packages/amplify-provider-awscloudformation/package.json +++ b/packages/amplify-provider-awscloudformation/package.json @@ -23,6 +23,7 @@ "graphql-dynamodb-transformer": "3.7.0", "graphql-elasticsearch-transformer": "3.6.0", "graphql-function-transformer": "1.0.2", + "graphql-key-transformer": "^1.0.0", "graphql-http-transformer": "3.4.6", "graphql-transformer-common": "3.7.0", "graphql-transformer-core": "3.6.3", diff --git a/packages/graphql-auth-transformer/src/ModelAuthTransformer.ts b/packages/graphql-auth-transformer/src/ModelAuthTransformer.ts index dc9b6daf5e9..49462ab3dae 100644 --- a/packages/graphql-auth-transformer/src/ModelAuthTransformer.ts +++ b/packages/graphql-auth-transformer/src/ModelAuthTransformer.ts @@ -1,4 +1,4 @@ -import { Transformer, TransformerContext, InvalidDirectiveError, gql } from 'graphql-transformer-core' +import { Transformer, TransformerContext, InvalidDirectiveError, gql, getDirectiveArguments } from 'graphql-transformer-core' import GraphQLAPI from 'cloudform-types/types/appSync/graphQlApi' import { ResourceFactory } from './resources' import { AuthRule, ModelQuery, ModelMutation, ModelOperation } from './AuthRule' @@ -186,6 +186,7 @@ export class ModelAuthTransformer extends Transformer { this.protectGetQuery(ctx, ResolverResourceIDs.DynamoDBGetResolverResourceID(def.name.value), queryRules.get) this.protectListQuery(ctx, ResolverResourceIDs.DynamoDBListResolverResourceID(def.name.value), queryRules.list) this.protectConnections(ctx, def, operationRules.read) + this.protectQueries(ctx, def, operationRules.read) } public field = ( @@ -834,12 +835,11 @@ All @auth directives used on field definitions are performed when the field is r */ private protectConnections(ctx: TransformerContext, def: ObjectTypeDefinitionNode, rules: AuthRule[]) { const thisModelName = def.name.value; - const connectionResolvers = {}; for (const inputDef of ctx.inputDocument.definitions) { if (inputDef.kind === Kind.OBJECT_TYPE_DEFINITION) { for (const field of inputDef.fields) { const returnTypeName = getBaseType(field.type) - if (hasDirective(field, 'connection') && returnTypeName === thisModelName) { + if (fieldHasDirective(field, 'connection') && returnTypeName === thisModelName) { const resolverResourceId = ResolverResourceIDs.ResolverResourceID(inputDef.name.value, field.name.value) if (isListType(field.type)) { this.protectListQuery(ctx, resolverResourceId, rules) @@ -852,6 +852,26 @@ All @auth directives used on field definitions are performed when the field is r } } + /** + * When read operations are protected via @auth, all secondary @key query resolvers will be protected. + * Find the directives & update their resolvers with auth logic + */ + private protectQueries(ctx: TransformerContext, def: ObjectTypeDefinitionNode, rules: AuthRule[]) { + const secondaryKeyDirectivesWithQueries = (def.directives || []).filter(d => { + const isKey = d.name.value === 'key'; + const args = getDirectiveArguments(d); + // @key with a name is a secondary key. + const isSecondaryKey = Boolean(args.name); + const hasQueryField = Boolean(args.queryField); + return isKey && isSecondaryKey && hasQueryField; + }); + for (const keyWithQuery of secondaryKeyDirectivesWithQueries) { + const args = getDirectiveArguments(keyWithQuery); + const resolverResourceId = ResolverResourceIDs.ResolverResourceID(ctx.getQueryTypeName(), args.queryField); + this.protectListQuery(ctx, resolverResourceId, rules) + } + } + private getOwnerRules(rules: AuthRule[]): AuthRule[] { return rules.filter(rule => rule.allow === 'owner'); } @@ -866,12 +886,14 @@ All @auth directives used on field definitions are performed when the field is r } -function hasDirective(field: FieldDefinitionNode, directiveName: string): boolean { +function fieldHasDirective(field: FieldDefinitionNode, directiveName: string): boolean { return field.directives && field.directives.length && Boolean(field.directives.find( (d: DirectiveNode) => d.name.value === directiveName )) } + + function isTruthyOrNull(obj: any): boolean { return obj || obj === null; } diff --git a/packages/graphql-auth-transformer/src/__tests__/__snapshots__/OperationsArgument.test.ts.snap b/packages/graphql-auth-transformer/src/__tests__/__snapshots__/OperationsArgument.test.ts.snap index 2d056022c4a..afdfc040a1e 100644 --- a/packages/graphql-auth-transformer/src/__tests__/__snapshots__/OperationsArgument.test.ts.snap +++ b/packages/graphql-auth-transformer/src/__tests__/__snapshots__/OperationsArgument.test.ts.snap @@ -33,15 +33,15 @@ exports[`Test "create", "update", "delete" auth operations 3`] = ` ## [End] Throw if unauthorized ** ## [Start] Prepare DynamoDB PutItem Request. ** -$util.qr($context.args.input.put(\\"createdAt\\", $util.time.nowISO8601())) -$util.qr($context.args.input.put(\\"updatedAt\\", $util.time.nowISO8601())) +$util.qr($context.args.input.put(\\"createdAt\\", $util.defaultIfNull($ctx.args.input.createdAt, $util.time.nowISO8601()))) +$util.qr($context.args.input.put(\\"updatedAt\\", $util.defaultIfNull($ctx.args.input.updatedAt, $util.time.nowISO8601()))) $util.qr($context.args.input.put(\\"__typename\\", \\"Post\\")) { \\"version\\": \\"2017-02-28\\", \\"operation\\": \\"PutItem\\", - \\"key\\": { - \\"id\\": $util.dynamodb.toDynamoDBJson($util.defaultIfNullOrBlank($ctx.args.input.id, $util.autoId())) - }, + \\"key\\": #if( $modelObjectKey ) $util.toJson($modelObjectKey) #else { + \\"id\\": $util.dynamodb.toDynamoDBJson($util.defaultIfNullOrBlank($ctx.args.input.id, $util.autoId())) +} #end, \\"attributeValues\\": $util.dynamodb.toMapValuesJson($context.args.input), \\"condition\\": { \\"expression\\": \\"attribute_not_exists(#id)\\", @@ -132,19 +132,42 @@ exports[`Test "create", "update", "delete" auth operations 4`] = ` #if( $authCondition && $authCondition.expression != \\"\\" ) #set( $condition = $authCondition ) - $util.qr($condition.put(\\"expression\\", \\"$condition.expression AND attribute_exists(#id)\\")) - $util.qr($condition.expressionNames.put(\\"#id\\", \\"id\\")) + #if( $modelObjectKey ) + #foreach( $entry in $modelObjectKey.entrySet() ) + $util.qr($condition.put(\\"expression\\", \\"$condition.expression AND attribute_exists(#keyCondition$velocityCount)\\")) + $util.qr($condition.expressionNames.put(\\"#keyCondition$velocityCount\\", \\"$entry.key\\")) + #end + #else + $util.qr($condition.put(\\"expression\\", \\"$condition.expression AND attribute_exists(#id)\\")) + $util.qr($condition.expressionNames.put(\\"#id\\", \\"id\\")) + #end #else - #set( $condition = { + #if( $modelObjectKey ) + #set( $condition = { + \\"expression\\": \\"\\", + \\"expressionNames\\": {}, + \\"expressionValues\\": {} +} ) + #foreach( $entry in $modelObjectKey.entrySet() ) + #if( $velocityCount == 1 ) + $util.qr($condition.put(\\"expression\\", \\"attribute_exists(#keyCondition$velocityCount)\\")) + #else + $util.qr($condition.put(\\"expression\\", \\"$condition.expression AND attribute_exists(#keyCondition$velocityCount)\\")) + #end + $util.qr($condition.expressionNames.put(\\"#keyCondition$velocityCount\\", \\"$entry.key\\")) + #end + #else + #set( $condition = { \\"expression\\": \\"attribute_exists(#id)\\", \\"expressionNames\\": { \\"#id\\": \\"id\\" }, \\"expressionValues\\": {} } ) + #end #end ## Automatically set the updatedAt timestamp. ** -$util.qr($context.args.input.put(\\"updatedAt\\", $util.time.nowISO8601())) +$util.qr($context.args.input.put(\\"updatedAt\\", $util.defaultIfNull($ctx.args.input.updatedAt, $util.time.nowISO8601()))) $util.qr($context.args.input.put(\\"__typename\\", \\"Post\\")) ## Update condition if type is @versioned ** #if( $versionedCondition ) @@ -157,14 +180,27 @@ $util.qr($context.args.input.put(\\"__typename\\", \\"Post\\")) #set( $expSet = {} ) #set( $expAdd = {} ) #set( $expRemove = [] ) -#foreach( $entry in $util.map.copyAndRemoveAllKeys($context.args.input, [\\"id\\"]).entrySet() ) +#if( $modelObjectKey ) + #set( $keyFields = [] ) + #foreach( $entry in $modelObjectKey.entrySet() ) + $util.qr($keyFields.add(\\"$entry.key\\")) + #end +#else + #set( $keyFields = [\\"id\\"] ) +#end +#foreach( $entry in $util.map.copyAndRemoveAllKeys($context.args.input, $keyFields).entrySet() ) + #if( !$util.isNull($dynamodbNameOverrideMap) && $dynamodbNameOverrideMap.containsKey(\\"$entry.key\\") ) + #set( $entryKeyAttributeName = $dynamodbNameOverrideMap.get(\\"$entry.key\\") ) + #else + #set( $entryKeyAttributeName = $entry.key ) + #end #if( $util.isNull($entry.value) ) - #set( $discard = $expRemove.add(\\"#$entry.key\\") ) - $util.qr($expNames.put(\\"#$entry.key\\", \\"$entry.key\\")) + #set( $discard = $expRemove.add(\\"#$entryKeyAttributeName\\") ) + $util.qr($expNames.put(\\"#$entryKeyAttributeName\\", \\"$entry.key\\")) #else - $util.qr($expSet.put(\\"#$entry.key\\", \\":$entry.key\\")) - $util.qr($expNames.put(\\"#$entry.key\\", \\"$entry.key\\")) - $util.qr($expValues.put(\\":$entry.key\\", $util.dynamodb.toDynamoDB($entry.value))) + $util.qr($expSet.put(\\"#$entryKeyAttributeName\\", \\":$entryKeyAttributeName\\")) + $util.qr($expNames.put(\\"#$entryKeyAttributeName\\", \\"$entry.key\\")) + $util.qr($expValues.put(\\":$entryKeyAttributeName\\", $util.dynamodb.toDynamoDB($entry.value))) #end #end #set( $expression = \\"\\" ) @@ -206,11 +242,11 @@ $util.qr($update.put(\\"expression\\", \\"$expression\\")) { \\"version\\": \\"2017-02-28\\", \\"operation\\": \\"UpdateItem\\", - \\"key\\": { - \\"id\\": { - \\"S\\": \\"$context.args.input.id\\" - } - }, + \\"key\\": #if( $modelObjectKey ) $util.toJson($modelObjectKey) #else { + \\"id\\": { + \\"S\\": \\"$context.args.input.id\\" + } +} #end, \\"update\\": $util.toJson($update), \\"condition\\": $util.toJson($condition) }" @@ -295,15 +331,37 @@ exports[`Test "create", "update", "delete" auth operations 5`] = ` #if( $authCondition ) #set( $condition = $authCondition ) - $util.qr($condition.put(\\"expression\\", \\"$condition.expression AND attribute_exists(#id)\\")) - $util.qr($condition.expressionNames.put(\\"#id\\", \\"id\\")) + #if( $modelObjectKey ) + #foreach( $entry in $modelObjectKey.entrySet() ) + $util.qr($condition.put(\\"expression\\", \\"$condition.expression AND attribute_exists(#keyCondition$velocityCount)\\")) + $util.qr($condition.expressionNames.put(\\"#keyCondition$velocityCount\\", \\"$entry.key\\")) + #end + #else + $util.qr($condition.put(\\"expression\\", \\"$condition.expression AND attribute_exists(#id)\\")) + $util.qr($condition.expressionNames.put(\\"#id\\", \\"id\\")) + #end #else - #set( $condition = { + #if( $modelObjectKey ) + #set( $condition = { + \\"expression\\": \\"\\", + \\"expressionNames\\": {} +} ) + #foreach( $entry in $modelObjectKey.entrySet() ) + #if( $velocityCount == 1 ) + $util.qr($condition.put(\\"expression\\", \\"attribute_exists(#keyCondition$velocityCount)\\")) + #else + $util.qr($condition.put(\\"expression\\", \\"$condition.expression AND attribute_exists(#keyCondition$velocityCount)\\")) + #end + $util.qr($condition.expressionNames.put(\\"#keyCondition$velocityCount\\", \\"$entry.key\\")) + #end + #else + #set( $condition = { \\"expression\\": \\"attribute_exists(#id)\\", \\"expressionNames\\": { \\"#id\\": \\"id\\" } } ) + #end #end #if( $versionedCondition ) $util.qr($condition.put(\\"expression\\", \\"($condition.expression) AND $versionedCondition.expression\\")) @@ -315,9 +373,9 @@ exports[`Test "create", "update", "delete" auth operations 5`] = ` { \\"version\\": \\"2017-02-28\\", \\"operation\\": \\"DeleteItem\\", - \\"key\\": { - \\"id\\": $util.dynamodb.toDynamoDBJson($ctx.args.input.id) - }, + \\"key\\": #if( $modelObjectKey ) $util.toJson($modelObjectKey) #else { + \\"id\\": $util.dynamodb.toDynamoDBJson($ctx.args.input.id) +} #end, \\"condition\\": $util.toJson($condition) }" `; @@ -355,15 +413,15 @@ exports[`Test that operation overwrites queries in auth operations 3`] = ` ## [End] Throw if unauthorized ** ## [Start] Prepare DynamoDB PutItem Request. ** -$util.qr($context.args.input.put(\\"createdAt\\", $util.time.nowISO8601())) -$util.qr($context.args.input.put(\\"updatedAt\\", $util.time.nowISO8601())) +$util.qr($context.args.input.put(\\"createdAt\\", $util.defaultIfNull($ctx.args.input.createdAt, $util.time.nowISO8601()))) +$util.qr($context.args.input.put(\\"updatedAt\\", $util.defaultIfNull($ctx.args.input.updatedAt, $util.time.nowISO8601()))) $util.qr($context.args.input.put(\\"__typename\\", \\"Post\\")) { \\"version\\": \\"2017-02-28\\", \\"operation\\": \\"PutItem\\", - \\"key\\": { - \\"id\\": $util.dynamodb.toDynamoDBJson($util.defaultIfNullOrBlank($ctx.args.input.id, $util.autoId())) - }, + \\"key\\": #if( $modelObjectKey ) $util.toJson($modelObjectKey) #else { + \\"id\\": $util.dynamodb.toDynamoDBJson($util.defaultIfNullOrBlank($ctx.args.input.id, $util.autoId())) +} #end, \\"attributeValues\\": $util.dynamodb.toMapValuesJson($context.args.input), \\"condition\\": { \\"expression\\": \\"attribute_not_exists(#id)\\", @@ -454,19 +512,42 @@ exports[`Test that operation overwrites queries in auth operations 4`] = ` #if( $authCondition && $authCondition.expression != \\"\\" ) #set( $condition = $authCondition ) - $util.qr($condition.put(\\"expression\\", \\"$condition.expression AND attribute_exists(#id)\\")) - $util.qr($condition.expressionNames.put(\\"#id\\", \\"id\\")) + #if( $modelObjectKey ) + #foreach( $entry in $modelObjectKey.entrySet() ) + $util.qr($condition.put(\\"expression\\", \\"$condition.expression AND attribute_exists(#keyCondition$velocityCount)\\")) + $util.qr($condition.expressionNames.put(\\"#keyCondition$velocityCount\\", \\"$entry.key\\")) + #end + #else + $util.qr($condition.put(\\"expression\\", \\"$condition.expression AND attribute_exists(#id)\\")) + $util.qr($condition.expressionNames.put(\\"#id\\", \\"id\\")) + #end #else - #set( $condition = { + #if( $modelObjectKey ) + #set( $condition = { + \\"expression\\": \\"\\", + \\"expressionNames\\": {}, + \\"expressionValues\\": {} +} ) + #foreach( $entry in $modelObjectKey.entrySet() ) + #if( $velocityCount == 1 ) + $util.qr($condition.put(\\"expression\\", \\"attribute_exists(#keyCondition$velocityCount)\\")) + #else + $util.qr($condition.put(\\"expression\\", \\"$condition.expression AND attribute_exists(#keyCondition$velocityCount)\\")) + #end + $util.qr($condition.expressionNames.put(\\"#keyCondition$velocityCount\\", \\"$entry.key\\")) + #end + #else + #set( $condition = { \\"expression\\": \\"attribute_exists(#id)\\", \\"expressionNames\\": { \\"#id\\": \\"id\\" }, \\"expressionValues\\": {} } ) + #end #end ## Automatically set the updatedAt timestamp. ** -$util.qr($context.args.input.put(\\"updatedAt\\", $util.time.nowISO8601())) +$util.qr($context.args.input.put(\\"updatedAt\\", $util.defaultIfNull($ctx.args.input.updatedAt, $util.time.nowISO8601()))) $util.qr($context.args.input.put(\\"__typename\\", \\"Post\\")) ## Update condition if type is @versioned ** #if( $versionedCondition ) @@ -479,14 +560,27 @@ $util.qr($context.args.input.put(\\"__typename\\", \\"Post\\")) #set( $expSet = {} ) #set( $expAdd = {} ) #set( $expRemove = [] ) -#foreach( $entry in $util.map.copyAndRemoveAllKeys($context.args.input, [\\"id\\"]).entrySet() ) +#if( $modelObjectKey ) + #set( $keyFields = [] ) + #foreach( $entry in $modelObjectKey.entrySet() ) + $util.qr($keyFields.add(\\"$entry.key\\")) + #end +#else + #set( $keyFields = [\\"id\\"] ) +#end +#foreach( $entry in $util.map.copyAndRemoveAllKeys($context.args.input, $keyFields).entrySet() ) + #if( !$util.isNull($dynamodbNameOverrideMap) && $dynamodbNameOverrideMap.containsKey(\\"$entry.key\\") ) + #set( $entryKeyAttributeName = $dynamodbNameOverrideMap.get(\\"$entry.key\\") ) + #else + #set( $entryKeyAttributeName = $entry.key ) + #end #if( $util.isNull($entry.value) ) - #set( $discard = $expRemove.add(\\"#$entry.key\\") ) - $util.qr($expNames.put(\\"#$entry.key\\", \\"$entry.key\\")) + #set( $discard = $expRemove.add(\\"#$entryKeyAttributeName\\") ) + $util.qr($expNames.put(\\"#$entryKeyAttributeName\\", \\"$entry.key\\")) #else - $util.qr($expSet.put(\\"#$entry.key\\", \\":$entry.key\\")) - $util.qr($expNames.put(\\"#$entry.key\\", \\"$entry.key\\")) - $util.qr($expValues.put(\\":$entry.key\\", $util.dynamodb.toDynamoDB($entry.value))) + $util.qr($expSet.put(\\"#$entryKeyAttributeName\\", \\":$entryKeyAttributeName\\")) + $util.qr($expNames.put(\\"#$entryKeyAttributeName\\", \\"$entry.key\\")) + $util.qr($expValues.put(\\":$entryKeyAttributeName\\", $util.dynamodb.toDynamoDB($entry.value))) #end #end #set( $expression = \\"\\" ) @@ -528,11 +622,11 @@ $util.qr($update.put(\\"expression\\", \\"$expression\\")) { \\"version\\": \\"2017-02-28\\", \\"operation\\": \\"UpdateItem\\", - \\"key\\": { - \\"id\\": { - \\"S\\": \\"$context.args.input.id\\" - } - }, + \\"key\\": #if( $modelObjectKey ) $util.toJson($modelObjectKey) #else { + \\"id\\": { + \\"S\\": \\"$context.args.input.id\\" + } +} #end, \\"update\\": $util.toJson($update), \\"condition\\": $util.toJson($condition) }" @@ -617,15 +711,37 @@ exports[`Test that operation overwrites queries in auth operations 5`] = ` #if( $authCondition ) #set( $condition = $authCondition ) - $util.qr($condition.put(\\"expression\\", \\"$condition.expression AND attribute_exists(#id)\\")) - $util.qr($condition.expressionNames.put(\\"#id\\", \\"id\\")) + #if( $modelObjectKey ) + #foreach( $entry in $modelObjectKey.entrySet() ) + $util.qr($condition.put(\\"expression\\", \\"$condition.expression AND attribute_exists(#keyCondition$velocityCount)\\")) + $util.qr($condition.expressionNames.put(\\"#keyCondition$velocityCount\\", \\"$entry.key\\")) + #end + #else + $util.qr($condition.put(\\"expression\\", \\"$condition.expression AND attribute_exists(#id)\\")) + $util.qr($condition.expressionNames.put(\\"#id\\", \\"id\\")) + #end #else - #set( $condition = { + #if( $modelObjectKey ) + #set( $condition = { + \\"expression\\": \\"\\", + \\"expressionNames\\": {} +} ) + #foreach( $entry in $modelObjectKey.entrySet() ) + #if( $velocityCount == 1 ) + $util.qr($condition.put(\\"expression\\", \\"attribute_exists(#keyCondition$velocityCount)\\")) + #else + $util.qr($condition.put(\\"expression\\", \\"$condition.expression AND attribute_exists(#keyCondition$velocityCount)\\")) + #end + $util.qr($condition.expressionNames.put(\\"#keyCondition$velocityCount\\", \\"$entry.key\\")) + #end + #else + #set( $condition = { \\"expression\\": \\"attribute_exists(#id)\\", \\"expressionNames\\": { \\"#id\\": \\"id\\" } } ) + #end #end #if( $versionedCondition ) $util.qr($condition.put(\\"expression\\", \\"($condition.expression) AND $versionedCondition.expression\\")) @@ -637,9 +753,9 @@ exports[`Test that operation overwrites queries in auth operations 5`] = ` { \\"version\\": \\"2017-02-28\\", \\"operation\\": \\"DeleteItem\\", - \\"key\\": { - \\"id\\": $util.dynamodb.toDynamoDBJson($ctx.args.input.id) - }, + \\"key\\": #if( $modelObjectKey ) $util.toJson($modelObjectKey) #else { + \\"id\\": $util.dynamodb.toDynamoDBJson($ctx.args.input.id) +} #end, \\"condition\\": $util.toJson($condition) }" `; diff --git a/packages/graphql-connection-transformer/src/ModelConnectionTransformer.ts b/packages/graphql-connection-transformer/src/ModelConnectionTransformer.ts index a92de3a6676..10a5dfda3c9 100644 --- a/packages/graphql-connection-transformer/src/ModelConnectionTransformer.ts +++ b/packages/graphql-connection-transformer/src/ModelConnectionTransformer.ts @@ -10,14 +10,15 @@ import { makeModelConnectionType, makeModelConnectionField, makeScalarFilterInputs, - makeScalarKeyConditionInputs, makeModelXFilterInputObject, makeModelSortDirectionEnumObject, } from 'graphql-dynamodb-transformer' import { getBaseType, isListType, getDirectiveArgument, blankObject, isScalar, STANDARD_SCALARS, - toCamelCase, isNonNullType, attributeTypeFromScalar + toCamelCase, isNonNullType, attributeTypeFromScalar, + makeScalarKeyConditionInputs, makeScalarKeyConditionForType, + makeNamedType, } from 'graphql-transformer-common' import { ResolverResourceIDs, ModelResourceIDs } from 'graphql-transformer-common' import { updateCreateInputWithConnectionField, updateUpdateInputWithConnectionField } from './definitions'; @@ -405,14 +406,9 @@ export class ModelConnectionTransformer extends Transformer { // Create sort key condition inputs for valid sort key types // We only create the KeyConditionInput if it is being used. if (sortKeyInfo) { - const sortKeyConditionInputs = makeScalarKeyConditionInputs() - for (const keyCondition of sortKeyConditionInputs) { - if ( - keyCondition.name.value === ModelResourceIDs.ModelKeyConditionInputTypeName(sortKeyInfo.typeName) && - !this.typeExist(keyCondition.name.value, ctx) - ) { - ctx.addInput(keyCondition) - } + const sortKeyConditionInput = makeScalarKeyConditionForType(makeNamedType(sortKeyInfo.typeName)) + if (!this.typeExist(sortKeyConditionInput.name.value, ctx)) { + ctx.addInput(sortKeyConditionInput); } } } diff --git a/packages/graphql-connection-transformer/src/resources.ts b/packages/graphql-connection-transformer/src/resources.ts index 47b66ccc8be..a1f04afff7a 100644 --- a/packages/graphql-connection-transformer/src/resources.ts +++ b/packages/graphql-connection-transformer/src/resources.ts @@ -7,7 +7,7 @@ import { ref, obj, set, nul, ifElse, compoundExpression, bool, equals, iff, raw, comment, qref, Expression, block } from 'graphql-mapping-template' -import { ResourceConstants, ModelResourceIDs, DEFAULT_SCALARS, NONE_VALUE } from 'graphql-transformer-common' +import { ResourceConstants, ModelResourceIDs, DEFAULT_SCALARS, NONE_VALUE, applyKeyConditionExpression } from 'graphql-transformer-common' import { InvalidDirectiveError } from 'graphql-transformer-core'; export class ResourceFactory { @@ -145,7 +145,7 @@ export class ResourceFactory { })) ]; if (sortKeyInfo) { - setup.push(this.applyKeyConditionExpression(sortKeyInfo.fieldName, sortKeyInfo.attributeType, 'query')); + setup.push(applyKeyConditionExpression(sortKeyInfo.fieldName, sortKeyInfo.attributeType, 'query')); } return new Resolver({ ApiId: Fn.GetAtt(ResourceConstants.RESOURCES.GraphQLAPILogicalID, 'ApiId'), @@ -189,83 +189,4 @@ export class ResourceFactory { ) }).dependsOn(ResourceConstants.RESOURCES.GraphQLSchemaLogicalID) } - - /** - * Key conditions materialize as instances of ModelXKeyConditionInput passed via $ctx.args. - * If the arguments with the given sortKey name exists, create a DynamoDB expression that - * implements its logic. Possible operators: eq, le, lt, ge, gt, beginsWith, and between. - * @param argName The name of the argument containing the sort key condition object. - */ - private applyKeyConditionExpression(argName: string, attributeType: 'S' | 'N' | 'B' = 'S', queryExprReference: string = 'query') { - return block("Applying Key Condition", [ - iff( - raw(`!$util.isNull($ctx.args.${argName}) && !$util.isNull($ctx.args.${argName}.beginsWith)`), - compoundExpression([ - set(ref('query.expression'), raw(`"$${queryExprReference}.expression AND begins_with(#${argName}, :${argName})"`)), - qref(`$${queryExprReference}.expressionNames.put("#${argName}", "${argName}")`), - // TODO: Handle N & B. - qref(`$${queryExprReference}.expressionValues.put(":${argName}", { "${attributeType}": "$ctx.args.${argName}.beginsWith" })`) - ]) - ), - iff( - raw(`!$util.isNull($ctx.args.${argName}) && !$util.isNull($ctx.args.${argName}.between)`), - compoundExpression([ - iff( - raw(`$ctx.args.${argName}.between.size() != 2`), - raw(`$util.error("Argument ${argName}.between expects exactly 2 elements.")`) - ), - set(ref('query.expression'), raw(`"$${queryExprReference}.expression AND #${argName} BETWEEN :${argName}0 AND :${argName}1"`)), - qref(`$${queryExprReference}.expressionNames.put("#${argName}", "${argName}")`), - // TODO: Handle N & B. - qref(`$${queryExprReference}.expressionValues.put(":${argName}0", { "${attributeType}": "$ctx.args.${argName}.between[0]" })`), - qref(`$${queryExprReference}.expressionValues.put(":${argName}1", { "${attributeType}": "$ctx.args.${argName}.between[1]" })`) - ]) - ), - iff( - raw(`!$util.isNull($ctx.args.${argName}) && !$util.isNull($ctx.args.${argName}.eq)`), - compoundExpression([ - set(ref('query.expression'), raw(`"$${queryExprReference}.expression AND #${argName} = :${argName}"`)), - qref(`$${queryExprReference}.expressionNames.put("#${argName}", "${argName}")`), - // TODO: Handle N & B. - qref(`$${queryExprReference}.expressionValues.put(":${argName}", { "${attributeType}": "$ctx.args.${argName}.eq" })`) - ]) - ), - iff( - raw(`!$util.isNull($ctx.args.${argName}) && !$util.isNull($ctx.args.${argName}.lt)`), - compoundExpression([ - set(ref('query.expression'), raw(`"$${queryExprReference}.expression AND #${argName} < :${argName}"`)), - qref(`$${queryExprReference}.expressionNames.put("#${argName}", "${argName}")`), - // TODO: Handle N & B. - qref(`$${queryExprReference}.expressionValues.put(":${argName}", { "${attributeType}": "$ctx.args.${argName}.lt" })`) - ]) - ), - iff( - raw(`!$util.isNull($ctx.args.${argName}) && !$util.isNull($ctx.args.${argName}.le)`), - compoundExpression([ - set(ref('query.expression'), raw(`"$${queryExprReference}.expression AND #${argName} <= :${argName}"`)), - qref(`$${queryExprReference}.expressionNames.put("#${argName}", "${argName}")`), - // TODO: Handle N & B. - qref(`$${queryExprReference}.expressionValues.put(":${argName}", { "${attributeType}": "$ctx.args.${argName}.le" })`) - ]) - ), - iff( - raw(`!$util.isNull($ctx.args.${argName}) && !$util.isNull($ctx.args.${argName}.gt)`), - compoundExpression([ - set(ref('query.expression'), raw(`"$${queryExprReference}.expression AND #${argName} > :${argName}"`)), - qref(`$${queryExprReference}.expressionNames.put("#${argName}", "${argName}")`), - // TODO: Handle N & B. - qref(`$${queryExprReference}.expressionValues.put(":${argName}", { "${attributeType}": "$ctx.args.${argName}.gt" })`) - ]) - ), - iff( - raw(`!$util.isNull($ctx.args.${argName}) && !$util.isNull($ctx.args.${argName}.ge)`), - compoundExpression([ - set(ref('query.expression'), raw(`"$${queryExprReference}.expression AND #${argName} >= :${argName}"`)), - qref(`$${queryExprReference}.expressionNames.put("#${argName}", "${argName}")`), - // TODO: Handle N & B. - qref(`$${queryExprReference}.expressionValues.put(":${argName}", { "${attributeType}": "$ctx.args.${argName}.ge" })`) - ]) - ) - ]); - } } diff --git a/packages/graphql-dynamodb-transformer/src/DynamoDBModelTransformer.ts b/packages/graphql-dynamodb-transformer/src/DynamoDBModelTransformer.ts index 27488cd44a6..04dd27ed238 100644 --- a/packages/graphql-dynamodb-transformer/src/DynamoDBModelTransformer.ts +++ b/packages/graphql-dynamodb-transformer/src/DynamoDBModelTransformer.ts @@ -6,14 +6,14 @@ import { ResourceFactory } from './resources' import { makeCreateInputObject, makeUpdateInputObject, makeDeleteInputObject, makeModelXFilterInputObject, makeModelSortDirectionEnumObject, makeModelConnectionType, - makeScalarFilterInputs, makeModelScanField, makeSubscriptionField, getNonModelObjectArray, + makeScalarFilterInputs, makeSubscriptionField, getNonModelObjectArray, makeNonModelInputObject, makeEnumFilterInputObjects } from './definitions' import { blankObject, makeField, makeInputValueDefinition, makeNamedType, makeNonNullType } from 'graphql-transformer-common' -import { ResolverResourceIDs, ModelResourceIDs } from 'graphql-transformer-common' +import { ResolverResourceIDs, ModelResourceIDs, makeConnectionField } from 'graphql-transformer-common' interface QueryNameMap { get?: string; @@ -313,7 +313,7 @@ export class DynamoDBModelTransformer extends Transformer { const listResolver = this.resources.makeListResolver(def.name.value, listFieldNameOverride, ctx.getQueryTypeName()) ctx.setResource(ResolverResourceIDs.DynamoDBListResolverResourceID(typeName), listResolver) - queryFields.push(makeModelScanField(listResolver.Properties.FieldName, def.name.value)) + queryFields.push(makeConnectionField(listResolver.Properties.FieldName, def.name.value)) } this.generateFilterInputs(ctx, def) diff --git a/packages/graphql-dynamodb-transformer/src/definitions.ts b/packages/graphql-dynamodb-transformer/src/definitions.ts index 2998009c1d6..0565ebd196c 100644 --- a/packages/graphql-dynamodb-transformer/src/definitions.ts +++ b/packages/graphql-dynamodb-transformer/src/definitions.ts @@ -23,12 +23,6 @@ const INT_CONDITIONS = ['ne', 'eq', 'le', 'lt', 'ge', 'gt', 'contains', 'notCont const FLOAT_CONDITIONS = ['ne', 'eq', 'le', 'lt', 'ge', 'gt', 'contains', 'notContains', 'between'] const BOOLEAN_CONDITIONS = ['ne', 'eq'] -// Key conditions -const STRING_KEY_CONDITIONS = ['eq', 'le', 'lt', 'ge', 'gt', 'between', 'beginsWith'] -const ID_KEY_CONDITIONS = ['eq', 'le', 'lt', 'ge', 'gt', 'between', 'beginsWith'] -const INT_KEY_CONDITIONS = ['eq', 'le', 'lt', 'ge', 'gt', 'between'] -const FLOAT_KEY_CONDITIONS = ['eq', 'le', 'lt', 'ge', 'gt', 'between'] - export function getNonModelObjectArray( obj: ObjectTypeDefinitionNode, ctx: TransformerContext, @@ -531,18 +525,6 @@ export function makeSubscriptionField(fieldName: string, returnTypeName: string, ) } -export function makeModelScanField(fieldName: string, returnTypeName: string): FieldDefinitionNode { - return makeField( - fieldName, - [ - makeInputValueDefinition('filter', makeNamedType(ModelResourceIDs.ModelFilterInputTypeName(returnTypeName))), - makeInputValueDefinition('limit', makeNamedType('Int')), - makeInputValueDefinition('nextToken', makeNamedType('String')) - ], - makeNamedType(ModelResourceIDs.ModelConnectionTypeName(returnTypeName)) - ) -} - export interface SortKeyFieldInfo { // The name of the sort key field. fieldName: string; @@ -577,46 +559,3 @@ export function makeScalarFilterInputs(): InputObjectTypeDefinitionNode[] { makeModelScalarFilterInputObject('Boolean') ]; } - -function getScalarKeyConditions(type: string): string[] { - switch (type) { - case 'String': - return STRING_KEY_CONDITIONS - case 'ID': - return ID_KEY_CONDITIONS - case 'Int': - return INT_KEY_CONDITIONS - case 'Float': - return FLOAT_KEY_CONDITIONS - default: - throw 'Valid types are String, ID, Int, Float, Boolean' - } -} -export function makeModelStringKeyConditionInputObject(type: string): InputObjectTypeDefinitionNode { - const name = ModelResourceIDs.ModelKeyConditionInputTypeName(type) - const conditions = getScalarKeyConditions(type) - const fields: InputValueDefinitionNode[] = conditions - .map((condition: string) => ({ - kind: Kind.INPUT_VALUE_DEFINITION, - name: { kind: "Name" as "Name", value: condition }, - type: condition === 'between' ? makeListType(makeNamedType(type)) : makeNamedType(type), - directives: [] - })) - return { - kind: Kind.INPUT_OBJECT_TYPE_DEFINITION, - name: { - kind: 'Name', - value: name - }, - fields, - directives: [] - } -} -export function makeScalarKeyConditionInputs(): InputObjectTypeDefinitionNode[] { - return [ - makeModelStringKeyConditionInputObject('String'), - makeModelStringKeyConditionInputObject('ID'), - makeModelStringKeyConditionInputObject('Int'), - makeModelStringKeyConditionInputObject('Float') - ]; -} \ No newline at end of file diff --git a/packages/graphql-dynamodb-transformer/src/resources.ts b/packages/graphql-dynamodb-transformer/src/resources.ts index 05504f1dfbe..0f5d4f214aa 100644 --- a/packages/graphql-dynamodb-transformer/src/resources.ts +++ b/packages/graphql-dynamodb-transformer/src/resources.ts @@ -3,7 +3,7 @@ import Output from 'cloudform-types/types/output'; import { DynamoDBMappingTemplate, printBlock, str, print, ref, obj, set, nul, - ifElse, compoundExpression, qref, bool, equals, iff, raw, comment + ifElse, compoundExpression, qref, bool, equals, iff, raw, comment, forEach, list } from 'graphql-mapping-template' import { ResourceConstants, plurality, graphqlName, toUpper, ModelResourceIDs } from 'graphql-transformer-common' @@ -313,13 +313,18 @@ export class ResourceFactory { TypeName: mutationTypeName, RequestMappingTemplate: printBlock('Prepare DynamoDB PutItem Request')( compoundExpression([ - qref('$context.args.input.put("createdAt", $util.time.nowISO8601())'), - qref('$context.args.input.put("updatedAt", $util.time.nowISO8601())'), + qref('$context.args.input.put("createdAt", $util.defaultIfNull($ctx.args.input.createdAt, $util.time.nowISO8601()))'), + qref('$context.args.input.put("updatedAt", $util.defaultIfNull($ctx.args.input.updatedAt, $util.time.nowISO8601()))'), qref(`$context.args.input.put("__typename", "${type}")`), DynamoDBMappingTemplate.putItem({ - key: obj({ - id: raw(`$util.dynamodb.toDynamoDBJson($util.defaultIfNullOrBlank($ctx.args.input.id, $util.autoId()))`) - }), + key: ifElse( + ref(ResourceConstants.SNIPPETS.ModelObjectKey), + raw(`$util.toJson(\$${ResourceConstants.SNIPPETS.ModelObjectKey})`), + obj({ + id: raw(`$util.dynamodb.toDynamoDBJson($util.defaultIfNullOrBlank($ctx.args.input.id, $util.autoId()))`) + }), + true + ), attributeValues: ref('util.dynamodb.toMapValuesJson($context.args.input)'), condition: obj({ expression: str(`attribute_not_exists(#id)`), @@ -349,19 +354,46 @@ export class ResourceFactory { raw(`$${ResourceConstants.SNIPPETS.AuthCondition} && $${ResourceConstants.SNIPPETS.AuthCondition}.expression != ""`), compoundExpression([ set(ref('condition'), ref(ResourceConstants.SNIPPETS.AuthCondition)), - qref('$condition.put("expression", "$condition.expression AND attribute_exists(#id)")'), - qref('$condition.expressionNames.put("#id", "id")') + ifElse( + ref(ResourceConstants.SNIPPETS.ModelObjectKey), + forEach(ref('entry'), ref(`${ResourceConstants.SNIPPETS.ModelObjectKey}.entrySet()`),[ + qref('$condition.put("expression", "$condition.expression AND attribute_exists(#keyCondition$velocityCount)")'), + qref('$condition.expressionNames.put("#keyCondition$velocityCount", "$entry.key")') + ]), + compoundExpression([ + qref('$condition.put("expression", "$condition.expression AND attribute_exists(#id)")'), + qref('$condition.expressionNames.put("#id", "id")') + ]) + ) ]), - set(ref('condition'), obj({ - expression: str("attribute_exists(#id)"), - expressionNames: obj({ - "#id": str("id") - }), - expressionValues: obj({}), - })) + ifElse( + ref(ResourceConstants.SNIPPETS.ModelObjectKey), + compoundExpression([ + set(ref('condition'), obj({ + expression: str(""), + expressionNames: obj({}), + expressionValues: obj({}), + })), + forEach(ref('entry'), ref(`${ResourceConstants.SNIPPETS.ModelObjectKey}.entrySet()`), [ + ifElse( + raw('$velocityCount == 1'), + qref('$condition.put("expression", "attribute_exists(#keyCondition$velocityCount)")'), + qref('$condition.put("expression", "$condition.expression AND attribute_exists(#keyCondition$velocityCount)")'), + ), + qref('$condition.expressionNames.put("#keyCondition$velocityCount", "$entry.key")') + ]) + ]), + set(ref('condition'), obj({ + expression: str("attribute_exists(#id)"), + expressionNames: obj({ + "#id": str("id") + }), + expressionValues: obj({}), + })) + ) ), comment('Automatically set the updatedAt timestamp.'), - qref('$context.args.input.put("updatedAt", $util.time.nowISO8601())'), + qref('$context.args.input.put("updatedAt", $util.defaultIfNull($ctx.args.input.updatedAt, $util.time.nowISO8601()))'), qref(`$context.args.input.put("__typename", "${type}")`), comment('Update condition if type is @versioned'), iff( @@ -374,10 +406,17 @@ export class ResourceFactory { ]) ), DynamoDBMappingTemplate.updateItem({ - key: obj({ - id: obj({ S: str('$context.args.input.id') }) - }), - condition: ref('util.toJson($condition)') + key: ifElse( + ref(ResourceConstants.SNIPPETS.ModelObjectKey), + raw(`$util.toJson(\$${ResourceConstants.SNIPPETS.ModelObjectKey})`), + obj({ + id: obj({ S: str('$context.args.input.id') }) + }), + true + ), + condition: ref('util.toJson($condition)'), + objectKeyVariable: ResourceConstants.SNIPPETS.ModelObjectKey, + nameOverrideMap: ResourceConstants.SNIPPETS.DynamoDBNameOverrideMap }) ]) ), @@ -400,9 +439,14 @@ export class ResourceFactory { TypeName: queryTypeName, RequestMappingTemplate: print( DynamoDBMappingTemplate.getItem({ - key: obj({ - id: ref('util.dynamodb.toDynamoDBJson($ctx.args.id)') - }) + key: ifElse( + ref(ResourceConstants.SNIPPETS.ModelObjectKey), + raw(`$util.toJson(\$${ResourceConstants.SNIPPETS.ModelObjectKey})`), + obj({ + id: ref('util.dynamodb.toDynamoDBJson($ctx.args.id)') + }), + true + ) }) ), ResponseMappingTemplate: print( @@ -479,7 +523,7 @@ export class ResourceFactory { public makeListResolver(type: string, nameOverride?: string, queryTypeName: string = 'Query') { const fieldName = nameOverride ? nameOverride : graphqlName('list' + plurality(toUpper(type))) const defaultPageLimit = 10 - + const requestVariable = 'ListRequest'; return new AppSync.Resolver({ ApiId: Fn.GetAtt(ResourceConstants.RESOURCES.GraphQLAPILogicalID, 'ApiId'), DataSourceName: Fn.GetAtt(ModelResourceIDs.ModelTableDataSourceID(type), 'Name'), @@ -488,19 +532,36 @@ export class ResourceFactory { RequestMappingTemplate: print( compoundExpression([ set(ref('limit'), ref(`util.defaultIfNull($context.args.limit, ${defaultPageLimit})`)), - DynamoDBMappingTemplate.listItem({ - filter: ifElse( - ref('context.args.filter'), - ref('util.transform.toDynamoDBFilterExpression($ctx.args.filter)'), - nul() - ), - limit: ref('limit'), - nextToken: ifElse( - ref('context.args.nextToken'), - str('$context.args.nextToken'), - nul() + set( + ref(requestVariable), + obj({ + version: str('2017-02-28'), + limit: ref('limit') + }) + ), + iff( + ref('context.args.nextToken'), + set( + ref(`${requestVariable}.nextToken`), + str('$context.args.nextToken') ) - }) + ), + iff( + ref('context.args.filter'), + set( + ref(`${requestVariable}.filter`), + ref('util.parseJson("$util.transform.toDynamoDBFilterExpression($ctx.args.filter)")') + ), + ), + ifElse( + raw(`!$util.isNull($${ResourceConstants.SNIPPETS.ModelQueryExpression}) && !$util.isNullOrEmpty($${ResourceConstants.SNIPPETS.ModelQueryExpression}.expression)`), + compoundExpression([ + qref(`$${requestVariable}.put("operation", "Query")`), + qref(`$${requestVariable}.put("query", $${ResourceConstants.SNIPPETS.ModelQueryExpression})`) + ]), + qref(`$${requestVariable}.put("operation", "Scan")`) + ), + raw(`$util.toJson($${requestVariable})`) ]) ), ResponseMappingTemplate: print( @@ -527,15 +588,41 @@ export class ResourceFactory { ref(ResourceConstants.SNIPPETS.AuthCondition), compoundExpression([ set(ref('condition'), ref(ResourceConstants.SNIPPETS.AuthCondition)), - qref('$condition.put("expression", "$condition.expression AND attribute_exists(#id)")'), - qref('$condition.expressionNames.put("#id", "id")') + ifElse( + ref(ResourceConstants.SNIPPETS.ModelObjectKey), + forEach(ref('entry'), ref(`${ResourceConstants.SNIPPETS.ModelObjectKey}.entrySet()`),[ + qref('$condition.put("expression", "$condition.expression AND attribute_exists(#keyCondition$velocityCount)")'), + qref('$condition.expressionNames.put("#keyCondition$velocityCount", "$entry.key")') + ]), + compoundExpression([ + qref('$condition.put("expression", "$condition.expression AND attribute_exists(#id)")'), + qref('$condition.expressionNames.put("#id", "id")') + ]) + ) ]), - set(ref('condition'), obj({ - expression: str("attribute_exists(#id)"), - expressionNames: obj({ - "#id": str("id") - }) - })) + ifElse( + ref(ResourceConstants.SNIPPETS.ModelObjectKey), + compoundExpression([ + set(ref('condition'), obj({ + expression: str(""), + expressionNames: obj({}), + })), + forEach(ref('entry'), ref(`${ResourceConstants.SNIPPETS.ModelObjectKey}.entrySet()`), [ + ifElse( + raw('$velocityCount == 1'), + qref('$condition.put("expression", "attribute_exists(#keyCondition$velocityCount)")'), + qref('$condition.put("expression", "$condition.expression AND attribute_exists(#keyCondition$velocityCount)")'), + ), + qref('$condition.expressionNames.put("#keyCondition$velocityCount", "$entry.key")') + ]) + ]), + set(ref('condition'), obj({ + expression: str("attribute_exists(#id)"), + expressionNames: obj({ + "#id": str("id") + }) + })) + ) ), iff( ref(ResourceConstants.SNIPPETS.VersionedCondition), @@ -549,9 +636,14 @@ export class ResourceFactory { ]) ), DynamoDBMappingTemplate.deleteItem({ - key: obj({ - id: ref('util.dynamodb.toDynamoDBJson($ctx.args.input.id)') - }), + key: ifElse( + ref(ResourceConstants.SNIPPETS.ModelObjectKey), + raw(`$util.toJson(\$${ResourceConstants.SNIPPETS.ModelObjectKey})`), + obj({ + id: ref('util.dynamodb.toDynamoDBJson($ctx.args.input.id)') + }), + true + ), condition: ref('util.toJson($condition)') }) ]) diff --git a/packages/graphql-function-transformer/src/__tests__/FunctionTransformer.test.ts b/packages/graphql-function-transformer/src/__tests__/FunctionTransformer.test.ts index 3c98b8820c4..c749e3cb826 100644 --- a/packages/graphql-function-transformer/src/__tests__/FunctionTransformer.test.ts +++ b/packages/graphql-function-transformer/src/__tests__/FunctionTransformer.test.ts @@ -24,7 +24,7 @@ test('FunctionTransformer should add a datasource, IAM role and a resolver resou let datasourceResource = out.stacks.FunctionDirectiveStack.Resources.EchofunctionLambdaDataSource expect(datasourceResource).toBeDefined() expect( - datasourceResource.Properties.LambdaConfig.LambdaFunctionArn['Fn::Sub'][0], + datasourceResource.Properties.LambdaConfig.LambdaFunctionArn['Fn::If'][1]['Fn::Sub'][0], ).toEqual(expectedLambdaArn) // IAM role @@ -40,7 +40,7 @@ test('FunctionTransformer should add a datasource, IAM role and a resolver resou iamRoleResource.Properties.Policies[0].PolicyDocument.Statement[0].Action[0] ).toEqual('lambda:InvokeFunction') expect( - iamRoleResource.Properties.Policies[0].PolicyDocument.Statement[0].Resource[0]['Fn::Sub'][0] + iamRoleResource.Properties.Policies[0].PolicyDocument.Statement[0].Resource['Fn::If'][1]['Fn::Sub'][0] ).toEqual(expectedLambdaArn) // Resolver @@ -94,10 +94,10 @@ test('two @function directives for the same field should be valid', () => { expect(resolverResource.Properties.TypeName).toEqual("Query") expect(resolverResource.Properties.PipelineConfig.Functions.length).toEqual(2) const otherFunctionIamResource = out.stacks.FunctionDirectiveStack.Resources.OtherfunctionLambdaDataSourceRole; - expect(otherFunctionIamResource.Properties.Policies[0].PolicyDocument.Statement[0].Resource[0]["Fn::Sub"][0]).toEqual('arn:aws:lambda:${AWS::Region}:${AWS::AccountId}:function:otherfunction'); + expect(otherFunctionIamResource.Properties.Policies[0].PolicyDocument.Statement[0].Resource['Fn::If'][1]["Fn::Sub"][0]).toEqual('arn:aws:lambda:${AWS::Region}:${AWS::AccountId}:function:otherfunction'); const echoFunctionIamResource = out.stacks.FunctionDirectiveStack.Resources.EchofunctionLambdaDataSourceRole; - expect(echoFunctionIamResource.Properties.Policies[0].PolicyDocument.Statement[0].Resource[0]["Fn::Sub"][0]).toEqual('arn:aws:lambda:${AWS::Region}:${AWS::AccountId}:function:echofunction-${env}'); - expect(echoFunctionIamResource.Properties.Policies[0].PolicyDocument.Statement[0].Resource[0]["Fn::Sub"][1].env.Ref).toEqual('env'); + expect(echoFunctionIamResource.Properties.Policies[0].PolicyDocument.Statement[0].Resource['Fn::If'][1]["Fn::Sub"][0]).toEqual('arn:aws:lambda:${AWS::Region}:${AWS::AccountId}:function:echofunction-${env}'); + expect(echoFunctionIamResource.Properties.Policies[0].PolicyDocument.Statement[0].Resource['Fn::If'][1]["Fn::Sub"][1].env.Ref).toEqual('env'); }) test('@function directive applied to Object should throw SchemaValidationError', () => { diff --git a/packages/graphql-key-transformer/package.json b/packages/graphql-key-transformer/package.json new file mode 100644 index 00000000000..c450058029c --- /dev/null +++ b/packages/graphql-key-transformer/package.json @@ -0,0 +1,42 @@ +{ + "name": "graphql-key-transformer", + "version": "1.0.0", + "description": "Implements the @key directive.", + "main": "lib/index.js", + "author": "Michael Paris", + "license": "MIT", + "scripts": { + "test": "jest", + "build": "tsc", + "clean": "rm -rf ./lib" + }, + "dependencies": { + "cloudform": "^3.5.0", + "cloudform-types": "^3.7.0", + "graphql": "^0.13.2", + "graphql-mapping-template": "^3.0.6", + "graphql-transformer-common": "^3.6.1", + "graphql-transformer-core": "^3.6.1" + }, + "devDependencies": { + "@types/jest": "23.1.1", + "jest": "^23.1.0", + "ts-jest": "^22.4.6", + "tslint-config-airbnb": "^5.11.1" + }, + "jest": { + "transform": { + "^.+\\.tsx?$": "ts-jest" + }, + "testURL": "http://localhost", + "testRegex": "(src/__tests__/.*.test.*)$", + "moduleFileExtensions": [ + "ts", + "tsx", + "js", + "jsx", + "json", + "node" + ] + } +} diff --git a/packages/graphql-key-transformer/src/KeyTransformer.ts b/packages/graphql-key-transformer/src/KeyTransformer.ts new file mode 100644 index 00000000000..a53f82a1116 --- /dev/null +++ b/packages/graphql-key-transformer/src/KeyTransformer.ts @@ -0,0 +1,743 @@ +import { + Transformer, gql, TransformerContext, getDirectiveArguments, TransformerContractError, InvalidDirectiveError +} from 'graphql-transformer-core'; +import { + obj, str, ref, printBlock, compoundExpression, newline, raw, qref, set, Expression, print, + ifElse, iff, block, bool, forEach, list +} from 'graphql-mapping-template'; +import { + ResolverResourceIDs, ResourceConstants, isNonNullType, + attributeTypeFromScalar, ModelResourceIDs, makeInputValueDefinition, + makeNonNullType, makeNamedType, getBaseType, + makeConnectionField, + makeScalarKeyConditionForType, applyKeyExpressionForCompositeKey, + makeCompositeKeyConditionInputForKey, makeCompositeKeyInputForKey, toCamelCase, graphqlName +} from 'graphql-transformer-common'; +import { + ObjectTypeDefinitionNode, FieldDefinitionNode, DirectiveNode, + InputObjectTypeDefinitionNode, TypeNode, Kind, InputValueDefinitionNode +} from 'graphql'; +import { AppSync, IAM, Fn, DynamoDB, Refs } from 'cloudform-types' +import { Projection, GlobalSecondaryIndex, LocalSecondaryIndex } from 'cloudform-types/types/dynamoDb/table'; + +interface KeyArguments { + name?: string; + fields: string[]; + queryField?: string; +} + +export default class FunctionTransformer extends Transformer { + + constructor() { + super( + 'KeyTransformer', + gql`directive @key(name: String, fields: [String!]!, queryField: String) on OBJECT` + ) + } + + /** + * Augment the table key structures based on the @key. + */ + object = (definition: ObjectTypeDefinitionNode, directive: DirectiveNode, ctx: TransformerContext) => { + this.validate(definition, directive, ctx); + this.updateIndexStructures(definition, directive, ctx); + this.updateSchema(definition, directive, ctx); + this.updateResolvers(definition, directive, ctx); + this.addKeyConditionInputs(definition, directive, ctx); + }; + + /** + * Update the existing @model table's index structures. Includes primary key, GSI, and LSIs. + * @param definition The object type definition node. + * @param directive The @key directive + * @param ctx The transformer context + */ + private updateIndexStructures = (definition: ObjectTypeDefinitionNode, directive: DirectiveNode, ctx: TransformerContext) => { + if (this.isPrimaryKey(directive)) { + // Set the table's primary key using the @key definition. + this.replacePrimaryKey(definition, directive, ctx); + } else { + // Append a GSI/LSI to the table configuration. + this.appendSecondaryIndex(definition, directive, ctx); + } + } + + /** + * Update the structural components of the schema that are relevant to the new index structures. + * + * Updates: + * 1. getX with new primary key information. + * 2. listX with new primary key information. + * + * Creates: + * 1. A query field for each secondary index. + */ + private updateSchema = (definition: ObjectTypeDefinitionNode, directive: DirectiveNode, ctx: TransformerContext) => { + this.updateQueryFields(definition, directive, ctx); + this.updateInputObjects(definition, directive, ctx); + } + + /** + * Update the get, list, create, update, and delete resolvers with updated key information. + */ + private updateResolvers = (definition: ObjectTypeDefinitionNode, directive: DirectiveNode, ctx: TransformerContext) => { + const directiveArgs: KeyArguments = getDirectiveArguments(directive); + const getResolver = ctx.getResource(ResolverResourceIDs.DynamoDBGetResolverResourceID(definition.name.value)); + const listResolver = ctx.getResource(ResolverResourceIDs.DynamoDBListResolverResourceID(definition.name.value)); + const createResolver = ctx.getResource(ResolverResourceIDs.DynamoDBCreateResolverResourceID(definition.name.value)); + const updateResolver = ctx.getResource(ResolverResourceIDs.DynamoDBUpdateResolverResourceID(definition.name.value)); + const deleteResolver = ctx.getResource(ResolverResourceIDs.DynamoDBDeleteResolverResourceID(definition.name.value)); + if (this.isPrimaryKey(directive)) { + // When looking at a primary key we update the primary paths for writing/reading data. + // and ensure any composite sort keys for the primary index. + if (getResolver) { + getResolver.Properties.RequestMappingTemplate = joinSnippets([ + this.setKeySnippet(directive), + getResolver.Properties.RequestMappingTemplate + ]); + } + if (listResolver) { + listResolver.Properties.RequestMappingTemplate = joinSnippets([ + print(setQuerySnippet(definition, directive, ctx)), + listResolver.Properties.RequestMappingTemplate + ]); + } + if (createResolver) { + createResolver.Properties.RequestMappingTemplate = joinSnippets([ + this.setKeySnippet(directive, true), + ensureCompositeKeySnippet(directive), + createResolver.Properties.RequestMappingTemplate + ]); + } + if (updateResolver) { + updateResolver.Properties.RequestMappingTemplate = joinSnippets([ + this.setKeySnippet(directive, true), + ensureCompositeKeySnippet(directive), + updateResolver.Properties.RequestMappingTemplate + ]); + } + if (deleteResolver) { + deleteResolver.Properties.RequestMappingTemplate = joinSnippets([ + this.setKeySnippet(directive, true), + deleteResolver.Properties.RequestMappingTemplate + ]); + } + } else { + // When looking at a secondary key we need to ensure any composite sort key values + // and validate update operations to protect the integrity of composite sort keys. + if (createResolver) { + createResolver.Properties.RequestMappingTemplate = joinSnippets([ + ensureCompositeKeySnippet(directive), + createResolver.Properties.RequestMappingTemplate + ]); + } + if (updateResolver) { + updateResolver.Properties.RequestMappingTemplate = joinSnippets([ + this.validateKeyUpdateArgumentsSnippet(directive), + ensureCompositeKeySnippet(directive), + updateResolver.Properties.RequestMappingTemplate + ]); + } + if (deleteResolver) { + deleteResolver.Properties.RequestMappingTemplate = joinSnippets([ + ensureCompositeKeySnippet(directive), + deleteResolver.Properties.RequestMappingTemplate + ]); + } + if (directiveArgs.queryField) { + const queryTypeName = ctx.getQueryTypeName(); + const queryResolverId = ResolverResourceIDs.ResolverResourceID(queryTypeName, directiveArgs.queryField); + const queryResolver = makeQueryResolver(definition, directive, ctx); + ctx.addToStackMapping(definition.name.value, `^${queryResolverId}$`); + ctx.setResource(queryResolverId, queryResolver); + } + } + } + + private addKeyConditionInputs = (definition: ObjectTypeDefinitionNode, directive: DirectiveNode, ctx: TransformerContext) => { + const args: KeyArguments = getDirectiveArguments(directive); + if (args.fields.length > 2) { + const compositeKeyFieldNames = args.fields.slice(1); + const compositeKeyFields = definition.fields.filter(field => Boolean(compositeKeyFieldNames.find(k => k === field.name.value))); + const keyName = args.name || 'Primary'; + const keyConditionInput = makeCompositeKeyConditionInputForKey(definition.name.value, keyName, compositeKeyFields); + if (!ctx.getType(keyConditionInput.name.value)) { + ctx.addInput(keyConditionInput); + } + const compositeKeyInput = makeCompositeKeyInputForKey(definition.name.value, keyName, compositeKeyFields); + if (!ctx.getType(compositeKeyInput.name.value)) { + ctx.addInput(compositeKeyInput); + } + } else if (args.fields.length === 2) { + const finalSortKeyFieldName = args.fields[1]; + const finalSortKeyField = definition.fields.find(f => f.name.value === finalSortKeyFieldName); + const sortKeyConditionInput = makeScalarKeyConditionForType(finalSortKeyField.type); + if (!ctx.getType(sortKeyConditionInput.name.value)) { + ctx.addInput(sortKeyConditionInput); + } + } + } + + /** + * Updates query fields to include any arguments required by the key structures. + * @param definition The object type definition node. + * @param directive The @key directive + * @param ctx The transformer context + */ + private updateQueryFields = (definition: ObjectTypeDefinitionNode, directive: DirectiveNode, ctx: TransformerContext) => { + this.updateGetField(definition, directive, ctx); + this.updateListField(definition, directive, ctx); + this.ensureQueryField(definition, directive, ctx); + } + + // If the get field exists, update its arguments with primary key information. + private updateGetField = (definition: ObjectTypeDefinitionNode, directive: DirectiveNode, ctx: TransformerContext) => { + let query = ctx.getQuery(); + const getResourceID = ResolverResourceIDs.DynamoDBGetResolverResourceID(definition.name.value); + const getResolverResource = ctx.getResource(getResourceID); + if (getResolverResource && this.isPrimaryKey(directive)) { + // By default takes a single argument named 'id'. Replace it with the updated primary key structure. + let getField: FieldDefinitionNode = query.fields.find(field => field.name.value === getResolverResource.Properties.FieldName) as FieldDefinitionNode; + const args: KeyArguments = getDirectiveArguments(directive); + const getArguments = args.fields.map(keyAttributeName => { + const keyField = definition.fields.find(field => field.name.value === keyAttributeName); + const keyArgument = makeInputValueDefinition(keyAttributeName, makeNonNullType(makeNamedType(getBaseType(keyField.type)))); + return keyArgument; + }) + getField = { ...getField, arguments: getArguments }; + query = { ...query, fields: query.fields.map(field => field.name.value === getField.name.value ? getField : field)} + ctx.putType(query); + } + } + + // If the list field exists, update its arguments with primary key information. + private updateListField = (definition: ObjectTypeDefinitionNode, directive: DirectiveNode, ctx: TransformerContext) => { + const listResourceID = ResolverResourceIDs.DynamoDBListResolverResourceID(definition.name.value); + const listResolverResource = ctx.getResource(listResourceID); + if (listResolverResource && this.isPrimaryKey(directive)) { + // By default takes a single argument named 'id'. Replace it with the updated primary key structure. + let query = ctx.getQuery(); + let listField: FieldDefinitionNode = query.fields.find(field => field.name.value === listResolverResource.Properties.FieldName) as FieldDefinitionNode; + let listArguments: InputValueDefinitionNode[] = [ ...listField.arguments ]; + const args: KeyArguments = getDirectiveArguments(directive); + if (args.fields.length > 2) { + listArguments = addCompositeSortKey(definition, args, listArguments); + listArguments = addHashField(definition, args, listArguments); + } else if (args.fields.length === 2) { + listArguments = addSimpleSortKey(definition, args, listArguments); + listArguments = addHashField(definition, args, listArguments); + } else { + listArguments = addHashField(definition, args, listArguments); + } + listField = { ...listField, arguments: listArguments }; + query = { ...query, fields: query.fields.map(field => field.name.value === listField.name.value ? listField : field)} + ctx.putType(query); + } + } + + // If this is a secondary key and a queryField has been provided, create the query field. + private ensureQueryField = (definition: ObjectTypeDefinitionNode, directive: DirectiveNode, ctx: TransformerContext) => { + const args: KeyArguments = getDirectiveArguments(directive); + if (args.queryField && !this.isPrimaryKey(directive)) { + let queryType = ctx.getQuery(); + let queryArguments = []; + if (args.fields.length > 2) { + queryArguments = addCompositeSortKey(definition, args, queryArguments); + queryArguments = addHashField(definition, args, queryArguments); + } else if (args.fields.length === 2) { + queryArguments = addSimpleSortKey(definition, args, queryArguments); + queryArguments = addHashField(definition, args, queryArguments); + } else { + queryArguments = addHashField(definition, args, queryArguments); + } + const queryField = makeConnectionField(args.queryField, definition.name.value, queryArguments); + queryType = { + ...queryType, + fields: [...queryType.fields, queryField] + }; + ctx.putType(queryType); + } + } + + // Update the create, update, and delete input objects to account for any changes to the primary key. + private updateInputObjects = (definition: ObjectTypeDefinitionNode, directive: DirectiveNode, ctx: TransformerContext) => { + if (this.isPrimaryKey(directive)) { + console.log(`Updating input structures for key: ${JSON.stringify(getDirectiveArguments(directive))}`); + const directiveArgs: KeyArguments = getDirectiveArguments(directive); + const createInput = ctx.getType(ModelResourceIDs.ModelCreateInputObjectName(definition.name.value)) as InputObjectTypeDefinitionNode; + if (createInput) { + ctx.putType(replaceCreateInput(definition, createInput, directiveArgs.fields)); + } + const updateInput = ctx.getType(ModelResourceIDs.ModelUpdateInputObjectName(definition.name.value)) as InputObjectTypeDefinitionNode; + if (updateInput) { + ctx.putType(replaceUpdateInput(definition, updateInput, directiveArgs.fields)); + } + const deleteInput = ctx.getType(ModelResourceIDs.ModelDeleteInputObjectName(definition.name.value)) as InputObjectTypeDefinitionNode; + if (deleteInput) { + ctx.putType(replaceDeleteInput(definition, deleteInput, directiveArgs.fields)); + } + } + } + + // Return a VTL snippet that sets the key for key for get, update, and delete operations. + private setKeySnippet = (directive: DirectiveNode, isMutation: boolean = false) => { + const directiveArgs = getDirectiveArguments(directive); + const cmds: Expression[] = [set( + ref(ResourceConstants.SNIPPETS.ModelObjectKey), + modelObjectKey(directiveArgs, isMutation) + )]; + return printBlock(`Set the primary @key`)(compoundExpression(cmds)); + } + + // When issuing an update mutation that changes one part of a composite sort key, + // you must supply the entire key so that the underlying composite key can be resaved + // in the update operation. We only need to update for composite sort keys on secondary indexes. + private validateKeyUpdateArgumentsSnippet = (directive: DirectiveNode): string => { + const directiveArgs: KeyArguments = getDirectiveArguments(directive); + if (!this.isPrimaryKey(directive) && directiveArgs.fields.length > 2) { + const sortKeyFields = directiveArgs.fields.slice(1); + return printBlock(`Validate update mutation for @key '${directiveArgs.name}'`)(compoundExpression([ + set(ref('hasSeenSomeKeyArg'), bool(false)), + set(ref('keyFieldNames'), list(sortKeyFields.map(f => str(f)))), + forEach(ref('keyFieldName'), ref('keyFieldNames'), [ + iff( + raw(`$ctx.args.input.containsKey("$keyFieldName")`), + set(ref('hasSeenSomeKeyArg'), bool(true)), + true + ) + ]), + forEach(ref('keyFieldName'), ref('keyFieldNames'), [ + iff( + raw(`$hasSeenSomeKeyArg && !$ctx.args.input.containsKey("$keyFieldName")`), + raw(`$util.error("When updating any part of the composite sort key for @key '${directiveArgs.name}',` + + ` you must provide all fields for the key. Missing key: '$keyFieldName'.")`) + ) + ]) + ])); + } + return ''; + } + + /** + * Validates the directive usage is semantically valid. + * + * 1. There may only be 1 @key without a name (specifying the primary key) + * 2. There may only be 1 @key with a given name. + * 3. @key must only reference existing scalar fields that map to DynamoDB S, N, or B. + * 4. A primary key must not include a 'queryField'. + * @param definition The object type definition node. + * @param directive The @key directive + * @param ctx The transformer context + */ + private validate = (definition: ObjectTypeDefinitionNode, directive: DirectiveNode, ctx: TransformerContext) => { + const directiveArgs = getDirectiveArguments(directive); + if (!directiveArgs.name) { + // 1. Make sure there are no more directives without a name. + for (const otherDirective of definition.directives.filter(d => d.name.value === 'key')) { + const otherArgs = getDirectiveArguments(otherDirective); + if (otherDirective !== directive && !otherArgs.name) { + throw new InvalidDirectiveError(`You may only supply one primary @key on type '${definition.name.value}'.`); + } + } + // 4. Make sure that a 'queryField' is not included on a primary @key. + if (directiveArgs.queryField) { + throw new InvalidDirectiveError(`You cannot pass 'queryField' to the primary @key on type '${definition.name.value}'.`); + } + } else { + // 2. Make sure there are no more directives with the same name. + for (const otherDirective of definition.directives.filter(d => d.name.value === 'key')) { + const otherArgs = getDirectiveArguments(otherDirective); + if (otherDirective !== directive && otherArgs.name === directiveArgs.name) { + throw new InvalidDirectiveError(`You may only supply one @key with the name '${directiveArgs.name}' on type '${definition.name.value}'.`); + } + } + } + // 3. Check that fields exists and are valid key types. + const fieldMap = new Map(); + for (const field of definition.fields) { + fieldMap.set(field.name.value, field); + } + for (const fieldName of directiveArgs.fields) { + if (!fieldMap.has(fieldName)) { + throw new InvalidDirectiveError(`You cannot specify a non-existant field '${fieldName}' in @key '${directiveArgs.name}' on type '${definition.name.value}'.`); + } else { + const existingField = fieldMap.get(fieldName); + const ddbKeyType = attributeTypeFromType(existingField.type, ctx); + if (this.isPrimaryKey(directive) && !isNonNullType(existingField.type)) { + throw new InvalidDirectiveError(`The primary @key on type '${definition.name.value}' must reference non-null fields.`); + } else if (ddbKeyType !== 'S' && ddbKeyType !== 'N' && ddbKeyType !== 'B') { + throw new InvalidDirectiveError(`A @key on type '${definition.name.value}' cannot reference non-scalar field ${fieldName}.`); + } + } + } + } + + /** + * Returns true if the directive specifies a primary key. + * @param directive The directive node. + */ + isPrimaryKey = (directive: DirectiveNode) => { + const directiveArgs = getDirectiveArguments(directive); + return !Boolean(directiveArgs.name); + } + + /** + * Replace the primary key schema with one defined by a @key. + * @param definition The object type definition node. + * @param directive The @key directive + * @param ctx The transformer context + */ + replacePrimaryKey = (definition: ObjectTypeDefinitionNode, directive: DirectiveNode, ctx: TransformerContext) => { + const args: KeyArguments = getDirectiveArguments(directive); + const ks = keySchema(args); + const attrDefs = attributeDefinitions(args, definition, ctx); + const tableLogicalID = ModelResourceIDs.ModelTableResourceID(definition.name.value); + const tableResource = ctx.getResource(tableLogicalID); + if (!tableResource) { + throw new InvalidDirectiveError(`The @key directive may only be added to object definitions annotated with @model.`); + } else { + // First remove any attribute definitions in the current primary key. + const existingAttrDefSet = new Set(tableResource.Properties.AttributeDefinitions.map(ad => ad.AttributeName)); + for (const existingKey of tableResource.Properties.KeySchema) { + if (existingAttrDefSet.has(existingKey.AttributeName)) { + tableResource.Properties.AttributeDefinitions = tableResource.Properties.AttributeDefinitions.filter(ad => ad.AttributeName !== existingKey.AttributeName); + existingAttrDefSet.delete(existingKey.AttributeName); + } + } + // Then replace the KeySchema and add any new attribute definitions back. + tableResource.Properties.KeySchema = ks; + for (const attr of attrDefs) { + if (!existingAttrDefSet.has(attr.AttributeName)) { + tableResource.Properties.AttributeDefinitions.push(attr); + } + } + } + } + + /** + * Add a LSI or GSI to the table as defined by a @key. + * @param definition The object type definition node. + * @param directive The @key directive + * @param ctx The transformer context + */ + appendSecondaryIndex = (definition: ObjectTypeDefinitionNode, directive: DirectiveNode, ctx: TransformerContext) => { + const args: KeyArguments = getDirectiveArguments(directive); + const ks = keySchema(args); + const attrDefs = attributeDefinitions(args, definition, ctx); + const tableLogicalID = ModelResourceIDs.ModelTableResourceID(definition.name.value); + const tableResource = ctx.getResource(tableLogicalID); + const primaryKeyDirective = getPrimaryKey(definition); + const primaryPartitionKeyName = primaryKeyDirective ? getDirectiveArguments(primaryKeyDirective).fields[0] : 'id'; + if (!tableResource) { + throw new InvalidDirectiveError(`The @key directive may only be added to object definitions annotated with @model.`); + } else { + const baseIndexProperties = { + IndexName: args.name, + KeySchema: ks, + Projection: new Projection({ + ProjectionType: 'ALL' + }) + }; + if (primaryPartitionKeyName === ks[0].AttributeName) { + // This is an LSI. + // Add the new secondary index and update the table's attribute definitions. + tableResource.Properties.LocalSecondaryIndexes = append( + tableResource.Properties.LocalSecondaryIndexes, + new LocalSecondaryIndex(baseIndexProperties) + ) + } else { + // This is a GSI. + // Add the new secondary index and update the table's attribute definitions. + tableResource.Properties.GlobalSecondaryIndexes = append( + tableResource.Properties.GlobalSecondaryIndexes, + new GlobalSecondaryIndex({ + ...baseIndexProperties, + ProvisionedThroughput: Fn.If( + ResourceConstants.CONDITIONS.ShouldUsePayPerRequestBilling, + Refs.NoValue, + { + ReadCapacityUnits: Fn.Ref(ResourceConstants.PARAMETERS.DynamoDBModelTableReadIOPS), + WriteCapacityUnits: Fn.Ref(ResourceConstants.PARAMETERS.DynamoDBModelTableWriteIOPS) + } + ) as any, + }) + ) + } + const existingAttrDefSet = new Set(tableResource.Properties.AttributeDefinitions.map(ad => ad.AttributeName)); + for (const attr of attrDefs) { + if (!existingAttrDefSet.has(attr.AttributeName)) { + tableResource.Properties.AttributeDefinitions.push(attr); + } + } + } + } +} + +/** + * Return a key schema given @key directive arguments. + * @param args The arguments of the @key directive. + */ +function keySchema(args: KeyArguments) { + if (args.fields.length > 1) { + const condensedSortKey = condenseRangeKey(args.fields.slice(1)); + return [ + { AttributeName: args.fields[0], KeyType: 'HASH' }, + { AttributeName: condensedSortKey, KeyType: 'RANGE' }, + ]; + } else { + return [{ AttributeName: args.fields[0], KeyType: 'HASH' }]; + } +} + +function attributeTypeFromType(type: TypeNode, ctx: TransformerContext) { + const baseTypeName = getBaseType(type); + const ofType = ctx.getType(baseTypeName); + if (ofType && ofType.kind === Kind.ENUM_TYPE_DEFINITION) { + return 'S'; + } + return attributeTypeFromScalar(type); +} + +/** + * Return a list of attribute definitions given a @key directive arguments and an object definition. + * @param args The arguments passed to @key. + * @param def The object type definition containing the @key. + */ +function attributeDefinitions(args: KeyArguments, def: ObjectTypeDefinitionNode, ctx: TransformerContext) { + const fieldMap = new Map(); + for (const field of def.fields) { + fieldMap.set(field.name.value, field); + } + if (args.fields.length > 2) { + const hashName = args.fields[0]; + const condensedSortKey = condenseRangeKey(args.fields.slice(1)); + return [ + { AttributeName: hashName, AttributeType: attributeTypeFromType(fieldMap.get(hashName).type, ctx) }, + { AttributeName: condensedSortKey, AttributeType: 'S' }, + ]; + } else if (args.fields.length === 2) { + const hashName = args.fields[0]; + const sortName = args.fields[1]; + return [ + { AttributeName: hashName, AttributeType: attributeTypeFromType(fieldMap.get(hashName).type, ctx) }, + { AttributeName: sortName, AttributeType: attributeTypeFromType(fieldMap.get(sortName).type, ctx) }, + ]; + } else { + const fieldName = args.fields[0]; + return [{ AttributeName: fieldName, AttributeType: attributeTypeFromType(fieldMap.get(fieldName).type, ctx) }]; + } +} + +function append(maybeList: T[] | undefined, item: T) { + if (maybeList) { + return [...maybeList, item]; + } + return [item]; +} + +function getPrimaryKey(obj: ObjectTypeDefinitionNode): DirectiveNode | undefined { + for (const directive of obj.directives) { + if (directive.name.value === 'key' && !getDirectiveArguments(directive).name) { + return directive; + } + } +} + +function primaryIdFields(definition: ObjectTypeDefinitionNode, keyFields: string[]): InputValueDefinitionNode[] { + return keyFields.map(keyFieldName => { + const keyField: FieldDefinitionNode = definition.fields.find(field => field.name.value === keyFieldName); + return makeInputValueDefinition(keyFieldName, makeNonNullType(makeNamedType(getBaseType(keyField.type)))); + }) +} + +// Key fields are non-nullable, non-key fields follow what their @model declaration makes. +function replaceCreateInput(definition: ObjectTypeDefinitionNode, input: InputObjectTypeDefinitionNode, keyFields: string[]): InputObjectTypeDefinitionNode { + return { + ...input, + fields: input.fields.reduce((acc, f) => { + // If the field is a key, make it non-null. + if (keyFields.find(k => k === f.name.value)) { + return [...acc, makeInputValueDefinition(f.name.value, makeNonNullType(makeNamedType(getBaseType(f.type))))]; + } else { + // If the field is not a key, use whatever the model type defines. + const existingField = definition.fields.find(field => field.name.value === f.name.value); + if (existingField && isNonNullType(existingField.type)) { + return [...acc, makeInputValueDefinition(f.name.value, makeNonNullType(makeNamedType(getBaseType(f.type))))]; + } else if (existingField) { + return [...acc, makeInputValueDefinition(f.name.value, makeNamedType(getBaseType(f.type)))]; + } + } + return acc; + }, []) + }; +}; + +// Key fields are non-nullable, non-key fields are not non-nullable. +function replaceUpdateInput(definition: ObjectTypeDefinitionNode, input: InputObjectTypeDefinitionNode, keyFields: string[]): InputObjectTypeDefinitionNode { + return { + ...input, + fields: input.fields.map( + f => { + if (keyFields.find(k => k === f.name.value)) { + return makeInputValueDefinition(f.name.value, makeNonNullType(makeNamedType(getBaseType(f.type)))); + } else { + return makeInputValueDefinition(f.name.value, makeNamedType(getBaseType(f.type))); + } + } + ) + }; +}; + +// Key fields are non-nullable, non-key fields are not non-nullable. +function replaceDeleteInput(definition: ObjectTypeDefinitionNode, input: InputObjectTypeDefinitionNode, keyFields: string[]): InputObjectTypeDefinitionNode { + return { + ...input, + fields: primaryIdFields(definition, keyFields) + }; +}; + +/** + * Return a VTL object containing the compressed key information. + * @param args The arguments of the @key directive. + */ +function modelObjectKey(args: KeyArguments, isMutation: boolean) { + const argsPrefix = isMutation ? + 'ctx.args.input' : + 'ctx.args'; + if (args.fields.length > 2) { + const rangeKeyFields = args.fields.slice(1); + const condensedSortKey = condenseRangeKey(rangeKeyFields); + const condensedSortKeyValue = condenseRangeKey( + rangeKeyFields.map(keyField => `\${${argsPrefix}.${keyField}}`) + ); + return obj({ + [args.fields[0]]: ref(`util.dynamodb.toDynamoDB($${argsPrefix}.${args.fields[0]})`), + [condensedSortKey]: ref(`util.dynamodb.toDynamoDB("${condensedSortKeyValue}")`) + }); + } else if (args.fields.length === 2) { + return obj({ + [args.fields[0]]: ref(`util.dynamodb.toDynamoDB($${argsPrefix}.${args.fields[0]})`), + [args.fields[1]]: ref(`util.dynamodb.toDynamoDB($${argsPrefix}.${args.fields[1]})`) + }); + } else if (args.fields.length === 1) { + return obj({ + [args.fields[0]]: ref(`util.dynamodb.toDynamoDB($${argsPrefix}.${args.fields[0]})`), + }); + } + throw new InvalidDirectiveError('@key directives must include at least one field.'); +} + +function ensureCompositeKeySnippet(dir: DirectiveNode): string { + const args: KeyArguments = getDirectiveArguments(dir); + const argsPrefix = 'ctx.args.input'; + if (args.fields.length > 2) { + const rangeKeyFields = args.fields.slice(1); + const condensedSortKey = condenseRangeKey(rangeKeyFields); + const dynamoDBFriendlySortKeyName = toCamelCase(rangeKeyFields.map(f => graphqlName(f))); + const condensedSortKeyValue = condenseRangeKey( + rangeKeyFields.map(keyField => `\${${argsPrefix}.${keyField}}`) + ); + return print(compoundExpression([ + ifElse( + raw(`$util.isNull($${ResourceConstants.SNIPPETS.DynamoDBNameOverrideMap})`), + set(ref(ResourceConstants.SNIPPETS.DynamoDBNameOverrideMap), obj({ + [condensedSortKey]: str(dynamoDBFriendlySortKeyName) + })), + qref(`$${ResourceConstants.SNIPPETS.DynamoDBNameOverrideMap}.put("${condensedSortKey}", "${dynamoDBFriendlySortKeyName}")`) + ), + qref(`$ctx.args.input.put("${condensedSortKey}","${condensedSortKeyValue}")`) + ])); + } + return ''; +} + +function condenseRangeKey(fields: string[]) { + return fields.join(ModelResourceIDs.ModelCompositeKeySeparator()); +} + +function makeQueryResolver(definition: ObjectTypeDefinitionNode, directive: DirectiveNode, ctx: TransformerContext) { + const type = definition.name.value; + const directiveArgs: KeyArguments = getDirectiveArguments(directive); + const index = directiveArgs.name; + const fieldName = directiveArgs.queryField; + const queryTypeName = ctx.getQueryTypeName(); + const defaultPageLimit = 10 + const requestVariable = 'QueryRequest'; + return new AppSync.Resolver({ + ApiId: Fn.GetAtt(ResourceConstants.RESOURCES.GraphQLAPILogicalID, 'ApiId'), + DataSourceName: Fn.GetAtt(ModelResourceIDs.ModelTableDataSourceID(type), 'Name'), + FieldName: fieldName, + TypeName: queryTypeName, + RequestMappingTemplate: print( + compoundExpression([ + setQuerySnippet(definition, directive, ctx), + set(ref('limit'), ref(`util.defaultIfNull($context.args.limit, ${defaultPageLimit})`)), + set( + ref(requestVariable), + obj({ + version: str('2017-02-28'), + operation: str('Query'), + limit: ref('limit'), + query: ref(ResourceConstants.SNIPPETS.ModelQueryExpression), + index: str(index) + }) + ), + iff( + ref('context.args.nextToken'), + set( + ref(`${requestVariable}.nextToken`), + str('$context.args.nextToken') + ), + true + ), + iff( + ref('context.args.filter'), + set( + ref(`${requestVariable}.filter`), + ref('util.transform.toDynamoDBFilterExpression($ctx.args.filter)') + ), + true + ), + raw(`$util.toJson($${requestVariable})`) + ]) + ), + ResponseMappingTemplate: print( + raw('$util.toJson($ctx.result)') + ) + }) +} + +function setQuerySnippet(definition: ObjectTypeDefinitionNode, directive: DirectiveNode, ctx: TransformerContext) { + const args: KeyArguments = getDirectiveArguments(directive); + const keys = args.fields; + const keyTypes = keys.map(k => { + const field = definition.fields.find(f => f.name.value === k); + return attributeTypeFromType(field.type, ctx); + }) + return block(`Set query expression for @key`, [ + set(ref(ResourceConstants.SNIPPETS.ModelQueryExpression), obj({})), + applyKeyExpressionForCompositeKey(keys, keyTypes, ResourceConstants.SNIPPETS.ModelQueryExpression) + ]) +} + +function addHashField(definition: ObjectTypeDefinitionNode, args: KeyArguments, elems: InputValueDefinitionNode[]): InputValueDefinitionNode[] { + let hashFieldName = args.fields[0]; + const hashField = definition.fields.find(field => field.name.value === hashFieldName); + const hashKey = makeInputValueDefinition(hashFieldName, makeNamedType(getBaseType(hashField.type))); + return [hashKey, ...elems]; +} +function addSimpleSortKey(definition: ObjectTypeDefinitionNode, args: KeyArguments, elems: InputValueDefinitionNode[]): InputValueDefinitionNode[] { + let sortKeyName = args.fields[1]; + const sortField = definition.fields.find(field => field.name.value === sortKeyName); + const hashKey = makeInputValueDefinition(sortKeyName, makeNamedType(ModelResourceIDs.ModelKeyConditionInputTypeName(getBaseType(sortField.type)))); + return [hashKey, ...elems]; +} +function addCompositeSortKey(definition: ObjectTypeDefinitionNode, args: KeyArguments, elems: InputValueDefinitionNode[]): InputValueDefinitionNode[] { + let sortKeyNames = args.fields.slice(1); + const compositeSortKeyName = toCamelCase(sortKeyNames); + const hashKey = makeInputValueDefinition(compositeSortKeyName, makeNamedType(ModelResourceIDs.ModelCompositeKeyConditionInputTypeName(definition.name.value, args.name || 'Primary'))); + return [hashKey, ...elems]; +} +function joinSnippets(lines: string[]): string { + return lines.join('\n'); +} \ No newline at end of file diff --git a/packages/graphql-key-transformer/src/__tests__/KeyTransformer.test.ts b/packages/graphql-key-transformer/src/__tests__/KeyTransformer.test.ts new file mode 100644 index 00000000000..6f2bae623c4 --- /dev/null +++ b/packages/graphql-key-transformer/src/__tests__/KeyTransformer.test.ts @@ -0,0 +1,71 @@ +import GraphQLTransform, { Transformer, InvalidDirectiveError } from 'graphql-transformer-core' +import KeyTransformer from '../KeyTransformer' + +test('KeyTransformer should fail if more than 1 @key is provided without a name.', () => { + const validSchema = ` + type Test @key(fields: ["id"]) @key(fields: ["email"]) { + id: ID! + email: String + } + ` + + const transformer = new GraphQLTransform({ + transformers: [ + new KeyTransformer() + ] + }) + + expect(() => transformer.transform(validSchema)).toThrowError(InvalidDirectiveError); +}) + +test('KeyTransformer should fail if more than 1 @key is provided with the same name.', () => { + const validSchema = ` + type Test @key(name: "Test", fields: ["id"]) @key(name: "Test", fields: ["email"]) { + id: ID! + email: String + } + ` + + const transformer = new GraphQLTransform({ + transformers: [ + new KeyTransformer() + ] + }) + + expect(() => transformer.transform(validSchema)).toThrowError(InvalidDirectiveError); +}) + + +test('KeyTransformer should fail if referencing a field that does not exist.', () => { + const validSchema = ` + type Test @key(fields: ["someWeirdId"]) { + id: ID! + email: String + } + ` + + const transformer = new GraphQLTransform({ + transformers: [ + new KeyTransformer() + ] + }) + + expect(() => transformer.transform(validSchema)).toThrowError(InvalidDirectiveError); +}) + +test('Test that a primary @key fails if pointing to nullable fields.', () => { + const validSchema = ` + type Test @key(fields: ["email"]) { + id: ID! + email: String + } + ` + + const transformer = new GraphQLTransform({ + transformers: [ + new KeyTransformer() + ] + }) + + expect(() => transformer.transform(validSchema)).toThrowError(InvalidDirectiveError); +}) diff --git a/packages/graphql-key-transformer/src/index.ts b/packages/graphql-key-transformer/src/index.ts new file mode 100644 index 00000000000..732333baf0f --- /dev/null +++ b/packages/graphql-key-transformer/src/index.ts @@ -0,0 +1,2 @@ +import KeyTransformer from './KeyTransformer'; +export default KeyTransformer; \ No newline at end of file diff --git a/packages/graphql-key-transformer/tsconfig.json b/packages/graphql-key-transformer/tsconfig.json new file mode 100644 index 00000000000..78d733e565b --- /dev/null +++ b/packages/graphql-key-transformer/tsconfig.json @@ -0,0 +1,18 @@ +{ + "compilerOptions": { + "target": "es5", + "module": "commonjs", + "sourceMap": true, + "outDir": "lib", + "lib": [ + "es2015", + "es2016.array.include", + "esnext.asynciterable", + "dom" + ] + }, + "exclude": [ + "node_modules", + "lib" + ] +} \ No newline at end of file diff --git a/packages/graphql-key-transformer/tslint.json b/packages/graphql-key-transformer/tslint.json new file mode 100644 index 00000000000..721e3d3b6c5 --- /dev/null +++ b/packages/graphql-key-transformer/tslint.json @@ -0,0 +1,6 @@ +{ + "extends": "tslint-config-airbnb", + "rules": { + "semicolon": true + } +} \ No newline at end of file diff --git a/packages/graphql-mapping-template/src/dynamodb.ts b/packages/graphql-mapping-template/src/dynamodb.ts index 1e2e3ee7442..ff1432bb71a 100644 --- a/packages/graphql-mapping-template/src/dynamodb.ts +++ b/packages/graphql-mapping-template/src/dynamodb.ts @@ -11,7 +11,7 @@ export class DynamoDBMappingTemplate { * @param keys A list of strings pointing to the key value locations. E.G. ctx.args.x (note no $) */ public static putItem({ key, attributeValues, condition }: { - key: ObjectNode, + key: ObjectNode | Expression, attributeValues: Expression, condition?: ObjectNode }): ObjectNode { @@ -29,7 +29,7 @@ export class DynamoDBMappingTemplate { * @param key A list of strings pointing to the key value locations. E.G. ctx.args.x (note no $) */ public static getItem({ key }: { - key: ObjectNode + key: ObjectNode | Expression }): ObjectNode { return obj({ version: str('2017-02-28'), @@ -66,17 +66,23 @@ export class DynamoDBMappingTemplate { * Create a list item resolver template. * @param key A list of strings pointing to the key value locations. E.G. ctx.args.x (note no $) */ - public static listItem({ filter, limit, nextToken }: { + public static listItem({ filter, limit, nextToken, scanIndexForward, query, index }: { filter: ObjectNode | Expression, limit: Expression, - nextToken?: Expression + nextToken?: Expression, + scanIndexForward?: Expression; + query?: ObjectNode | Expression, + index?: StringNode, }): ObjectNode { return obj({ version: str('2017-02-28'), operation: str('Scan'), filter, limit, - nextToken + nextToken, + query, + index, + scanIndexForward, }) } @@ -85,7 +91,7 @@ export class DynamoDBMappingTemplate { * @param key A list of strings pointing to the key value locations. E.G. ctx.args.x (note no $) */ public static deleteItem({ key, condition }: { - key: ObjectNode, + key: ObjectNode | Expression, condition: ObjectNode | ReferenceNode }): ObjectNode { return obj({ @@ -100,33 +106,52 @@ export class DynamoDBMappingTemplate { * Create an update item resolver template. * @param key */ - public static updateItem({ key, condition }: { - key: ObjectNode, - condition: ObjectNode | ReferenceNode + public static updateItem({ key, condition, objectKeyVariable, nameOverrideMap }: { + key: ObjectNode | Expression, + condition: ObjectNode | ReferenceNode, + objectKeyVariable: string, + nameOverrideMap?: string }): CompoundExpressionNode { - const keyNames = key.attributes.map((attr: [string, Expression]) => attr[0]) + // const keyFields = key.attributes.map((attr: [string, Expression]) => attr[0]) // Auto timestamp // qref('$input.put("updatedAt", "$util.time.nowISO8601()")'), + const entryKeyAttributeNameVar = 'entryKeyAttributeName'; + const handleRename = (keyVar: string) => ifElse( + raw(`!$util.isNull($${nameOverrideMap}) && $${nameOverrideMap}.containsKey("${keyVar}")`), + set(ref(entryKeyAttributeNameVar), raw(`$${nameOverrideMap}.get("${keyVar}")`)), + set(ref(entryKeyAttributeNameVar), raw(keyVar)), + ); return compoundExpression([ set(ref('expNames'), obj({})), set(ref('expValues'), obj({})), set(ref('expSet'), obj({})), set(ref('expAdd'), obj({})), set(ref('expRemove'), list([])), + ifElse( + ref(objectKeyVariable), + compoundExpression([ + set(ref('keyFields'), list([])), + forEach(ref('entry'), ref(`${objectKeyVariable}.entrySet()`),[ + qref('$keyFields.add("$entry.key")') + ]), + ]), + set(ref('keyFields'), list([str('id')])), + ), forEach( ref('entry'), - ref(`util.map.copyAndRemoveAllKeys($context.args.input, [${keyNames.map(k => `"${k}"`).join(', ')}]).entrySet()`), + ref(`util.map.copyAndRemoveAllKeys($context.args.input, $keyFields).entrySet()`), [ + handleRename('$entry.key'), ifElse( ref('util.isNull($entry.value)'), compoundExpression([ - set(ref('discard'), ref('expRemove.add("#$entry.key")')), - qref('$expNames.put("#$entry.key", "$entry.key")') + set(ref('discard'), ref(`expRemove.add("#$${entryKeyAttributeNameVar}")`)), + qref(`$expNames.put("#$${entryKeyAttributeNameVar}", "$entry.key")`) ]), compoundExpression([ - qref('$expSet.put("#$entry.key", ":$entry.key")'), - qref('$expNames.put("#$entry.key", "$entry.key")'), - qref('$expValues.put(":$entry.key", $util.dynamodb.toDynamoDB($entry.value))') + qref(`$expSet.put("#$${entryKeyAttributeNameVar}", ":$${entryKeyAttributeNameVar}")`), + qref(`$expNames.put("#$${entryKeyAttributeNameVar}", "$entry.key")`), + qref(`$expValues.put(":$${entryKeyAttributeNameVar}", $util.dynamodb.toDynamoDB($entry.value))`) ]) ) ] diff --git a/packages/graphql-transformer-common/src/ModelResourceIDs.ts b/packages/graphql-transformer-common/src/ModelResourceIDs.ts index 3367492c77b..ad66ca9c5df 100644 --- a/packages/graphql-transformer-common/src/ModelResourceIDs.ts +++ b/packages/graphql-transformer-common/src/ModelResourceIDs.ts @@ -1,4 +1,4 @@ -import { graphqlName, toUpper } from './util' +import { graphqlName, toUpper, toCamelCase, simplifyName } from './util' import { DEFAULT_SCALARS } from './definition' export class ModelResourceIDs { @@ -29,6 +29,21 @@ export class ModelResourceIDs { } return `Model${name}KeyConditionInput` } + static ModelCompositeKeyArgumentName(keyFieldNames: string[]) { + return toCamelCase(keyFieldNames.map(n => graphqlName(n))); + } + static ModelCompositeKeySeparator() { + return '#'; + } + static ModelCompositeAttributeName(keyFieldNames: string[]) { + return keyFieldNames.join(ModelResourceIDs.ModelCompositeKeySeparator()); + } + static ModelCompositeKeyConditionInputTypeName(modelName: string, keyName: string): string { + return `Model${modelName}${keyName}CompositeKeyConditionInput` + } + static ModelCompositeKeyInputTypeName(modelName: string, keyName: string): string { + return `Model${modelName}${keyName}CompositeKeyInput` + } static ModelFilterListInputTypeName(name: string): string { const nameOverride = DEFAULT_SCALARS[name] if (nameOverride) { diff --git a/packages/graphql-transformer-common/src/ResolverResourceIDs.ts b/packages/graphql-transformer-common/src/ResolverResourceIDs.ts index a6f4fbc3803..1456c72fa5b 100644 --- a/packages/graphql-transformer-common/src/ResolverResourceIDs.ts +++ b/packages/graphql-transformer-common/src/ResolverResourceIDs.ts @@ -1,4 +1,4 @@ -import { graphqlName } from "./util"; +import { graphqlName, toUpper } from "./util"; export class ResolverResourceIDs { static DynamoDBCreateResolverResourceID(typeName: string): string { @@ -10,9 +10,6 @@ export class ResolverResourceIDs { static DynamoDBDeleteResolverResourceID(typeName: string): string { return `Delete${typeName}Resolver` } - static DynamoDBQueryResolverResourceID(typeName: string): string { - return `Query${typeName}Resolver` - } static DynamoDBGetResolverResourceID(typeName: string): string { return `Get${typeName}Resolver` } diff --git a/packages/graphql-transformer-common/src/ResourceConstants.ts b/packages/graphql-transformer-common/src/ResourceConstants.ts index b29a5c2ffdd..18b6b261d31 100644 --- a/packages/graphql-transformer-common/src/ResourceConstants.ts +++ b/packages/graphql-transformer-common/src/ResourceConstants.ts @@ -91,6 +91,10 @@ export class ResourceConstants { public static readonly SNIPPETS = { AuthCondition: "authCondition", VersionedCondition: "versionedCondition", + ModelObjectKey: "modelObjectKey", + DynamoDBNameOverrideMap: "dynamodbNameOverrideMap", + ModelQueryExpression: "modelQueryExpression", + ModelQueryIndex: "modelQueryIndex", IsDynamicGroupAuthorizedVariable: "isDynamicGroupAuthorized", IsLocalDynamicGroupAuthorizedVariable: "isLocalDynamicGroupAuthorized", IsStaticGroupAuthorizedVariable: "isStaticGroupAuthorized", diff --git a/packages/graphql-transformer-common/src/connectionUtils.ts b/packages/graphql-transformer-common/src/connectionUtils.ts new file mode 100644 index 00000000000..e8ac955e9a8 --- /dev/null +++ b/packages/graphql-transformer-common/src/connectionUtils.ts @@ -0,0 +1,15 @@ +import { makeField, makeInputValueDefinition, makeNamedType } from './definition'; +import { ModelResourceIDs } from './ModelResourceIDs'; +import { FieldDefinitionNode, InputValueDefinitionNode } from 'graphql'; +export function makeConnectionField(fieldName: string, returnTypeName: string, args: InputValueDefinitionNode[] = []): FieldDefinitionNode { + return makeField( + fieldName, + [ + ...args, + makeInputValueDefinition('filter', makeNamedType(ModelResourceIDs.ModelFilterInputTypeName(returnTypeName))), + makeInputValueDefinition('limit', makeNamedType('Int')), + makeInputValueDefinition('nextToken', makeNamedType('String')) + ], + makeNamedType(ModelResourceIDs.ModelConnectionTypeName(returnTypeName)) + ) +} diff --git a/packages/graphql-transformer-common/src/definition.ts b/packages/graphql-transformer-common/src/definition.ts index bd97f8c6a1d..636449e620d 100644 --- a/packages/graphql-transformer-common/src/definition.ts +++ b/packages/graphql-transformer-common/src/definition.ts @@ -5,7 +5,8 @@ import { valueFromASTUntyped, ArgumentNode, DirectiveNode, EnumTypeDefinitionNode, ValueNode, ListValueNode, - ObjectValueNode + ObjectValueNode, + InputObjectTypeDefinitionNode } from 'graphql' import { access } from 'fs'; @@ -204,6 +205,18 @@ export function extensionWithFields(object: ObjectTypeExtensionNode, fields: Fie } } +export function makeInputObjectDefinition(name: string, inputs: InputValueDefinitionNode[]): InputObjectTypeDefinitionNode { + return { + kind: 'InputObjectTypeDefinition', + name: { + kind: 'Name', + value: name + }, + fields: inputs, + directives: [] + } +} + export function makeField(name: string, args: InputValueDefinitionNode[], type: TypeNode, directives: DirectiveNode[] = []): FieldDefinitionNode { return { kind: Kind.FIELD_DEFINITION, diff --git a/packages/graphql-transformer-common/src/dynamodbUtils.ts b/packages/graphql-transformer-common/src/dynamodbUtils.ts new file mode 100644 index 00000000000..c3e8f7fc08c --- /dev/null +++ b/packages/graphql-transformer-common/src/dynamodbUtils.ts @@ -0,0 +1,497 @@ +import { InputObjectTypeDefinitionNode, InputValueDefinitionNode, Kind, TypeNode, FieldDefinitionNode } from 'graphql'; +import { makeListType, makeNamedType, getBaseType, makeInputValueDefinition, DEFAULT_SCALARS, makeInputObjectDefinition } from './definition'; +import { ModelResourceIDs } from './ModelResourceIDs'; +import { compoundExpression, block, iff, raw, set, ref, qref, obj, str, printBlock, list, forEach, Expression, newline, ReferenceNode, ifElse } from 'graphql-mapping-template'; +import { toCamelCase } from './util'; + +// Key conditions +const STRING_KEY_CONDITIONS = ['eq', 'le', 'lt', 'ge', 'gt', 'between', 'beginsWith'] +const ID_KEY_CONDITIONS = ['eq', 'le', 'lt', 'ge', 'gt', 'between', 'beginsWith'] +const INT_KEY_CONDITIONS = ['eq', 'le', 'lt', 'ge', 'gt', 'between'] +const FLOAT_KEY_CONDITIONS = ['eq', 'le', 'lt', 'ge', 'gt', 'between'] + +function getScalarKeyConditions(type: string): string[] { + switch (type) { + case 'String': + return STRING_KEY_CONDITIONS + case 'ID': + return ID_KEY_CONDITIONS + case 'Int': + return INT_KEY_CONDITIONS + case 'Float': + return FLOAT_KEY_CONDITIONS + default: + throw 'Valid types are String, ID, Int, Float, Boolean' + } +} +export function makeModelScalarKeyConditionInputObject(type: string): InputObjectTypeDefinitionNode { + const name = ModelResourceIDs.ModelKeyConditionInputTypeName(type) + const conditions = getScalarKeyConditions(type) + const fields: InputValueDefinitionNode[] = conditions + .map((condition: string) => ({ + kind: Kind.INPUT_VALUE_DEFINITION, + name: { kind: "Name" as "Name", value: condition }, + type: condition === 'between' ? makeListType(makeNamedType(type)) : makeNamedType(type), + directives: [] + })) + return makeInputObjectDefinition(name, fields); +} + +const STRING_KEY_CONDITION = makeModelScalarKeyConditionInputObject('String'); +const ID_KEY_CONDITION = makeModelScalarKeyConditionInputObject('ID'); +const INT_KEY_CONDITION = makeModelScalarKeyConditionInputObject('Int'); +const FLOAT_KEY_CONDITION = makeModelScalarKeyConditionInputObject('Float'); +const SCALAR_KEY_CONDITIONS = [STRING_KEY_CONDITION, ID_KEY_CONDITION, INT_KEY_CONDITION, FLOAT_KEY_CONDITION]; +export function makeScalarKeyConditionInputs(): InputObjectTypeDefinitionNode[] { + return SCALAR_KEY_CONDITIONS; +} +export function makeScalarKeyConditionForType(type: TypeNode): InputObjectTypeDefinitionNode { + const inputName = ModelResourceIDs.ModelKeyConditionInputTypeName(getBaseType(type)); + for (const key of SCALAR_KEY_CONDITIONS) { + if (key.name.value === inputName) { + return key; + } + } +} + +/** + * Given a list of key fields, create a composite key input type for the sort key condition. + * Given, + * type User @model @key(fields: ["a", "b", "c"]) { a: String, b: String, c: String } + * a composite key will be formed over "a" and "b". This will output: + * input UserPrimaryCompositeKeyConditionInput { + * beginsWith: UserPrimaryCompositeKeyInput, + * between: [UserPrimaryCompositeKeyInput], + * eq, le, lt, gt, ge: UserPrimaryCompositeKeyInput + * } + * input UserPrimaryCompositeKeyInput { + * b: String + * c: String + * } + */ +export function makeCompositeKeyConditionInputForKey(modelName: string, keyName: string, fields: FieldDefinitionNode[]): InputObjectTypeDefinitionNode { + const name = ModelResourceIDs.ModelCompositeKeyConditionInputTypeName(modelName, keyName) + const conditions = STRING_KEY_CONDITIONS; + const inputValues: InputValueDefinitionNode[] = conditions + .map((condition: string) => { + // Between takes a list of comosite key nodes. + const typeNode = condition === 'between' ? + makeListType(makeNamedType(ModelResourceIDs.ModelCompositeKeyInputTypeName(modelName, keyName))) : + makeNamedType(ModelResourceIDs.ModelCompositeKeyInputTypeName(modelName, keyName)); + return makeInputValueDefinition(condition, typeNode); + }); + return makeInputObjectDefinition(name, inputValues); +} + +export function makeCompositeKeyInputForKey(modelName: string, keyName: string, fields: FieldDefinitionNode[]): InputObjectTypeDefinitionNode { + const inputValues = fields.map( + (field: FieldDefinitionNode, idx) => { + const baseTypeName = getBaseType(field.type); + const nameOverride = DEFAULT_SCALARS[baseTypeName] + let typeNode = null; + if (idx === fields.length -1 && nameOverride) { + typeNode = makeNamedType(nameOverride) + } else { + typeNode = makeNamedType(baseTypeName) + } + return makeInputValueDefinition(field.name.value, typeNode); + }); + const inputName = ModelResourceIDs.ModelCompositeKeyInputTypeName(modelName, keyName); + return makeInputObjectDefinition(inputName, inputValues); +} + +/** +* Key conditions materialize as instances of ModelXKeyConditionInput passed via $ctx.args. +* If the arguments with the given sortKey name exists, create a DynamoDB expression that +* implements its logic. Possible operators: eq, le, lt, ge, gt, beginsWith, and between. +* @param argName The name of the argument containing the sort key condition object. +* @param attributeType The type of the DynamoDB attribute in the table. +* @param queryExprReference The name of the variable containing the query expression in the template. +*/ +export function applyKeyConditionExpression(argName: string, attributeType: 'S' | 'N' | 'B' = 'S', queryExprReference: string = 'query', sortKeyName?: string, prefixVariableName?: string) { + const prefixValue = (value: string): string => prefixVariableName ? `$${prefixVariableName}#${value}` : value; + const _sortKeyName = sortKeyName ? sortKeyName : argName; + return block("Applying Key Condition", [ + iff( + raw(`!$util.isNull($ctx.args.${argName}) && !$util.isNull($ctx.args.${argName}.beginsWith)`), + compoundExpression([ + set(ref(`${queryExprReference}.expression`), raw(`"$${queryExprReference}.expression AND begins_with(#sortKey, :sortKey)"`)), + qref(`$${queryExprReference}.expressionNames.put("#sortKey", "${_sortKeyName}")`), + // TODO: Handle N & B. + qref(`$${queryExprReference}.expressionValues.put(":sortKey", { "${attributeType}": "${prefixValue(`$ctx.args.${argName}.beginsWith`)}" })`) + ]) + ), + iff( + raw(`!$util.isNull($ctx.args.${argName}) && !$util.isNull($ctx.args.${argName}.between)`), + compoundExpression([ + set(ref(`${queryExprReference}.expression`), raw(`"$${queryExprReference}.expression AND #sortKey BETWEEN :sortKey0 AND :sortKey1"`)), + qref(`$${queryExprReference}.expressionNames.put("#sortKey", "${_sortKeyName}")`), + // TODO: Handle N & B. + qref(`$${queryExprReference}.expressionValues.put(":sortKey0", { "${attributeType}": "${prefixValue(`$ctx.args.${argName}.between[0]`)}" })`), + qref(`$${queryExprReference}.expressionValues.put(":sortKey1", { "${attributeType}": "${prefixValue(`$ctx.args.${argName}.between[1]`)}" })`) + ]) + ), + iff( + raw(`!$util.isNull($ctx.args.${argName}) && !$util.isNull($ctx.args.${argName}.eq)`), + compoundExpression([ + set(ref(`${queryExprReference}.expression`), raw(`"$${queryExprReference}.expression AND #sortKey = :sortKey"`)), + qref(`$${queryExprReference}.expressionNames.put("#sortKey", "${_sortKeyName}")`), + // TODO: Handle N & B. + qref(`$${queryExprReference}.expressionValues.put(":sortKey", { "${attributeType}": "${prefixValue(`$ctx.args.${argName}.eq`)}" })`) + ]) + ), + iff( + raw(`!$util.isNull($ctx.args.${argName}) && !$util.isNull($ctx.args.${argName}.lt)`), + compoundExpression([ + set(ref(`${queryExprReference}.expression`), raw(`"$${queryExprReference}.expression AND #sortKey < :sortKey"`)), + qref(`$${queryExprReference}.expressionNames.put("#sortKey", "${_sortKeyName}")`), + // TODO: Handle N & B. + qref(`$${queryExprReference}.expressionValues.put(":sortKey", { "${attributeType}": "${prefixValue(`$ctx.args.${argName}.lt`)}" })`) + ]) + ), + iff( + raw(`!$util.isNull($ctx.args.${argName}) && !$util.isNull($ctx.args.${argName}.le)`), + compoundExpression([ + set(ref(`${queryExprReference}.expression`), raw(`"$${queryExprReference}.expression AND #sortKey <= :sortKey"`)), + qref(`$${queryExprReference}.expressionNames.put("#sortKey", "${_sortKeyName}")`), + // TODO: Handle N & B. + qref(`$${queryExprReference}.expressionValues.put(":sortKey", { "${attributeType}": "${prefixValue(`$ctx.args.${argName}.le`)}" })`) + ]) + ), + iff( + raw(`!$util.isNull($ctx.args.${argName}) && !$util.isNull($ctx.args.${argName}.gt)`), + compoundExpression([ + set(ref(`${queryExprReference}.expression`), raw(`"$${queryExprReference}.expression AND #sortKey > :sortKey"`)), + qref(`$${queryExprReference}.expressionNames.put("#sortKey", "${_sortKeyName}")`), + // TODO: Handle N & B. + qref(`$${queryExprReference}.expressionValues.put(":sortKey", { "${attributeType}": "${prefixValue(`$ctx.args.${argName}.gt`)}" })`) + ]) + ), + iff( + raw(`!$util.isNull($ctx.args.${argName}) && !$util.isNull($ctx.args.${argName}.ge)`), + compoundExpression([ + set(ref(`${queryExprReference}.expression`), raw(`"$${queryExprReference}.expression AND #sortKey >= :sortKey"`)), + qref(`$${queryExprReference}.expressionNames.put("#sortKey", "${_sortKeyName}")`), + // TODO: Handle N & B. + qref(`$${queryExprReference}.expressionValues.put(":sortKey", { "${attributeType}": "${prefixValue(`$ctx.args.${argName}.ge`)}" })`) + ]) + ) + ]); +} + +/** +* Key conditions materialize as instances of ModelXKeyConditionInput passed via $ctx.args. +* If the arguments with the given sortKey name exists, create a DynamoDB expression that +* implements its logic. Possible operators: eq, le, lt, ge, gt, beginsWith, and between. +* @param argName The name of the argument containing the sort key condition object. +* @param attributeType The type of the DynamoDB attribute in the table. +* @param queryExprReference The name of the variable containing the query expression in the template. +*/ +export function applyCompositeKeyConditionExpression(keyNames: string[], queryExprReference: string = 'query', sortKeyArgumentName: string, sortKeyAttributeName: string) { + const accumulatorVar1 = 'sortKeyValue'; + const accumulatorVar2 = 'sortKeyValue2'; + const sep = ModelResourceIDs.ModelCompositeKeySeparator(); + return block("Applying Key Condition", [ + set(ref(accumulatorVar1), str("")), + set(ref(accumulatorVar2), str("")), + iff( + raw(`!$util.isNull($ctx.args.${sortKeyArgumentName}) && !$util.isNull($ctx.args.${sortKeyArgumentName}.beginsWith)`), + compoundExpression([ + ...keyNames.map( + (keyName, idx) => iff( + raw(`!$util.isNull($ctx.args.${sortKeyArgumentName}.beginsWith.${keyName})`), + idx === 0 ? + set(ref(accumulatorVar1), str(`$ctx.args.${sortKeyArgumentName}.beginsWith.${keyName}`)) : + set(ref(accumulatorVar1), str(`$${accumulatorVar1}${sep}$ctx.args.${sortKeyArgumentName}.beginsWith.${keyName}`)), + true + ) + ), + set(ref(`${queryExprReference}.expression`), raw(`"$${queryExprReference}.expression AND begins_with(#sortKey, :sortKey)"`)), + qref(`$${queryExprReference}.expressionNames.put("#sortKey", "${sortKeyAttributeName}")`), + // TODO: Handle N & B. + qref(`$${queryExprReference}.expressionValues.put(":sortKey", { "S": "$${accumulatorVar1}" })`) + ]) + ), + iff( + raw(`!$util.isNull($ctx.args.${sortKeyArgumentName}) && !$util.isNull($ctx.args.${sortKeyArgumentName}.between)`), + compoundExpression([ + iff( + raw(`$ctx.args.${sortKeyArgumentName}.between.size() != 2`), + raw(`$util.error("Argument ${sortKeyArgumentName}.between expects exactly 2 elements.")`) + ), + ...keyNames.map( + (keyName, idx) => iff( + raw(`!$util.isNull($ctx.args.${sortKeyArgumentName}.between[0].${keyName})`), + idx === 0 ? + set(ref(accumulatorVar1), str(`$ctx.args.${sortKeyArgumentName}.between[0].${keyName}`)) : + set(ref(accumulatorVar1), str(`$${accumulatorVar1}${sep}$ctx.args.${sortKeyArgumentName}.between[0].${keyName}`)), + true + )), + ...keyNames.map( + (keyName, idx) => iff( + raw(`!$util.isNull($ctx.args.${sortKeyArgumentName}.between[1].${keyName})`), + idx === 0 ? + set(ref(accumulatorVar2), str(`$ctx.args.${sortKeyArgumentName}.between[1].${keyName}`)) : + set(ref(accumulatorVar2), str(`$${accumulatorVar2}${sep}$ctx.args.${sortKeyArgumentName}.between[1].${keyName}`)), + true + )), + set(ref(`${queryExprReference}.expression`), raw(`"$${queryExprReference}.expression AND #sortKey BETWEEN :sortKey0 AND :sortKey1"`)), + qref(`$${queryExprReference}.expressionNames.put("#sortKey", "${sortKeyAttributeName}")`), + // TODO: Handle N & B. + qref(`$${queryExprReference}.expressionValues.put(":sortKey0", { "S": "$${accumulatorVar1}" })`), + qref(`$${queryExprReference}.expressionValues.put(":sortKey1", { "S": "$${accumulatorVar2}" })`) + ]) + ), + iff( + raw(`!$util.isNull($ctx.args.${sortKeyArgumentName}) && !$util.isNull($ctx.args.${sortKeyArgumentName}.eq)`), + compoundExpression([ + ...keyNames.map( + (keyName, idx) => iff( + raw(`!$util.isNull($ctx.args.${sortKeyArgumentName}.eq.${keyName})`), + idx === 0 ? + set(ref(accumulatorVar1), str(`$ctx.args.${sortKeyArgumentName}.eq.${keyName}`)) : + set(ref(accumulatorVar1), str(`$${accumulatorVar1}${sep}$ctx.args.${sortKeyArgumentName}.eq.${keyName}`)), + true + )), + set(ref(`${queryExprReference}.expression`), raw(`"$${queryExprReference}.expression AND #sortKey = :sortKey"`)), + qref(`$${queryExprReference}.expressionNames.put("#sortKey", "${sortKeyAttributeName}")`), + // TODO: Handle N & B. + qref(`$${queryExprReference}.expressionValues.put(":sortKey", { "S": "$${accumulatorVar1}" })`) + ]) + ), + iff( + raw(`!$util.isNull($ctx.args.${sortKeyArgumentName}) && !$util.isNull($ctx.args.${sortKeyArgumentName}.lt)`), + compoundExpression([ + ...keyNames.map( + (keyName, idx) => iff( + raw(`!$util.isNull($ctx.args.${sortKeyArgumentName}.lt.${keyName})`), + idx === 0 ? + set(ref(accumulatorVar1), str(`$ctx.args.${sortKeyArgumentName}.lt.${keyName}`)) : + set(ref(accumulatorVar1), str(`$${accumulatorVar1}${sep}$ctx.args.${sortKeyArgumentName}.lt.${keyName}`)), + true + )), + set(ref(`${queryExprReference}.expression`), raw(`"$${queryExprReference}.expression AND #sortKey < :sortKey"`)), + qref(`$${queryExprReference}.expressionNames.put("#sortKey", "${sortKeyAttributeName}")`), + // TODO: Handle N & B. + qref(`$${queryExprReference}.expressionValues.put(":sortKey", { "S": "$${accumulatorVar1}" })`) + ]) + ), + iff( + raw(`!$util.isNull($ctx.args.${sortKeyArgumentName}) && !$util.isNull($ctx.args.${sortKeyArgumentName}.le)`), + compoundExpression([ + ...keyNames.map( + (keyName, idx) => iff( + raw(`!$util.isNull($ctx.args.${sortKeyArgumentName}.le.${keyName})`), + idx === 0 ? + set(ref(accumulatorVar1), str(`$ctx.args.${sortKeyArgumentName}.le.${keyName}`)) : + set(ref(accumulatorVar1), str(`$${accumulatorVar1}${sep}$ctx.args.${sortKeyArgumentName}.le.${keyName}`)), + true + )), + set(ref(`${queryExprReference}.expression`), raw(`"$${queryExprReference}.expression AND #sortKey <= :sortKey"`)), + qref(`$${queryExprReference}.expressionNames.put("#sortKey", "${sortKeyAttributeName}")`), + // TODO: Handle N & B. + qref(`$${queryExprReference}.expressionValues.put(":sortKey", { "S": "$${accumulatorVar1}" })`) + ]) + ), + iff( + raw(`!$util.isNull($ctx.args.${sortKeyArgumentName}) && !$util.isNull($ctx.args.${sortKeyArgumentName}.gt)`), + compoundExpression([ + ...keyNames.map( + (keyName, idx) => iff( + raw(`!$util.isNull($ctx.args.${sortKeyArgumentName}.gt.${keyName})`), + idx === 0 ? + set(ref(accumulatorVar1), str(`$ctx.args.${sortKeyArgumentName}.gt.${keyName}`)) : + set(ref(accumulatorVar1), str(`$${accumulatorVar1}${sep}$ctx.args.${sortKeyArgumentName}.gt.${keyName}`)), + true + )), + set(ref(`${queryExprReference}.expression`), raw(`"$${queryExprReference}.expression AND #sortKey > :sortKey"`)), + qref(`$${queryExprReference}.expressionNames.put("#sortKey", "${sortKeyAttributeName}")`), + // TODO: Handle N & B. + qref(`$${queryExprReference}.expressionValues.put(":sortKey", { "S": "$${accumulatorVar1}" })`) + ]) + ), + iff( + raw(`!$util.isNull($ctx.args.${sortKeyArgumentName}) && !$util.isNull($ctx.args.${sortKeyArgumentName}.ge)`), + compoundExpression([ + ...keyNames.map( + (keyName, idx) => iff( + raw(`!$util.isNull($ctx.args.${sortKeyArgumentName}.ge.${keyName})`), + idx === 0 ? + set(ref(accumulatorVar1), str(`$ctx.args.${sortKeyArgumentName}.ge.${keyName}`)) : + set(ref(accumulatorVar1), str(`$${accumulatorVar1}${sep}$ctx.args.${sortKeyArgumentName}.ge.${keyName}`)), + true + )), + set(ref(`${queryExprReference}.expression`), raw(`"$${queryExprReference}.expression AND #sortKey >= :sortKey"`)), + qref(`$${queryExprReference}.expressionNames.put("#sortKey", "${sortKeyAttributeName}")`), + // TODO: Handle N & B. + qref(`$${queryExprReference}.expressionValues.put(":sortKey", { "S": "$${accumulatorVar1}" })`) + ]) + ), + newline() + ]); +} + + +/** +* Key conditions materialize as instances of ModelXKeyConditionInput passed via $ctx.args. +* If the arguments with the given sortKey name exists, create a DynamoDB expression that +* implements its logic. Possible operators: eq, le, lt, ge, gt, beginsWith, and between. +* @param argName The name of the argument containing the sort key condition object. +* @param attributeType The type of the DynamoDB attribute in the table. +* @param queryExprReference The name of the variable containing the query expression in the template. +* @param compositeKeyName When handling a managed composite key from @key the name of the arg and underlying fields are different. +* @param compositeKeyValue When handling a managed composite key from @key the value of the composite key is made up of multiple parts known by the caller. +*/ +export function applyKeyExpressionForCompositeKey(keys: string[], attributeTypes: ('S' | 'N' | 'B')[] = ['S'], queryExprReference: string = 'query') { + if (keys.length > 2) { + // In the case of > 2, we condense the composite key, validate inputs at runtime, and wire up the HASH/RANGE expressions. + // In the case of === 2, we validate inputs at runtime and wire up the HASH/RANGE expressions. + const hashKeyName = keys[0]; + const hashKeyAttributeType = attributeTypes[0]; + const sortKeys = keys.slice(1); + const sortKeyTypes = attributeTypes.slice(1); + return compoundExpression([ + validateCompositeKeyArguments(keys), + setupHashKeyExpression(hashKeyName, hashKeyAttributeType, queryExprReference), + applyCompositeSortKey(sortKeys, sortKeyTypes, queryExprReference) + ]); + } else if (keys.length === 2) { + // In the case of === 2, we validate inputs at runtime and wire up the HASH/RANGE expressions. + const hashKeyName = keys[0]; + const hashKeyAttributeType = attributeTypes[0]; + const sortKeyName = keys[1]; + const sortKeyAttributeType = attributeTypes[1]; + return compoundExpression([ + validateKeyArguments(keys), + setupHashKeyExpression(hashKeyName, hashKeyAttributeType, queryExprReference), + applyKeyConditionExpression(sortKeyName, sortKeyAttributeType, queryExprReference) + ]); + } else if (keys.length === 1) { + const hashKeyName = keys[0]; + const hashKeyAttributeType = attributeTypes[0]; + return setupHashKeyExpression(hashKeyName, hashKeyAttributeType, queryExprReference); + } +} + +function setupHashKeyExpression(hashKeyName: string, hashKeyAttributeType: string, queryExprReference: string) { + return iff( + raw(`!$util.isNull($ctx.args.${hashKeyName})`), + compoundExpression([ + set(ref(`${queryExprReference}.expression`), str(`#${hashKeyName} = :${hashKeyName}`)), + set(ref(`${queryExprReference}.expressionNames`), obj({ [`#${hashKeyName}`]: str(hashKeyName) })), + set(ref(`${queryExprReference}.expressionValues`), obj({ [`:${hashKeyName}`]: obj({ [hashKeyAttributeType]: str(`$ctx.args.${hashKeyName}`) }) })), + ]) + ) +} + +/** + * Applies a composite sort key to the query expression. + */ +function applyCompositeSortKey(sortKeys: string[], sortKeyTypes: ('S'|'N'|'B')[], queryExprReference: string) { + if (sortKeys.length === 0) { + return newline(); + } + // E.g. status#date + const sortKeyAttributeName = ModelResourceIDs.ModelCompositeAttributeName(sortKeys); + const sortKeyArgumentName = ModelResourceIDs.ModelCompositeKeyArgumentName(sortKeys); + return compoundExpression([ + applyCompositeKeyConditionExpression(sortKeys, queryExprReference, sortKeyArgumentName, sortKeyAttributeName) + ]) +} + +/** + * When providing keys, you must provide them from left to right. + * E.G. when providing @key(fields: ["k1", "k2", "k3"]) then you may + * query by ["k1"] or ["k1", "k2"] or ["k1", "k2", "k3"] BUT you may not + * query by ["k1", "k3"] as it is impossible to create a key condition without + * the "k2" value. This snippet fails a query/list operation when invalid + * argument sets are provided. + * @param keys + */ +function validateKeyArguments(keys: string[]) { + const exprs: Expression[] = []; + if (keys.length > 1) { + for (let index = keys.length - 1; index > 0; index--) { + const rightKey = keys[index]; + const previousKey = keys[index - 1]; + exprs.push( + iff( + raw(`!$util.isNull($ctx.args.${rightKey}) && $util.isNull($ctx.args.${previousKey})`), + raw(`$util.error("When providing argument '${rightKey}' you must also provide arguments ${keys.slice(0, index).join(', ')}", "InvalidArgumentsError")`) + ) + ) + } + return block('Validate key arguments.', exprs); + } else { + return newline(); + } +} + +function invalidArgumentError(err: string) { + return raw(`$util.error("${err}", "InvalidArgumentsError")`); +} + +function validateCompositeKeyArguments(keys: string[]) { + const sortKeys = keys.slice(1); + const hashKey = keys[0]; + const sortKeyArgumentName = ModelResourceIDs.ModelCompositeKeyArgumentName(sortKeys); + const exprs: Expression[] = [ + iff( + raw(`!$util.isNull($ctx.args.${sortKeyArgumentName}) && $util.isNullOrBlank($ctx.args.${hashKey})`), + invalidArgumentError(`When providing argument '${sortKeyArgumentName}' you must also provide '${hashKey}'.`) + ) + ]; + if (sortKeys.length > 1) { + const loopOverKeys = (fn: (rKey: string, pKey: string) => Expression) => { + const exprs = []; + for (let index = sortKeys.length - 1; index > 0; index--) { + const rightKey = sortKeys[index]; + const previousKey = sortKeys[index - 1]; + exprs.push(fn(rightKey, previousKey)) + } + return compoundExpression(exprs); + } + const validateBetween = () => compoundExpression([ + iff( + raw(`$ctx.args.${sortKeyArgumentName}.between.size() != 2`), + invalidArgumentError(`Argument '${sortKeyArgumentName}.between' expects exactly two elements.`) + ), + loopOverKeys((rightKey: string, previousKey: string) => compoundExpression([ + iff( + raw(`!$util.isNullOrBlank($ctx.args.${sortKeyArgumentName}.between[0].${rightKey}) && $util.isNullOrBlank($ctx.args.${sortKeyArgumentName}.between[0].${previousKey})`), + invalidArgumentError(`When providing argument '${sortKeyArgumentName}.between[0].${rightKey}' you must also provide '${sortKeyArgumentName}.between[0].${previousKey}'.`) + ), + iff( + raw(`!$util.isNullOrBlank($ctx.args.${sortKeyArgumentName}.between[1].${rightKey}) && $util.isNullOrBlank($ctx.args.${sortKeyArgumentName}.between[1].${previousKey})`), + invalidArgumentError(`When providing argument '${sortKeyArgumentName}.between[1].${rightKey}' you must also provide '${sortKeyArgumentName}.between[1].${previousKey}'.`) + ) + ])) + ]); + const validateOtherOperation = () => loopOverKeys((rightKey: string, previousKey: string) => iff( + raw(`!$util.isNullOrBlank($ctx.args.${sortKeyArgumentName}.get("$operation").${rightKey}) && $util.isNullOrBlank($ctx.args.${sortKeyArgumentName}.get("$operation").${previousKey})`), + invalidArgumentError(`When providing argument '${sortKeyArgumentName}.$operation.${rightKey}' you must also provide '${sortKeyArgumentName}.$operation.${previousKey}'.`) + )); + exprs.push( + iff( + raw(`!$util.isNull($ctx.args.${sortKeyArgumentName})`), + compoundExpression([ + set(ref('sortKeyArgumentOperations'), raw(`$ctx.args.${sortKeyArgumentName}.keySet()`)), + iff( + raw(`$sortKeyArgumentOperations.size() > 1`), + invalidArgumentError(`Argument ${sortKeyArgumentName} must specify at most one key condition operation.`) + ), + forEach(ref('operation'), ref('sortKeyArgumentOperations'), [ + ifElse( + raw(`$operation == "between"`), + validateBetween(), + validateOtherOperation() + ) + ]) + ]) + ) + ) + return block('Validate key arguments.', exprs); + } else { + return newline(); + } +} diff --git a/packages/graphql-transformer-common/src/index.ts b/packages/graphql-transformer-common/src/index.ts index 8bd43b2f202..41144647900 100644 --- a/packages/graphql-transformer-common/src/index.ts +++ b/packages/graphql-transformer-common/src/index.ts @@ -7,3 +7,5 @@ export * from './SearchableResourceIDs' export * from './nodeUtils' export * from './HttpResourceIDs' export * from './FunctionResourceIDs' +export * from './connectionUtils'; +export * from './dynamodbUtils'; \ No newline at end of file diff --git a/packages/graphql-transformer-core/src/errors.ts b/packages/graphql-transformer-core/src/errors.ts index b9dcc58818f..d0cc901e005 100644 --- a/packages/graphql-transformer-core/src/errors.ts +++ b/packages/graphql-transformer-core/src/errors.ts @@ -4,6 +4,7 @@ export class InvalidTransformerError extends Error { constructor(message: string) { super(message); + Object.setPrototypeOf(this, InvalidTransformerError.prototype); this.name = "InvalidTransformerError"; if ((Error as any).captureStackTrace) { (Error as any).captureStackTrace(this, InvalidTransformerError) @@ -15,6 +16,7 @@ export class SchemaValidationError extends Error { constructor(errors: GraphQLError[]) { super(`Schema Errors:\n\n${errors.join('\n')}`); + Object.setPrototypeOf(this, SchemaValidationError.prototype); this.name = "SchemaValidationError"; if ((Error as any).captureStackTrace) { (Error as any).captureStackTrace(this, SchemaValidationError) @@ -34,6 +36,7 @@ export class TransformerContractError extends Error { constructor(message: string) { super(message); + Object.setPrototypeOf(this, TransformerContractError.prototype); this.name = "TransformerContractError"; if ((Error as any).captureStackTrace) { (Error as any).captureStackTrace(this, TransformerContractError) @@ -44,6 +47,7 @@ export class TransformerContractError extends Error { export class InvalidDirectiveError extends Error { constructor(message: string) { super(message); + Object.setPrototypeOf(this, InvalidDirectiveError.prototype); this.name = "InvalidDirectiveError"; if ((Error as any).captureStackTrace) { (Error as any).captureStackTrace(this, InvalidDirectiveError) @@ -54,6 +58,7 @@ export class InvalidDirectiveError extends Error { export class UnknownDirectiveError extends Error { constructor(message: string) { super(message); + Object.setPrototypeOf(this, UnknownDirectiveError.prototype); this.name = "UnknownDirectiveError"; if ((Error as any).captureStackTrace) { (Error as any).captureStackTrace(this, UnknownDirectiveError) diff --git a/packages/graphql-transformers-e2e-tests/package.json b/packages/graphql-transformers-e2e-tests/package.json index 58012e83d09..e262c6e9c78 100644 --- a/packages/graphql-transformers-e2e-tests/package.json +++ b/packages/graphql-transformers-e2e-tests/package.json @@ -39,6 +39,7 @@ "graphql-elasticsearch-transformer": "3.6.0", "graphql-function-transformer": "1.0.2", "graphql-versioned-transformer": "3.4.6", + "graphql-key-transformer": "1.0.0", "jest": "^23.1.0", "node-fetch": "^2.2.0", "ts-jest": "^22.4.6", diff --git a/packages/graphql-transformers-e2e-tests/src/__tests__/KeyTransformer.e2e.test.ts b/packages/graphql-transformers-e2e-tests/src/__tests__/KeyTransformer.e2e.test.ts new file mode 100644 index 00000000000..dbb85f36a40 --- /dev/null +++ b/packages/graphql-transformers-e2e-tests/src/__tests__/KeyTransformer.e2e.test.ts @@ -0,0 +1,478 @@ +import { ResourceConstants } from 'graphql-transformer-common' +import GraphQLTransform from 'graphql-transformer-core' +import ModelTransformer from 'graphql-dynamodb-transformer' +import KeyTransformer from 'graphql-key-transformer' +import { CloudFormationClient } from '../CloudFormationClient' +import { Output } from 'aws-sdk/clients/cloudformation' +import { GraphQLClient } from '../GraphQLClient' +import * as moment from 'moment'; +import emptyBucket from '../emptyBucket'; +import { deploy } from '../deployNestedStacks' +import { S3Client } from '../S3Client'; +import * as S3 from 'aws-sdk/clients/s3' + +jest.setTimeout(2000000); + +const cf = new CloudFormationClient('us-west-2') +const customS3Client = new S3Client('us-west-2') +const awsS3Client = new S3({ region: 'us-west-2' }) + +const BUILD_TIMESTAMP = moment().format('YYYYMMDDHHmmss') +const STACK_NAME = `KeyTransformerTests-${BUILD_TIMESTAMP}` +const BUCKET_NAME = `appsync-key-transformer-test-bucket-${BUILD_TIMESTAMP}` +const LOCAL_FS_BUILD_DIR = '/tmp/key_transformer_tests/' +const S3_ROOT_DIR_KEY = 'deployments' + +let GRAPHQL_CLIENT = undefined; + +function outputValueSelector(key: string) { + return (outputs: Output[]) => { + const output = outputs.find((o: Output) => o.OutputKey === key) + return output ? output.OutputValue : null + } +} + +beforeAll(async () => { + const validSchema = ` + type Order @model @key(fields: ["customerEmail", "createdAt"]) { + customerEmail: String! + createdAt: String! + orderId: ID! + } + type Customer @model @key(fields: ["email"]) { + email: String! + username: String + } + type Item @model + @key(fields: ["orderId", "status", "createdAt"]) + @key(name: "ByStatus", fields: ["status", "createdAt"], queryField: "itemsByStatus") + { + orderId: ID! + status: Status! + createdAt: AWSDateTime! + name: String! + } + enum Status { + DELIVERED IN_TRANSIT PENDING UNKNOWN + } + type ShippingUpdate @model + @key(name: "ByOrderItemStatus", fields: ["orderId", "itemId", "status"], queryField: "shippingUpdates") + { + id: ID! + orderId: ID + itemId: ID + status: Status + name: String + } + ` + try { + await awsS3Client.createBucket({Bucket: BUCKET_NAME}).promise() + } catch (e) { console.warn(`Could not create bucket: ${e}`) } + const transformer = new GraphQLTransform({ + transformers: [ + new ModelTransformer(), + new KeyTransformer() + ] + }) + const out = transformer.transform(validSchema); + const finishedStack = await deploy( + customS3Client, cf, STACK_NAME, out, { env: 'dev' }, LOCAL_FS_BUILD_DIR, BUCKET_NAME, S3_ROOT_DIR_KEY, + BUILD_TIMESTAMP + ) + // Arbitrary wait to make sure everything is ready. + await cf.wait(5, () => Promise.resolve()) + console.log('Successfully created stack ' + STACK_NAME) + console.log(finishedStack) + expect(finishedStack).toBeDefined() + const getApiEndpoint = outputValueSelector(ResourceConstants.OUTPUTS.GraphQLAPIEndpointOutput) + const getApiKey = outputValueSelector(ResourceConstants.OUTPUTS.GraphQLAPIApiKeyOutput) + const endpoint = getApiEndpoint(finishedStack.Outputs) + const apiKey = getApiKey(finishedStack.Outputs) + expect(apiKey).toBeDefined() + expect(endpoint).toBeDefined() + GRAPHQL_CLIENT = new GraphQLClient(endpoint, { 'x-api-key': apiKey }) +}); + +// afterAll(async () => { +// try { +// console.log('Deleting stack ' + STACK_NAME) +// await cf.deleteStack(STACK_NAME) +// // await cf.waitForStack(STACK_NAME) +// console.log('Successfully deleted stack ' + STACK_NAME) +// } catch (e) { +// if (e.code === 'ValidationError' && e.message === `Stack with id ${STACK_NAME} does not exist`) { +// // The stack was deleted. This is good. +// expect(true).toEqual(true) +// console.log('Successfully deleted stack ' + STACK_NAME) +// } else { +// console.error(e) +// expect(true).toEqual(false) +// } +// } +// try { +// await emptyBucket(BUCKET_NAME); +// } catch (e) { console.warn(`Error during bucket cleanup: ${e}`)} +// }) + +/** + * Test queries below + */ +test('Test getX with a two part primary key.', async () => { + const order1 = await createOrder('test@gmail.com', '1'); + const getOrder1 = await getOrder('test@gmail.com', order1.data.createOrder.createdAt) + expect(getOrder1.data.getOrder.orderId).toEqual('1'); +}) + +test('Test updateX with a two part primary key.', async () => { + const order2 = await createOrder('test3@gmail.com', '2'); + let getOrder2 = await getOrder('test3@gmail.com', order2.data.createOrder.createdAt) + expect(getOrder2.data.getOrder.orderId).toEqual('2'); + const updateOrder2 = await updateOrder('test3@gmail.com', order2.data.createOrder.createdAt, '3') + expect(updateOrder2.data.updateOrder.orderId).toEqual('3'); + getOrder2 = await getOrder('test3@gmail.com', order2.data.createOrder.createdAt) + expect(getOrder2.data.getOrder.orderId).toEqual('3'); +}) + +test('Test deleteX with a two part primary key.', async () => { + const order2 = await createOrder('test2@gmail.com', '2'); + let getOrder2 = await getOrder('test2@gmail.com', order2.data.createOrder.createdAt) + expect(getOrder2.data.getOrder.orderId).toEqual('2'); + const delOrder2 = await deleteOrder('test2@gmail.com', order2.data.createOrder.createdAt) + expect(delOrder2.data.deleteOrder.orderId).toEqual('2'); + getOrder2 = await getOrder('test2@gmail.com', order2.data.createOrder.createdAt) + expect(getOrder2.data.getOrder).toBeNull(); +}) + +test('Test getX with a three part primary key', async () => { + const item1 = await createItem('1', 'PENDING', 'item1'); + const getItem1 = await getItem('1', 'PENDING', item1.data.createItem.createdAt); + expect(getItem1.data.getItem.orderId).toEqual('1'); + expect(getItem1.data.getItem.status).toEqual('PENDING'); +}) + +test('Test updateX with a three part primary key.', async () => { + const item2 = await createItem('2', 'PENDING', 'item2'); + let getItem2 = await getItem('2', 'PENDING', item2.data.createItem.createdAt) + expect(getItem2.data.getItem.orderId).toEqual('2'); + const updateItem2 = await updateItem('2', 'PENDING', item2.data.createItem.createdAt, 'item2.1') + expect(updateItem2.data.updateItem.name).toEqual('item2.1'); + getItem2 = await getItem('2', 'PENDING', item2.data.createItem.createdAt) + expect(getItem2.data.getItem.name).toEqual('item2.1'); +}) + +test('Test deleteX with a three part primary key.', async () => { + const item3 = await createItem('3', 'IN_TRANSIT', 'item3'); + let getItem3 = await getItem('3', 'IN_TRANSIT', item3.data.createItem.createdAt) + expect(getItem3.data.getItem.name).toEqual('item3'); + const delItem3 = await deleteItem('3', 'IN_TRANSIT', item3.data.createItem.createdAt) + expect(delItem3.data.deleteItem.name).toEqual('item3'); + getItem3 = await getItem('3', 'IN_TRANSIT', item3.data.createItem.createdAt); + expect(getItem3.data.getItem).toBeNull(); +}) + +test('Test listX with three part primary key.', async () => { + const hashKey = 'TEST_LIST_ID'; + await createItem(hashKey, 'IN_TRANSIT', 'list1', '2018-01-01T00:01:01.000Z'); + await createItem(hashKey, 'PENDING', 'list2', '2018-06-01T00:01:01.000Z'); + await createItem(hashKey, 'PENDING', 'item3', '2018-09-01T00:01:01.000Z'); + let items = await listItem(undefined); + expect(items.data.listItems.items.length).toBeGreaterThan(0); + items = await listItem(hashKey); + expect(items.data.listItems.items).toHaveLength(3) + items = await listItem(hashKey, { beginsWith: { status: 'PENDING' } }); + expect(items.data.listItems.items).toHaveLength(2) + items = await listItem(hashKey, { beginsWith: { status: 'IN_TRANSIT' } }); + expect(items.data.listItems.items).toHaveLength(1) + items = await listItem(hashKey, { beginsWith: { status: 'PENDING', createdAt: '2018-09' } }); + expect(items.data.listItems.items).toHaveLength(1) + items = await listItem(hashKey, { eq: { status: 'PENDING', createdAt: '2018-09-01T00:01:01.000Z' } }); + expect(items.data.listItems.items).toHaveLength(1) + items = await listItem(hashKey, { between: [{ status: 'PENDING', createdAt: '2018-08-01' }, { status: 'PENDING', createdAt: '2018-10-01' }] }); + expect(items.data.listItems.items).toHaveLength(1) + items = await listItem(hashKey, { gt: { status: 'PENDING', createdAt: '2018-08-1'}}); + expect(items.data.listItems.items).toHaveLength(1) + items = await listItem(hashKey, { ge: { status: 'PENDING', createdAt: '2018-09-01T00:01:01.000Z'}}); + expect(items.data.listItems.items).toHaveLength(1) + items = await listItem(hashKey, { lt: { status: 'IN_TRANSIT', createdAt: '2018-01-02'}}); + expect(items.data.listItems.items).toHaveLength(1) + items = await listItem(hashKey, { le: { status: 'IN_TRANSIT', createdAt: '2018-01-01T00:01:01.000Z'}}); + expect(items.data.listItems.items).toHaveLength(1) +}) + +test('Test query with three part secondary key.', async () => { + const hashKey = 'UNKNOWN'; + await createItem('order1', 'UNKNOWN', 'list1', '2018-01-01T00:01:01.000Z'); + await createItem('order2', 'UNKNOWN', 'list2', '2018-06-01T00:01:01.000Z'); + await createItem('order3', 'UNKNOWN', 'item3', '2018-09-01T00:01:01.000Z'); + let items = await itemsByStatus(undefined); + expect(items.data).toBeNull(); + expect(items.errors.length).toBeGreaterThan(0); + items = await itemsByStatus(hashKey); + expect(items.data.itemsByStatus.items).toHaveLength(3) + items = await itemsByStatus(hashKey, { beginsWith: '2018-09' }); + expect(items.data.itemsByStatus.items).toHaveLength(1) + items = await itemsByStatus(hashKey, { eq: '2018-09-01T00:01:01.000Z' }); + expect(items.data.itemsByStatus.items).toHaveLength(1) + items = await itemsByStatus(hashKey, { between: ['2018-08-01', '2018-10-01'] }); + expect(items.data.itemsByStatus.items).toHaveLength(1) + items = await itemsByStatus(hashKey, { gt: '2018-08-01' }); + expect(items.data.itemsByStatus.items).toHaveLength(1) + items = await itemsByStatus(hashKey, { ge: '2018-09-01' }); + expect(items.data.itemsByStatus.items).toHaveLength(1) + items = await itemsByStatus(hashKey, { lt: '2018-07-01' }); + expect(items.data.itemsByStatus.items).toHaveLength(2) + items = await itemsByStatus(hashKey, { le: '2018-06-01' }); + expect(items.data.itemsByStatus.items).toHaveLength(1) + items = await itemsByStatus(undefined, { le: '2018-09-01' }); + expect(items.data).toBeNull() + expect(items.errors.length).toBeGreaterThan(0); +}) + +test('Test update mutation validation with three part secondary key.', async () => { + await createShippingUpdate('order1', 'item1', 'PENDING', 'name1'); + const items = await getShippingUpdates('order1'); + expect(items.data.shippingUpdates.items).toHaveLength(1); + const item = items.data.shippingUpdates.items[0]; + expect(item.name).toEqual('name1') + const updateResponseMissingLastSortKey = await updateShippingUpdate({ id: item.id, orderId: 'order1', itemId: 'item1', name: 'name2'}); + expect(updateResponseMissingLastSortKey.data.updateShippingUpdate).toBeNull(); + expect(updateResponseMissingLastSortKey.errors).toHaveLength(1); + const updateResponseMissingFirstSortKey = await updateShippingUpdate({ id: item.id, orderId: 'order1', status: 'PENDING', name: 'name3'}); + expect(updateResponseMissingFirstSortKey.data.updateShippingUpdate).toBeNull(); + expect(updateResponseMissingFirstSortKey.errors).toHaveLength(1); + const updateResponseMissingAllSortKeys = await updateShippingUpdate({ id: item.id, orderId: 'order1', name: 'testing'}); + expect(updateResponseMissingAllSortKeys.data.updateShippingUpdate.name).toEqual('testing') + const updateResponseMissingNoKeys = await updateShippingUpdate({ id: item.id, orderId: 'order1', itemId: 'item1', status: 'PENDING', name: 'testing2' }); + expect(updateResponseMissingNoKeys.data.updateShippingUpdate.name).toEqual('testing2') +}) + +async function createOrder(customerEmail: string, orderId: string) { + const result = await GRAPHQL_CLIENT.query(`mutation CreateOrder($input: CreateOrderInput!) { + createOrder(input: $input) { + customerEmail + orderId + createdAt + } + }`, { + input: { customerEmail, orderId, createdAt: new Date().toISOString() } + }); + console.log(JSON.stringify(result, null, 4)); + return result; +} + +async function updateOrder(customerEmail: string, createdAt: string, orderId: string) { + const result = await GRAPHQL_CLIENT.query(`mutation UpdateOrder($input: UpdateOrderInput!) { + updateOrder(input: $input) { + customerEmail + orderId + createdAt + } + }`, { + input: { customerEmail, orderId, createdAt } + }); + console.log(JSON.stringify(result, null, 4)); + return result; +} + +async function deleteOrder(customerEmail: string, createdAt: string) { + const result = await GRAPHQL_CLIENT.query(`mutation DeleteOrder($input: DeleteOrderInput!) { + deleteOrder(input: $input) { + customerEmail + orderId + createdAt + } + }`, { + input: { customerEmail, createdAt } + }); + console.log(JSON.stringify(result, null, 4)); + return result; +} + +async function getOrder(customerEmail: string, createdAt: string) { + const result = await GRAPHQL_CLIENT.query(`query GetOrder($customerEmail: String!, $createdAt: String!) { + getOrder(customerEmail: $customerEmail, createdAt: $createdAt) { + customerEmail + orderId + createdAt + } + }`, { customerEmail, createdAt }); + console.log(JSON.stringify(result, null, 4)); + return result; +} + +async function createItem(orderId: string, status: string, name: string, createdAt: string = new Date().toISOString()) { + const input = { status, orderId, name, createdAt }; + const result = await GRAPHQL_CLIENT.query(`mutation CreateItem($input: CreateItemInput!) { + createItem(input: $input) { + orderId + status + createdAt + name + } + }`, { + input + }); + console.log(`Running create: ${JSON.stringify(input)}`); + console.log(JSON.stringify(result, null, 4)); + return result; +} + +async function updateItem(orderId: string, status: string, createdAt: string, name: string) { + const input = { status, orderId, createdAt, name }; + const result = await GRAPHQL_CLIENT.query(`mutation UpdateItem($input: UpdateItemInput!) { + updateItem(input: $input) { + orderId + status + createdAt + name + } + }`, { + input + }); + console.log(`Running create: ${JSON.stringify(input)}`); + console.log(JSON.stringify(result, null, 4)); + return result; +} + +async function deleteItem(orderId: string, status: string, createdAt: string) { + const input = { orderId, status, createdAt }; + const result = await GRAPHQL_CLIENT.query(`mutation DeleteItem($input: DeleteItemInput!) { + deleteItem(input: $input) { + orderId + status + createdAt + name + } + }`, { + input + }); + console.log(`Running delete: ${JSON.stringify(input)}`); + console.log(JSON.stringify(result, null, 4)); + return result; +} + +async function getItem(orderId: string, status: string, createdAt: string) { + const result = await GRAPHQL_CLIENT.query(`query GetItem($orderId: ID!, $status: Status!, $createdAt: AWSDateTime!) { + getItem(orderId: $orderId, status: $status, createdAt: $createdAt) { + orderId + status + createdAt + name + } + }`, { orderId, status, createdAt }); + console.log(JSON.stringify(result, null, 4)); + return result; +} + +interface StringKeyConditionInput { + eq?: string, + gt?: string, + ge?: string, + lt?: string, + le?: string, + between?: string[], + beginsWith?: string, +} + +interface ItemCompositeKeyConditionInput { + eq?: ItemCompositeKeyInput, + gt?: ItemCompositeKeyInput, + ge?: ItemCompositeKeyInput, + lt?: ItemCompositeKeyInput, + le?: ItemCompositeKeyInput, + between?: ItemCompositeKeyInput[], + beginsWith?: ItemCompositeKeyInput, +} +interface ItemCompositeKeyInput { + status?: string, + createdAt?: string +} +async function listItem(orderId?: string, statusCreatedAt?: ItemCompositeKeyConditionInput, limit?: number, nextToken?: string) { + const result = await GRAPHQL_CLIENT.query(`query ListItems($orderId: ID, $statusCreatedAt: ModelItemPrimaryCompositeKeyConditionInput, $limit: Int, $nextToken: String) { + listItems(orderId: $orderId, statusCreatedAt: $statusCreatedAt, limit: $limit, nextToken: $nextToken) { + items { + orderId + status + createdAt + name + } + nextToken + } + }`, { orderId, statusCreatedAt, limit, nextToken }); + console.log(JSON.stringify(result, null, 4)); + return result; +} + +async function itemsByStatus(status: string, createdAt?: StringKeyConditionInput, limit?: number, nextToken?: string) { + const result = await GRAPHQL_CLIENT.query(`query ListByStatus($status: Status!, $createdAt: ModelStringKeyConditionInput, $limit: Int, $nextToken: String) { + itemsByStatus(status: $status, createdAt: $createdAt, limit: $limit, nextToken: $nextToken) { + items { + orderId + status + createdAt + name + } + nextToken + } + }`, { status, createdAt, limit, nextToken }); + console.log(JSON.stringify(result, null, 4)); + return result; +} + +async function createShippingUpdate(orderId: string, itemId: string, status: string, name?: string) { + const input = { status, orderId, itemId, name }; + const result = await GRAPHQL_CLIENT.query(`mutation CreateShippingUpdate($input: CreateShippingUpdateInput!) { + createShippingUpdate(input: $input) { + orderId + status + itemId + name + id + } + }`, { + input + }); + console.log(`Running create: ${JSON.stringify(input)}`); + console.log(JSON.stringify(result, null, 4)); + return result; +} + +interface UpdateShippingInput { + id: string, orderId?: string, status?: string, itemId?: string, name?: string +} +async function updateShippingUpdate(input: UpdateShippingInput) { + // const input = { id, status, orderId, itemId, name }; + const result = await GRAPHQL_CLIENT.query(`mutation UpdateShippingUpdate($input: UpdateShippingUpdateInput!) { + updateShippingUpdate(input: $input) { + orderId + status + itemId + name + id + } + }`, { + input + }); + console.log(`Running update: ${JSON.stringify(input)}`); + console.log(JSON.stringify(result, null, 4)); + return result; +} + +async function getShippingUpdates(orderId: string) { + const result = await GRAPHQL_CLIENT.query(`query GetShippingUpdates($orderId: ID!) { + shippingUpdates(orderId: $orderId) { + items { + id + orderId + status + itemId + name + } + nextToken + } + }`, { orderId }); + console.log(JSON.stringify(result, null, 4)); + return result; +} + + diff --git a/packages/graphql-transformers-e2e-tests/src/__tests__/KeyTransformerLocal.e2e.test.ts b/packages/graphql-transformers-e2e-tests/src/__tests__/KeyTransformerLocal.e2e.test.ts new file mode 100644 index 00000000000..1a0a859f546 --- /dev/null +++ b/packages/graphql-transformers-e2e-tests/src/__tests__/KeyTransformerLocal.e2e.test.ts @@ -0,0 +1,325 @@ +import GraphQLTransform, { Transformer, InvalidDirectiveError } from 'graphql-transformer-core' +import ModelTransformer from 'graphql-dynamodb-transformer'; +import KeyTransformer from 'graphql-key-transformer'; +import { parse, FieldDefinitionNode, ObjectTypeDefinitionNode } from 'graphql'; +import { expectArguments, expectFields, expectNonNullFields, expectNullableFields } from '../testUtil'; + +test('Test that a primary @key with a single field changes the hash key.', () => { + const validSchema = ` + type Test @model @key(fields: ["email"]) { + email: String! + } + ` + + const transformer = new GraphQLTransform({ + transformers: [ + new ModelTransformer(), + new KeyTransformer() + ] + }); + + const out = transformer.transform(validSchema); + let tableResource = out.stacks.Test.Resources.TestTable; + expect(tableResource).toBeDefined() + expect( + tableResource.Properties.KeySchema[0].AttributeName, + ).toEqual('email'); + expect( + tableResource.Properties.KeySchema[0].KeyType, + ).toEqual('HASH'); + expect( + tableResource.Properties.AttributeDefinitions[0].AttributeType, + ).toEqual('S'); + const schema = parse(out.schema); + const queryType = schema.definitions.find((def: any) => def.name && def.name.value === 'Query') as ObjectTypeDefinitionNode; + const getTestField = queryType.fields.find(f => f.name && f.name.value === 'getTest') as FieldDefinitionNode; + expect(getTestField.arguments).toHaveLength(1); + expectArguments(getTestField, ['email']) +}) + +test('Test that a primary @key with 2 fields changes the hash and sort key.', () => { + const validSchema = ` + type Test @model @key(fields: ["email", "kind"]) { + email: String! + kind: Int! + } + ` + + const transformer = new GraphQLTransform({ + transformers: [ + new ModelTransformer(), + new KeyTransformer() + ] + }); + + const out = transformer.transform(validSchema); + let tableResource = out.stacks.Test.Resources.TestTable; + expect(tableResource).toBeDefined() + const hashKey = tableResource.Properties.KeySchema.find(o => o.KeyType === 'HASH'); + const hashKeyAttr = tableResource.Properties.AttributeDefinitions.find(o => o.AttributeName === 'email'); + const rangeKey = tableResource.Properties.KeySchema.find(o => o.KeyType === 'RANGE'); + const rangeKeyAttr = tableResource.Properties.AttributeDefinitions.find(o => o.AttributeName === 'kind'); + expect(tableResource.Properties.AttributeDefinitions).toHaveLength(2); + expect(hashKey.AttributeName).toEqual('email'); + expect(rangeKey.AttributeName).toEqual('kind'); + expect(hashKeyAttr.AttributeType).toEqual('S'); + expect(rangeKeyAttr.AttributeType).toEqual('N'); + + const schema = parse(out.schema); + const queryType = schema.definitions.find((def: any) => def.name && def.name.value === 'Query') as ObjectTypeDefinitionNode; + const getTestField = queryType.fields.find(f => f.name && f.name.value === 'getTest') as FieldDefinitionNode; + expect(getTestField.arguments).toHaveLength(2); + expectArguments(getTestField, ['email', 'kind']) +}) + +test('Test that a primary @key with 3 fields changes the hash and sort keys.', () => { + const validSchema = ` + type Test @model @key(fields: ["email", "kind", "date"]) { + email: String! + kind: Int! + date: AWSDateTime! + } + ` + + const transformer = new GraphQLTransform({ + transformers: [ + new ModelTransformer(), + new KeyTransformer() + ] + }); + + const out = transformer.transform(validSchema); + let tableResource = out.stacks.Test.Resources.TestTable; + expect(tableResource).toBeDefined() + const hashKey = tableResource.Properties.KeySchema.find(o => o.KeyType === 'HASH'); + const hashKeyAttr = tableResource.Properties.AttributeDefinitions.find(o => o.AttributeName === 'email'); + const rangeKey = tableResource.Properties.KeySchema.find(o => o.KeyType === 'RANGE'); + const rangeKeyAttr = tableResource.Properties.AttributeDefinitions.find(o => o.AttributeName === 'kind#date'); + expect(tableResource.Properties.AttributeDefinitions).toHaveLength(2); + expect(hashKey.AttributeName).toEqual('email'); + expect(rangeKey.AttributeName).toEqual('kind#date'); + expect(hashKeyAttr.AttributeType).toEqual('S'); + // composite keys will always be strings. + expect(rangeKeyAttr.AttributeType).toEqual('S'); + + const schema = parse(out.schema); + const queryType = schema.definitions.find((def: any) => def.name && def.name.value === 'Query') as ObjectTypeDefinitionNode; + const getTestField = queryType.fields.find(f => f.name && f.name.value === 'getTest') as FieldDefinitionNode; + expect(getTestField.arguments).toHaveLength(3); + expectArguments(getTestField, ['email', 'kind', 'date']); + + const listTestField = queryType.fields.find(f => f.name && f.name.value === 'listTests') as FieldDefinitionNode; + expect(listTestField.arguments).toHaveLength(5); + expectArguments(listTestField, ['email', 'kindDate', 'filter', 'nextToken', 'limit']); +}) + +test('Test that a secondary @key with 3 fields changes the hash and sort keys and adds a query fields correctly.', () => { + const validSchema = ` + type Test @model @key(name: "GSI", fields: ["email", "kind", "date"], queryField: "listByEmailKindDate") { + email: String! + kind: Int! + date: AWSDateTime! + } + ` + + const transformer = new GraphQLTransform({ + transformers: [ + new ModelTransformer(), + new KeyTransformer() + ] + }); + + const out = transformer.transform(validSchema); + console.log(out.schema); + let tableResource = out.stacks.Test.Resources.TestTable; + expect(tableResource).toBeDefined() + const hashKey = tableResource.Properties.KeySchema.find(o => o.KeyType === 'HASH'); + const hashKeyAttr = tableResource.Properties.AttributeDefinitions.find(o => o.AttributeName === 'email'); + expect(tableResource.Properties.AttributeDefinitions).toHaveLength(3); + expect(hashKey.AttributeName).toEqual('id'); + expect(hashKeyAttr.AttributeType).toEqual('S'); + // composite keys will always be strings. + + const gsi = tableResource.Properties.GlobalSecondaryIndexes.find(o => o.IndexName === 'GSI') + const gsiHashKey = gsi.KeySchema.find(o => o.KeyType === 'HASH'); + const gsiHashKeyAttr = tableResource.Properties.AttributeDefinitions.find(o => o.AttributeName === 'email'); + const gsiRangeKey = gsi.KeySchema.find(o => o.KeyType === 'RANGE'); + const gsiRangeKeyAttr = tableResource.Properties.AttributeDefinitions.find(o => o.AttributeName === 'kind#date'); + expect(gsiHashKey.AttributeName).toEqual('email'); + expect(gsiRangeKey.AttributeName).toEqual('kind#date'); + expect(gsiHashKeyAttr.AttributeType).toEqual('S'); + expect(gsiRangeKeyAttr.AttributeType).toEqual('S'); + + const schema = parse(out.schema); + const queryType = schema.definitions.find((def: any) => def.name && def.name.value === 'Query') as ObjectTypeDefinitionNode; + const getTestField = queryType.fields.find(f => f.name && f.name.value === 'getTest') as FieldDefinitionNode; + expect(getTestField.arguments).toHaveLength(1); + expectArguments(getTestField, ['id']); + + const queryField = queryType.fields.find(f => f.name && f.name.value === 'listByEmailKindDate') as FieldDefinitionNode; + expect(queryField.arguments).toHaveLength(5); + expectArguments(queryField, ['email', 'kindDate', 'filter', 'nextToken', 'limit']); + + const listTestField = queryType.fields.find(f => f.name && f.name.value === 'listTests') as FieldDefinitionNode; + expect(listTestField.arguments).toHaveLength(3); + expectArguments(listTestField, ['filter', 'nextToken', 'limit']); +}) + +test('Test that a secondary @key with a single field adds a GSI.', () => { + const validSchema = ` + type Test @model @key(name: "GSI_Email", fields: ["email"], queryField: "testsByEmail") { + id: ID! + email: String! + } + ` + + const transformer = new GraphQLTransform({ + transformers: [ + new ModelTransformer(), + new KeyTransformer() + ] + }); + + const out = transformer.transform(validSchema); + let tableResource = out.stacks.Test.Resources.TestTable; + expect(tableResource).toBeDefined() + expect( + tableResource.Properties.GlobalSecondaryIndexes[0].KeySchema[0].AttributeName, + ).toEqual('email'); + expect( + tableResource.Properties.GlobalSecondaryIndexes[0].KeySchema[0].KeyType, + ).toEqual('HASH'); + expect( + tableResource.Properties.AttributeDefinitions.find(ad => ad.AttributeName === 'email').AttributeType, + ).toEqual('S'); + const schema = parse(out.schema); + const queryType = schema.definitions.find((def: any) => def.name && def.name.value === 'Query') as ObjectTypeDefinitionNode; + const getField = queryType.fields.find(f => f.name.value === 'getTest'); + expect(getField.arguments).toHaveLength(1); + expectArguments(getField, ['id']) + const listTestsField = queryType.fields.find(f => f.name && f.name.value === 'listTests') as FieldDefinitionNode; + expect(listTestsField.arguments).toHaveLength(3); + expectArguments(listTestsField, ['filter', 'nextToken', 'limit']); + const queryIndexField = queryType.fields.find(f => f.name && f.name.value === 'testsByEmail') as FieldDefinitionNode; + expect(queryIndexField.arguments).toHaveLength(4); + expectArguments(queryIndexField, ['email', 'filter', 'nextToken', 'limit']); +}) + +test('Test that a secondary @key with a multiple field adds an GSI.', () => { + const validSchema = ` + type Test @model @key(fields: ["email", "createdAt"]) @key(name: "CategoryGSI", fields: ["category", "createdAt"], queryField: "testsByCategory") { + email: String! + createdAt: String! + category: String! + description: String + } + ` + + const transformer = new GraphQLTransform({ + transformers: [ + new ModelTransformer(), + new KeyTransformer() + ] + }); + + const out = transformer.transform(validSchema); + let tableResource = out.stacks.Test.Resources.TestTable; + expect(tableResource).toBeDefined() + expect( + tableResource.Properties.GlobalSecondaryIndexes[0].KeySchema[0].AttributeName, + ).toEqual('category'); + expect( + tableResource.Properties.GlobalSecondaryIndexes[0].KeySchema[0].KeyType, + ).toEqual('HASH'); + expect( + tableResource.Properties.GlobalSecondaryIndexes[0].KeySchema[1].AttributeName, + ).toEqual('createdAt'); + expect( + tableResource.Properties.GlobalSecondaryIndexes[0].KeySchema[1].KeyType, + ).toEqual('RANGE'); + expect( + tableResource.Properties.AttributeDefinitions.find(ad => ad.AttributeName === 'email').AttributeType, + ).toEqual('S'); + expect( + tableResource.Properties.AttributeDefinitions.find(ad => ad.AttributeName === 'category').AttributeType, + ).toEqual('S'); + expect( + tableResource.Properties.AttributeDefinitions.find(ad => ad.AttributeName === 'createdAt').AttributeType, + ).toEqual('S'); + const schema = parse(out.schema); + const queryType = schema.definitions.find((def: any) => def.name && def.name.value === 'Query') as ObjectTypeDefinitionNode; + const queryIndexField = queryType.fields.find(f => f.name && f.name.value === 'testsByCategory') as FieldDefinitionNode; + expect(queryIndexField.arguments).toHaveLength(5); + expectArguments(queryIndexField, ['category', 'createdAt', 'filter', 'nextToken', 'limit']); + + // When using a complex primary key args are added to the list field. They are optional and if provided, will use a Query instead of a Scan. + const listTestsField = queryType.fields.find(f => f.name && f.name.value === 'listTests') as FieldDefinitionNode; + expect(listTestsField.arguments).toHaveLength(5); + expectArguments(listTestsField, ['email', 'createdAt', 'filter', 'nextToken', 'limit']); + + // Check the create, update, delete inputs. + const createInput = schema.definitions.find((def: any) => def.name && def.name.value === 'CreateTestInput') as ObjectTypeDefinitionNode; + expectNonNullFields(createInput, ['email', 'createdAt', 'category']); + expectNullableFields(createInput, ['description']); + expect(createInput.fields).toHaveLength(4); + const updateInput = schema.definitions.find((def: any) => def.name && def.name.value === 'UpdateTestInput') as ObjectTypeDefinitionNode; + expectNonNullFields(updateInput, ['email', 'createdAt']); + expectNullableFields(updateInput, ['category', 'description']); + expect(updateInput.fields).toHaveLength(4); + const deleteInput = schema.definitions.find((def: any) => def.name && def.name.value === 'DeleteTestInput') as ObjectTypeDefinitionNode; + expectNonNullFields(deleteInput, ['email', 'createdAt']); + expect(deleteInput.fields).toHaveLength(2); +}) + + +test('Test that a secondary @key with a multiple field adds an LSI.', () => { + const validSchema = ` + type Test @model @key(fields: ["email", "createdAt"]) @key(name: "GSI_Email_UpdatedAt", fields: ["email", "updatedAt"], queryField: "testsByEmailByUpdatedAt") { + email: String! + createdAt: String! + updatedAt: String! + } + ` + + const transformer = new GraphQLTransform({ + transformers: [ + new ModelTransformer(), + new KeyTransformer() + ] + }); + + const out = transformer.transform(validSchema); + let tableResource = out.stacks.Test.Resources.TestTable; + expect(tableResource).toBeDefined() + expect( + tableResource.Properties.LocalSecondaryIndexes[0].KeySchema[0].AttributeName, + ).toEqual('email'); + expect( + tableResource.Properties.LocalSecondaryIndexes[0].KeySchema[0].KeyType, + ).toEqual('HASH'); + expect( + tableResource.Properties.LocalSecondaryIndexes[0].KeySchema[1].AttributeName, + ).toEqual('updatedAt'); + expect( + tableResource.Properties.LocalSecondaryIndexes[0].KeySchema[1].KeyType, + ).toEqual('RANGE'); + expect( + tableResource.Properties.AttributeDefinitions.find(ad => ad.AttributeName === 'email').AttributeType, + ).toEqual('S'); + expect( + tableResource.Properties.AttributeDefinitions.find(ad => ad.AttributeName === 'updatedAt').AttributeType, + ).toEqual('S'); + expect( + tableResource.Properties.AttributeDefinitions.find(ad => ad.AttributeName === 'createdAt').AttributeType, + ).toEqual('S'); + const schema = parse(out.schema); + const queryType = schema.definitions.find((def: any) => def.name && def.name.value === 'Query') as ObjectTypeDefinitionNode; + const queryIndexField = queryType.fields.find(f => f.name && f.name.value === 'testsByEmailByUpdatedAt') as FieldDefinitionNode; + expect(queryIndexField.arguments).toHaveLength(5); + expectArguments(queryIndexField, ['email', 'updatedAt', 'filter', 'nextToken', 'limit']); + + // When using a complex primary key args are added to the list field. They are optional and if provided, will use a Query instead of a Scan. + const listTestsField = queryType.fields.find(f => f.name && f.name.value === 'listTests') as FieldDefinitionNode; + expect(listTestsField.arguments).toHaveLength(5); + expectArguments(listTestsField, ['email', 'createdAt', 'filter', 'nextToken', 'limit']); +}) diff --git a/packages/graphql-transformers-e2e-tests/src/__tests__/KeyWithAuth.e2e.test.ts b/packages/graphql-transformers-e2e-tests/src/__tests__/KeyWithAuth.e2e.test.ts new file mode 100644 index 00000000000..5b01e14b240 --- /dev/null +++ b/packages/graphql-transformers-e2e-tests/src/__tests__/KeyWithAuth.e2e.test.ts @@ -0,0 +1,356 @@ +import Amplify, { Auth } from 'aws-amplify'; +import { ResourceConstants } from 'graphql-transformer-common' +import GraphQLTransform from 'graphql-transformer-core' +import DynamoDBModelTransformer from 'graphql-dynamodb-transformer' +import ModelAuthTransformer from 'graphql-auth-transformer' +import KeyTransformer from 'graphql-key-transformer' +import * as fs from 'fs' +import { CloudFormationClient } from '../CloudFormationClient' +import { Output } from 'aws-sdk/clients/cloudformation' +import * as CognitoClient from 'aws-sdk/clients/cognitoidentityserviceprovider' +import * as S3 from 'aws-sdk/clients/s3' +import { GraphQLClient } from '../GraphQLClient' +import { S3Client } from '../S3Client'; +import * as path from 'path' +import { deploy } from '../deployNestedStacks' +import * as moment from 'moment'; +import emptyBucket from '../emptyBucket'; +import { + createUserPool, createUserPoolClient, deleteUserPool, + signupAndAuthenticateUser, createGroup, addUserToGroup, + configureAmplify + } from '../cognitoUtils'; + +// to deal with bug in cognito-identity-js +(global as any).fetch = require("node-fetch"); + +jest.setTimeout(2000000); + +const cf = new CloudFormationClient('us-west-2') + +const BUILD_TIMESTAMP = moment().format('YYYYMMDDHHmmss') +const STACK_NAME = `KeyWithAuth-${BUILD_TIMESTAMP}` +const BUCKET_NAME = `appsync-key-with-auth-test-bucket-${BUILD_TIMESTAMP}` +const LOCAL_FS_BUILD_DIR = '/tmp/key_auth_transform_tests/' +const S3_ROOT_DIR_KEY = 'deployments' + +let GRAPHQL_ENDPOINT = undefined; + +/** + * Client 1 is logged in and is a member of the Admin group. + */ +let GRAPHQL_CLIENT_1 = undefined; + +/** + * Client 1 is logged in and is a member of the Admin group via an access token. + */ +let GRAPHQL_CLIENT_1_ACCESS = undefined; + +/** + * Client 2 is logged in and is a member of the Devs group. + */ +let GRAPHQL_CLIENT_2 = undefined; + +/** + * Client 3 is logged in and has no group memberships. + */ +let GRAPHQL_CLIENT_3 = undefined; + +let USER_POOL_ID = undefined; + +const USERNAME1 = 'user1@test.com' +const USERNAME2 = 'user2@test.com' +const USERNAME3 = 'user3@test.com' +const TMP_PASSWORD = 'Password123!' +const REAL_PASSWORD = 'Password1234!' + +const ADMIN_GROUP_NAME = 'Admin'; +const DEVS_GROUP_NAME = 'Devs'; +const PARTICIPANT_GROUP_NAME = 'Participant'; +const WATCHER_GROUP_NAME = 'Watcher'; + +const cognitoClient = new CognitoClient({ apiVersion: '2016-04-19', region: 'us-west-2' }) +const customS3Client = new S3Client('us-west-2') +const awsS3Client = new S3({ region: 'us-west-2' }) + +function outputValueSelector(key: string) { + return (outputs: Output[]) => { + const output = outputs.find((o: Output) => o.OutputKey === key) + return output ? output.OutputValue : null + } +} + +function deleteDirectory(directory: string) { + const files = fs.readdirSync(directory) + for (const file of files) { + const contentPath = path.join(directory, file) + if (fs.lstatSync(contentPath).isDirectory()) { + deleteDirectory(contentPath) + fs.rmdirSync(contentPath) + } else { + fs.unlinkSync(contentPath) + } + } +} + +beforeAll(async () => { + // Create a stack for the post model with auth enabled. + const validSchema = ` + type Order + @model + @key(fields: ["customerEmail", "orderId"]) + @key(name: "GSI", fields: ["orderId"], queryField: "ordersByOrderId") + @auth(rules: [{ allow: owner, ownerField: "customerEmail" }, { allow: groups, groups: ["Admin"] }]) + { + customerEmail: String! + createdAt: String + orderId: String! + } + ` + const transformer = new GraphQLTransform({ + transformers: [ + new DynamoDBModelTransformer(), + new KeyTransformer(), + new ModelAuthTransformer({ authMode: 'AMAZON_COGNITO_USER_POOLS' }) + ] + }) + try { + await awsS3Client.createBucket({Bucket: BUCKET_NAME}).promise() + } catch (e) { + console.error(`Failed to create bucket: ${e}`) + } + const userPoolResponse = await createUserPool(cognitoClient, `UserPool${STACK_NAME}`); + USER_POOL_ID = userPoolResponse.UserPool.Id; + const userPoolClientResponse = await createUserPoolClient(cognitoClient, USER_POOL_ID, `UserPool${STACK_NAME}`); + const userPoolClientId = userPoolClientResponse.UserPoolClient.ClientId; + try { + // Clean the bucket + const out = transformer.transform(validSchema) + const finishedStack = await deploy( + customS3Client, cf, STACK_NAME, out, { AuthCognitoUserPoolId: USER_POOL_ID }, LOCAL_FS_BUILD_DIR, BUCKET_NAME, S3_ROOT_DIR_KEY, + BUILD_TIMESTAMP + ) + expect(finishedStack).toBeDefined() + const getApiEndpoint = outputValueSelector(ResourceConstants.OUTPUTS.GraphQLAPIEndpointOutput) + const getApiKey = outputValueSelector(ResourceConstants.OUTPUTS.GraphQLAPIApiKeyOutput) + GRAPHQL_ENDPOINT = getApiEndpoint(finishedStack.Outputs) + console.log(`Using graphql url: ${GRAPHQL_ENDPOINT}`); + + const apiKey = getApiKey(finishedStack.Outputs) + console.log(`API KEY: ${apiKey}`); + expect(apiKey).not.toBeTruthy() + + // Verify we have all the details + expect(GRAPHQL_ENDPOINT).toBeTruthy() + expect(USER_POOL_ID).toBeTruthy() + expect(userPoolClientId).toBeTruthy() + + // Configure Amplify, create users, and sign in. + configureAmplify(USER_POOL_ID, userPoolClientId) + + const authRes: any = await signupAndAuthenticateUser(USER_POOL_ID, USERNAME1, TMP_PASSWORD, REAL_PASSWORD) + const authRes2: any = await signupAndAuthenticateUser(USER_POOL_ID, USERNAME2, TMP_PASSWORD, REAL_PASSWORD) + const authRes3: any = await signupAndAuthenticateUser(USER_POOL_ID, USERNAME3, TMP_PASSWORD, REAL_PASSWORD) + + await createGroup(USER_POOL_ID, ADMIN_GROUP_NAME) + await createGroup(USER_POOL_ID, PARTICIPANT_GROUP_NAME) + await createGroup(USER_POOL_ID, WATCHER_GROUP_NAME) + await createGroup(USER_POOL_ID, DEVS_GROUP_NAME) + await addUserToGroup(ADMIN_GROUP_NAME, USERNAME1, USER_POOL_ID) + await addUserToGroup(PARTICIPANT_GROUP_NAME, USERNAME1, USER_POOL_ID) + await addUserToGroup(WATCHER_GROUP_NAME, USERNAME1, USER_POOL_ID) + await addUserToGroup(DEVS_GROUP_NAME, USERNAME2, USER_POOL_ID) + const authResAfterGroup: any = await signupAndAuthenticateUser(USER_POOL_ID, USERNAME1, TMP_PASSWORD, REAL_PASSWORD) + + const idToken = authResAfterGroup.getIdToken().getJwtToken() + GRAPHQL_CLIENT_1 = new GraphQLClient(GRAPHQL_ENDPOINT, { Authorization: idToken }) + + const accessToken = authResAfterGroup.getAccessToken().getJwtToken() + GRAPHQL_CLIENT_1_ACCESS = new GraphQLClient(GRAPHQL_ENDPOINT, { Authorization: accessToken }) + + const authRes2AfterGroup: any = await signupAndAuthenticateUser(USER_POOL_ID, USERNAME2, TMP_PASSWORD, REAL_PASSWORD) + const idToken2 = authRes2AfterGroup.getIdToken().getJwtToken() + GRAPHQL_CLIENT_2 = new GraphQLClient(GRAPHQL_ENDPOINT, { Authorization: idToken2 }) + + const idToken3 = authRes3.getIdToken().getJwtToken() + GRAPHQL_CLIENT_3 = new GraphQLClient(GRAPHQL_ENDPOINT, { Authorization: idToken3 }) + + // Wait for any propagation to avoid random + // "The security token included in the request is invalid" errors + await new Promise((res) => setTimeout(() => res(), 5000)) + } catch (e) { + console.error(e) + expect(true).toEqual(false) + } +}); + + +afterAll(async () => { + try { + console.log('Deleting stack ' + STACK_NAME) + await cf.deleteStack(STACK_NAME) + await deleteUserPool(cognitoClient, USER_POOL_ID) + await cf.waitForStack(STACK_NAME) + console.log('Successfully deleted stack ' + STACK_NAME) + } catch (e) { + if (e.code === 'ValidationError' && e.message === `Stack with id ${STACK_NAME} does not exist`) { + // The stack was deleted. This is good. + expect(true).toEqual(true) + console.log('Successfully deleted stack ' + STACK_NAME) + } else { + console.error(e) + expect(true).toEqual(false) + } + } + try { + await emptyBucket(BUCKET_NAME); + } catch (e) { + console.error(`Failed to empty S3 bucket: ${e}`) + } +}) + +/** + * Test queries below + */ +test('Test createOrder mutation as admin', async () => { + const response = await createOrder(GRAPHQL_CLIENT_1, USERNAME2, "order1"); + expect(response.data.createOrder.customerEmail).toBeDefined() + expect(response.data.createOrder.orderId).toEqual('order1') + expect(response.data.createOrder.createdAt).toBeDefined() +}) + +test('Test createOrder mutation as owner', async () => { + const response = await createOrder(GRAPHQL_CLIENT_2, USERNAME2, "order2"); + expect(response.data.createOrder.customerEmail).toBeDefined() + expect(response.data.createOrder.orderId).toEqual('order2') + expect(response.data.createOrder.createdAt).toBeDefined() +}) + +test('Test createOrder mutation as owner', async () => { + const response = await createOrder(GRAPHQL_CLIENT_3, USERNAME2, "order3"); + expect(response.data.createOrder).toBeNull(); + expect(response.errors).toHaveLength(1); +}) + +test('Test list orders as owner', async () => { + await createOrder(GRAPHQL_CLIENT_3, USERNAME3, "owned1") + await createOrder(GRAPHQL_CLIENT_3, USERNAME3, "owned2") + const listResponse = await listOrders(GRAPHQL_CLIENT_3, USERNAME3, { beginsWith: "owned" }) + expect(listResponse.data.listOrders.items).toHaveLength(2); +}) + +test('Test list orders as non owner', async () => { + await createOrder(GRAPHQL_CLIENT_3, USERNAME3, "unowned1") + await createOrder(GRAPHQL_CLIENT_3, USERNAME3, "unowned2") + const listResponse = await listOrders(GRAPHQL_CLIENT_2, USERNAME3, { beginsWith: "unowned" }) + expect(listResponse.data.listOrders.items).toHaveLength(0); +}) + +test('Test get orders as owner', async () => { + await createOrder(GRAPHQL_CLIENT_2, USERNAME2, "myobj") + const getResponse = await getOrder(GRAPHQL_CLIENT_2, USERNAME2, "myobj") + expect(getResponse.data.getOrder.orderId).toEqual("myobj"); +}) + +test('Test get orders as non-owner', async () => { + await createOrder(GRAPHQL_CLIENT_2, USERNAME2, "notmyobj") + const getResponse = await getOrder(GRAPHQL_CLIENT_3, USERNAME2, "notmyobj") + expect(getResponse.data.getOrder).toBeNull(); + expect(getResponse.errors).toHaveLength(1); +}) + +test('Test query orders as owner', async () => { + await createOrder(GRAPHQL_CLIENT_3, USERNAME3, "ownedby3a") + const listResponse = await ordersByOrderId(GRAPHQL_CLIENT_3, "ownedby3a") + expect(listResponse.data.ordersByOrderId.items).toHaveLength(1); +}) + +test('Test query orders as non owner', async () => { + await createOrder(GRAPHQL_CLIENT_3, USERNAME3, "notownedby2a") + const listResponse = await ordersByOrderId(GRAPHQL_CLIENT_2, "notownedby2a") + expect(listResponse.data.ordersByOrderId.items).toHaveLength(0); +}) + +async function createOrder(client: GraphQLClient, customerEmail: string, orderId: string) { + const result = await client.query(`mutation CreateOrder($input: CreateOrderInput!) { + createOrder(input: $input) { + customerEmail + orderId + createdAt + } + }`, { + input: { customerEmail, orderId } + }); + console.log(JSON.stringify(result, null, 4)); + return result; +} + +async function updateOrder(client: GraphQLClient, customerEmail: string, orderId: string) { + const result = await client.query(`mutation UpdateOrder($input: UpdateOrderInput!) { + updateOrder(input: $input) { + customerEmail + orderId + createdAt + } + }`, { + input: { customerEmail, orderId } + }); + console.log(JSON.stringify(result, null, 4)); + return result; +} + +async function deleteOrder(client: GraphQLClient, customerEmail: string, orderId: string) { + const result = await client.query(`mutation DeleteOrder($input: DeleteOrderInput!) { + deleteOrder(input: $input) { + customerEmail + orderId + createdAt + } + }`, { + input: { customerEmail, orderId } + }); + console.log(JSON.stringify(result, null, 4)); + return result; +} + +async function getOrder(client: GraphQLClient, customerEmail: string, orderId: string) { + const result = await client.query(`query GetOrder($customerEmail: String!, $orderId: String!) { + getOrder(customerEmail: $customerEmail, orderId: $orderId) { + customerEmail + orderId + createdAt + } + }`, { customerEmail, orderId }); + console.log(JSON.stringify(result, null, 4)); + return result; +} + +async function listOrders(client: GraphQLClient, customerEmail: string, orderId: { beginsWith: string }) { + const result = await client.query(`query ListOrder($customerEmail: String, $orderId: ModelStringKeyConditionInput) { + listOrders(customerEmail: $customerEmail, orderId: $orderId) { + items { + customerEmail + orderId + createdAt + } + nextToken + } + }`, { customerEmail, orderId }); + console.log(JSON.stringify(result, null, 4)); + return result; +} + +async function ordersByOrderId(client: GraphQLClient, orderId: string) { + const result = await client.query(`query OrdersByOrderId($orderId: String!) { + ordersByOrderId(orderId: $orderId) { + items { + customerEmail + orderId + createdAt + } + nextToken + } + }`, { orderId }); + console.log(JSON.stringify(result, null, 4)); + return result; +} \ No newline at end of file diff --git a/packages/graphql-transformers-e2e-tests/src/testUtil.ts b/packages/graphql-transformers-e2e-tests/src/testUtil.ts new file mode 100644 index 00000000000..e5aa332bfc0 --- /dev/null +++ b/packages/graphql-transformers-e2e-tests/src/testUtil.ts @@ -0,0 +1,56 @@ +import { + ObjectTypeDefinitionNode, FieldDefinitionNode, DocumentNode, + InputObjectTypeDefinitionNode, Kind, InputValueDefinitionNode, + DefinitionNode +} from 'graphql'; +import { isNonNullType } from 'graphql-transformer-common'; + +export function expectFields(type: ObjectTypeDefinitionNode, fields: string[]) { + for (const fieldName of fields) { + const foundField = type.fields.find((f: FieldDefinitionNode) => f.name.value === fieldName) + expect(foundField).toBeDefined() + } +} + +export function expectNonNullFields(type: ObjectTypeDefinitionNode, fields: string[]) { + for (const fieldName of fields) { + const foundField = type.fields.find((f: FieldDefinitionNode) => f.name.value === fieldName) + expect(foundField).toBeDefined() + expect(isNonNullType(foundField.type)).toBeTruthy(); + } +} + +export function expectNullableFields(type: ObjectTypeDefinitionNode, fields: string[]) { + for (const fieldName of fields) { + const foundField = type.fields.find((f: FieldDefinitionNode) => f.name.value === fieldName) + expect(foundField).toBeDefined() + expect(isNonNullType(foundField.type)).toBeFalsy(); + } +} + +export function expectArguments(field: FieldDefinitionNode, args: string[]) { + for (const argName of args) { + const foundArg = field.arguments.find((a: InputValueDefinitionNode) => a.name.value === argName) + expect(foundArg).toBeDefined() + } +} + +export function doNotExpectFields(type: ObjectTypeDefinitionNode, fields: string[]) { + for (const fieldName of fields) { + expect( + type.fields.find((f: FieldDefinitionNode) => f.name.value === fieldName) + ).toBeUndefined() + } +} + +export function getObjectType(doc: DocumentNode, type: string): ObjectTypeDefinitionNode | undefined { + return doc.definitions.find( + (def: DefinitionNode) => def.kind === Kind.OBJECT_TYPE_DEFINITION && def.name.value === type + ) as ObjectTypeDefinitionNode | undefined +} + +export function getInputType(doc: DocumentNode, type: string): InputObjectTypeDefinitionNode | undefined { + return doc.definitions.find( + (def: DefinitionNode) => def.kind === Kind.INPUT_OBJECT_TYPE_DEFINITION && def.name.value === type + ) as InputObjectTypeDefinitionNode | undefined +} \ No newline at end of file diff --git a/testing-custom-indexes.md b/testing-custom-indexes.md new file mode 100644 index 00000000000..253a376f220 --- /dev/null +++ b/testing-custom-indexes.md @@ -0,0 +1,163 @@ +# Testing Custom Indexes + +The `@key` directive makes it simple to configure complex key structures in DynamoDB. +The first thing to do when starting to build an application on top of DynamoDB is to think about access patterns. + +DynamoDB is a distributed hash table that can execute efficient range queries on extremely large data sets but to do so comes with a few restrictions. DynamoDB query operations use at most two attributes to efficiently query data. Even more restrictive is that the first argument (the partition key) must use strict equality and the second attribute may use gt, ge, lt, le, eq, beginsWith, and between (there is no 'ne'). DynamoDB provides features and design best-practices to help get around these restrictions. A few features/patterns are: + +1. Secondary Indexes - Create new data structures to store information in a different way to enable new access patterns. Incurs extra cost. +1. Composite Keys - Store two logical fields in a single field such that more than two logical fields can be used in a range query. +2. Index overloading - Store more than 1 logical entity in a single index. Different logical entities may contains entirely different types of data. Allows a single index to power more than 1 access patterns for one or more logical entities. + +The `@key` directive, in addition to allowing you to define custom primary index structures, helps with parts 1 and 2 above. The `@key` directive does not automatically overload indexes although this may be a possibility going forward. This is the definition of `@key`: + +```graphql +# @param name - When provided specifies the name of the secondary index. There may be one @key without a 'name' per @model type. +# @param fields (required) - Specifies the logical fields that should be included in the index's key structure. +# @param queryField - When provided specifies the name of the top level query field that should be created to query the secondary index. +# Primary @keys are not allowed to have a queryField because the listX query is already being updated to work with the primary key. +directive @key(name: String, fields: [String!]!, queryField: String) on OBJECT +``` + +For example, let's say we are building some kind of e-commerce application and need to facilitate these access patterns. + +1. Get orders by customer by createdAt. +2. Get customers by email. +3. Get items by order by status by createdAt. +4. Get items by status by createdAt. + +When thinking about your access patterns, it is useful to lay them out using the same "by X by Y" structure I have here. +Once you have them laid out like this you can translate them directily into a `@key` by including the "X" and "Y" values as `fields`. +For example to **Get orders by customer by date**, I would create a `@key`: + +```graphql +@key(fields: ["customerEmail", "createdAt"]) +``` + +We can use the `@key` directive to quickly create an API & data model for this application. + +1. Clone this repository and checkout the `feature/@key` branch. + +```bash +git clone https://github.com/mikeparisstuff/amplify-cli.git +cd amplify-cli +git checkout feature/@key +``` + +2. Run `npm run setup-dev` from the repo's root directory. + +3. Create a new directory somewhere else and init the amplify project. + +```bash +mkdir testing-key +cd testing-key +amplify init +# ... +amplify add api +# ... +# Say you don't have a schema, use the guided schema creation, +# and open the simplest model in your editor. Replace the schema with the one below. +``` + +```graphql +# A @key without a 'name' specifies the primary key. You may only provide 1 per @model type. +# The @key creates a primary key where the HASHKEY = "customerEmail" and the SORTKEY = "createdAt". +type Order @model @key(fields: ["customerEmail", "createdAt"]) { + customerEmail: String! + createdAt: String! + orderId: ID! +} +# A @key with one field creates a primary key with a HASHKEY = "email" +type Customer @model @key(fields: ["email"]) { + email: String! + username: String +} +# The primary @key with 3 fields does something a little special. +# The first field "orderId" will be the HASH KEY as expected BUT the SORT KEY will be +# a new composite key named 'status#createdAt' that is made of the "status" and "createdAt" fields. +# The AppSync resolvers will automatically stitch together the new composite key so the client does not need to worry about that detail. +# The @key with name = "ByStatus" specifies a secondary index where the HASH KEY = "status" (an enum) and the SORT KEY = "createdAt". +# The second @key directive also specifies that a top level query field named "itemsByStatus" should be created to query this index in AppSync. +type Item @model + @key(fields: ["orderId", "status", "createdAt"]) + @key(name: "ByStatus", fields: ["status", "createdAt"], queryField: "itemsByStatus") +{ + orderId: ID! + status: Status! + createdAt: AWSDateTime! + name: String! +} +enum Status { + DELIVERED IN_TRANSIT PENDING UNKNOWN +} +``` + +4. You can test the schema above with these queries/mutations: + +```graphql +mutation CreateItem($input: CreateItemInput!) { + createItem(input: $input) { + orderId + status + createdAt + name + } +} + +mutation UpdateItem($input: UpdateItemInput!) { + updateItem(input: $input) { + orderId + status + createdAt + name + } +} + +mutation DeleteItem($input: DeleteItemInput!) { + deleteItem(input: $input) { + orderId + status + createdAt + name + } +} + +# GetItem takes 3 arguments because the primary @key specifies 3 fields. +query GetItem($orderId: ID!, status: Status!, $createdAt: String!) { + getItem(orderId: $orderId, status: $status, createdAt: $createdAt) { + orderId + status + createdAt + name + } +} + +# ListItem takes additional arguments because the primary @key specifies 3 fields. +# Note: There is one thing that is likely going to change around the structure of `$createdAt: ModelStringKeyConditionInput`. +query ListItems($orderId: ID, $status: Status, $createdAt: ModelStringKeyConditionInput, $limit: Int, $nextToken: String) { + listItems(orderId: $orderId, status: $status, createdAt: $createdAt, limit: $limit, nextToken: $nextToken) { + items { + orderId + status + createdAt + name + } + nextToken + } +} + +# We may use our new top level query field to query secondary @keys. +query ListByStatus($status: Status!, $createdAt: ModelStringKeyConditionInput, $limit: Int, $nextToken: String) { + itemsByStatus(status: $status, createdAt: $createdAt, limit: $limit, nextToken: $nextToken) { + items { + orderId + status + createdAt + name + } + nextToken + } +} +``` + +5. Provide feedback in the issues tab.