diff --git a/.changeset/fuzzy-tips-clap.md b/.changeset/fuzzy-tips-clap.md new file mode 100644 index 0000000000000..5dd00f4c08aa3 --- /dev/null +++ b/.changeset/fuzzy-tips-clap.md @@ -0,0 +1,10 @@ +--- +"@medusajs/modules-sdk": patch +"@medusajs/types": patch +"@medusajs/pricing": patch +"@medusajs/product": patch +"@medusajs/inventory": patch +"@medusajs/stock-location": patch +--- + +Medusa App loading modules reference diff --git a/.changeset/gentle-pots-enjoy.md b/.changeset/gentle-pots-enjoy.md new file mode 100644 index 0000000000000..c6e75da20ab6f --- /dev/null +++ b/.changeset/gentle-pots-enjoy.md @@ -0,0 +1,5 @@ +--- +"@medusajs/ui": patch +--- + +feature(ui): Adds a `size` variant to `` component, and prevent clicks from propigating to parent elements". Also adds additional sizes to the `` component. diff --git a/.changeset/pink-balloons-search.md b/.changeset/pink-balloons-search.md new file mode 100644 index 0000000000000..cab0d69c78652 --- /dev/null +++ b/.changeset/pink-balloons-search.md @@ -0,0 +1,11 @@ +--- +"@medusajs/workflow-engine-inmemory": patch +"@medusajs/workflow-engine-redis": patch +"@medusajs/orchestration": patch +"@medusajs/workflows-sdk": patch +"@medusajs/modules-sdk": patch +"@medusajs/types": patch +"@medusajs/utils": patch +--- + +Modules: Workflows Engine in-memory and Redis diff --git a/.changeset/young-forks-worry.md b/.changeset/young-forks-worry.md new file mode 100644 index 0000000000000..80a0ead825ef2 --- /dev/null +++ b/.changeset/young-forks-worry.md @@ -0,0 +1,6 @@ +--- +"@medusajs/medusa": patch +"@medusajs/types": patch +--- + +feat(medusa,types): added buyget support for modules diff --git a/.eslintignore b/.eslintignore index d05cda3f13e99..2af245f62d738 100644 --- a/.eslintignore +++ b/.eslintignore @@ -24,6 +24,9 @@ packages/* !packages/orchestration !packages/workflows-sdk !packages/core-flows +!packages/workflow-engine-redis +!packages/workflow-engine-inmemory + **/models/* diff --git a/.eslintrc.js b/.eslintrc.js index 22a44eb6283d4..caafd0d9f6bf7 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -72,9 +72,7 @@ module.exports = { node: true, jest: true, }, - ignorePatterns: [ - "packages/admin-next/dashboard/**/dist" - ], + ignorePatterns: ["packages/admin-next/dashboard/**/dist"], overrides: [ { files: ["*.ts"], @@ -101,6 +99,8 @@ module.exports = { "./packages/orchestration/tsconfig.json", "./packages/workflows-sdk/tsconfig.spec.json", "./packages/core-flows/tsconfig.spec.json", + "./packages/workflow-engine-redis/tsconfig.spec.json", + "./packages/workflow-engine-inmemory/tsconfig.spec.json", ], }, rules: { diff --git a/integration-tests/plugins/__tests__/product/admin/index.ts b/integration-tests/plugins/__tests__/product/admin/index.ts index 8d514214e778d..3449f83f8ad39 100644 --- a/integration-tests/plugins/__tests__/product/admin/index.ts +++ b/integration-tests/plugins/__tests__/product/admin/index.ts @@ -16,7 +16,7 @@ import { } from "../../../../factories" import { createDefaultRuleTypes } from "../../../helpers/create-default-rule-types" -jest.setTimeout(5000000) +jest.setTimeout(50000) const adminHeaders = { headers: { diff --git a/integration-tests/plugins/__tests__/promotion/admin/create-promotion.spec.ts b/integration-tests/plugins/__tests__/promotion/admin/create-promotion.spec.ts index 33206fd1c9bb9..aa0ccf669c258 100644 --- a/integration-tests/plugins/__tests__/promotion/admin/create-promotion.spec.ts +++ b/integration-tests/plugins/__tests__/promotion/admin/create-promotion.spec.ts @@ -62,7 +62,7 @@ describe("POST /admin/promotions", () => { ) }) - it("should create a promotion successfully", async () => { + it("should create a standard promotion successfully", async () => { const api = useApi() as any const response = await api.post( `/admin/promotions`, @@ -148,4 +148,194 @@ describe("POST /admin/promotions", () => { }) ) }) + + it("should throw an error if buy_rules params are not passed", async () => { + const api = useApi() as any + const { response } = await api + .post( + `/admin/promotions`, + { + code: "TEST", + type: PromotionType.BUYGET, + is_automatic: true, + application_method: { + target_type: "items", + type: "fixed", + allocation: "each", + value: "100", + max_quantity: 100, + target_rules: [ + { + attribute: "test.test", + operator: "eq", + values: ["test1", "test2"], + }, + ], + }, + rules: [ + { + attribute: "test.test", + operator: "eq", + values: ["test1", "test2"], + }, + ], + }, + adminHeaders + ) + .catch((e) => e) + + expect(response.status).toEqual(400) + expect(response.data.message).toEqual( + "Buy rules are required for buyget promotion type" + ) + }) + + it("should throw an error if buy_rules params are not passed", async () => { + const api = useApi() as any + const { response } = await api + .post( + `/admin/promotions`, + { + code: "TEST", + type: PromotionType.BUYGET, + is_automatic: true, + application_method: { + target_type: "items", + type: "fixed", + allocation: "each", + value: "100", + max_quantity: 100, + buy_rules: [ + { + attribute: "test.test", + operator: "eq", + values: ["test1", "test2"], + }, + ], + }, + rules: [ + { + attribute: "test.test", + operator: "eq", + values: ["test1", "test2"], + }, + ], + }, + adminHeaders + ) + .catch((e) => e) + + expect(response.status).toEqual(400) + expect(response.data.message).toEqual( + "Target rules are required for buyget promotion type" + ) + }) + + it("should create a buyget promotion successfully", async () => { + const api = useApi() as any + const response = await api.post( + `/admin/promotions`, + { + code: "TEST", + type: PromotionType.BUYGET, + is_automatic: true, + campaign: { + name: "test", + campaign_identifier: "test-1", + budget: { + type: "usage", + limit: 100, + }, + }, + application_method: { + target_type: "items", + type: "fixed", + allocation: "each", + value: "100", + max_quantity: 100, + apply_to_quantity: 1, + buy_rules_min_quantity: 1, + target_rules: [ + { + attribute: "test.test", + operator: "eq", + values: ["test1", "test2"], + }, + ], + buy_rules: [ + { + attribute: "test.test", + operator: "eq", + values: ["test1", "test2"], + }, + ], + }, + rules: [ + { + attribute: "test.test", + operator: "eq", + values: ["test1", "test2"], + }, + ], + }, + adminHeaders + ) + + expect(response.status).toEqual(200) + expect(response.data.promotion).toEqual( + expect.objectContaining({ + id: expect.any(String), + code: "TEST", + type: "buyget", + is_automatic: true, + campaign: expect.objectContaining({ + name: "test", + campaign_identifier: "test-1", + budget: expect.objectContaining({ + type: "usage", + limit: 100, + }), + }), + application_method: expect.objectContaining({ + value: 100, + max_quantity: 100, + type: "fixed", + target_type: "items", + allocation: "each", + apply_to_quantity: 1, + buy_rules_min_quantity: 1, + target_rules: [ + expect.objectContaining({ + operator: "eq", + attribute: "test.test", + values: expect.arrayContaining([ + expect.objectContaining({ value: "test1" }), + expect.objectContaining({ value: "test2" }), + ]), + }), + ], + buy_rules: [ + expect.objectContaining({ + operator: "eq", + attribute: "test.test", + values: expect.arrayContaining([ + expect.objectContaining({ value: "test1" }), + expect.objectContaining({ value: "test2" }), + ]), + }), + ], + }), + rules: [ + expect.objectContaining({ + operator: "eq", + attribute: "test.test", + values: expect.arrayContaining([ + expect.objectContaining({ value: "test1" }), + expect.objectContaining({ value: "test2" }), + ]), + }), + ], + }) + ) + }) }) diff --git a/integration-tests/plugins/__tests__/promotion/admin/retrieve-promotion.spec.ts b/integration-tests/plugins/__tests__/promotion/admin/retrieve-promotion.spec.ts index f0cc7301f0e76..95f7d56da36d6 100644 --- a/integration-tests/plugins/__tests__/promotion/admin/retrieve-promotion.spec.ts +++ b/integration-tests/plugins/__tests__/promotion/admin/retrieve-promotion.spec.ts @@ -74,28 +74,30 @@ describe("GET /admin/promotions", () => { ) expect(response.status).toEqual(200) - expect(response.data.promotion).toEqual({ - id: expect.any(String), - code: "TEST", - campaign: null, - is_automatic: false, - type: "standard", - created_at: expect.any(String), - updated_at: expect.any(String), - deleted_at: null, - application_method: { + expect(response.data.promotion).toEqual( + expect.objectContaining({ id: expect.any(String), - promotion: expect.any(Object), - value: 100, - type: "fixed", - target_type: "order", - max_quantity: 0, - allocation: null, + code: "TEST", + campaign: null, + is_automatic: false, + type: "standard", created_at: expect.any(String), updated_at: expect.any(String), deleted_at: null, - }, - }) + application_method: expect.objectContaining({ + id: expect.any(String), + promotion: expect.any(Object), + value: 100, + type: "fixed", + target_type: "order", + max_quantity: 0, + allocation: null, + created_at: expect.any(String), + updated_at: expect.any(String), + deleted_at: null, + }), + }) + ) }) it("should get the requested promotion with filtered fields and relations", async () => { diff --git a/integration-tests/plugins/__tests__/promotion/admin/update-promotion.spec.ts b/integration-tests/plugins/__tests__/promotion/admin/update-promotion.spec.ts index 85f701a1be4e7..4ce4652c713f9 100644 --- a/integration-tests/plugins/__tests__/promotion/admin/update-promotion.spec.ts +++ b/integration-tests/plugins/__tests__/promotion/admin/update-promotion.spec.ts @@ -132,4 +132,58 @@ describe("POST /admin/promotions/:id", () => { }) ) }) + + it("should update a buyget promotion successfully", async () => { + const createdPromotion = await promotionModuleService.create({ + code: "PROMOTION_TEST", + type: PromotionType.BUYGET, + application_method: { + type: "fixed", + target_type: "items", + allocation: "across", + value: "100", + apply_to_quantity: 1, + buy_rules_min_quantity: 1, + buy_rules: [ + { + attribute: "product_collection.id", + operator: "eq", + values: ["pcol_towel"], + }, + ], + target_rules: [ + { + attribute: "product.id", + operator: "eq", + values: "prod_mat", + }, + ], + }, + }) + + const api = useApi() as any + const response = await api.post( + `/admin/promotions/${createdPromotion.id}`, + { + code: "TEST_TWO", + application_method: { + value: "200", + buy_rules_min_quantity: 6, + }, + }, + adminHeaders + ) + + expect(response.status).toEqual(200) + expect(response.data.promotion).toEqual( + expect.objectContaining({ + id: expect.any(String), + code: "TEST_TWO", + application_method: expect.objectContaining({ + value: 200, + buy_rules_min_quantity: 6, + }), + }) + ) + }) }) diff --git a/package.json b/package.json index 336ea9e9a3681..b4a476e036ad5 100644 --- a/package.json +++ b/package.json @@ -65,15 +65,15 @@ "scripts": { "hooks:install": "husky install", "hooks:uninstall": "husky uninstall", - "build": "turbo run build --no-daemon", + "build": "turbo run build --concurrency=50% --no-daemon", "lint": "eslint --ignore-path .eslintignore --ext .js,.ts,.tsx .", "prettier": "prettier", "jest": "jest", - "test": "turbo run test --no-daemon", - "test:integration:packages": "turbo run test:integration --no-daemon --filter='./packages/*'", - "test:integration:api": "turbo run test:integration --no-daemon --filter=integration-tests-api", - "test:integration:plugins": "turbo run test:integration --no-daemon --filter=integration-tests-plugins", - "test:integration:repositories": "turbo run test:integration --no-daemon --filter=integration-tests-repositories", + "test": "turbo run test --concurrency=50% --no-daemon", + "test:integration:packages": "turbo run test:integration --concurrency=1 --no-daemon --filter='./packages/*'", + "test:integration:api": "turbo run test:integration --concurrency=50% --no-daemon --filter=integration-tests-api", + "test:integration:plugins": "turbo run test:integration --concurrency=50% --no-daemon --filter=integration-tests-plugins", + "test:integration:repositories": "turbo run test:integration --concurrency=50% --no-daemon --filter=integration-tests-repositories", "openapi:generate": "yarn ./packages/oas/oas-github-ci run ci --with-full-file", "medusa-oas": "yarn ./packages/oas/medusa-oas-cli run medusa-oas", "release:snapshot": "changeset publish --no-git-tags --snapshot --tag snapshot", diff --git a/packages/admin-next/dashboard/public/locales/en/translation.json b/packages/admin-next/dashboard/public/locales/en/translation.json index 57dfc15fafc57..8d0c34af3155d 100644 --- a/packages/admin-next/dashboard/public/locales/en/translation.json +++ b/packages/admin-next/dashboard/public/locales/en/translation.json @@ -27,6 +27,8 @@ "details": "Details", "enabled": "Enabled", "disabled": "Disabled", + "active": "Active", + "revoked": "Revoked", "remove": "Remove", "admin": "Admin", "store": "Store", @@ -183,10 +185,16 @@ "domain": "API Key Management", "createKey": "Create key", "createPublishableApiKey": "Create Publishable API Key", + "editKey": "Edit key", "revoke": "Revoke", "publishableApiKeyHint": "Publishable API keys are used to limit the scope of requests to specific sales channels.", "deleteKeyWarning": "You are about to delete the API key {{title}}. This action cannot be undone.", - "revokeKeyWarning": "You are about to revoke the API key {{title}}." + "revokeKeyWarning": "You are about to revoke the API key {{title}}. This action cannot be undone, and the key cannot be used in future requests.", + "removeSalesChannelWarning": "You are about to remove the sales channel {{name}} from the API key. This action cannot be undone.", + "removeSalesChannelsWarning_one": "You are about to remove {{count}} sales channel from the API key. This action cannot be undone.", + "removeSalesChannelsWarning_other": "You are about to remove {{count}} sales channels from the API key. This action cannot be undone.", + "createdBy": "Created by", + "revokedBy": "Revoked by" }, "fields": { "name": "Name", @@ -234,6 +242,8 @@ "variants": "Variants", "orders": "Orders", "account": "Account", - "total": "Total" + "total": "Total", + "created": "Created", + "key": "Key" } } diff --git a/packages/admin-next/dashboard/src/components/common/user-link/index.ts b/packages/admin-next/dashboard/src/components/common/user-link/index.ts new file mode 100644 index 0000000000000..951235f82422f --- /dev/null +++ b/packages/admin-next/dashboard/src/components/common/user-link/index.ts @@ -0,0 +1 @@ +export * from "./user-link" diff --git a/packages/admin-next/dashboard/src/components/common/user-link/user-link.tsx b/packages/admin-next/dashboard/src/components/common/user-link/user-link.tsx new file mode 100644 index 0000000000000..1a23adcf617ab --- /dev/null +++ b/packages/admin-next/dashboard/src/components/common/user-link/user-link.tsx @@ -0,0 +1,34 @@ +import { Avatar, Text } from "@medusajs/ui" +import { Link } from "react-router-dom" + +type UserLinkProps = { + id: string + first_name?: string | null + last_name?: string | null + email: string + type?: "customer" | "user" +} + +export const UserLink = ({ + id, + first_name, + last_name, + email, + type = "user", +}: UserLinkProps) => { + const name = [first_name, last_name].filter(Boolean).join(" ") + const fallback = name ? name.slice(0, 1) : email.slice(0, 1) + const link = type === "user" ? `/settings/users/${id}` : `/customers/${id}` + + return ( + + + + {name || email} + + + ) +} diff --git a/packages/admin-next/dashboard/src/providers/router-provider/router-provider.tsx b/packages/admin-next/dashboard/src/providers/router-provider/router-provider.tsx index 6ccd1178e7ba3..c1ccf72385b13 100644 --- a/packages/admin-next/dashboard/src/providers/router-provider/router-provider.tsx +++ b/packages/admin-next/dashboard/src/providers/router-provider/router-provider.tsx @@ -3,7 +3,9 @@ import type { AdminCustomerGroupsRes, AdminCustomersRes, AdminProductsRes, + AdminPublishableApiKeysRes, AdminRegionsRes, + AdminSalesChannelsRes, } from "@medusajs/medusa" import { Outlet, @@ -487,6 +489,10 @@ const router = createBrowserRouter([ path: ":id", lazy: () => import("../../routes/sales-channels/sales-channel-detail"), + handle: { + crumb: (data: AdminSalesChannelsRes) => + data.sales_channel.name, + }, children: [ { path: "edit", @@ -533,6 +539,10 @@ const router = createBrowserRouter([ import( "../../routes/api-key-management/api-key-management-detail" ), + handle: { + crumb: (data: AdminPublishableApiKeysRes) => + data.publishable_api_key.title, + }, children: [ { path: "edit", @@ -541,6 +551,13 @@ const router = createBrowserRouter([ "../../routes/api-key-management/api-key-management-edit" ), }, + { + path: "add-sales-channels", + lazy: () => + import( + "../../routes/api-key-management/api-key-management-add-sales-channels" + ), + }, ], }, ], diff --git a/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-add-sales-channels/api-key-management-add-sales-channels.tsx b/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-add-sales-channels/api-key-management-add-sales-channels.tsx new file mode 100644 index 0000000000000..9e8a62caf7b4a --- /dev/null +++ b/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-add-sales-channels/api-key-management-add-sales-channels.tsx @@ -0,0 +1,36 @@ +import { FocusModal } from "@medusajs/ui" +import { useAdminPublishableApiKeySalesChannels } from "medusa-react" +import { useParams } from "react-router-dom" +import { useRouteModalState } from "../../../hooks/use-route-modal-state" +import { AddSalesChannelsToApiKeyForm } from "./components" + +export const ApiKeyManagementAddSalesChannels = () => { + const { id } = useParams() + const [open, onOpenChange, subscribe] = useRouteModalState() + + const { sales_channels, isLoading, isError, error } = + useAdminPublishableApiKeySalesChannels(id!) + + const handleSuccessfulSubmit = () => { + onOpenChange(false, true) + } + + if (isError) { + throw error + } + + return ( + + + {!isLoading && sales_channels && ( + sc.id)} + onSuccessfulSubmit={handleSuccessfulSubmit} + subscribe={subscribe} + /> + )} + + + ) +} diff --git a/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-add-sales-channels/components/add-sales-channels-to-api-key-form.tsx b/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-add-sales-channels/components/add-sales-channels-to-api-key-form.tsx new file mode 100644 index 0000000000000..902dee4fc9a4d --- /dev/null +++ b/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-add-sales-channels/components/add-sales-channels-to-api-key-form.tsx @@ -0,0 +1,326 @@ +import { zodResolver } from "@hookform/resolvers/zod" +import { SalesChannel } from "@medusajs/medusa" +import { + Button, + Checkbox, + FocusModal, + Hint, + StatusBadge, + Table, + Tooltip, + clx, +} from "@medusajs/ui" +import { + PaginationState, + RowSelectionState, + createColumnHelper, + flexRender, + getCoreRowModel, + useReactTable, +} from "@tanstack/react-table" +import { + useAdminAddPublishableKeySalesChannelsBatch, + useAdminSalesChannels, +} from "medusa-react" +import { useEffect, useMemo, useState } from "react" +import { useForm } from "react-hook-form" +import { useTranslation } from "react-i18next" +import * as zod from "zod" +import { Form } from "../../../../components/common/form" +import { OrderBy } from "../../../../components/filtering/order-by" +import { Query } from "../../../../components/filtering/query" +import { LocalizedTablePagination } from "../../../../components/localization/localized-table-pagination" +import { useQueryParams } from "../../../../hooks/use-query-params" + +type AddSalesChannelsToApiKeyFormProps = { + apiKey: string + preSelected: string[] + subscribe: (state: boolean) => void + onSuccessfulSubmit: () => void +} + +const AddSalesChannelsToApiKeySchema = zod.object({ + sales_channel_ids: zod.array(zod.string()).min(1), +}) + +const PAGE_SIZE = 50 + +export const AddSalesChannelsToApiKeyForm = ({ + apiKey, + preSelected, + subscribe, + onSuccessfulSubmit, +}: AddSalesChannelsToApiKeyFormProps) => { + const { t } = useTranslation() + + const form = useForm>({ + defaultValues: { + sales_channel_ids: [], + }, + resolver: zodResolver(AddSalesChannelsToApiKeySchema), + }) + + const { + formState: { isDirty }, + } = form + + useEffect(() => { + subscribe(isDirty) + }, [isDirty]) + + const { mutateAsync, isLoading: isMutating } = + useAdminAddPublishableKeySalesChannelsBatch(apiKey) + + const [{ pageIndex, pageSize }, setPagination] = useState({ + pageIndex: 0, + pageSize: PAGE_SIZE, + }) + + const pagination = useMemo( + () => ({ + pageIndex, + pageSize, + }), + [pageIndex, pageSize] + ) + + const [rowSelection, setRowSelection] = useState({}) + + useEffect(() => { + form.setValue( + "sales_channel_ids", + Object.keys(rowSelection).filter((k) => rowSelection[k]) + ) + }, [rowSelection]) + + const params = useQueryParams(["q", "order"]) + const { sales_channels, count } = useAdminSalesChannels( + { + limit: PAGE_SIZE, + offset: PAGE_SIZE * pageIndex, + ...params, + }, + { + keepPreviousData: true, + } + ) + + const columns = useColumns() + + const table = useReactTable({ + data: sales_channels ?? [], + columns, + pageCount: Math.ceil((count ?? 0) / PAGE_SIZE), + state: { + pagination, + rowSelection, + }, + onPaginationChange: setPagination, + onRowSelectionChange: setRowSelection, + getCoreRowModel: getCoreRowModel(), + manualPagination: true, + getRowId: (row) => row.id, + enableRowSelection: (row) => { + return !preSelected.includes(row.id) + }, + meta: { + preSelected, + }, + }) + + const handleSubmit = form.handleSubmit(async (values) => { + await mutateAsync( + { + sales_channel_ids: values.sales_channel_ids.map((p) => ({ id: p })), + }, + { + onSuccess: () => { + onSuccessfulSubmit() + }, + } + ) + }) + + return ( +
+ + +
+ {form.formState.errors.sales_channel_ids && ( + + {form.formState.errors.sales_channel_ids.message} + + )} + + + + +
+
+ +
+
+
+ + +
+
+
+ + + {table.getHeaderGroups().map((headerGroup) => { + return ( + + {headerGroup.headers.map((header) => { + return ( + + {flexRender( + header.column.columnDef.header, + header.getContext() + )} + + ) + })} + + ) + })} + + + {table.getRowModel().rows.map((row) => ( + + {row.getVisibleCells().map((cell) => ( + + {flexRender( + cell.column.columnDef.cell, + cell.getContext() + )} + + ))} + + ))} + +
+
+
+ +
+
+
+ + ) +} + +const columnHelper = createColumnHelper() + +const useColumns = () => { + const { t } = useTranslation() + + return useMemo( + () => [ + columnHelper.display({ + id: "select", + header: ({ table }) => { + return ( + + table.toggleAllPageRowsSelected(!!value) + } + /> + ) + }, + cell: ({ row, table }) => { + const { preSelected } = table.options.meta as { + preSelected: string[] + } + + const isAdded = preSelected.includes(row.original.id) + const isSelected = row.getIsSelected() || isAdded + + const Component = ( + row.toggleSelected(!!value)} + /> + ) + + if (isAdded) { + return ( + + {Component} + + ) + } + + return Component + }, + }), + columnHelper.accessor("name", { + header: t("fields.name"), + cell: ({ getValue }) => getValue(), + }), + columnHelper.accessor("description", { + header: t("fields.description"), + cell: ({ getValue }) => ( +
+ {getValue()} +
+ ), + }), + columnHelper.accessor("is_disabled", { + header: t("fields.status"), + cell: ({ getValue }) => { + const value = getValue() + return ( +
+ + {value ? t("general.disabled") : t("general.enabled")} + +
+ ) + }, + }), + ], + [t] + ) +} diff --git a/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-add-sales-channels/components/index.ts b/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-add-sales-channels/components/index.ts new file mode 100644 index 0000000000000..4e4781c229fba --- /dev/null +++ b/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-add-sales-channels/components/index.ts @@ -0,0 +1 @@ +export * from "./add-sales-channels-to-api-key-form" diff --git a/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-add-sales-channels/index.ts b/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-add-sales-channels/index.ts new file mode 100644 index 0000000000000..8c278126e7c1d --- /dev/null +++ b/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-add-sales-channels/index.ts @@ -0,0 +1 @@ +export { ApiKeyManagementAddSalesChannels as Component } from "./api-key-management-add-sales-channels" diff --git a/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-create/api-key-management-create.tsx b/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-create/api-key-management-create.tsx index ef7c98898e7c8..cfc8522b0c719 100644 --- a/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-create/api-key-management-create.tsx +++ b/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-create/api-key-management-create.tsx @@ -8,7 +8,7 @@ export const ApiKeyManagementCreate = () => { return ( - + ) diff --git a/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-create/components/create-publishable-api-key-form/create-publishable-api-key-form.tsx b/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-create/components/create-publishable-api-key-form/create-publishable-api-key-form.tsx index dd5c196b8325d..d7766fed21718 100644 --- a/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-create/components/create-publishable-api-key-form/create-publishable-api-key-form.tsx +++ b/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-create/components/create-publishable-api-key-form/create-publishable-api-key-form.tsx @@ -6,6 +6,7 @@ import { useTranslation } from "react-i18next" import * as zod from "zod" import { useEffect } from "react" +import { useNavigate } from "react-router-dom" import { Form } from "../../../../../components/common/form" type CreatePublishableApiKeyFormProps = { @@ -37,9 +38,16 @@ export const CreatePublishableApiKeyForm = ({ }, [isDirty]) const { t } = useTranslation() + const navigate = useNavigate() const handleSubmit = form.handleSubmit(async (values) => { - await mutateAsync(values) + await mutateAsync(values, { + onSuccess: ({ publishable_api_key }) => { + navigate(`/settings/api-key-management/${publishable_api_key.id}`, { + replace: true, + }) + }, + }) }) return ( diff --git a/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-detail/api-key-management-detail.tsx b/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-detail/api-key-management-detail.tsx index 169888b96e631..358886d640435 100644 --- a/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-detail/api-key-management-detail.tsx +++ b/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-detail/api-key-management-detail.tsx @@ -1,3 +1,39 @@ +import { useAdminPublishableApiKey } from "medusa-react" +import { Outlet, json, useLoaderData, useParams } from "react-router-dom" +import { JsonViewSection } from "../../../components/common/json-view-section" +import { ApiKeyGeneralSection } from "./components/api-key-general-section" +import { ApiKeySalesChannelSection } from "./components/api-key-sales-channel-section" +import { apiKeyLoader } from "./loader" + export const ApiKeyManagementDetail = () => { - return
+ const initialData = useLoaderData() as Awaited< + ReturnType + > + + const { id } = useParams() + const { publishable_api_key, isLoading, isError, error } = + useAdminPublishableApiKey(id!, { + initialData, + }) + + if (isLoading) { + return
Loading...
+ } + + if (isError || !publishable_api_key) { + if (error) { + throw error + } + + throw json("An unknown error occurred", 500) + } + + return ( +
+ + + + +
+ ) } diff --git a/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-detail/components/api-key-general-section/api-key-general-section.tsx b/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-detail/components/api-key-general-section/api-key-general-section.tsx new file mode 100644 index 0000000000000..89ebed001d2bb --- /dev/null +++ b/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-detail/components/api-key-general-section/api-key-general-section.tsx @@ -0,0 +1,180 @@ +import { PencilSquare, Trash, XCircle } from "@medusajs/icons" +import { PublishableApiKey } from "@medusajs/medusa" +import { + Container, + Copy, + Heading, + StatusBadge, + Text, + usePrompt, +} from "@medusajs/ui" +import { + useAdminDeletePublishableApiKey, + useAdminRevokePublishableApiKey, + useAdminUser, +} from "medusa-react" +import { useTranslation } from "react-i18next" +import { ActionMenu } from "../../../../../components/common/action-menu" +import { Skeleton } from "../../../../../components/common/skeleton" +import { UserLink } from "../../../../../components/common/user-link" + +type ApiKeyGeneralSectionProps = { + apiKey: PublishableApiKey +} + +export const ApiKeyGeneralSection = ({ apiKey }: ApiKeyGeneralSectionProps) => { + const { t } = useTranslation() + const prompt = usePrompt() + + const { mutateAsync: revokeAsync } = useAdminRevokePublishableApiKey( + apiKey.id + ) + const { mutateAsync: deleteAsync } = useAdminDeletePublishableApiKey( + apiKey.id + ) + + const handleDelete = async () => { + const res = await prompt({ + title: t("general.areYouSure"), + description: t("apiKeyManagement.deleteKeyWarning", { + title: apiKey.title, + }), + confirmText: t("general.delete"), + cancelText: t("general.cancel"), + }) + + if (!res) { + return + } + + await deleteAsync() + } + + const handleRevoke = async () => { + const res = await prompt({ + title: t("general.areYouSure"), + description: t("apiKeyManagement.revokeKeyWarning", { + title: apiKey.title, + }), + confirmText: t("apiKeyManagement.revoke"), + cancelText: t("general.cancel"), + }) + + if (!res) { + return + } + + await revokeAsync() + } + + const dangerousActions = [ + { + icon: , + label: t("general.delete"), + onClick: handleDelete, + }, + ] + + if (!apiKey.revoked_at) { + dangerousActions.unshift({ + icon: , + label: t("apiKeyManagement.revoke"), + onClick: handleRevoke, + }) + } + + return ( + +
+ {apiKey.title} +
+ + {apiKey.revoked_at ? t("general.revoked") : t("general.active")} + + , + to: `/settings/api-key-management/${apiKey.id}/edit`, + }, + ], + }, + { + actions: dangerousActions, + }, + ]} + /> +
+
+
+ + {t("fields.key")} + +
+ + {apiKey.id} + + +
+
+
+ + {t("apiKeyManagement.createdBy")} + + +
+ {apiKey.revoked_at && ( +
+ + {t("apiKeyManagement.revokedBy")} + + +
+ )} +
+ ) +} + +const ActionBy = ({ userId }: { userId: string | null }) => { + const { user, isLoading, isError, error } = useAdminUser(userId!, { + enabled: !!userId, + }) + + if (!userId) { + return ( + + - + + ) + } + + if (isError) { + throw error + } + + if (isLoading) { + return ( +
+ + +
+ ) + } + + if (!user) { + return ( + + - + + ) + } + + return +} diff --git a/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-detail/components/api-key-general-section/index.ts b/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-detail/components/api-key-general-section/index.ts new file mode 100644 index 0000000000000..eb2a2a50ed698 --- /dev/null +++ b/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-detail/components/api-key-general-section/index.ts @@ -0,0 +1 @@ +export * from "./api-key-general-section" diff --git a/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-detail/components/api-key-sales-channel-section/api-key-sales-channel-section.tsx b/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-detail/components/api-key-sales-channel-section/api-key-sales-channel-section.tsx new file mode 100644 index 0000000000000..3071e1e3f58ff --- /dev/null +++ b/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-detail/components/api-key-sales-channel-section/api-key-sales-channel-section.tsx @@ -0,0 +1,368 @@ +import { PencilSquare, Trash } from "@medusajs/icons" +import { PublishableApiKey, SalesChannel } from "@medusajs/medusa" +import { + Button, + Checkbox, + CommandBar, + Container, + Heading, + StatusBadge, + Table, + clx, + usePrompt, +} from "@medusajs/ui" +import { + PaginationState, + RowSelectionState, + createColumnHelper, + flexRender, + getCoreRowModel, + useReactTable, +} from "@tanstack/react-table" +import { + useAdminPublishableApiKeySalesChannels, + useAdminRemovePublishableKeySalesChannelsBatch, +} from "medusa-react" +import { useMemo, useState } from "react" +import { useTranslation } from "react-i18next" +import { Link, useNavigate } from "react-router-dom" +import { ActionMenu } from "../../../../../components/common/action-menu" +import { + NoRecords, + NoResults, +} from "../../../../../components/common/empty-table-content" +import { Query } from "../../../../../components/filtering/query" +import { LocalizedTablePagination } from "../../../../../components/localization/localized-table-pagination" +import { useQueryParams } from "../../../../../hooks/use-query-params" + +type ApiKeySalesChannelSectionProps = { + apiKey: PublishableApiKey +} + +const PAGE_SIZE = 10 + +export const ApiKeySalesChannelSection = ({ + apiKey, +}: ApiKeySalesChannelSectionProps) => { + const { t } = useTranslation() + const navigate = useNavigate() + const prompt = usePrompt() + + const [{ pageIndex, pageSize }, setPagination] = useState({ + pageIndex: 0, + pageSize: PAGE_SIZE, + }) + + const pagination = useMemo( + () => ({ + pageIndex, + pageSize, + }), + [pageIndex, pageSize] + ) + + const [rowSelection, setRowSelection] = useState({}) + + const params = useQueryParams(["q"]) + const { sales_channels, isLoading, isError, error } = + useAdminPublishableApiKeySalesChannels( + apiKey.id, + { + ...params, + }, + { + keepPreviousData: true, + } + ) + + const count = sales_channels?.length || 0 + + const columns = useColumns() + + const table = useReactTable({ + data: sales_channels ?? [], + columns, + pageCount: Math.ceil(count / PAGE_SIZE), + state: { + pagination, + rowSelection, + }, + getRowId: (row) => row.id, + onPaginationChange: setPagination, + onRowSelectionChange: setRowSelection, + getCoreRowModel: getCoreRowModel(), + manualPagination: true, + meta: { + apiKey: apiKey.id, + }, + }) + + const { mutateAsync } = useAdminRemovePublishableKeySalesChannelsBatch( + apiKey.id + ) + + const handleRemove = async () => { + const keys = Object.keys(rowSelection).filter((k) => rowSelection[k]) + + const res = await prompt({ + title: t("general.areYouSure"), + description: t("apiKeyManagement.removeSalesChannelsWarning", { + count: keys.length, + }), + confirmText: t("general.continue"), + cancelText: t("general.cancel"), + }) + + if (!res) { + return + } + + await mutateAsync( + { + sales_channel_ids: keys.map((k) => ({ id: k })), + }, + { + onSuccess: () => { + setRowSelection({}) + }, + } + ) + } + + const noRecords = !isLoading && !sales_channels?.length && !params.q + + if (isError) { + throw error + } + + return ( + +
+ {t("salesChannels.domain")} + +
+ {!noRecords && ( +
+
+
+ +
+
+ )} + {noRecords ? ( + + ) : ( +
+ {!isLoading && sales_channels?.length !== 0 ? ( + + + {table.getHeaderGroups().map((headerGroup) => { + return ( + + {headerGroup.headers.map((header) => { + return ( + + {flexRender( + header.column.columnDef.header, + header.getContext() + )} + + ) + })} + + ) + })} + + + {table.getRowModel().rows.map((row) => ( + + navigate(`/settings/sales-channels/${row.original.id}`) + } + > + {row.getVisibleCells().map((cell) => ( + + {flexRender( + cell.column.columnDef.cell, + cell.getContext() + )} + + ))} + + ))} + +
+ ) : ( + + )} + + + + + {t("general.countSelected", { + count: Object.keys(rowSelection).length, + })} + + + + + +
+ )} +
+ ) +} + +const SalesChannelActions = ({ + salesChannel, + apiKey, +}: { + salesChannel: SalesChannel + apiKey: string +}) => { + const { t } = useTranslation() + const prompt = usePrompt() + + const { mutateAsync } = useAdminRemovePublishableKeySalesChannelsBatch(apiKey) + + const handleDelete = async () => { + const res = await prompt({ + title: t("general.areYouSure"), + description: t("apiKeyManagement.removeSalesChannelWarning"), + confirmText: t("general.delete"), + cancelText: t("general.cancel"), + }) + + if (!res) { + return + } + + await mutateAsync({ + sales_channel_ids: [{ id: salesChannel.id }], + }) + } + + return ( + , + label: t("general.edit"), + to: `/settings/sales-channels/${salesChannel.id}/edit`, + }, + ], + }, + { + actions: [ + { + icon: , + label: t("general.delete"), + onClick: handleDelete, + }, + ], + }, + ]} + /> + ) +} + +const columnHelper = createColumnHelper() + +const useColumns = () => { + const { t } = useTranslation() + + return useMemo( + () => [ + columnHelper.display({ + id: "select", + header: ({ table }) => { + return ( + + table.toggleAllPageRowsSelected(!!value) + } + /> + ) + }, + cell: ({ row }) => { + return ( + row.toggleSelected(!!value)} + onClick={(e) => { + e.stopPropagation() + }} + /> + ) + }, + }), + columnHelper.accessor("name", { + header: t("fields.name"), + cell: ({ getValue }) => getValue(), + }), + columnHelper.accessor("description", { + header: t("fields.description"), + cell: ({ getValue }) => getValue(), + }), + columnHelper.accessor("is_disabled", { + header: t("fields.status"), + cell: ({ getValue }) => { + const value = getValue() + return ( +
+ + {value ? t("general.disabled") : t("general.enabled")} + +
+ ) + }, + }), + columnHelper.display({ + id: "actions", + cell: ({ row, table }) => { + const { apiKey } = table.options.meta as { + apiKey: string + } + + return ( + + ) + }, + }), + ], + [t] + ) +} diff --git a/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-detail/components/api-key-sales-channel-section/index.ts b/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-detail/components/api-key-sales-channel-section/index.ts new file mode 100644 index 0000000000000..a9b2850391b8f --- /dev/null +++ b/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-detail/components/api-key-sales-channel-section/index.ts @@ -0,0 +1 @@ +export * from "./api-key-sales-channel-section" diff --git a/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-detail/index.ts b/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-detail/index.ts index 2c193a6bd8137..710a7b5175286 100644 --- a/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-detail/index.ts +++ b/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-detail/index.ts @@ -1 +1,2 @@ export { ApiKeyManagementDetail as Component } from "./api-key-management-detail" +export { apiKeyLoader as loader } from "./loader" diff --git a/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-detail/loader.ts b/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-detail/loader.ts new file mode 100644 index 0000000000000..66034bcebb6a9 --- /dev/null +++ b/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-detail/loader.ts @@ -0,0 +1,22 @@ +import { AdminPublishableApiKeysRes } from "@medusajs/medusa" +import { Response } from "@medusajs/medusa-js" +import { adminProductKeys } from "medusa-react" +import { LoaderFunctionArgs } from "react-router-dom" + +import { medusa, queryClient } from "../../../lib/medusa" + +const apiKeyDetailQuery = (id: string) => ({ + queryKey: adminProductKeys.detail(id), + queryFn: async () => medusa.admin.publishableApiKeys.retrieve(id), +}) + +export const apiKeyLoader = async ({ params }: LoaderFunctionArgs) => { + const id = params.id + const query = apiKeyDetailQuery(id!) + + return ( + queryClient.getQueryData>( + query.queryKey + ) ?? (await queryClient.fetchQuery(query)) + ) +} diff --git a/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-edit/api-key-management-edit.tsx b/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-edit/api-key-management-edit.tsx index 5c4125b5dd679..61c1fcaeb57a0 100644 --- a/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-edit/api-key-management-edit.tsx +++ b/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-edit/api-key-management-edit.tsx @@ -1,12 +1,40 @@ -import { Drawer } from "@medusajs/ui" +import { Drawer, Heading } from "@medusajs/ui" +import { useAdminPublishableApiKey } from "medusa-react" +import { useTranslation } from "react-i18next" +import { useParams } from "react-router-dom" import { useRouteModalState } from "../../../hooks/use-route-modal-state" +import { EditApiKeyForm } from "./components/edit-api-key-form" export const ApiKeyManagementEdit = () => { - const [open, onOpenChange] = useRouteModalState() + const [open, onOpenChange, subscribe] = useRouteModalState() + const { id } = useParams() + const { t } = useTranslation() + + const { publishable_api_key, isLoading, isError, error } = + useAdminPublishableApiKey(id!) + + const handleSuccessfulSubmit = () => { + onOpenChange(false, true) + } + + if (isError) { + throw error + } return ( - + + + {t("apiKeyManagement.editKey")} + + {!isLoading && publishable_api_key && ( + + )} + ) } diff --git a/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-edit/components/edit-api-key-form/edit-api-key-form.tsx b/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-edit/components/edit-api-key-form/edit-api-key-form.tsx new file mode 100644 index 0000000000000..0a061c564c24d --- /dev/null +++ b/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-edit/components/edit-api-key-form/edit-api-key-form.tsx @@ -0,0 +1,90 @@ +import { zodResolver } from "@hookform/resolvers/zod" +import type { PublishableApiKey } from "@medusajs/medusa" +import { Button, Drawer, Input } from "@medusajs/ui" +import { useAdminUpdatePublishableApiKey } from "medusa-react" +import { useEffect } from "react" +import { useForm } from "react-hook-form" +import { useTranslation } from "react-i18next" +import * as zod from "zod" +import { Form } from "../../../../../components/common/form" + +type EditApiKeyFormProps = { + apiKey: PublishableApiKey + onSuccessfulSubmit: () => void + subscribe: (state: boolean) => void +} + +const EditApiKeySchema = zod.object({ + title: zod.string().min(1), +}) + +export const EditApiKeyForm = ({ + apiKey, + onSuccessfulSubmit, + subscribe, +}: EditApiKeyFormProps) => { + const { t } = useTranslation() + + const form = useForm>({ + defaultValues: { + title: apiKey.title, + }, + resolver: zodResolver(EditApiKeySchema), + }) + + const { + formState: { isDirty }, + } = form + + useEffect(() => { + subscribe(isDirty) + }, [isDirty]) + + const { mutateAsync, isLoading } = useAdminUpdatePublishableApiKey(apiKey.id) + + const handleSubmit = form.handleSubmit(async (data) => { + await mutateAsync(data, { + onSuccess: () => { + onSuccessfulSubmit() + }, + }) + }) + + return ( +
+ + +
+ { + return ( + + {t("fields.title")} + + + + + + ) + }} + /> +
+
+ +
+ + + + +
+
+
+ + ) +} diff --git a/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-edit/components/edit-api-key-form/index.ts b/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-edit/components/edit-api-key-form/index.ts new file mode 100644 index 0000000000000..441fcefa9f79f --- /dev/null +++ b/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-edit/components/edit-api-key-form/index.ts @@ -0,0 +1 @@ +export * from "./edit-api-key-form" diff --git a/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-list/components/api-key-management-list-table/api-key-management-list-table.tsx b/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-list/components/api-key-management-list-table/api-key-management-list-table.tsx index 4edd5e6a1c16b..4effd24a8c415 100644 --- a/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-list/components/api-key-management-list-table/api-key-management-list-table.tsx +++ b/packages/admin-next/dashboard/src/routes/api-key-management/api-key-management-list/components/api-key-management-list-table/api-key-management-list-table.tsx @@ -1,6 +1,16 @@ import { PencilSquare, Trash, XCircle } from "@medusajs/icons" import { PublishableApiKey } from "@medusajs/medusa" -import { Button, Container, Heading, Table, clx, usePrompt } from "@medusajs/ui" +import { + Button, + Container, + Copy, + Heading, + StatusBadge, + Table, + Text, + clx, + usePrompt, +} from "@medusajs/ui" import { PaginationState, RowSelectionState, @@ -9,6 +19,7 @@ import { getCoreRowModel, useReactTable, } from "@tanstack/react-table" +import { format } from "date-fns" import { useAdminDeletePublishableApiKey, useAdminPublishableApiKeys, @@ -71,7 +82,7 @@ export const ApiKeyManagementListTable = () => { } return ( - +
{t("apiKeyManagement.domain")} @@ -89,7 +100,7 @@ export const ApiKeyManagementListTable = () => { return ( {headerGroup.headers.map((header) => { return ( @@ -247,7 +258,45 @@ const useColumns = () => { }), columnHelper.accessor("id", { header: "Key", - cell: ({ getValue }) => getValue(), + cell: ({ getValue }) => { + const token = getValue() + + return ( +
e.stopPropagation()} + > + + {token} + + +
+ ) + }, + }), + columnHelper.accessor("revoked_at", { + header: t("fields.status"), + cell: ({ getValue }) => { + const revokedAt = getValue() + + return ( + + {revokedAt ? t("general.revoked") : t("general.active")} + + ) + }, + }), + columnHelper.accessor("created_at", { + header: t("fields.created"), + cell: ({ getValue }) => { + const date = getValue() + + return format(new Date(date), "dd MMM, yyyy") + }, }), columnHelper.display({ id: "actions", diff --git a/packages/admin-next/dashboard/src/routes/locations/location-detail/components/location-sales-channel-section/location-sales-channel-section.tsx b/packages/admin-next/dashboard/src/routes/locations/location-detail/components/location-sales-channel-section/location-sales-channel-section.tsx index cf0e17529974b..deeafce0efbdb 100644 --- a/packages/admin-next/dashboard/src/routes/locations/location-detail/components/location-sales-channel-section/location-sales-channel-section.tsx +++ b/packages/admin-next/dashboard/src/routes/locations/location-detail/components/location-sales-channel-section/location-sales-channel-section.tsx @@ -29,7 +29,7 @@ type LocationSalesChannelSectionProps = { location: StockLocationExpandedDTO } -const PAGE_SIZE = 20 +const PAGE_SIZE = 10 export const LocationSalesChannelSection = ({ location, diff --git a/packages/admin-next/dashboard/src/routes/sales-channels/sales-channel-detail/index.ts b/packages/admin-next/dashboard/src/routes/sales-channels/sales-channel-detail/index.ts index 29e5a16bccb01..8d5a59045f7b8 100644 --- a/packages/admin-next/dashboard/src/routes/sales-channels/sales-channel-detail/index.ts +++ b/packages/admin-next/dashboard/src/routes/sales-channels/sales-channel-detail/index.ts @@ -1 +1,2 @@ +export { salesChannelLoader as loader } from "./loader" export { SalesChannelDetail as Component } from "./sales-channel-detail" diff --git a/packages/admin-next/dashboard/src/routes/sales-channels/sales-channel-detail/loader.ts b/packages/admin-next/dashboard/src/routes/sales-channels/sales-channel-detail/loader.ts new file mode 100644 index 0000000000000..5208c2e065737 --- /dev/null +++ b/packages/admin-next/dashboard/src/routes/sales-channels/sales-channel-detail/loader.ts @@ -0,0 +1,21 @@ +import { AdminSalesChannelsRes } from "@medusajs/medusa" +import { Response } from "@medusajs/medusa-js" +import { adminProductKeys } from "medusa-react" +import { LoaderFunctionArgs } from "react-router-dom" + +import { medusa, queryClient } from "../../../lib/medusa" + +const salesChannelDetailQuery = (id: string) => ({ + queryKey: adminProductKeys.detail(id), + queryFn: async () => medusa.admin.salesChannels.retrieve(id), +}) + +export const salesChannelLoader = async ({ params }: LoaderFunctionArgs) => { + const id = params.id + const query = salesChannelDetailQuery(id!) + + return ( + queryClient.getQueryData>(query.queryKey) ?? + (await queryClient.fetchQuery(query)) + ) +} diff --git a/packages/admin-next/dashboard/src/routes/sales-channels/sales-channel-detail/sales-channel-detail.tsx b/packages/admin-next/dashboard/src/routes/sales-channels/sales-channel-detail/sales-channel-detail.tsx index a8250eb963ce9..dd74f831360b5 100644 --- a/packages/admin-next/dashboard/src/routes/sales-channels/sales-channel-detail/sales-channel-detail.tsx +++ b/packages/admin-next/dashboard/src/routes/sales-channels/sales-channel-detail/sales-channel-detail.tsx @@ -1,13 +1,20 @@ import { useAdminSalesChannel } from "medusa-react" -import { Outlet, useParams } from "react-router-dom" +import { Outlet, useLoaderData, useParams } from "react-router-dom" import { JsonViewSection } from "../../../components/common/json-view-section" import { SalesChannelGeneralSection } from "./components/sales-channel-general-section" import { SalesChannelProductSection } from "./components/sales-channel-product-section" +import { salesChannelLoader } from "./loader" export const SalesChannelDetail = () => { + const initialData = useLoaderData() as Awaited< + ReturnType + > + const { id } = useParams() - const { sales_channel, isLoading } = useAdminSalesChannel(id!) + const { sales_channel, isLoading } = useAdminSalesChannel(id!, { + initialData, + }) if (isLoading || !sales_channel) { return
Loading...
diff --git a/packages/admin-next/dashboard/src/routes/store/store-detail/components/store-general-section/store-general-section.tsx b/packages/admin-next/dashboard/src/routes/store/store-detail/components/store-general-section/store-general-section.tsx index 73c147547f753..362c98141a936 100644 --- a/packages/admin-next/dashboard/src/routes/store/store-detail/components/store-general-section/store-general-section.tsx +++ b/packages/admin-next/dashboard/src/routes/store/store-detail/components/store-general-section/store-general-section.tsx @@ -51,11 +51,16 @@ export const StoreGeneralSection = ({ store }: StoreGeneralSectionProps) => { {t("store.swapLinkTemplate")} {store.swap_link_template ? ( - - - {store.swap_link_template} - - +
+ + {store.swap_link_template} + + +
) : ( - @@ -67,11 +72,16 @@ export const StoreGeneralSection = ({ store }: StoreGeneralSectionProps) => { {t("store.paymentLinkTemplate")} {store.payment_link_template ? ( - - - {store.payment_link_template} - - +
+ + {store.payment_link_template} + + +
) : ( - @@ -83,11 +93,16 @@ export const StoreGeneralSection = ({ store }: StoreGeneralSectionProps) => { {t("store.inviteLinkTemplate")} {store.invite_link_template ? ( - - - {store.invite_link_template} - - +
+ + {store.invite_link_template} + + +
) : ( - diff --git a/packages/authentication/integration-tests/__fixtures__/auth-user/index.ts b/packages/authentication/integration-tests/__fixtures__/auth-user/index.ts index 7921cbdf906f3..46e745ffdc18b 100644 --- a/packages/authentication/integration-tests/__fixtures__/auth-user/index.ts +++ b/packages/authentication/integration-tests/__fixtures__/auth-user/index.ts @@ -6,13 +6,16 @@ export async function createAuthUsers( userData: any[] = [ { id: "test-id", + entity_id: "test-id", provider: "manual", }, { id: "test-id-1", + entity_id: "test-id-1", provider: "manual", }, { + entity_id: "test-id-2", provider: "store", }, ] diff --git a/packages/authentication/integration-tests/__tests__/services/auth-user/index.spec.ts b/packages/authentication/integration-tests/__tests__/services/auth-user/index.spec.ts index eaaf4324ef83c..07f7aa2426d9e 100644 --- a/packages/authentication/integration-tests/__tests__/services/auth-user/index.spec.ts +++ b/packages/authentication/integration-tests/__tests__/services/auth-user/index.spec.ts @@ -229,6 +229,7 @@ describe("AuthUser Service", () => { { id: "test", provider_id: "manual", + entity_id: "test" }, ]) diff --git a/packages/authentication/integration-tests/__tests__/services/module/auth-user.spec.ts b/packages/authentication/integration-tests/__tests__/services/module/auth-user.spec.ts index 14dc31ef6690b..677fd8e35fc0a 100644 --- a/packages/authentication/integration-tests/__tests__/services/module/auth-user.spec.ts +++ b/packages/authentication/integration-tests/__tests__/services/module/auth-user.spec.ts @@ -237,6 +237,7 @@ describe("AuthenticationModuleService - AuthUser", () => { { id: "test", provider_id: "manual", + entity_id: "test" }, ]) diff --git a/packages/authentication/integration-tests/__tests__/services/module/provider-loaders.ts b/packages/authentication/integration-tests/__tests__/services/module/provider-loaders.ts deleted file mode 100644 index 36e513f95fb86..0000000000000 --- a/packages/authentication/integration-tests/__tests__/services/module/provider-loaders.ts +++ /dev/null @@ -1,45 +0,0 @@ -import { SqlEntityManager } from "@mikro-orm/postgresql" - -import { MikroOrmWrapper } from "../../../utils" -import { initialize } from "../../../../src" -import { DB_URL } from "@medusajs/pricing/integration-tests/utils" -import { MedusaModule } from "@medusajs/modules-sdk" -import { IAuthenticationModuleService } from "@medusajs/types" - -jest.setTimeout(30000) - -describe("AuthenticationModuleService - AuthProvider", () => { - let service: IAuthenticationModuleService - let testManager: SqlEntityManager - - beforeEach(async () => { - await MikroOrmWrapper.setupDatabase() - testManager = MikroOrmWrapper.forkManager() - - service = await initialize({ - database: { - clientUrl: DB_URL, - schema: process.env.MEDUSA_PRICING_DB_SCHEMA, - }, - }) - }) - - afterEach(async () => { - await MikroOrmWrapper.clearDatabase() - MedusaModule.clearInstances() - }) - - describe("listAuthProviders", () => { - it("should list default AuthProviders", async () => { - const authProviders = await service.listAuthProviders() - const serialized = JSON.parse(JSON.stringify(authProviders)) - - expect(serialized).toEqual([ - expect.objectContaining({ - provider: "usernamePassword", - name: "Username/Password Authentication", - }), - ]) - }) - }) -}) diff --git a/packages/authentication/integration-tests/__tests__/services/module/providers.spec.ts b/packages/authentication/integration-tests/__tests__/services/module/providers.spec.ts new file mode 100644 index 0000000000000..cefb712ce166e --- /dev/null +++ b/packages/authentication/integration-tests/__tests__/services/module/providers.spec.ts @@ -0,0 +1,73 @@ +import { DB_URL } from "@medusajs/pricing/integration-tests/utils" +import { IAuthenticationModuleService } from "@medusajs/types" +import { MedusaModule } from "@medusajs/modules-sdk" +import { MikroOrmWrapper } from "../../../utils" +import { SqlEntityManager } from "@mikro-orm/postgresql" +import { createAuthProviders } from "../../../__fixtures__/auth-provider" +import { initialize } from "../../../../src" + +jest.setTimeout(30000) + +describe("AuthenticationModuleService - AuthProvider", () => { + let service: IAuthenticationModuleService + let testManager: SqlEntityManager + + beforeEach(async () => { + await MikroOrmWrapper.setupDatabase() + testManager = MikroOrmWrapper.forkManager() + + service = await initialize({ + database: { + clientUrl: DB_URL, + schema: process.env.MEDUSA_PRICING_DB_SCHEMA, + }, + }) + + if (service.__hooks?.onApplicationStart) { + await service.__hooks.onApplicationStart() + } + }) + + afterEach(async () => { + await MikroOrmWrapper.clearDatabase() + MedusaModule.clearInstances() + }) + + describe("listAuthProviders", () => { + it("should list default AuthProviders registered by loaders", async () => { + const authProviders = await service.listAuthProviders() + const serialized = JSON.parse(JSON.stringify(authProviders)) + + expect(serialized).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + provider: "usernamePassword", + name: "Username/Password Authentication", + }), + expect.objectContaining({ + provider: "google", + name: "Google Authentication", + }), + ]) + ) + }) + }) + + describe("authenticate", () => { + it("authenticate validates that a provider is registered in container", async () => { + await createAuthProviders(testManager, [ + { + provider: "notRegistered", + name: "test", + }, + ]) + + const { success, error } = await service.authenticate("notRegistered", {}) + + expect(success).toBe(false) + expect(error).toEqual( + "AuthenticationProvider with for provider: notRegistered wasn't registered in the module. Have you configured your options correctly?" + ) + }) + }) +}) diff --git a/packages/authentication/integration-tests/__tests__/services/providers/username-password.spec.ts b/packages/authentication/integration-tests/__tests__/services/providers/username-password.spec.ts new file mode 100644 index 0000000000000..b8556ea1aa572 --- /dev/null +++ b/packages/authentication/integration-tests/__tests__/services/providers/username-password.spec.ts @@ -0,0 +1,139 @@ +import { DB_URL } from "@medusajs/pricing/integration-tests/utils" +import { IAuthenticationModuleService } from "@medusajs/types" +import { MedusaModule } from "@medusajs/modules-sdk" +import { MikroOrmWrapper } from "../../../utils" +import Scrypt from "scrypt-kdf" +import { SqlEntityManager } from "@mikro-orm/postgresql" +import { createAuthProviders } from "../../../__fixtures__/auth-provider" +import { createAuthUsers } from "../../../__fixtures__/auth-user" +import { initialize } from "../../../../src" + +jest.setTimeout(30000) +const seedDefaultData = async (testManager) => { + await createAuthProviders(testManager) + await createAuthUsers(testManager) +} + +describe("AuthenticationModuleService - AuthProvider", () => { + let service: IAuthenticationModuleService + let testManager: SqlEntityManager + + beforeEach(async () => { + await MikroOrmWrapper.setupDatabase() + testManager = MikroOrmWrapper.forkManager() + + service = await initialize({ + database: { + clientUrl: DB_URL, + schema: process.env.MEDUSA_PRICING_DB_SCHEMA, + }, + }) + + if(service.__hooks?.onApplicationStart) { + await service.__hooks.onApplicationStart() + } + }) + + afterEach(async () => { + await MikroOrmWrapper.clearDatabase() + MedusaModule.clearInstances() + }) + + describe("authenticate", () => { + it("authenticate validates that a provider is registered in container", async () => { + const password = "supersecret" + const email = "test@test.com" + const passwordHash = ( + await Scrypt.kdf(password, { logN: 15, r: 8, p: 1 }) + ).toString("base64") + + await seedDefaultData(testManager) + await createAuthUsers(testManager, [ + // Add authenticated user + { + provider: "usernamePassword", + entity_id: email, + provider_metadata: { + password: passwordHash, + }, + }, + ]) + + const res = await service.authenticate("usernamePassword", { + body: { + email: "test@test.com", + password: password, + }, + }) + + expect(res).toEqual({ + success: true, + authUser: expect.objectContaining({ + entity_id: email, + provider_metadata: { + }, + }), + }) + }) + + it("fails when no password is given", async () => { + const email = "test@test.com" + + await seedDefaultData(testManager) + + const res = await service.authenticate("usernamePassword", { + body: { email: "test@test.com" }, + }) + + expect(res).toEqual({ + success: false, + error: "Password should be a string", + }) + }) + + it("fails when no email is given", async () => { + await seedDefaultData(testManager) + + const res = await service.authenticate("usernamePassword", { + body: { password: "supersecret" }, + }) + + expect(res).toEqual({ + success: false, + error: "Email should be a string", + }) + }) + + it("fails with an invalid password", async () => { + const password = "supersecret" + const email = "test@test.com" + const passwordHash = ( + await Scrypt.kdf(password, { logN: 15, r: 8, p: 1 }) + ).toString("base64") + + await seedDefaultData(testManager) + await createAuthUsers(testManager, [ + // Add authenticated user + { + provider: "usernamePassword", + entity_id: email, + provider_metadata: { + password_hash: passwordHash, + }, + }, + ]) + + const res = await service.authenticate("usernamePassword", { + body: { + email: "test@test.com", + password: "password", + }, + }) + + expect(res).toEqual({ + success: false, + error: "Invalid email or password", + }) + }) + }) +}) diff --git a/packages/authentication/package.json b/packages/authentication/package.json index 05fb16fbbf1cd..7fee6905fe378 100644 --- a/packages/authentication/package.json +++ b/packages/authentication/package.json @@ -56,6 +56,9 @@ "@mikro-orm/postgresql": "5.9.7", "awilix": "^8.0.0", "dotenv": "^16.1.4", - "knex": "2.4.2" + "jsonwebtoken": "^9.0.2", + "knex": "2.4.2", + "scrypt-kdf": "^2.0.1", + "simple-oauth2": "^5.0.0" } } diff --git a/packages/authentication/src/index.ts b/packages/authentication/src/index.ts index 46e1b299381e8..98c26a5e9e4d7 100644 --- a/packages/authentication/src/index.ts +++ b/packages/authentication/src/index.ts @@ -1,22 +1,11 @@ -import { moduleDefinition } from "./module-definition" -import { Modules } from "@medusajs/modules-sdk" -import * as Models from "@models" -import { ModulesSdkUtils } from "@medusajs/utils" +import { + moduleDefinition, + revertMigration, + runMigrations, +} from "./module-definition" export default moduleDefinition - -const migrationScriptOptions = { - moduleName: Modules.AUTHENTICATION, - models: Models, - pathToMigrations: __dirname + "/migrations", -} - -export const runMigrations = ModulesSdkUtils.buildMigrationScript( - migrationScriptOptions -) -export const revertMigration = ModulesSdkUtils.buildRevertMigrationScript( - migrationScriptOptions -) +export { revertMigration, runMigrations } export * from "./initialize" export * from "./loaders" diff --git a/packages/authentication/src/initialize/index.ts b/packages/authentication/src/initialize/index.ts index cfd284f1e965d..2328c27688c6c 100644 --- a/packages/authentication/src/initialize/index.ts +++ b/packages/authentication/src/initialize/index.ts @@ -1,13 +1,14 @@ import { ExternalModuleDeclaration, InternalModuleDeclaration, - MedusaModule, MODULE_PACKAGE_NAMES, + MedusaModule, Modules, } from "@medusajs/modules-sdk" import { IAuthenticationModuleService, ModulesSdkTypes } from "@medusajs/types" -import { moduleDefinition } from "../module-definition" + import { InitializeModuleInjectableDependencies } from "../types" +import { moduleDefinition } from "../module-definition" export const initialize = async ( options?: diff --git a/packages/authentication/src/loaders/providers.ts b/packages/authentication/src/loaders/providers.ts index d21a31db0bcfb..49cea3348344c 100644 --- a/packages/authentication/src/loaders/providers.ts +++ b/packages/authentication/src/loaders/providers.ts @@ -1,12 +1,16 @@ -import { LoaderOptions, ModulesSdkTypes } from "@medusajs/types" -import { asClass } from "awilix" import * as defaultProviders from "@providers" -import { AuthProviderService } from "@services" -import { ServiceTypes } from "@types" + +import { + AwilixContainer, + ClassOrFunctionReturning, + Constructor, + Resolver, + asClass, +} from "awilix" +import { LoaderOptions, ModulesSdkTypes } from "@medusajs/types" export default async ({ container, - options, }: LoaderOptions< | ModulesSdkTypes.ModuleServiceInitializeOptions | ModulesSdkTypes.ModuleServiceInitializeCustomDataLayerOptions @@ -17,33 +21,24 @@ export default async ({ const providersToLoad = Object.values(defaultProviders) - const authProviderService: AuthProviderService = - container.cradle["authProviderService"] - - const providers = await authProviderService.list({ - provider: providersToLoad.map((p) => p.PROVIDER), - }) - - const loadedProviders = new Map(providers.map((p) => [p.provider, p])) - - const providersToCreate: ServiceTypes.CreateAuthProviderDTO[] = [] - for (const provider of providersToLoad) { - container.registerAdd("providers", asClass(provider).singleton()) - container.register({ - [`provider_${provider.PROVIDER}`]: asClass(provider).singleton(), + [`auth_provider_${provider.PROVIDER}`]: asClass( + provider as Constructor + ).singleton(), }) + } - if (loadedProviders.has(provider.PROVIDER)) { - continue - } + container.register({ + [`auth_providers`]: asArray(providersToLoad), + }) +} - providersToCreate.push({ - provider: provider.PROVIDER, - name: provider.DISPLAY_NAME, - }) +function asArray( + resolvers: (ClassOrFunctionReturning | Resolver)[] +): { resolve: (container: AwilixContainer) => unknown[] } { + return { + resolve: (container: AwilixContainer) => + resolvers.map((resolver) => container.build(resolver)), } - - await authProviderService.create(providersToCreate) } diff --git a/packages/authentication/src/migrations/.snapshot-medusa-authentication.json b/packages/authentication/src/migrations/.snapshot-medusa-authentication.json index 0d3f1eaba19f5..a31fe796e7234 100644 --- a/packages/authentication/src/migrations/.snapshot-medusa-authentication.json +++ b/packages/authentication/src/migrations/.snapshot-medusa-authentication.json @@ -39,6 +39,15 @@ ], "mappedType": "enum" }, + "config": { + "name": "config", + "type": "jsonb", + "unsigned": false, + "autoincrement": false, + "primary": false, + "nullable": true, + "mappedType": "json" + }, "is_active": { "name": "is_active", "type": "boolean", @@ -77,6 +86,15 @@ "nullable": false, "mappedType": "text" }, + "entity_id": { + "name": "entity_id", + "type": "text", + "unsigned": false, + "autoincrement": false, + "primary": false, + "nullable": false, + "mappedType": "text" + }, "provider_id": { "name": "provider_id", "type": "text", @@ -117,6 +135,16 @@ "name": "auth_user", "schema": "public", "indexes": [ + { + "keyName": "IDX_auth_user_provider_entity_id", + "columnNames": [ + "provider_id", + "entity_id" + ], + "composite": true, + "primary": false, + "unique": true + }, { "keyName": "auth_user_pkey", "columnNames": [ diff --git a/packages/authentication/src/migrations/Migration20240104154451.ts b/packages/authentication/src/migrations/Migration20240104154451.ts deleted file mode 100644 index 2ffaf00de1668..0000000000000 --- a/packages/authentication/src/migrations/Migration20240104154451.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { Migration } from '@mikro-orm/migrations'; - -export class Migration20240104154451 extends Migration { - - async up(): Promise { - this.addSql('create table "auth_provider" ("provider" text not null, "name" text not null, "domain" text check ("domain" in (\'all\', \'store\', \'admin\')) not null default \'all\', "is_active" boolean not null default false, constraint "auth_provider_pkey" primary key ("provider"));'); - - this.addSql('create table "auth_user" ("id" text not null, "provider_id" text null, "user_metadata" jsonb null, "app_metadata" jsonb null, "provider_metadata" jsonb null, constraint "auth_user_pkey" primary key ("id"));'); - - this.addSql('alter table "auth_user" add constraint "auth_user_provider_id_foreign" foreign key ("provider_id") references "auth_provider" ("provider") on delete cascade;'); - } - - async down(): Promise { - this.addSql('alter table "auth_user" drop constraint "auth_user_provider_id_foreign";'); - - this.addSql('drop table if exists "auth_provider" cascade;'); - - this.addSql('drop table if exists "auth_user" cascade;'); - } - -} diff --git a/packages/authentication/src/migrations/Migration20240122041959.ts b/packages/authentication/src/migrations/Migration20240122041959.ts new file mode 100644 index 0000000000000..d23f27a10750e --- /dev/null +++ b/packages/authentication/src/migrations/Migration20240122041959.ts @@ -0,0 +1,22 @@ +import { Migration } from '@mikro-orm/migrations'; + +export class Migration20240122041959 extends Migration { + + async up(): Promise { + this.addSql('create table if not exists "auth_provider" ("provider" text not null, "name" text not null, "domain" text check ("domain" in (\'all\', \'store\', \'admin\')) not null default \'all\', "config" jsonb null, "is_active" boolean not null default false, constraint "auth_provider_pkey" primary key ("provider"));'); + + this.addSql('create table if not exists "auth_user" ("id" text not null, "entity_id" text not null, "provider_id" text null, "user_metadata" jsonb null, "app_metadata" jsonb null, "provider_metadata" jsonb null, constraint "auth_user_pkey" primary key ("id"));'); + this.addSql('alter table "auth_user" add constraint "IDX_auth_user_provider_entity_id" unique ("provider_id", "entity_id");'); + + this.addSql('alter table "auth_user" add constraint if not exists "auth_user_provider_id_foreign" foreign key ("provider_id") references "auth_provider" ("provider") on delete cascade;'); + } + + async down(): Promise { + this.addSql('alter table "auth_user" drop constraint if exists "auth_user_provider_id_foreign";'); + + this.addSql('drop table if exists "auth_provider" cascade;'); + + this.addSql('drop table if exists "auth_user" cascade;'); + } + +} diff --git a/packages/authentication/src/models/auth-provider.ts b/packages/authentication/src/models/auth-provider.ts index 8febe9ff6f62f..0827186069bbd 100644 --- a/packages/authentication/src/models/auth-provider.ts +++ b/packages/authentication/src/models/auth-provider.ts @@ -5,9 +5,10 @@ import { PrimaryKey, Property, } from "@mikro-orm/core" + import { ProviderDomain } from "../types/repositories/auth-provider" -type OptionalFields = "domain" | "is_active" +type OptionalFields = "domain" | "is_active" | "config" @Entity() export default class AuthProvider { @@ -22,6 +23,9 @@ export default class AuthProvider { @Enum({ items: () => ProviderDomain, default: ProviderDomain.ALL }) domain: ProviderDomain = ProviderDomain.ALL + @Property({ columnType: "jsonb", nullable: true }) + config: Record | null = null + @Property({ columnType: "boolean", default: false }) is_active = false } diff --git a/packages/authentication/src/models/auth-user.ts b/packages/authentication/src/models/auth-user.ts index e90cb10130c03..0c10053175fae 100644 --- a/packages/authentication/src/models/auth-user.ts +++ b/packages/authentication/src/models/auth-user.ts @@ -1,25 +1,32 @@ -import { generateEntityId } from "@medusajs/utils" import { BeforeCreate, Cascade, Entity, + Index, ManyToOne, OnInit, OptionalProps, PrimaryKey, Property, + Unique, } from "@mikro-orm/core" + import AuthProvider from "./auth-provider" +import { generateEntityId } from "@medusajs/utils" type OptionalFields = "provider_metadata" | "app_metadata" | "user_metadata" @Entity() +@Unique({ properties: ["provider","entity_id" ], name: "IDX_auth_user_provider_entity_id" }) export default class AuthUser { [OptionalProps]: OptionalFields @PrimaryKey({ columnType: "text" }) id!: string + @Property({ columnType: "text" }) + entity_id: string + @ManyToOne(() => AuthProvider, { joinColumn: "provider", fieldName: "provider_id", diff --git a/packages/authentication/src/module-definition.ts b/packages/authentication/src/module-definition.ts index 89b9e240a662d..215e19c13aa7c 100644 --- a/packages/authentication/src/module-definition.ts +++ b/packages/authentication/src/module-definition.ts @@ -1,13 +1,31 @@ +import { Modules } from "@medusajs/modules-sdk" import { ModuleExports } from "@medusajs/types" +import { ModulesSdkUtils } from "@medusajs/utils" +import * as Models from "@models" import { AuthenticationModuleService } from "@services" import loadConnection from "./loaders/connection" import loadContainer from "./loaders/container" import loadProviders from "./loaders/providers" +const migrationScriptOptions = { + moduleName: Modules.AUTHENTICATION, + models: Models, + pathToMigrations: __dirname + "/migrations", +} + +export const runMigrations = ModulesSdkUtils.buildMigrationScript( + migrationScriptOptions +) +export const revertMigration = ModulesSdkUtils.buildRevertMigrationScript( + migrationScriptOptions +) + const service = AuthenticationModuleService const loaders = [loadContainer, loadConnection, loadProviders] as any export const moduleDefinition: ModuleExports = { service, loaders, + runMigrations, + revertMigration, } diff --git a/packages/authentication/src/providers/google.ts b/packages/authentication/src/providers/google.ts new file mode 100644 index 0000000000000..39bc0dbe9f6a8 --- /dev/null +++ b/packages/authentication/src/providers/google.ts @@ -0,0 +1,209 @@ +import { + AbstractAuthenticationModuleProvider, + MedusaError, +} from "@medusajs/utils" +import { AuthProviderService, AuthUserService } from "@services" +import jwt, { JwtPayload } from "jsonwebtoken" + +import { AuthProvider } from "@models" +import { AuthenticationResponse } from "@medusajs/types" +import { AuthorizationCode } from "simple-oauth2" +import url from "url" + +type InjectedDependencies = { + authUserService: AuthUserService + authProviderService: AuthProviderService +} + +type AuthenticationInput = { + connection: { encrypted: boolean } + url: string + headers: { host: string } + query: Record + body: Record +} + +type ProviderConfig = { + clientID: string + clientSecret: string + callbackURL: string +} + +class GoogleProvider extends AbstractAuthenticationModuleProvider { + public static PROVIDER = "google" + public static DISPLAY_NAME = "Google Authentication" + + protected readonly authUserSerivce_: AuthUserService + protected readonly authProviderService_: AuthProviderService + + constructor({ authUserService, authProviderService }: InjectedDependencies) { + super() + + this.authUserSerivce_ = authUserService + this.authProviderService_ = authProviderService + } + + private async validateConfig(config: Partial) { + if (!config.clientID) { + throw new Error("Google clientID is required") + } + + if (!config.clientSecret) { + throw new Error("Google clientSecret is required") + } + + if (!config.callbackURL) { + throw new Error("Google callbackUrl is required") + } + } + + private originalURL(req: AuthenticationInput) { + const tls = req.connection.encrypted, + host = req.headers.host, + protocol = tls ? "https" : "http", + path = req.url || "" + return protocol + "://" + host + path + } + + async getProviderConfig(req: AuthenticationInput): Promise { + const { config } = (await this.authProviderService_.retrieve( + GoogleProvider.PROVIDER + )) as AuthProvider & { config: ProviderConfig } + + this.validateConfig(config || {}) + + const { callbackURL } = config + + const parsedCallbackUrl = !url.parse(callbackURL).protocol + ? url.resolve(this.originalURL(req), callbackURL) + : callbackURL + + return { ...config, callbackURL: parsedCallbackUrl } + } + + async authenticate( + req: AuthenticationInput + ): Promise { + if (req.query && req.query.error) { + return { + success: false, + error: `${req.query.error_description}, read more at: ${req.query.error_uri}`, + } + } + + let config + + try { + config = await this.getProviderConfig(req) + } catch (error) { + return { success: false, error: error.message } + } + + let { callbackURL, clientID, clientSecret } = config + + const meta: ProviderConfig = { + clientID, + callbackURL, + clientSecret, + } + + const code = (req.query && req.query.code) || (req.body && req.body.code) + + // Redirect to google + if (!code) { + return this.getRedirect(meta) + } + + return await this.validateCallback(code, meta) + } + + // abstractable + private async validateCallback( + code: string, + { clientID, callbackURL, clientSecret }: ProviderConfig + ) { + const client = this.getAuthorizationCodeHandler({ clientID, clientSecret }) + + const tokenParams = { + code, + redirect_uri: callbackURL, + } + + try { + const accessToken = await client.getToken(tokenParams) + + return await this.verify_(accessToken.token.id_token) + } catch (error) { + return { success: false, error: error.message } + } + } + + // abstractable + async verify_(refreshToken: string) { + const jwtData = (await jwt.decode(refreshToken, { + complete: true, + })) as JwtPayload + const entity_id = jwtData.payload.email + + let authUser + + try { + authUser = await this.authUserSerivce_.retrieveByProviderAndEntityId( + entity_id, + GoogleProvider.PROVIDER + ) + } catch (error) { + if (error.type === MedusaError.Types.NOT_FOUND) { + authUser = await this.authUserSerivce_.create([ + { + entity_id, + provider_id: GoogleProvider.PROVIDER, + user_metadata: jwtData!.payload, + }, + ]) + } else { + return { success: false, error: error.message } + } + } + + return { success: true, authUser } + } + + // Abstractable + private getRedirect({ clientID, callbackURL, clientSecret }: ProviderConfig) { + const client = this.getAuthorizationCodeHandler({ clientID, clientSecret }) + + const location = client.authorizeURL({ + redirect_uri: callbackURL, + scope: "email profile", + }) + + return { success: true, location } + } + + private getAuthorizationCodeHandler({ + clientID, + clientSecret, + }: { + clientID: string + clientSecret: string + }) { + const config = { + client: { + id: clientID, + secret: clientSecret, + }, + auth: { + // TODO: abstract to not be google specific + authorizeHost: "https://accounts.google.com", + authorizePath: "/o/oauth2/v2/auth", + tokenHost: "https://www.googleapis.com", + tokenPath: "/oauth2/v4/token", + }, + } + + return new AuthorizationCode(config) + } +} + +export default GoogleProvider diff --git a/packages/authentication/src/providers/index.ts b/packages/authentication/src/providers/index.ts index 77a2d444f782a..e00b4b172759d 100644 --- a/packages/authentication/src/providers/index.ts +++ b/packages/authentication/src/providers/index.ts @@ -1 +1,2 @@ export { default as UsernamePasswordProvider } from "./username-password" +export { default as GoogleProvider } from "./google" \ No newline at end of file diff --git a/packages/authentication/src/providers/username-password.ts b/packages/authentication/src/providers/username-password.ts index 1b3ca03a17806..b19f26c5d7526 100644 --- a/packages/authentication/src/providers/username-password.ts +++ b/packages/authentication/src/providers/username-password.ts @@ -1,5 +1,8 @@ +import { AuthenticationResponse } from "@medusajs/types" + import { AuthUserService } from "@services" -import { AbstractAuthenticationModuleProvider } from "@medusajs/types" +import Scrypt from "scrypt-kdf" +import { AbstractAuthenticationModuleProvider, isString } from "@medusajs/utils" class UsernamePasswordProvider extends AbstractAuthenticationModuleProvider { public static PROVIDER = "usernamePassword" @@ -13,8 +16,48 @@ class UsernamePasswordProvider extends AbstractAuthenticationModuleProvider { this.authUserSerivce_ = AuthUserService } - async authenticate(userData: Record) { - return {} + async authenticate( + userData: Record + ): Promise { + const { email, password } = userData.body + + if (!password || !isString(password)) { + return { + success: false, + error: "Password should be a string", + } + } + + if (!email || !isString(email)) { + return { + success: false, + error: "Email should be a string", + } + } + + const authUser = await this.authUserSerivce_.retrieveByProviderAndEntityId( + email, + UsernamePasswordProvider.PROVIDER + ) + + const password_hash = authUser.provider_metadata?.password + + if (isString(password_hash)) { + const buf = Buffer.from(password_hash, "base64") + + const success = await Scrypt.verify(buf, password) + + if (success) { + delete authUser.provider_metadata!.password + + return { success, authUser: JSON.parse(JSON.stringify(authUser)) } + } + } + + return { + success: false, + error: "Invalid email or password", + } } } diff --git a/packages/authentication/src/services/auth-user.ts b/packages/authentication/src/services/auth-user.ts index 352d818a2a84d..8ee6285386225 100644 --- a/packages/authentication/src/services/auth-user.ts +++ b/packages/authentication/src/services/auth-user.ts @@ -1,8 +1,12 @@ -import { DAL } from "@medusajs/types" -import { ModulesSdkUtils } from "@medusajs/utils" +import { AuthenticationTypes, Context, DAL, FindConfig } from "@medusajs/types" +import { + InjectManager, + MedusaContext, + MedusaError, + ModulesSdkUtils, +} from "@medusajs/utils" import { AuthUser } from "@models" - -import { ServiceTypes } from "@types" +import { ServiceTypes, RepositoryTypes } from "@types" type InjectedDependencies = { authUserRepository: DAL.RepositoryService @@ -16,8 +20,38 @@ export default class AuthUserService< create: ServiceTypes.CreateAuthUserDTO } >(AuthUser) { + protected readonly authUserRepository_: RepositoryTypes.IAuthUserRepository constructor(container: InjectedDependencies) { // @ts-ignore super(...arguments) + this.authUserRepository_ = container.authUserRepository + } + + @InjectManager("authUserRepository_") + async retrieveByProviderAndEntityId< + TEntityMethod = AuthenticationTypes.AuthUserDTO + >( + entityId: string, + provider: string, + config: FindConfig = {}, + @MedusaContext() sharedContext: Context = {} + ): Promise { + const queryConfig = ModulesSdkUtils.buildQuery( + { entity_id: entityId, provider }, + { ...config, take: 1 } + ) + const [result] = await this.authUserRepository_.find( + queryConfig, + sharedContext + ) + + if (!result) { + throw new MedusaError( + MedusaError.Types.NOT_FOUND, + `AuthUser with entity_id: "${entityId}" and provider: "${provider}" not found` + ) + } + + return result } } diff --git a/packages/authentication/src/services/authentication-module.ts b/packages/authentication/src/services/authentication-module.ts index d47c094b3e1b1..f2fdf4948cd31 100644 --- a/packages/authentication/src/services/authentication-module.ts +++ b/packages/authentication/src/services/authentication-module.ts @@ -1,9 +1,11 @@ import { + AuthenticationResponse, AuthenticationTypes, Context, DAL, FindConfig, InternalModuleDeclaration, + MedusaContainer, ModuleJoinerConfig, } from "@medusajs/types" @@ -11,10 +13,13 @@ import { AuthProvider, AuthUser } from "@models" import { joinerConfig } from "../joiner-config" import { AuthProviderService, AuthUserService } from "@services" + import { + AbstractAuthenticationModuleProvider, InjectManager, InjectTransactionManager, MedusaContext, + MedusaError, } from "@medusajs/utils" import { AuthProviderDTO, @@ -25,6 +30,7 @@ import { FilterableAuthUserProps, UpdateAuthUserDTO, } from "@medusajs/types/dist/authentication/common" +import { ServiceTypes } from "@types" type InjectedDependencies = { baseRepository: DAL.RepositoryService @@ -37,6 +43,15 @@ export default class AuthenticationModuleService< TAuthProvider extends AuthProvider = AuthProvider > implements AuthenticationTypes.IAuthenticationModuleService { + __joinerConfig(): ModuleJoinerConfig { + return joinerConfig + } + + __hooks = { + onApplicationStart: async () => await this.createProvidersOnLoad(), + } + + protected __container__: MedusaContainer protected baseRepository_: DAL.RepositoryService protected authUserService_: AuthUserService @@ -50,6 +65,7 @@ export default class AuthenticationModuleService< }: InjectedDependencies, protected readonly moduleDeclaration: InternalModuleDeclaration ) { + this.__container__ = arguments[0] this.baseRepository_ = baseRepository this.authUserService_ = authUserService this.authProviderService_ = authProviderService @@ -142,7 +158,7 @@ export default class AuthenticationModuleService< protected async createAuthProviders_( data: any[], @MedusaContext() sharedContext: Context - ): Promise { + ): Promise { return await this.authProviderService_.create(data, sharedContext) } @@ -180,7 +196,7 @@ export default class AuthenticationModuleService< async updateAuthProvider_( data: AuthenticationTypes.UpdateAuthProviderDTO[], @MedusaContext() sharedContext: Context = {} - ): Promise { + ): Promise { return await this.authProviderService_.update(data, sharedContext) } @@ -336,7 +352,63 @@ export default class AuthenticationModuleService< await this.authUserService_.delete(ids, sharedContext) } - __joinerConfig(): ModuleJoinerConfig { - return joinerConfig + protected getRegisteredAuthenticationProvider( + provider: string + ): AbstractAuthenticationModuleProvider { + let containerProvider: AbstractAuthenticationModuleProvider + try { + containerProvider = this.__container__[`auth_provider_${provider}`] + } catch (error) { + throw new MedusaError( + MedusaError.Types.NOT_FOUND, + `AuthenticationProvider with for provider: ${provider} wasn't registered in the module. Have you configured your options correctly?` + ) + } + + return containerProvider + } + + @InjectTransactionManager("baseRepository_") + async authenticate( + provider: string, + authenticationData: Record, + @MedusaContext() sharedContext: Context = {} + ): Promise { + let registeredProvider + + try { + await this.retrieveAuthProvider(provider, {}) + + registeredProvider = this.getRegisteredAuthenticationProvider(provider) + + return await registeredProvider.authenticate(authenticationData) + } catch (error) { + return { success: false, error: error.message } + } + } + + private async createProvidersOnLoad() { + const providersToLoad = this.__container__["auth_providers"] + + const providers = await this.authProviderService_.list({ + provider: providersToLoad.map((p) => p.provider), + }) + + const loadedProvidersMap = new Map(providers.map((p) => [p.provider, p])) + + const providersToCreate: ServiceTypes.CreateAuthProviderDTO[] = [] + + for (const provider of providersToLoad) { + if (loadedProvidersMap.has(provider.provider)) { + continue + } + + providersToCreate.push({ + provider: provider.provider, + name: provider.displayName, + }) + } + + await this.authProviderService_.create(providersToCreate) } } diff --git a/packages/authentication/src/types/repositories/auth-provider.ts b/packages/authentication/src/types/repositories/auth-provider.ts index 31f5ce087a5f9..6f1ea0dc88e0f 100644 --- a/packages/authentication/src/types/repositories/auth-provider.ts +++ b/packages/authentication/src/types/repositories/auth-provider.ts @@ -5,6 +5,7 @@ export type CreateAuthProviderDTO = { name: string domain?: ProviderDomain is_active?: boolean + config?: Record } export type UpdateAuthProviderDTO = { @@ -13,6 +14,7 @@ export type UpdateAuthProviderDTO = { name?: string domain?: ProviderDomain is_active?: boolean + config?: Record } provider: AuthProvider } diff --git a/packages/authentication/src/types/repositories/auth-user.ts b/packages/authentication/src/types/repositories/auth-user.ts index 33315b20e1600..541f465838772 100644 --- a/packages/authentication/src/types/repositories/auth-user.ts +++ b/packages/authentication/src/types/repositories/auth-user.ts @@ -2,6 +2,7 @@ import { AuthUser } from "@models" export type CreateAuthUserDTO = { provider_id: string + entity_id: string provider_metadata?: Record user_metadata?: Record app_metadata?: Record diff --git a/packages/authentication/src/types/services/auth-provider.ts b/packages/authentication/src/types/services/auth-provider.ts index 8f1307ab07c29..8ef5d9b3b9bdd 100644 --- a/packages/authentication/src/types/services/auth-provider.ts +++ b/packages/authentication/src/types/services/auth-provider.ts @@ -3,6 +3,7 @@ export type AuthProviderDTO = { name: string domain: ProviderDomain is_active: boolean + config: Record } export type CreateAuthProviderDTO = { @@ -10,6 +11,7 @@ export type CreateAuthProviderDTO = { name: string domain?: ProviderDomain is_active?: boolean + config?: Record } export type UpdateAuthProviderDTO = { @@ -17,6 +19,7 @@ export type UpdateAuthProviderDTO = { name?: string domain?: ProviderDomain is_active?: boolean + config?: Record } export enum ProviderDomain { diff --git a/packages/authentication/src/types/services/auth-user.ts b/packages/authentication/src/types/services/auth-user.ts index 18c8899303ee6..c059e980f8b9c 100644 --- a/packages/authentication/src/types/services/auth-user.ts +++ b/packages/authentication/src/types/services/auth-user.ts @@ -3,6 +3,7 @@ import { AuthProviderDTO } from "./auth-provider" export type AuthUserDTO = { id: string provider_id: string + entity_id: string provider: AuthProviderDTO provider_metadata?: Record user_metadata: Record @@ -10,6 +11,7 @@ export type AuthUserDTO = { } export type CreateAuthUserDTO = { + entity_id: string provider_id: string provider_metadata?: Record user_metadata?: Record diff --git a/packages/cart/src/index.ts b/packages/cart/src/index.ts index 13081ed6847db..d449f7354469a 100644 --- a/packages/cart/src/index.ts +++ b/packages/cart/src/index.ts @@ -1,23 +1,10 @@ -import { Modules } from "@medusajs/modules-sdk" -import { ModulesSdkUtils } from "@medusajs/utils" -import * as Models from "@models" -import { moduleDefinition } from "./module-definition" +import { + moduleDefinition, + revertMigration, + runMigrations, +} from "./module-definition" export default moduleDefinition - -const migrationScriptOptions = { - moduleName: Modules.CART, - models: Models, - pathToMigrations: __dirname + "/migrations", -} - -export const runMigrations = ModulesSdkUtils.buildMigrationScript( - migrationScriptOptions -) -export const revertMigration = ModulesSdkUtils.buildRevertMigrationScript( - migrationScriptOptions -) +export { revertMigration, runMigrations } export * from "./initialize" -export * from "./loaders" - diff --git a/packages/cart/src/module-definition.ts b/packages/cart/src/module-definition.ts index 133814940b6ff..1db51b9c5cf38 100644 --- a/packages/cart/src/module-definition.ts +++ b/packages/cart/src/module-definition.ts @@ -1,12 +1,30 @@ +import { Modules } from "@medusajs/modules-sdk" import { ModuleExports } from "@medusajs/types" +import { ModulesSdkUtils } from "@medusajs/utils" +import * as Models from "@models" import loadConnection from "./loaders/connection" import loadContainer from "./loaders/container" import { CartModuleService } from "./services" +const migrationScriptOptions = { + moduleName: Modules.CART, + models: Models, + pathToMigrations: __dirname + "/migrations", +} + +export const runMigrations = ModulesSdkUtils.buildMigrationScript( + migrationScriptOptions +) +export const revertMigration = ModulesSdkUtils.buildRevertMigrationScript( + migrationScriptOptions +) + const service = CartModuleService const loaders = [loadContainer, loadConnection] as any export const moduleDefinition: ModuleExports = { service, loaders, + runMigrations, + revertMigration, } diff --git a/packages/customer/src/index.ts b/packages/customer/src/index.ts index c84e04625b609..d449f7354469a 100644 --- a/packages/customer/src/index.ts +++ b/packages/customer/src/index.ts @@ -1,24 +1,10 @@ -import { Modules } from "@medusajs/modules-sdk" -import { ModulesSdkUtils } from "@medusajs/utils" -import * as Models from "@models" - -import { moduleDefinition } from "./module-definition" +import { + moduleDefinition, + revertMigration, + runMigrations, +} from "./module-definition" export default moduleDefinition - -const migrationScriptOptions = { - moduleName: Modules.CUSTOMER, - models: Models, - pathToMigrations: __dirname + "/migrations", -} - -export const revertMigration = ModulesSdkUtils.buildRevertMigrationScript( - migrationScriptOptions -) - -export const runMigration = ModulesSdkUtils.buildMigrationScript( - migrationScriptOptions -) +export { revertMigration, runMigrations } export * from "./initialize" -export * from "./loaders" diff --git a/packages/customer/src/module-definition.ts b/packages/customer/src/module-definition.ts index d3e28a4c78b74..81e86620beece 100644 --- a/packages/customer/src/module-definition.ts +++ b/packages/customer/src/module-definition.ts @@ -1,12 +1,31 @@ +import { Modules } from "@medusajs/modules-sdk" import { ModuleExports } from "@medusajs/types" +import { ModulesSdkUtils } from "@medusajs/utils" +import * as Models from "@models" import { CustomerModuleService } from "@services" import loadConnection from "./loaders/connection" import loadContainer from "./loaders/container" +const migrationScriptOptions = { + moduleName: Modules.CUSTOMER, + models: Models, + pathToMigrations: __dirname + "/migrations", +} + +export const revertMigration = ModulesSdkUtils.buildRevertMigrationScript( + migrationScriptOptions +) + +export const runMigrations = ModulesSdkUtils.buildMigrationScript( + migrationScriptOptions +) + const service = CustomerModuleService const loaders = [loadContainer, loadConnection] as any export const moduleDefinition: ModuleExports = { service, loaders, + runMigrations, + revertMigration, } diff --git a/packages/design-system/ui/src/components/avatar/avatar.tsx b/packages/design-system/ui/src/components/avatar/avatar.tsx index 9828742510521..9a9df41d1db61 100644 --- a/packages/design-system/ui/src/components/avatar/avatar.tsx +++ b/packages/design-system/ui/src/components/avatar/avatar.tsx @@ -14,18 +14,24 @@ const avatarVariants = cva({ rounded: "rounded-full", }, size: { - xsmall: "h-5 w-5", - small: "h-6 w-6", + "2xsmall": "h-5 w-5", + xsmall: "h-6 w-6", + small: "h-7 w-7", base: "h-8 w-8", large: "h-10 w-10", xlarge: "h-12 w-12", }, }, compoundVariants: [ + { + variant: "squared", + size: "2xsmall", + className: "rounded-md", + }, { variant: "squared", size: "xsmall", - className: "rounded-[4px]", + className: "rounded-md", }, { variant: "squared", @@ -62,14 +68,20 @@ const innerVariants = cva({ rounded: "rounded-full", }, size: { - xsmall: "txt-compact-xsmall-plus h-4 w-4", - small: "txt-compact-xsmall-plus h-5 w-5", + "2xsmall": "txt-compact-xsmall-plus h-4 w-4", + xsmall: "txt-compact-xsmall-plus h-5 w-5", + small: "txt-compact-small-plus h-6 w-6", base: "txt-compact-small-plus h-7 w-7", large: "txt-compact-medium-plus h-9 w-9", xlarge: "txt-compact-large-plus h-11 w-11", }, }, compoundVariants: [ + { + variant: "squared", + size: "2xsmall", + className: "rounded-sm", + }, { variant: "squared", size: "xsmall", diff --git a/packages/design-system/ui/src/components/copy/copy.tsx b/packages/design-system/ui/src/components/copy/copy.tsx index 9eceab6771821..0fc55300049d0 100644 --- a/packages/design-system/ui/src/components/copy/copy.tsx +++ b/packages/design-system/ui/src/components/copy/copy.tsx @@ -2,77 +2,115 @@ import { Tooltip } from "@/components/tooltip" import { clx } from "@/utils/clx" -import { CheckCircleSolid, SquareTwoStack } from "@medusajs/icons" +import { + CheckCircleMiniSolid, + CheckCircleSolid, + SquareTwoStack, + SquareTwoStackMini, +} from "@medusajs/icons" import { Slot } from "@radix-ui/react-slot" import copy from "copy-to-clipboard" import React, { useState } from "react" type CopyProps = React.HTMLAttributes & { content: string + variant?: "mini" | "default" | null asChild?: boolean } /** * This component is based on the `button` element and supports all of its props */ -const Copy = React.forwardRef< - HTMLButtonElement, - CopyProps ->(({ - children, - className, - /** - * The content to copy. - */ - content, - /** - * Whether to remove the wrapper `button` element and use the - * passed child element instead. - */ - asChild = false, - ...props - }: CopyProps, ref) => { - const [done, setDone] = useState(false) - const [open, setOpen] = useState(false) - const [text, setText] = useState("Copy") +const Copy = React.forwardRef( + ( + { + children, + className, + /** + * The content to copy. + */ + content, + /** + * The variant of the copy button. + */ + variant = "default", + /** + * Whether to remove the wrapper `button` element and use the + * passed child element instead. + */ + asChild = false, + ...props + }: CopyProps, + ref + ) => { + const [done, setDone] = useState(false) + const [open, setOpen] = useState(false) + const [text, setText] = useState("Copy") - const copyToClipboard = () => { - setDone(true) - copy(content) + const copyToClipboard = ( + e: + | React.MouseEvent + | React.MouseEvent + ) => { + e.stopPropagation() - setTimeout(() => { - setDone(false) - }, 2000) - } + setDone(true) + copy(content) + + setTimeout(() => { + setDone(false) + }, 2000) + } - React.useEffect(() => { - if (done) { - setText("Copied") - return + React.useEffect(() => { + if (done) { + setText("Copied") + return + } + + setTimeout(() => { + setText("Copy") + }, 500) + }, [done]) + + const isDefaultVariant = ( + variant?: string | null + ): variant is "default" => { + return variant === "default" } - setTimeout(() => { - setText("Copy") - }, 500) - }, [done]) + const isDefault = isDefaultVariant(variant) - const Component = asChild ? Slot : "button" + const Component = asChild ? Slot : "button" - return ( - - - {children ? children : done ? : } - - - ) -}) + return ( + + + {children ? ( + children + ) : done ? ( + isDefault ? ( + + ) : ( + + ) + ) : isDefault ? ( + + ) : ( + + )} + + + ) + } +) Copy.displayName = "Copy" export { Copy } diff --git a/packages/design-system/ui/src/components/status-badge/status-badge.tsx b/packages/design-system/ui/src/components/status-badge/status-badge.tsx index 6f7af3c591292..80a955db82f32 100644 --- a/packages/design-system/ui/src/components/status-badge/status-badge.tsx +++ b/packages/design-system/ui/src/components/status-badge/status-badge.tsx @@ -1,11 +1,28 @@ import * as React from "react" import { clx } from "@/utils/clx" +import { VariantProps, cva } from "cva" + +const statusBadgeVariants = cva({ + base: "flex items-center justify-center w-5 h-[18px] [&_div]:w-2 [&_div]:h-2 [&_div]:rounded-sm", + variants: { + color: { + green: "[&_div]:bg-ui-tag-green-icon", + red: "[&_div]:bg-ui-tag-red-icon", + orange: "[&_div]:bg-ui-tag-orange-icon", + blue: "[&_div]:bg-ui-tag-blue-icon", + purple: "[&_div]:bg-ui-tag-purple-icon", + grey: "[&_div]:bg-ui-tag-neutral-icon", + }, + }, + defaultVariants: { + color: "grey", + }, +}) interface StatusBadgeProps - extends Omit, "color"> { - color?: "green" | "red" | "blue" | "orange" | "grey" | "purple" -} + extends Omit, "color">, + VariantProps {} /** * This component is based on the span element and supports all of its props @@ -27,31 +44,14 @@ const StatusBadge = React.forwardRef( - - - - - +
+
+
{children} ) diff --git a/packages/inventory/src/index.ts b/packages/inventory/src/index.ts index d3159f3c9a771..1cb6f68e0bb0f 100644 --- a/packages/inventory/src/index.ts +++ b/packages/inventory/src/index.ts @@ -1,8 +1,6 @@ -import { revertMigration, runMigrations } from "./migrations/run-migration" import { moduleDefinition } from "./module-definition" export default moduleDefinition export * from "./initialize" export { revertMigration, runMigrations } from "./migrations/run-migration" -export * from "./types" diff --git a/packages/medusa/src/api-v2/admin/promotions/validators.ts b/packages/medusa/src/api-v2/admin/promotions/validators.ts index ff161dc2151e5..cd75d04975825 100644 --- a/packages/medusa/src/api-v2/admin/promotions/validators.ts +++ b/packages/medusa/src/api-v2/admin/promotions/validators.ts @@ -1,3 +1,4 @@ +import { PromotionTypeValues } from "@medusajs/types" import { ApplicationMethodAllocation, ApplicationMethodTargetType, @@ -15,6 +16,7 @@ import { IsOptional, IsString, Validate, + ValidateIf, ValidateNested, } from "class-validator" import { FindParams, extendedFindParamsMixin } from "../../../types/common" @@ -43,7 +45,7 @@ export class AdminPostPromotionsReq { @IsOptional() @IsEnum(PromotionType) - type?: PromotionType + type?: PromotionTypeValues @IsOptional() @IsString() @@ -56,8 +58,8 @@ export class AdminPostPromotionsReq { @IsNotEmpty() @ValidateNested() - @Type(() => ApplicationMethod) - application_method: ApplicationMethod + @Type(() => ApplicationMethodsPostReq) + application_method: ApplicationMethodsPostReq @IsOptional() @IsArray() @@ -83,7 +85,7 @@ export class PromotionRule { values: string[] } -export class ApplicationMethod { +export class ApplicationMethodsPostReq { @IsOptional() @IsString() description?: string @@ -113,6 +115,68 @@ export class ApplicationMethod { @ValidateNested({ each: true }) @Type(() => PromotionRule) target_rules?: PromotionRule[] + + @ValidateIf((data) => data.type === PromotionType.BUYGET) + @IsArray() + @ValidateNested({ each: true }) + @Type(() => PromotionRule) + buy_rules?: PromotionRule[] + + @ValidateIf((data) => data.type === PromotionType.BUYGET) + @IsNotEmpty() + @IsNumber() + apply_to_quantity?: number + + @ValidateIf((data) => data.type === PromotionType.BUYGET) + @IsNotEmpty() + @IsNumber() + buy_rules_min_quantity?: number +} + +export class ApplicationMethodsMethodPostReq { + @IsOptional() + @IsString() + description?: string + + @IsOptional() + @IsString() + value?: string + + @IsOptional() + @IsNumber() + max_quantity?: number + + @IsOptional() + @IsEnum(ApplicationMethodType) + type?: ApplicationMethodType + + @IsOptional() + @IsEnum(ApplicationMethodTargetType) + target_type?: ApplicationMethodTargetType + + @IsOptional() + @IsEnum(ApplicationMethodAllocation) + allocation?: ApplicationMethodAllocation + + @IsOptional() + @IsArray() + @ValidateNested({ each: true }) + @Type(() => PromotionRule) + target_rules?: PromotionRule[] + + @IsOptional() + @IsArray() + @ValidateNested({ each: true }) + @Type(() => PromotionRule) + buy_rules?: PromotionRule[] + + @IsOptional() + @IsNumber() + apply_to_quantity?: number + + @IsOptional() + @IsNumber() + buy_rules_min_quantity?: number } export class AdminPostPromotionsPromotionReq { @@ -141,8 +205,8 @@ export class AdminPostPromotionsPromotionReq { @IsOptional() @ValidateNested() - @Type(() => ApplicationMethod) - application_method?: ApplicationMethod + @Type(() => ApplicationMethodsMethodPostReq) + application_method?: ApplicationMethodsMethodPostReq @IsOptional() @IsArray() diff --git a/packages/medusa/src/api/middlewares/transform-body.ts b/packages/medusa/src/api/middlewares/transform-body.ts index 4eebb03a987aa..026ee29dc8d54 100644 --- a/packages/medusa/src/api/middlewares/transform-body.ts +++ b/packages/medusa/src/api/middlewares/transform-body.ts @@ -1,6 +1,6 @@ +import { ValidatorOptions } from "class-validator" import { NextFunction, Request, Response } from "express" import { ClassConstructor } from "../../types/global" -import { ValidatorOptions } from "class-validator" import { validator } from "../../utils/validator" export function transformBody( diff --git a/packages/medusa/src/api/routes/admin/index.js b/packages/medusa/src/api/routes/admin/index.js index 855e5ed084717..da882ef408d05 100644 --- a/packages/medusa/src/api/routes/admin/index.js +++ b/packages/medusa/src/api/routes/admin/index.js @@ -1,5 +1,6 @@ import cors from "cors" import { Router } from "express" +import { parseCorsOrigins } from "medusa-core-utils" import middlewares from "../../middlewares" import analyticsConfigs from "./analytics-configs" import appRoutes from "./apps" @@ -18,16 +19,18 @@ import noteRoutes from "./notes" import notificationRoutes from "./notifications" import orderEditRoutes from "./order-edits" import orderRoutes from "./orders" +import paymentCollectionRoutes from "./payment-collections" +import paymentRoutes from "./payments" import priceListRoutes from "./price-lists" +import productCategoryRoutes from "./product-categories" import productTagRoutes from "./product-tags" import productTypesRoutes from "./product-types" -import publishableApiKeyRoutes from "./publishable-api-keys" import productRoutes from "./products" +import publishableApiKeyRoutes from "./publishable-api-keys" import regionRoutes from "./regions" import reservationRoutes from "./reservations" import returnReasonRoutes from "./return-reasons" import returnRoutes from "./returns" -import reservationRoutes from "./reservations" import salesChannelRoutes from "./sales-channels" import shippingOptionRoutes from "./shipping-options" import shippingProfileRoutes from "./shipping-profiles" @@ -38,10 +41,6 @@ import taxRateRoutes from "./tax-rates" import uploadRoutes from "./uploads" import userRoutes, { unauthenticatedUserRoutes } from "./users" import variantRoutes from "./variants" -import paymentCollectionRoutes from "./payment-collections" -import paymentRoutes from "./payments" -import productCategoryRoutes from "./product-categories" -import { parseCorsOrigins } from "medusa-core-utils" const route = Router() diff --git a/packages/medusa/src/loaders/helpers/routing/__fixtures__/server/index.js b/packages/medusa/src/loaders/helpers/routing/__fixtures__/server/index.js index 11f593414066b..6f13e301cb120 100644 --- a/packages/medusa/src/loaders/helpers/routing/__fixtures__/server/index.js +++ b/packages/medusa/src/loaders/helpers/routing/__fixtures__/server/index.js @@ -3,6 +3,7 @@ import { ModulesDefinition, registerMedusaModule, } from "@medusajs/modules-sdk" +import { ContainerRegistrationKeys } from "@medusajs/utils" import { asValue, createContainer } from "awilix" import express from "express" import jwt from "jsonwebtoken" @@ -63,6 +64,7 @@ export const createServer = async (rootDir) => { return this }.bind(container) + container.register(ContainerRegistrationKeys.PG_CONNECTION, asValue({})) container.register("featureFlagRouter", asValue(featureFlagRouter)) container.register("configModule", asValue(config)) container.register({ diff --git a/packages/medusa/src/loaders/index.ts b/packages/medusa/src/loaders/index.ts index 308879a10a061..7eef903059c3c 100644 --- a/packages/medusa/src/loaders/index.ts +++ b/packages/medusa/src/loaders/index.ts @@ -51,7 +51,7 @@ async function loadLegacyModulesEntities(configModules, container) { continue } - let modulePath = isString(moduleConfig) + const modulePath = isString(moduleConfig) ? moduleConfig : (moduleConfig as InternalModuleDeclaration).resolve ?? (definition.defaultPackage as string) @@ -69,7 +69,7 @@ async function loadLegacyModulesEntities(configModules, container) { continue } - const module = await import(modulePath) + const module = await import(modulePath as string) if (module.default?.models) { module.default.models.map((model) => diff --git a/packages/modules-sdk/medusajs-modules-sdk-1.12.6.tgz b/packages/modules-sdk/medusajs-modules-sdk-1.12.6.tgz new file mode 100644 index 0000000000000..968452f2038d6 Binary files /dev/null and b/packages/modules-sdk/medusajs-modules-sdk-1.12.6.tgz differ diff --git a/packages/modules-sdk/src/definitions.ts b/packages/modules-sdk/src/definitions.ts index 503ad6163f30e..4be77eb5e331a 100644 --- a/packages/modules-sdk/src/definitions.ts +++ b/packages/modules-sdk/src/definitions.ts @@ -7,6 +7,7 @@ import { import { upperCaseFirst } from "@medusajs/utils" export enum Modules { + LINK = "linkModules", EVENT_BUS = "eventBus", STOCK_LOCATION = "stockLocationService", INVENTORY = "inventoryService", @@ -15,6 +16,7 @@ export enum Modules { PRICING = "pricingService", PROMOTION = "promotion", AUTHENTICATION = "authentication", + WORKFLOW_ENGINE = "workflows", CART = "cart", CUSTOMER = "customer", PAYMENT = "payment", @@ -29,12 +31,14 @@ export enum ModuleRegistrationName { PRICING = "pricingModuleService", PROMOTION = "promotionModuleService", AUTHENTICATION = "authenticationModuleService", + WORKFLOW_ENGINE = "workflowsModuleService", CART = "cartModuleService", CUSTOMER = "customerModuleService", PAYMENT = "paymentModuleService", } export const MODULE_PACKAGE_NAMES = { + [Modules.LINK]: "@medusajs/link-modules", [Modules.PRODUCT]: "@medusajs/product", [Modules.EVENT_BUS]: "@medusajs/event-bus-local", [Modules.STOCK_LOCATION]: "@medusajs/stock-location", @@ -43,6 +47,7 @@ export const MODULE_PACKAGE_NAMES = { [Modules.PRICING]: "@medusajs/pricing", [Modules.PROMOTION]: "@medusajs/promotion", [Modules.AUTHENTICATION]: "@medusajs/authentication", + [Modules.WORKFLOW_ENGINE]: "@medusajs/workflow-engine-inmemory", [Modules.CART]: "@medusajs/cart", [Modules.CUSTOMER]: "@medusajs/customer", [Modules.PAYMENT]: "@medusajs/payment", @@ -163,6 +168,20 @@ export const ModulesDefinition: { [key: string | Modules]: ModuleDefinition } = resources: MODULE_RESOURCE_TYPE.SHARED, }, }, + [Modules.WORKFLOW_ENGINE]: { + key: Modules.WORKFLOW_ENGINE, + registrationName: ModuleRegistrationName.WORKFLOW_ENGINE, + defaultPackage: false, + label: upperCaseFirst(ModuleRegistrationName.WORKFLOW_ENGINE), + isRequired: false, + canOverride: true, + isQueryable: true, + dependencies: ["logger"], + defaultModuleDeclaration: { + scope: MODULE_SCOPE.INTERNAL, + resources: MODULE_RESOURCE_TYPE.SHARED, + }, + }, [Modules.CART]: { key: Modules.CART, registrationName: ModuleRegistrationName.CART, diff --git a/packages/modules-sdk/src/loaders/utils/load-internal.ts b/packages/modules-sdk/src/loaders/utils/load-internal.ts index 57f695cd78d7e..08a20747434f9 100644 --- a/packages/modules-sdk/src/loaders/utils/load-internal.ts +++ b/packages/modules-sdk/src/loaders/utils/load-internal.ts @@ -121,11 +121,13 @@ export async function loadInternalModule( } export async function loadModuleMigrations( - resolution: ModuleResolution + resolution: ModuleResolution, + moduleExports?: ModuleExports ): Promise<[Function | undefined, Function | undefined]> { let loadedModule: ModuleExports try { - loadedModule = await import(resolution.resolutionPath as string) + loadedModule = + moduleExports ?? (await import(resolution.resolutionPath as string)) return [loadedModule.runMigrations, loadedModule.revertMigration] } catch { diff --git a/packages/modules-sdk/src/medusa-app.ts b/packages/modules-sdk/src/medusa-app.ts index 062a38a5ed4b0..fff2bf0fec066 100644 --- a/packages/modules-sdk/src/medusa-app.ts +++ b/packages/modules-sdk/src/medusa-app.ts @@ -1,12 +1,15 @@ +import { mergeTypeDefs } from "@graphql-tools/merge" +import { makeExecutableSchema } from "@graphql-tools/schema" +import { RemoteFetchDataCallback } from "@medusajs/orchestration" import { ExternalModuleDeclaration, InternalModuleDeclaration, LoadedModule, - LoaderOptions, MedusaContainer, MODULE_RESOURCE_TYPE, MODULE_SCOPE, ModuleDefinition, + ModuleExports, ModuleJoinerConfig, ModuleServiceInitializeOptions, RemoteJoinerQuery, @@ -15,23 +18,21 @@ import { ContainerRegistrationKeys, createMedusaContainer, isObject, + isString, ModulesSdkUtils, } from "@medusajs/utils" +import { asValue } from "awilix" import { MODULE_PACKAGE_NAMES, ModuleRegistrationName, Modules, } from "./definitions" import { MedusaModule } from "./medusa-module" -import { RemoteFetchDataCallback } from "@medusajs/orchestration" import { RemoteLink } from "./remote-link" import { RemoteQuery } from "./remote-query" import { cleanGraphQLSchema } from "./utils" -import { asValue } from "awilix" -import { makeExecutableSchema } from "@graphql-tools/schema" -import { mergeTypeDefs } from "@graphql-tools/merge" -const LinkModulePackage = "@medusajs/link-modules" +const LinkModulePackage = MODULE_PACKAGE_NAMES[Modules.LINK] export type RunMigrationFn = ( options?: ModuleServiceInitializeOptions, @@ -71,6 +72,7 @@ async function loadModules(modulesConfig, sharedContainer) { Object.keys(modulesConfig).map(async (moduleName) => { const mod = modulesConfig[moduleName] let path: string + let moduleExports: ModuleExports | undefined = undefined let declaration: any = {} let definition: ModuleDefinition | undefined = undefined @@ -78,6 +80,9 @@ async function loadModules(modulesConfig, sharedContainer) { const mod_ = mod as unknown as InternalModuleDeclaration path = mod_.resolve ?? MODULE_PACKAGE_NAMES[moduleName] definition = mod_.definition + moduleExports = !isString(mod_.resolve) + ? (mod_.resolve as ModuleExports) + : undefined declaration = { ...mod } delete declaration.definition } else { @@ -98,6 +103,7 @@ async function loadModules(modulesConfig, sharedContainer) { declaration, sharedContainer, moduleDefinition: definition, + moduleExports, })) as LoadedModule const service = loaded[moduleName] @@ -119,9 +125,16 @@ async function loadModules(modulesConfig, sharedContainer) { return allModules } -async function initializeLinks(config, linkModules, injectedDependencies) { +async function initializeLinks({ + config, + linkModules, + injectedDependencies, + moduleExports, +}) { try { - const { initialize, runMigrations } = await import(LinkModulePackage) + const { initialize, runMigrations } = + moduleExports ?? (await import(LinkModulePackage)) + const linkResolution = await initialize( config, linkModules, @@ -140,6 +153,10 @@ async function initializeLinks(config, linkModules, injectedDependencies) { } } +function isMedusaModule(mod) { + return typeof mod?.initialize === "function" +} + function cleanAndMergeSchema(loadedSchema) { const { schema: cleanedSchema, notFound } = cleanGraphQLSchema(loadedSchema) const mergedSchema = mergeTypeDefs(cleanedSchema) @@ -174,32 +191,28 @@ export type MedusaAppOutput = { runMigrations: RunMigrationFn } -export async function MedusaApp( - { - sharedContainer, - sharedResourcesConfig, - servicesConfig, - modulesConfigPath, - modulesConfigFileName, - modulesConfig, - linkModules, - remoteFetchData, - injectedDependencies, - }: { - sharedContainer?: MedusaContainer - sharedResourcesConfig?: SharedResources - loadedModules?: LoadedModule[] - servicesConfig?: ModuleJoinerConfig[] - modulesConfigPath?: string - modulesConfigFileName?: string - modulesConfig?: MedusaModuleConfig - linkModules?: ModuleJoinerConfig | ModuleJoinerConfig[] - remoteFetchData?: RemoteFetchDataCallback - injectedDependencies?: any - } = { - injectedDependencies: {}, - } -): Promise<{ +export async function MedusaApp({ + sharedContainer, + sharedResourcesConfig, + servicesConfig, + modulesConfigPath, + modulesConfigFileName, + modulesConfig, + linkModules, + remoteFetchData, + injectedDependencies, +}: { + sharedContainer?: MedusaContainer + sharedResourcesConfig?: SharedResources + loadedModules?: LoadedModule[] + servicesConfig?: ModuleJoinerConfig[] + modulesConfigPath?: string + modulesConfigFileName?: string + modulesConfig?: MedusaModuleConfig + linkModules?: ModuleJoinerConfig | ModuleJoinerConfig[] + remoteFetchData?: RemoteFetchDataCallback + injectedDependencies?: any +} = {}): Promise<{ modules: Record link: RemoteLink | undefined query: ( @@ -210,6 +223,8 @@ export async function MedusaApp( notFound?: Record> runMigrations: RunMigrationFn }> { + injectedDependencies ??= {} + const sharedContainer_ = createMedusaContainer({}, sharedContainer) const modules: MedusaModuleConfig = @@ -230,6 +245,12 @@ export async function MedusaApp( registerCustomJoinerConfigs(servicesConfig ?? []) if ( + sharedResourcesConfig?.database?.connection && + !injectedDependencies[ContainerRegistrationKeys.PG_CONNECTION] + ) { + injectedDependencies[ContainerRegistrationKeys.PG_CONNECTION] = + sharedResourcesConfig.database.connection + } else if ( dbData.clientUrl && !injectedDependencies[ContainerRegistrationKeys.PG_CONNECTION] ) { @@ -241,8 +262,10 @@ export async function MedusaApp( } // remove the link module from the modules - const linkModule = modules[LinkModulePackage] + const linkModule = modules[LinkModulePackage] ?? modules[Modules.LINK] delete modules[LinkModulePackage] + delete modules[Modules.LINK] + let linkModuleOptions = {} if (isObject(linkModule)) { @@ -267,11 +290,12 @@ export async function MedusaApp( remoteLink, linkResolution, runMigrations: linkModuleMigration, - } = await initializeLinks( - linkModuleOptions, + } = await initializeLinks({ + config: linkModuleOptions, linkModules, - injectedDependencies - ) + injectedDependencies, + moduleExports: isMedusaModule(linkModule) ? linkModule : undefined, + }) const loadedSchema = getLoadedSchema() const { schema, notFound } = cleanAndMergeSchema(loadedSchema) @@ -294,16 +318,29 @@ export async function MedusaApp( for (const moduleName of Object.keys(allModules)) { const moduleResolution = MedusaModule.getModuleResolutions(moduleName) + if (!moduleResolution.options?.database) { + moduleResolution.options ??= {} + moduleResolution.options.database = { + ...(sharedResourcesConfig?.database ?? {}), + } + } + await MedusaModule.migrateUp( moduleResolution.definition.key, moduleResolution.resolutionPath as string, - moduleResolution.options + moduleResolution.options, + moduleResolution.moduleExports ) } + const linkModuleOpt = { ...linkModuleOptions } + linkModuleOpt.database ??= { + ...(sharedResourcesConfig?.database ?? {}), + } + linkModuleMigration && (await linkModuleMigration({ - options: linkModuleOptions, + options: linkModuleOpt, injectedDependencies, })) } diff --git a/packages/modules-sdk/src/medusa-module.ts b/packages/modules-sdk/src/medusa-module.ts index 6144d93ff80f7..fe1eb240bdffe 100644 --- a/packages/modules-sdk/src/medusa-module.ts +++ b/packages/modules-sdk/src/medusa-module.ts @@ -438,7 +438,8 @@ export class MedusaModule { public static async migrateUp( moduleKey: string, modulePath: string, - options?: Record + options?: Record, + moduleExports?: ModuleExports ): Promise { const moduleResolutions = registerMedusaModule(moduleKey, { scope: MODULE_SCOPE.INTERNAL, @@ -448,7 +449,10 @@ export class MedusaModule { }) for (const mod in moduleResolutions) { - const [migrateUp] = await loadModuleMigrations(moduleResolutions[mod]) + const [migrateUp] = await loadModuleMigrations( + moduleResolutions[mod], + moduleExports + ) if (typeof migrateUp === "function") { await migrateUp({ @@ -462,7 +466,8 @@ export class MedusaModule { public static async migrateDown( moduleKey: string, modulePath: string, - options?: Record + options?: Record, + moduleExports?: ModuleExports ): Promise { const moduleResolutions = registerMedusaModule(moduleKey, { scope: MODULE_SCOPE.INTERNAL, @@ -472,7 +477,10 @@ export class MedusaModule { }) for (const mod in moduleResolutions) { - const [, migrateDown] = await loadModuleMigrations(moduleResolutions[mod]) + const [, migrateDown] = await loadModuleMigrations( + moduleResolutions[mod], + moduleExports + ) if (typeof migrateDown === "function") { await migrateDown({ diff --git a/packages/orchestration/src/__tests__/transaction/transaction-orchestrator.ts b/packages/orchestration/src/__tests__/transaction/transaction-orchestrator.ts index 19dcbdcfbbfe3..cd5addce28252 100644 --- a/packages/orchestration/src/__tests__/transaction/transaction-orchestrator.ts +++ b/packages/orchestration/src/__tests__/transaction/transaction-orchestrator.ts @@ -1,10 +1,14 @@ +import { TransactionStepState, TransactionStepStatus } from "@medusajs/utils" +import { setTimeout } from "timers/promises" import { DistributedTransaction, TransactionHandlerType, TransactionOrchestrator, TransactionPayload, TransactionState, + TransactionStepTimeoutError, TransactionStepsDefinition, + TransactionTimeoutError, } from "../../transaction" describe("Transaction Orchestrator", () => { @@ -986,4 +990,454 @@ describe("Transaction Orchestrator", () => { expect(transaction).toBe(transactionInHandler) }) + + describe("Timeouts - Transaction and Step", () => { + it("should fail the current steps and revert the transaction if the Transaction Timeout is reached", async () => { + const mocks = { + f1: jest.fn(() => { + return "content f1" + }), + f2: jest.fn(async () => { + await setTimeout(200) + return "delayed content f2" + }), + f3: jest.fn(() => { + return "content f3" + }), + f4: jest.fn(() => { + return "content f4" + }), + } + + async function handler( + actionId: string, + functionHandlerType: TransactionHandlerType, + payload: TransactionPayload + ) { + const command = { + action1: { + [TransactionHandlerType.INVOKE]: () => { + return mocks.f1() + }, + [TransactionHandlerType.COMPENSATE]: () => { + return mocks.f1() + }, + }, + action2: { + [TransactionHandlerType.INVOKE]: async () => { + return await mocks.f2() + }, + [TransactionHandlerType.COMPENSATE]: () => { + return mocks.f2() + }, + }, + action3: { + [TransactionHandlerType.INVOKE]: () => { + return mocks.f3() + }, + [TransactionHandlerType.COMPENSATE]: () => { + return mocks.f3() + }, + }, + action4: { + [TransactionHandlerType.INVOKE]: () => { + return mocks.f4() + }, + [TransactionHandlerType.COMPENSATE]: () => { + return mocks.f4() + }, + }, + } + + return command[actionId][functionHandlerType]() + } + + const flow: TransactionStepsDefinition = { + next: { + action: "action1", + next: [ + { + action: "action2", + }, + { + action: "action3", + next: { + action: "action4", + }, + }, + ], + }, + } + + const strategy = new TransactionOrchestrator("transaction-name", flow, { + timeout: 0.1, // 100ms + }) + + const transaction = await strategy.beginTransaction( + "transaction_id_123", + handler + ) + + await strategy.resume(transaction) + + expect(transaction.transactionId).toBe("transaction_id_123") + expect(mocks.f1).toBeCalledTimes(2) + expect(mocks.f2).toBeCalledTimes(2) + expect(mocks.f3).toBeCalledTimes(2) + expect(mocks.f4).toBeCalledTimes(0) + expect(transaction.getContext().invoke.action1).toBe("content f1") + expect(transaction.getContext().invoke.action2).toBe("delayed content f2") + expect(transaction.getContext().invoke.action3).toBe("content f3") + expect(transaction.getContext().invoke.action4).toBe(undefined) + + expect(transaction.getErrors()[0].error).toBeInstanceOf( + TransactionTimeoutError + ) + expect(transaction.getErrors()[0].action).toBe("action2") + + expect(transaction.getState()).toBe(TransactionState.REVERTED) + }) + + it("should continue the transaction and skip children steps when the Transaction Step Timeout is reached but the step is set to 'continueOnPermanentFailure'", async () => { + const mocks = { + f1: jest.fn(() => { + return "content f1" + }), + f2: jest.fn(async () => { + await setTimeout(200) + return "delayed content f2" + }), + f3: jest.fn(() => { + return "content f3" + }), + f4: jest.fn(() => { + return "content f4" + }), + } + + async function handler( + actionId: string, + functionHandlerType: TransactionHandlerType, + payload: TransactionPayload + ) { + const command = { + action1: { + [TransactionHandlerType.INVOKE]: () => { + return mocks.f1() + }, + [TransactionHandlerType.COMPENSATE]: () => { + return mocks.f1() + }, + }, + action2: { + [TransactionHandlerType.INVOKE]: async () => { + return await mocks.f2() + }, + [TransactionHandlerType.COMPENSATE]: () => { + return mocks.f2() + }, + }, + action3: { + [TransactionHandlerType.INVOKE]: () => { + return mocks.f3() + }, + [TransactionHandlerType.COMPENSATE]: () => { + return mocks.f3() + }, + }, + action4: { + [TransactionHandlerType.INVOKE]: () => { + return mocks.f4() + }, + [TransactionHandlerType.COMPENSATE]: () => { + return mocks.f4() + }, + }, + } + + return command[actionId][functionHandlerType]() + } + + const flow: TransactionStepsDefinition = { + next: { + action: "action1", + next: [ + { + timeout: 0.1, // 100ms + action: "action2", + continueOnPermanentFailure: true, + next: { + action: "action4", + }, + }, + { + action: "action3", + }, + ], + }, + } + + const strategy = new TransactionOrchestrator("transaction-name", flow) + + const transaction = await strategy.beginTransaction( + "transaction_id_123", + handler + ) + + await strategy.resume(transaction) + + expect(transaction.transactionId).toBe("transaction_id_123") + expect(mocks.f1).toBeCalledTimes(1) + expect(mocks.f2).toBeCalledTimes(1) + expect(mocks.f3).toBeCalledTimes(1) + expect(mocks.f4).toBeCalledTimes(0) + expect(transaction.getContext().invoke.action1).toBe("content f1") + expect(transaction.getContext().invoke.action2).toBe("delayed content f2") + expect(transaction.getContext().invoke.action3).toBe("content f3") + expect(transaction.getContext().invoke.action4).toBe(undefined) + expect( + transaction.getFlow().steps["_root.action1.action2"].invoke.state + ).toBe(TransactionStepState.TIMEOUT) + expect( + transaction.getFlow().steps["_root.action1.action2"].invoke.status + ).toBe(TransactionStepStatus.PERMANENT_FAILURE) + expect( + transaction.getFlow().steps["_root.action1.action2"].compensate.state + ).toBe(TransactionStepState.DORMANT) + expect( + transaction.getFlow().steps["_root.action1.action2.action4"].invoke + .state + ).toBe(TransactionStepState.SKIPPED) + expect( + transaction.getFlow().steps["_root.action1.action2.action4"].invoke + .status + ).toBe(TransactionStepStatus.IDLE) + + expect(transaction.getState()).toBe(TransactionState.DONE) + }) + + it("should fail the current steps and revert the transaction if the Step Timeout is reached", async () => { + const mocks = { + f1: jest.fn(() => { + return "content f1" + }), + f2: jest.fn(async () => { + await setTimeout(200) + return "delayed content f2" + }), + f3: jest.fn(() => { + return "content f3" + }), + f4: jest.fn(() => { + return "content f4" + }), + } + + async function handler( + actionId: string, + functionHandlerType: TransactionHandlerType, + payload: TransactionPayload + ) { + const command = { + action1: { + [TransactionHandlerType.INVOKE]: () => { + return mocks.f1() + }, + [TransactionHandlerType.COMPENSATE]: () => { + return mocks.f1() + }, + }, + action2: { + [TransactionHandlerType.INVOKE]: async () => { + return await mocks.f2() + }, + [TransactionHandlerType.COMPENSATE]: () => { + return mocks.f2() + }, + }, + action3: { + [TransactionHandlerType.INVOKE]: () => { + return mocks.f3() + }, + [TransactionHandlerType.COMPENSATE]: () => { + return mocks.f3() + }, + }, + action4: { + [TransactionHandlerType.INVOKE]: () => { + return mocks.f4() + }, + [TransactionHandlerType.COMPENSATE]: () => { + return mocks.f4() + }, + }, + } + + return command[actionId][functionHandlerType]() + } + + const flow: TransactionStepsDefinition = { + next: { + action: "action1", + next: [ + { + action: "action2", + timeout: 0.1, // 100ms + }, + { + action: "action3", + next: { + action: "action4", + }, + }, + ], + }, + } + + const strategy = new TransactionOrchestrator("transaction-name", flow) + + const transaction = await strategy.beginTransaction( + "transaction_id_123", + handler + ) + + await strategy.resume(transaction) + + expect(transaction.transactionId).toBe("transaction_id_123") + expect(mocks.f1).toBeCalledTimes(2) + expect(mocks.f2).toBeCalledTimes(2) + expect(mocks.f3).toBeCalledTimes(2) + expect(mocks.f4).toBeCalledTimes(0) + expect(transaction.getContext().invoke.action1).toBe("content f1") + expect(transaction.getContext().invoke.action2).toBe("delayed content f2") + expect(transaction.getContext().invoke.action3).toBe("content f3") + expect(transaction.getContext().invoke.action4).toBe(undefined) + + expect(transaction.getErrors()[0].error).toBeInstanceOf( + TransactionStepTimeoutError + ) + expect(transaction.getErrors()[0].action).toBe("action2") + + expect(transaction.getState()).toBe(TransactionState.REVERTED) + }) + + it("should fail the current steps and revert the transaction if the Transaction Timeout is reached event if the step is set as 'continueOnPermanentFailure'", async () => { + const mocks = { + f1: jest.fn(() => { + return "content f1" + }), + f2: jest.fn(async () => { + await setTimeout(200) + return "delayed content f2" + }), + f3: jest.fn(async () => { + await setTimeout(200) + return "content f3" + }), + f4: jest.fn(() => { + return "content f4" + }), + } + + async function handler( + actionId: string, + functionHandlerType: TransactionHandlerType, + payload: TransactionPayload + ) { + const command = { + action1: { + [TransactionHandlerType.INVOKE]: () => { + return mocks.f1() + }, + [TransactionHandlerType.COMPENSATE]: () => { + return mocks.f1() + }, + }, + action2: { + [TransactionHandlerType.INVOKE]: async () => { + return await mocks.f2() + }, + [TransactionHandlerType.COMPENSATE]: () => { + return mocks.f2() + }, + }, + action3: { + [TransactionHandlerType.INVOKE]: async () => { + return await mocks.f3() + }, + [TransactionHandlerType.COMPENSATE]: () => { + return mocks.f3() + }, + }, + action4: { + [TransactionHandlerType.INVOKE]: () => { + return mocks.f4() + }, + [TransactionHandlerType.COMPENSATE]: () => { + return mocks.f4() + }, + }, + } + + return command[actionId][functionHandlerType]() + } + + const flow: TransactionStepsDefinition = { + next: { + action: "action1", + next: [ + { + action: "action2", + continueOnPermanentFailure: true, + }, + { + action: "action3", + continueOnPermanentFailure: true, + next: { + action: "action4", + }, + }, + ], + }, + } + + const strategy = new TransactionOrchestrator("transaction-name", flow, { + timeout: 0.1, // 100ms + }) + + const transaction = await strategy.beginTransaction( + "transaction_id_123", + handler + ) + + await strategy.resume(transaction) + + expect(transaction.transactionId).toBe("transaction_id_123") + expect(mocks.f1).toBeCalledTimes(2) + expect(mocks.f2).toBeCalledTimes(2) + expect(mocks.f3).toBeCalledTimes(2) + expect(mocks.f4).toBeCalledTimes(0) + expect(transaction.getContext().invoke.action1).toBe("content f1") + expect(transaction.getContext().invoke.action2).toBe("delayed content f2") + expect(transaction.getContext().invoke.action3).toBe("content f3") + expect(transaction.getContext().invoke.action4).toBe(undefined) + + expect(transaction.getErrors()).toHaveLength(2) + expect( + TransactionTimeoutError.isTransactionTimeoutError( + transaction.getErrors()[0].error + ) + ).toBe(true) + expect(transaction.getErrors()[0].action).toBe("action2") + + expect( + TransactionTimeoutError.isTransactionTimeoutError( + transaction.getErrors()[1].error + ) + ).toBe(true) + expect(transaction.getErrors()[1].action).toBe("action3") + + expect(transaction.getState()).toBe(TransactionState.REVERTED) + }) + }) }) diff --git a/packages/orchestration/src/transaction/datastore/abstract-storage.ts b/packages/orchestration/src/transaction/datastore/abstract-storage.ts index aab8c5e677c5a..defc91d6cd18d 100644 --- a/packages/orchestration/src/transaction/datastore/abstract-storage.ts +++ b/packages/orchestration/src/transaction/datastore/abstract-storage.ts @@ -3,14 +3,11 @@ import { TransactionCheckpoint, } from "../distributed-transaction" import { TransactionStep } from "../transaction-step" -import { TransactionModelOptions } from "../types" export interface IDistributedTransactionStorage { get(key: string): Promise list(): Promise save(key: string, data: TransactionCheckpoint, ttl?: number): Promise - delete(key: string): Promise - archive(key: string, options?: TransactionModelOptions): Promise scheduleRetry( transaction: DistributedTransaction, step: TransactionStep, @@ -62,14 +59,6 @@ export abstract class DistributedTransactionStorage throw new Error("Method 'save' not implemented.") } - async delete(key: string): Promise { - throw new Error("Method 'delete' not implemented.") - } - - async archive(key: string, options?: TransactionModelOptions): Promise { - throw new Error("Method 'archive' not implemented.") - } - async scheduleRetry( transaction: DistributedTransaction, step: TransactionStep, diff --git a/packages/orchestration/src/transaction/datastore/base-in-memory-storage.ts b/packages/orchestration/src/transaction/datastore/base-in-memory-storage.ts index 69ab557a03f02..23ac1438cb997 100644 --- a/packages/orchestration/src/transaction/datastore/base-in-memory-storage.ts +++ b/packages/orchestration/src/transaction/datastore/base-in-memory-storage.ts @@ -1,5 +1,5 @@ +import { TransactionState } from "@medusajs/utils" import { TransactionCheckpoint } from "../distributed-transaction" -import { TransactionModelOptions } from "../types" import { DistributedTransactionStorage } from "./abstract-storage" // eslint-disable-next-line max-len @@ -24,14 +24,16 @@ export class BaseInMemoryDistributedTransactionStorage extends DistributedTransa data: TransactionCheckpoint, ttl?: number ): Promise { - this.storage.set(key, data) - } - - async delete(key: string): Promise { - this.storage.delete(key) - } + const hasFinished = [ + TransactionState.DONE, + TransactionState.REVERTED, + TransactionState.FAILED, + ].includes(data.flow.state) - async archive(key: string, options?: TransactionModelOptions): Promise { - this.storage.delete(key) + if (hasFinished) { + this.storage.delete(key) + } else { + this.storage.set(key, data) + } } } diff --git a/packages/orchestration/src/transaction/distributed-transaction.ts b/packages/orchestration/src/transaction/distributed-transaction.ts index 16d05c2f62a5d..0a9e14e7879b5 100644 --- a/packages/orchestration/src/transaction/distributed-transaction.ts +++ b/packages/orchestration/src/transaction/distributed-transaction.ts @@ -86,7 +86,7 @@ export class DistributedTransaction extends EventEmitter { this.keyValueStore = storage } - private static keyPrefix = "dtrans" + public static keyPrefix = "dtrans" constructor( private flow: TransactionFlow, @@ -177,18 +177,18 @@ export class DistributedTransaction extends EventEmitter { } public hasTimeout(): boolean { - return !!this.getFlow().definition.timeout + return !!this.getTimeout() } - public getTimeoutInterval(): number | undefined { - return this.getFlow().definition.timeout + public getTimeout(): number | undefined { + return this.getFlow().options?.timeout } public async saveCheckpoint( ttl = 0 ): Promise { const options = this.getFlow().options - if (!options?.storeExecution) { + if (!options?.store) { return } @@ -226,31 +226,6 @@ export class DistributedTransaction extends EventEmitter { return null } - public async deleteCheckpoint(): Promise { - const options = this.getFlow().options - if (!options?.storeExecution) { - return - } - - const key = TransactionOrchestrator.getKeyName( - DistributedTransaction.keyPrefix, - this.modelId, - this.transactionId - ) - await DistributedTransaction.keyValueStore.delete(key) - } - - public async archiveCheckpoint(): Promise { - const options = this.getFlow().options - - const key = TransactionOrchestrator.getKeyName( - DistributedTransaction.keyPrefix, - this.modelId, - this.transactionId - ) - await DistributedTransaction.keyValueStore.archive(key, options) - } - public async scheduleRetry( step: TransactionStep, interval: number @@ -269,6 +244,11 @@ export class DistributedTransaction extends EventEmitter { } public async scheduleTransactionTimeout(interval: number): Promise { + // schedule transaction timeout only if there are async steps + if (!this.getFlow().hasAsyncSteps) { + return + } + await this.saveCheckpoint() await DistributedTransaction.keyValueStore.scheduleTransactionTimeout( this, @@ -278,6 +258,10 @@ export class DistributedTransaction extends EventEmitter { } public async clearTransactionTimeout(): Promise { + if (!this.getFlow().hasAsyncSteps) { + return + } + await DistributedTransaction.keyValueStore.clearTransactionTimeout(this) } @@ -285,6 +269,11 @@ export class DistributedTransaction extends EventEmitter { step: TransactionStep, interval: number ): Promise { + // schedule step timeout only if the step is async + if (!step.definition.async) { + return + } + await this.saveCheckpoint() await DistributedTransaction.keyValueStore.scheduleStepTimeout( this, @@ -295,6 +284,10 @@ export class DistributedTransaction extends EventEmitter { } public async clearStepTimeout(step: TransactionStep): Promise { + if (!step.definition.async || step.isCompensating()) { + return + } + await DistributedTransaction.keyValueStore.clearStepTimeout(this, step) } } diff --git a/packages/orchestration/src/transaction/errors.ts b/packages/orchestration/src/transaction/errors.ts index 331784e80c3f9..2bbda62dc2a7e 100644 --- a/packages/orchestration/src/transaction/errors.ts +++ b/packages/orchestration/src/transaction/errors.ts @@ -4,7 +4,7 @@ export class PermanentStepFailureError extends Error { ): error is PermanentStepFailureError { return ( error instanceof PermanentStepFailureError || - error.name === "PermanentStepFailure" + error?.name === "PermanentStepFailure" ) } @@ -14,16 +14,19 @@ export class PermanentStepFailureError extends Error { } } -export class StepTimeoutError extends Error { - static isStepTimeoutError(error: Error): error is StepTimeoutError { +export class TransactionStepTimeoutError extends Error { + static isTransactionStepTimeoutError( + error: Error + ): error is TransactionStepTimeoutError { return ( - error instanceof StepTimeoutError || error.name === "StepTimeoutError" + error instanceof TransactionStepTimeoutError || + error?.name === "TransactionStepTimeoutError" ) } constructor(message?: string) { super(message) - this.name = "StepTimeoutError" + this.name = "TransactionStepTimeoutError" } } @@ -33,7 +36,7 @@ export class TransactionTimeoutError extends Error { ): error is TransactionTimeoutError { return ( error instanceof TransactionTimeoutError || - error.name === "TransactionTimeoutError" + error?.name === "TransactionTimeoutError" ) } diff --git a/packages/orchestration/src/transaction/orchestrator-builder.ts b/packages/orchestration/src/transaction/orchestrator-builder.ts index 711902c5d6e0d..a645cf72fd34f 100644 --- a/packages/orchestration/src/transaction/orchestrator-builder.ts +++ b/packages/orchestration/src/transaction/orchestrator-builder.ts @@ -314,7 +314,7 @@ export class OrchestratorBuilder { action: string, step: InternalStep = this.steps ): InternalStep | undefined { - if (step.action === action) { + if (step.uuid === action || step.action === action) { return step } @@ -357,7 +357,7 @@ export class OrchestratorBuilder { if (!nextStep) { continue } - if (nextStep.action === action) { + if (nextStep.uuid === action || nextStep.action === action) { return step } const foundStep = this.findParentStepByAction( diff --git a/packages/orchestration/src/transaction/transaction-orchestrator.ts b/packages/orchestration/src/transaction/transaction-orchestrator.ts index 1a6de79a675e5..7a7fa54c2a794 100644 --- a/packages/orchestration/src/transaction/transaction-orchestrator.ts +++ b/packages/orchestration/src/transaction/transaction-orchestrator.ts @@ -13,11 +13,11 @@ import { TransactionStepStatus, } from "./types" -import { MedusaError, promiseAll } from "@medusajs/utils" +import { MedusaError, promiseAll, TransactionStepState } from "@medusajs/utils" import { EventEmitter } from "events" import { PermanentStepFailureError, - StepTimeoutError, + TransactionStepTimeoutError, TransactionTimeoutError, } from "./errors" @@ -30,6 +30,7 @@ export type TransactionFlow = { hasFailedSteps: boolean hasWaitingSteps: boolean hasSkippedSteps: boolean + hasRevertedSteps: boolean timedOutAt: number | null startedAt?: number state: TransactionState @@ -62,10 +63,6 @@ export class TransactionOrchestrator extends EventEmitter { return params.join(this.SEPARATOR) } - public getOptions(): TransactionModelOptions { - return this.options ?? {} - } - private getPreviousStep(flow: TransactionFlow, step: TransactionStep) { const id = step.id.split(".") id.pop() @@ -73,6 +70,10 @@ export class TransactionOrchestrator extends EventEmitter { return flow.steps[parentId] } + public getOptions(): TransactionModelOptions { + return this.options ?? {} + } + private getInvokeSteps(flow: TransactionFlow): string[] { if (this.invokeSteps.length) { return this.invokeSteps @@ -102,9 +103,10 @@ export class TransactionOrchestrator extends EventEmitter { private canMoveForward(flow: TransactionFlow, previousStep: TransactionStep) { const states = [ - TransactionState.DONE, - TransactionState.FAILED, - TransactionState.SKIPPED, + TransactionStepState.DONE, + TransactionStepState.FAILED, + TransactionStepState.TIMEOUT, + TransactionStepState.SKIPPED, ] const siblings = this.getPreviousStep(flow, previousStep).next.map( @@ -119,10 +121,10 @@ export class TransactionOrchestrator extends EventEmitter { private canMoveBackward(flow: TransactionFlow, step: TransactionStep) { const states = [ - TransactionState.DONE, - TransactionState.REVERTED, - TransactionState.FAILED, - TransactionState.DORMANT, + TransactionStepState.DONE, + TransactionStepState.REVERTED, + TransactionStepState.FAILED, + TransactionStepState.DORMANT, ] const siblings = step.next.map((sib) => flow.steps[sib]) return ( @@ -144,29 +146,89 @@ export class TransactionOrchestrator extends EventEmitter { } } - private async checkStepTimeout(transaction, step) { + private hasExpired( + { + transaction, + step, + }: { + transaction?: DistributedTransaction + step?: TransactionStep + }, + dateNow: number + ): boolean { + const hasStepTimedOut = + step && + step.hasTimeout() && + !step.isCompensating() && + dateNow > step.startedAt! + step.getTimeout()! * 1e3 + + const hasTransactionTimedOut = + transaction && + transaction.hasTimeout() && + transaction.getFlow().state !== TransactionState.COMPENSATING && + dateNow > + transaction.getFlow().startedAt! + transaction.getTimeout()! * 1e3 + + return !!hasStepTimedOut || !!hasTransactionTimedOut + } + + private async checkTransactionTimeout( + transaction: DistributedTransaction, + currentSteps: TransactionStep[] + ) { + const flow = transaction.getFlow() + let hasTimedOut = false + if (!flow.timedOutAt && this.hasExpired({ transaction }, Date.now())) { + flow.timedOutAt = Date.now() + + void transaction.clearTransactionTimeout() + + for (const step of currentSteps) { + await TransactionOrchestrator.setStepTimeout( + transaction, + step, + new TransactionTimeoutError() + ) + } + + await transaction.saveCheckpoint() + + this.emit(DistributedTransactionEvent.TIMEOUT, { transaction }) + + hasTimedOut = true + } + + return hasTimedOut + } + + private async checkStepTimeout( + transaction: DistributedTransaction, + step: TransactionStep + ) { let hasTimedOut = false if ( - step.hasTimeout() && !step.timedOutAt && step.canCancel() && - step.startedAt! + step.getTimeoutInterval()! * 1e3 < Date.now() + this.hasExpired({ step }, Date.now()) ) { step.timedOutAt = Date.now() - await transaction.saveCheckpoint() - this.emit(DistributedTransactionEvent.TIMEOUT, { transaction }) - await TransactionOrchestrator.setStepFailure( + + await TransactionOrchestrator.setStepTimeout( transaction, step, - new StepTimeoutError(), - 0 + new TransactionStepTimeoutError() ) hasTimedOut = true + + await transaction.saveCheckpoint() + + this.emit(DistributedTransactionEvent.TIMEOUT, { transaction }) } return hasTimedOut } private async checkAllSteps(transaction: DistributedTransaction): Promise<{ + current: TransactionStep[] next: TransactionStep[] total: number remaining: number @@ -182,6 +244,8 @@ export class TransactionOrchestrator extends EventEmitter { const flow = transaction.getFlow() const nextSteps: TransactionStep[] = [] + const currentSteps: TransactionStep[] = [] + const allSteps = flow.state === TransactionState.COMPENSATING ? this.getCompensationSteps(flow) @@ -204,6 +268,7 @@ export class TransactionOrchestrator extends EventEmitter { } if (curState.status === TransactionStepStatus.WAITING) { + currentSteps.push(stepDef) hasWaiting = true if (stepDef.hasAwaitingRetry()) { @@ -223,6 +288,8 @@ export class TransactionOrchestrator extends EventEmitter { continue } else if (curState.status === TransactionStepStatus.TEMPORARY_FAILURE) { + currentSteps.push(stepDef) + if (!stepDef.canRetry()) { if (stepDef.hasRetryInterval() && !stepDef.retryRescheduledAt) { stepDef.hasScheduledRetry = true @@ -243,11 +310,11 @@ export class TransactionOrchestrator extends EventEmitter { } else { completedSteps++ - if (curState.state === TransactionState.SKIPPED) { + if (curState.state === TransactionStepState.SKIPPED) { hasSkipped = true - } else if (curState.state === TransactionState.REVERTED) { + } else if (curState.state === TransactionStepState.REVERTED) { hasReverted = true - } else if (curState.state === TransactionState.FAILED) { + } else if (curState.state === TransactionStepState.FAILED) { if (stepDef.definition.continueOnPermanentFailure) { hasIgnoredFailure = true } else { @@ -258,6 +325,7 @@ export class TransactionOrchestrator extends EventEmitter { } flow.hasWaitingSteps = hasWaiting + flow.hasRevertedSteps = hasReverted const totalSteps = allSteps.length - 1 if ( @@ -288,6 +356,7 @@ export class TransactionOrchestrator extends EventEmitter { } return { + current: currentSteps, next: nextSteps, total: totalSteps, remaining: totalSteps - completedSteps, @@ -304,11 +373,13 @@ export class TransactionOrchestrator extends EventEmitter { const stepDef = flow.steps[step] const curState = stepDef.getStates() if ( - curState.state === TransactionState.DONE || + [TransactionStepState.DONE, TransactionStepState.TIMEOUT].includes( + curState.state + ) || curState.status === TransactionStepStatus.PERMANENT_FAILURE ) { stepDef.beginCompensation() - stepDef.changeState(TransactionState.NOT_STARTED) + stepDef.changeState(TransactionStepState.NOT_STARTED) } } } @@ -318,6 +389,9 @@ export class TransactionOrchestrator extends EventEmitter { step: TransactionStep, response: unknown ): Promise { + const hasStepTimedOut = + step.getStates().state === TransactionStepState.TIMEOUT + if (step.saveResponse) { transaction.addResponse( step.definition.action!, @@ -328,16 +402,19 @@ export class TransactionOrchestrator extends EventEmitter { ) } - step.changeStatus(TransactionStepStatus.OK) + const flow = transaction.getFlow() + + if (!hasStepTimedOut) { + step.changeStatus(TransactionStepStatus.OK) + } if (step.isCompensating()) { - step.changeState(TransactionState.REVERTED) - } else { - step.changeState(TransactionState.DONE) + step.changeState(TransactionStepState.REVERTED) + } else if (!hasStepTimedOut) { + step.changeState(TransactionStepState.DONE) } - const flow = transaction.getFlow() - if (step.definition.async || flow.options?.strictCheckpoints) { + if (step.definition.async || flow.options?.storeExecution) { await transaction.saveCheckpoint() } @@ -357,35 +434,87 @@ export class TransactionOrchestrator extends EventEmitter { transaction.emit(eventName, { step, transaction }) } + private static async setStepTimeout( + transaction: DistributedTransaction, + step: TransactionStep, + error: TransactionStepTimeoutError | TransactionTimeoutError + ): Promise { + if ( + [ + TransactionStepState.TIMEOUT, + TransactionStepState.DONE, + TransactionStepState.REVERTED, + ].includes(step.getStates().state) + ) { + return + } + + step.changeState(TransactionStepState.TIMEOUT) + + transaction.addError( + step.definition.action!, + TransactionHandlerType.INVOKE, + error + ) + + await TransactionOrchestrator.setStepFailure( + transaction, + step, + undefined, + 0, + true, + error + ) + + await transaction.clearStepTimeout(step) + } + private static async setStepFailure( transaction: DistributedTransaction, step: TransactionStep, error: Error | any, - maxRetries: number = TransactionOrchestrator.DEFAULT_RETRIES + maxRetries: number = TransactionOrchestrator.DEFAULT_RETRIES, + isTimeout = false, + timeoutError?: TransactionStepTimeoutError | TransactionTimeoutError ): Promise { step.failures++ - step.changeStatus(TransactionStepStatus.TEMPORARY_FAILURE) + if ( + !isTimeout && + step.getStates().status !== TransactionStepStatus.PERMANENT_FAILURE + ) { + step.changeStatus(TransactionStepStatus.TEMPORARY_FAILURE) + } const flow = transaction.getFlow() const cleaningUp: Promise[] = [] - if (step.failures > maxRetries) { - step.changeState(TransactionState.FAILED) + + const hasTimedOut = step.getStates().state === TransactionStepState.TIMEOUT + if (step.failures > maxRetries || hasTimedOut) { + if (!hasTimedOut) { + step.changeState(TransactionStepState.FAILED) + } + step.changeStatus(TransactionStepStatus.PERMANENT_FAILURE) - transaction.addError( - step.definition.action!, - step.isCompensating() - ? TransactionHandlerType.COMPENSATE - : TransactionHandlerType.INVOKE, - error - ) + if (!isTimeout) { + transaction.addError( + step.definition.action!, + step.isCompensating() + ? TransactionHandlerType.COMPENSATE + : TransactionHandlerType.INVOKE, + error + ) + } if (!step.isCompensating()) { - if (step.definition.continueOnPermanentFailure) { + if ( + step.definition.continueOnPermanentFailure && + !TransactionTimeoutError.isTransactionTimeoutError(timeoutError!) + ) { for (const childStep of step.next) { const child = flow.steps[childStep] - child.changeState(TransactionState.SKIPPED) + child.changeState(TransactionStepState.SKIPPED) } } else { flow.state = TransactionState.WAITING_TO_COMPENSATE @@ -397,7 +526,7 @@ export class TransactionOrchestrator extends EventEmitter { } } - if (step.definition.async || flow.options?.strictCheckpoints) { + if (step.definition.async || flow.options?.storeExecution) { await transaction.saveCheckpoint() } @@ -413,33 +542,6 @@ export class TransactionOrchestrator extends EventEmitter { transaction.emit(eventName, { step, transaction }) } - private async checkTransactionTimeout(transaction, currentSteps) { - let hasTimedOut = false - const flow = transaction.getFlow() - if ( - transaction.hasTimeout() && - !flow.timedOutAt && - flow.startedAt! + transaction.getTimeoutInterval()! * 1e3 < Date.now() - ) { - flow.timedOutAt = Date.now() - this.emit(DistributedTransactionEvent.TIMEOUT, { transaction }) - - for (const step of currentSteps) { - await TransactionOrchestrator.setStepFailure( - transaction, - step, - new TransactionTimeoutError(), - 0 - ) - } - - await transaction.saveCheckpoint() - - hasTimedOut = true - } - return hasTimedOut - } - private async executeNext( transaction: DistributedTransaction ): Promise { @@ -456,22 +558,19 @@ export class TransactionOrchestrator extends EventEmitter { const hasTimedOut = await this.checkTransactionTimeout( transaction, - nextSteps.next + nextSteps.current ) + if (hasTimedOut) { continue } if (nextSteps.remaining === 0) { if (transaction.hasTimeout()) { - await transaction.clearTransactionTimeout() + void transaction.clearTransactionTimeout() } - if (flow.options?.retentionTime == undefined) { - await transaction.deleteCheckpoint() - } else { - await transaction.saveCheckpoint() - } + await transaction.saveCheckpoint() this.emit(DistributedTransactionEvent.FINISH, { transaction }) } @@ -486,20 +585,20 @@ export class TransactionOrchestrator extends EventEmitter { step.lastAttempt = Date.now() step.attempts++ - if (curState.state === TransactionState.NOT_STARTED) { + if (curState.state === TransactionStepState.NOT_STARTED) { if (!step.startedAt) { step.startedAt = Date.now() } if (step.isCompensating()) { - step.changeState(TransactionState.COMPENSATING) + step.changeState(TransactionStepState.COMPENSATING) if (step.definition.noCompensation) { - step.changeState(TransactionState.REVERTED) + step.changeState(TransactionStepState.REVERTED) continue } } else if (flow.state === TransactionState.INVOKING) { - step.changeState(TransactionState.INVOKING) + step.changeState(TransactionStepState.INVOKING) } } @@ -554,6 +653,14 @@ export class TransactionOrchestrator extends EventEmitter { transaction .handler(step.definition.action + "", type, payload, transaction) .then(async (response: any) => { + if (this.hasExpired({ transaction, step }, Date.now())) { + await this.checkStepTimeout(transaction, step) + await this.checkTransactionTimeout( + transaction, + nextSteps.next.includes(step) ? nextSteps.next : [step] + ) + } + await TransactionOrchestrator.setStepSuccess( transaction, step, @@ -561,6 +668,14 @@ export class TransactionOrchestrator extends EventEmitter { ) }) .catch(async (error) => { + if (this.hasExpired({ transaction, step }, Date.now())) { + await this.checkStepTimeout(transaction, step) + await this.checkTransactionTimeout( + transaction, + nextSteps.next.includes(step) ? nextSteps.next : [step] + ) + } + if ( PermanentStepFailureError.isPermanentStepFailureError(error) ) { @@ -573,7 +688,7 @@ export class TransactionOrchestrator extends EventEmitter { ) } else { execution.push( - transaction.saveCheckpoint().then(async () => + transaction.saveCheckpoint().then(() => { transaction .handler( step.definition.action + "", @@ -591,12 +706,12 @@ export class TransactionOrchestrator extends EventEmitter { await setStepFailure(error) }) - ) + }) ) } } - if (hasSyncSteps && flow.options?.strictCheckpoints) { + if (hasSyncSteps && flow.options?.storeExecution) { await transaction.saveCheckpoint() } @@ -630,16 +745,14 @@ export class TransactionOrchestrator extends EventEmitter { flow.state = TransactionState.INVOKING flow.startedAt = Date.now() - if (this.options?.storeExecution) { + if (this.options?.store) { await transaction.saveCheckpoint( flow.hasAsyncSteps ? 0 : TransactionOrchestrator.DEFAULT_TTL ) } if (transaction.hasTimeout()) { - await transaction.scheduleTransactionTimeout( - transaction.getTimeoutInterval()! - ) + await transaction.scheduleTransactionTimeout(transaction.getTimeout()!) } this.emit(DistributedTransactionEvent.BEGIN, { transaction }) @@ -682,12 +795,19 @@ export class TransactionOrchestrator extends EventEmitter { this.definition ) + this.options ??= {} + const hasAsyncSteps = features.hasAsyncSteps const hasStepTimeouts = features.hasStepTimeouts const hasRetriesTimeout = features.hasRetriesTimeout + const hasTransactionTimeout = !!this.options.timeout - this.options ??= {} - if (hasAsyncSteps || hasStepTimeouts || hasRetriesTimeout) { + if (hasAsyncSteps) { + this.options.store = true + } + + if (hasStepTimeouts || hasRetriesTimeout || hasTransactionTimeout) { + this.options.store = true this.options.storeExecution = true } @@ -699,6 +819,7 @@ export class TransactionOrchestrator extends EventEmitter { hasFailedSteps: false, hasSkippedSteps: false, hasWaitingSteps: false, + hasRevertedSteps: false, timedOutAt: null, state: TransactionState.NOT_STARTED, definition: this.definition, @@ -807,15 +928,16 @@ export class TransactionOrchestrator extends EventEmitter { new TransactionStep(), existingSteps?.[id] || { id, + uuid: definitionCopy.uuid, depth: level.length - 1, definition: definitionCopy, saveResponse: definitionCopy.saveResponse ?? true, invoke: { - state: TransactionState.NOT_STARTED, + state: TransactionStepState.NOT_STARTED, status: TransactionStepStatus.IDLE, }, compensate: { - state: TransactionState.DORMANT, + state: TransactionStepState.DORMANT, status: TransactionStepStatus.IDLE, }, attempts: 0, @@ -861,11 +983,7 @@ export class TransactionOrchestrator extends EventEmitter { existingTransaction?.context ) - if ( - newTransaction && - this.options?.storeExecution && - this.options?.strictCheckpoints - ) { + if (newTransaction && this.options?.store && this.options?.storeExecution) { await transaction.saveCheckpoint( modelFlow.hasAsyncSteps ? 0 : TransactionOrchestrator.DEFAULT_TTL ) diff --git a/packages/orchestration/src/transaction/transaction-step.ts b/packages/orchestration/src/transaction/transaction-step.ts index 57b06acf31e84..bf20b5635af43 100644 --- a/packages/orchestration/src/transaction/transaction-step.ts +++ b/packages/orchestration/src/transaction/transaction-step.ts @@ -1,4 +1,4 @@ -import { MedusaError } from "@medusajs/utils" +import { MedusaError, TransactionStepState } from "@medusajs/utils" import { DistributedTransaction, TransactionPayload, @@ -6,8 +6,8 @@ import { import { TransactionHandlerType, TransactionState, - TransactionStepsDefinition, TransactionStepStatus, + TransactionStepsDefinition, } from "./types" export type TransactionStepHandler = ( @@ -38,14 +38,15 @@ export class TransactionStep { */ private stepFailed = false id: string + uuid?: string depth: number definition: TransactionStepsDefinition invoke: { - state: TransactionState + state: TransactionStepState status: TransactionStepStatus } compensate: { - state: TransactionState + state: TransactionStepState status: TransactionStepStatus } attempts: number @@ -81,24 +82,25 @@ export class TransactionStep { return !this.stepFailed } - public changeState(toState: TransactionState) { + public changeState(toState: TransactionStepState) { const allowed = { - [TransactionState.DORMANT]: [TransactionState.NOT_STARTED], - [TransactionState.NOT_STARTED]: [ - TransactionState.INVOKING, - TransactionState.COMPENSATING, - TransactionState.FAILED, - TransactionState.SKIPPED, + [TransactionStepState.DORMANT]: [TransactionStepState.NOT_STARTED], + [TransactionStepState.NOT_STARTED]: [ + TransactionStepState.INVOKING, + TransactionStepState.COMPENSATING, + TransactionStepState.FAILED, + TransactionStepState.SKIPPED, ], - [TransactionState.INVOKING]: [ - TransactionState.FAILED, - TransactionState.DONE, + [TransactionStepState.INVOKING]: [ + TransactionStepState.FAILED, + TransactionStepState.DONE, + TransactionStepState.TIMEOUT, ], - [TransactionState.COMPENSATING]: [ - TransactionState.REVERTED, - TransactionState.FAILED, + [TransactionStepState.COMPENSATING]: [ + TransactionStepState.REVERTED, + TransactionStepState.FAILED, ], - [TransactionState.DONE]: [TransactionState.COMPENSATING], + [TransactionStepState.DONE]: [TransactionStepState.COMPENSATING], } const curState = this.getStates() @@ -155,10 +157,10 @@ export class TransactionStep { } hasTimeout(): boolean { - return !!this.definition.timeout + return !!this.getTimeout() } - getTimeoutInterval(): number | undefined { + getTimeout(): number | undefined { return this.definition.timeout } @@ -190,7 +192,7 @@ export class TransactionStep { const { status, state } = this.getStates() return ( (!this.isCompensating() && - state === TransactionState.NOT_STARTED && + state === TransactionStepState.NOT_STARTED && flowState === TransactionState.INVOKING) || status === TransactionStepStatus.TEMPORARY_FAILURE ) @@ -199,7 +201,7 @@ export class TransactionStep { canCompensate(flowState: TransactionState): boolean { return ( this.isCompensating() && - this.getStates().state === TransactionState.NOT_STARTED && + this.getStates().state === TransactionStepState.NOT_STARTED && flowState === TransactionState.COMPENSATING ) } diff --git a/packages/orchestration/src/transaction/types.ts b/packages/orchestration/src/transaction/types.ts index 16f27985f5c62..bd5abab5caa80 100644 --- a/packages/orchestration/src/transaction/types.ts +++ b/packages/orchestration/src/transaction/types.ts @@ -1,51 +1,118 @@ import { DistributedTransaction } from "./distributed-transaction" import { TransactionStep } from "./transaction-step" +export { + TransactionHandlerType, + TransactionState, + TransactionStepStatus, +} from "@medusajs/utils" -export enum TransactionHandlerType { - INVOKE = "invoke", - COMPENSATE = "compensate", -} - +/** + * Defines the structure and behavior of a single step within a transaction workflow. + */ export type TransactionStepsDefinition = { + /** + * A unique identifier for the transaction step. + * This is set automatically when declaring a workflow with "createWorkflow" + */ + uuid?: string + + /** + * Specifies the action to be performed in this step. + * "name" is an alias for action when creating a workflow with "createWorkflow". + */ action?: string + + /** + * Indicates whether the workflow should continue even if there is a permanent failure in this step. + * In case it is set to true, the children steps of this step will not be executed and their status will be marked as TransactionStepState.SKIPPED. + */ continueOnPermanentFailure?: boolean + + /** + * If true, no compensation action will be triggered for this step in case of a failure. + */ noCompensation?: boolean + + /** + * The maximum number of times this step should be retried in case of temporary failures. + * The default is 0 (no retries). + */ maxRetries?: number + + /** + * The interval (in seconds) between retry attempts after a temporary failure. + * The default is to retry immediately. + */ retryInterval?: number + + /** + * The interval (in seconds) to retry a step even if its status is "TransactionStepStatus.WAITING". + */ retryIntervalAwaiting?: number + + /** + * The maximum amount of time (in seconds) to wait for this step to complete. + * This is NOT an execution timeout, the step will always be executed and wait for its response. + * If the response is not received within the timeout set, it will be marked as "TransactionStepStatus.TIMEOUT" and the workflow will be reverted as soon as it receives the response. + */ timeout?: number + + /** + * If true, the step is executed asynchronously. This means that the workflow will not wait for the response of this step. + * Async steps require to have their responses set using "setStepSuccess" or "setStepFailure". + * If combined with a timeout, and any response is not set within that interval, the step will be marked as "TransactionStepStatus.TIMEOUT" and the workflow will be reverted immediately. + */ async?: boolean + + /** + * If true, the compensation function for this step is executed asynchronously. Which means, the response has to be set using "setStepSuccess" or "setStepFailure". + */ compensateAsync?: boolean + + /** + * If true, the workflow will not wait for their sibling steps to complete before moving to the next step. + */ noWait?: boolean + + /** + * If true, the response of this step will be stored. + * Default is true. + */ saveResponse?: boolean - next?: TransactionStepsDefinition | TransactionStepsDefinition[] -} -export enum TransactionStepStatus { - IDLE = "idle", - OK = "ok", - WAITING = "waiting_response", - TEMPORARY_FAILURE = "temp_failure", - PERMANENT_FAILURE = "permanent_failure", -} + /** + * Defines the next step(s) to execute after this step. Can be a single step or an array of steps. + */ + next?: TransactionStepsDefinition | TransactionStepsDefinition[] -export enum TransactionState { - NOT_STARTED = "not_started", - INVOKING = "invoking", - WAITING_TO_COMPENSATE = "waiting_to_compensate", - COMPENSATING = "compensating", - DONE = "done", - REVERTED = "reverted", - FAILED = "failed", - DORMANT = "dormant", - SKIPPED = "skipped", + // TODO: add metadata field for customizations } +/** + * Defines the options for a transaction model, which are applicable to the entire workflow. + */ export type TransactionModelOptions = { + /** + * The global timeout for the entire transaction workflow (in seconds). + */ timeout?: number - storeExecution?: boolean + + /** + * If true, the state of the transaction will be persisted. + */ + store?: boolean + + /** + * TBD + */ retentionTime?: number - strictCheckpoints?: boolean + + /** + * If true, the execution details of each step will be stored. + */ + storeExecution?: boolean + + // TODO: add metadata field for customizations } export type TransactionModel = { diff --git a/packages/orchestration/src/workflow/workflow-manager.ts b/packages/orchestration/src/workflow/workflow-manager.ts index f0f0c06dcfa0f..62c648b523d57 100644 --- a/packages/orchestration/src/workflow/workflow-manager.ts +++ b/packages/orchestration/src/workflow/workflow-manager.ts @@ -81,9 +81,16 @@ export class WorkflowManager { const finalFlow = flow instanceof OrchestratorBuilder ? flow.build() : flow if (WorkflowManager.workflows.has(workflowId)) { + function excludeStepUuid(key, value) { + return key === "uuid" ? undefined : value + } + const areStepsEqual = finalFlow - ? JSON.stringify(finalFlow) === - JSON.stringify(WorkflowManager.workflows.get(workflowId)!.flow_) + ? JSON.stringify(finalFlow, excludeStepUuid) === + JSON.stringify( + WorkflowManager.workflows.get(workflowId)!.flow_, + excludeStepUuid + ) : true if (!areStepsEqual) { @@ -131,14 +138,19 @@ export class WorkflowManager { } const finalFlow = flow instanceof OrchestratorBuilder ? flow.build() : flow + const updatedOptions = { ...workflow.options, ...options } WorkflowManager.workflows.set(workflowId, { id: workflowId, flow_: finalFlow, - orchestrator: new TransactionOrchestrator(workflowId, finalFlow, options), + orchestrator: new TransactionOrchestrator( + workflowId, + finalFlow, + updatedOptions + ), handler: WorkflowManager.buildHandlers(workflow.handlers_), handlers_: workflow.handlers_, - options: { ...workflow.options, ...options }, + options: updatedOptions, requiredModules, optionalModules, }) diff --git a/packages/payment/src/index.ts b/packages/payment/src/index.ts index 5ec1f1b289ad7..d449f7354469a 100644 --- a/packages/payment/src/index.ts +++ b/packages/payment/src/index.ts @@ -1,23 +1,10 @@ -import { moduleDefinition } from "./module-definition" -import { Modules } from "@medusajs/modules-sdk" -import { ModulesSdkUtils } from "@medusajs/utils" - -import * as PaymentModels from "@models" +import { + moduleDefinition, + revertMigration, + runMigrations, +} from "./module-definition" export default moduleDefinition - -const migrationScriptOptions = { - moduleName: Modules.PAYMENT, - models: PaymentModels, - pathToMigrations: __dirname + "/migrations", -} - -export const runMigrations = ModulesSdkUtils.buildMigrationScript( - migrationScriptOptions -) -export const revertMigration = ModulesSdkUtils.buildRevertMigrationScript( - migrationScriptOptions -) +export { revertMigration, runMigrations } export * from "./initialize" -export * from "./loaders" diff --git a/packages/payment/src/module-definition.ts b/packages/payment/src/module-definition.ts index 543a6ef254546..3b7c8e329a897 100644 --- a/packages/payment/src/module-definition.ts +++ b/packages/payment/src/module-definition.ts @@ -5,10 +5,30 @@ import { PaymentModuleService } from "@services" import loadConnection from "./loaders/connection" import loadContainer from "./loaders/container" +import { Modules } from "@medusajs/modules-sdk" +import { ModulesSdkUtils } from "@medusajs/utils" + +import * as PaymentModels from "@models" + +const migrationScriptOptions = { + moduleName: Modules.PAYMENT, + models: PaymentModels, + pathToMigrations: __dirname + "/migrations", +} + +export const runMigrations = ModulesSdkUtils.buildMigrationScript( + migrationScriptOptions +) +export const revertMigration = ModulesSdkUtils.buildRevertMigrationScript( + migrationScriptOptions +) + const service = PaymentModuleService const loaders = [loadContainer, loadConnection] as any export const moduleDefinition: ModuleExports = { service, loaders, + runMigrations, + revertMigration, } diff --git a/packages/pricing/src/index.ts b/packages/pricing/src/index.ts index f4609139aa329..515ee01a9179f 100644 --- a/packages/pricing/src/index.ts +++ b/packages/pricing/src/index.ts @@ -1,25 +1,14 @@ -import { moduleDefinition } from "./module-definition" -import { Modules } from "@medusajs/modules-sdk" -import * as Models from "@models" -import { ModulesSdkUtils } from "@medusajs/utils" +import { + moduleDefinition, + revertMigration, + runMigrations, +} from "./module-definition" export default moduleDefinition - -const migrationScriptOptions = { - moduleName: Modules.PRICING, - models: Models, - pathToMigrations: __dirname + "/migrations", -} - -export const runMigrations = ModulesSdkUtils.buildMigrationScript( - migrationScriptOptions -) -export const revertMigration = ModulesSdkUtils.buildRevertMigrationScript( - migrationScriptOptions -) +export { revertMigration, runMigrations } export * from "./initialize" -export * from "./types" -export * from "./loaders" +// TODO: remove export from models and services export * from "./models" export * from "./services" +export * from "./types" diff --git a/packages/pricing/src/module-definition.ts b/packages/pricing/src/module-definition.ts index 721047739aab3..b2cbdca76290a 100644 --- a/packages/pricing/src/module-definition.ts +++ b/packages/pricing/src/module-definition.ts @@ -1,12 +1,30 @@ +import { Modules } from "@medusajs/modules-sdk" import { ModuleExports } from "@medusajs/types" +import { ModulesSdkUtils } from "@medusajs/utils" +import * as Models from "@models" import { PricingModuleService } from "@services" import loadConnection from "./loaders/connection" import loadContainer from "./loaders/container" +const migrationScriptOptions = { + moduleName: Modules.PRICING, + models: Models, + pathToMigrations: __dirname + "/migrations", +} + +export const runMigrations = ModulesSdkUtils.buildMigrationScript( + migrationScriptOptions +) +export const revertMigration = ModulesSdkUtils.buildRevertMigrationScript( + migrationScriptOptions +) + const service = PricingModuleService const loaders = [loadContainer, loadConnection] as any export const moduleDefinition: ModuleExports = { service, loaders, + runMigrations, + revertMigration, } diff --git a/packages/product/src/index.ts b/packages/product/src/index.ts index e8df9fb52e004..515ee01a9179f 100644 --- a/packages/product/src/index.ts +++ b/packages/product/src/index.ts @@ -1,25 +1,14 @@ -import { moduleDefinition } from "./module-definition" -import { ModulesSdkUtils } from "@medusajs/utils" -import { Modules } from "@medusajs/modules-sdk" -import * as ProductModels from "@models" +import { + moduleDefinition, + revertMigration, + runMigrations, +} from "./module-definition" export default moduleDefinition - -const migrationScriptOptions = { - moduleName: Modules.PRODUCT, - models: ProductModels, - pathToMigrations: __dirname + "/migrations", -} - -export const runMigrations = ModulesSdkUtils.buildMigrationScript( - migrationScriptOptions -) -export const revertMigration = ModulesSdkUtils.buildRevertMigrationScript( - migrationScriptOptions -) +export { revertMigration, runMigrations } export * from "./initialize" -export * from "./types" -export * from "./loaders" +// TODO: remove export from models and services export * from "./models" export * from "./services" +export * from "./types" diff --git a/packages/product/src/module-definition.ts b/packages/product/src/module-definition.ts index d47414c790107..177c5c0cb978c 100644 --- a/packages/product/src/module-definition.ts +++ b/packages/product/src/module-definition.ts @@ -3,10 +3,29 @@ import { ProductModuleService } from "@services" import loadConnection from "./loaders/connection" import loadContainer from "./loaders/container" +import { Modules } from "@medusajs/modules-sdk" +import { ModulesSdkUtils } from "@medusajs/utils" +import * as ProductModels from "@models" + +const migrationScriptOptions = { + moduleName: Modules.PRODUCT, + models: ProductModels, + pathToMigrations: __dirname + "/migrations", +} + +export const runMigrations = ModulesSdkUtils.buildMigrationScript( + migrationScriptOptions +) +export const revertMigration = ModulesSdkUtils.buildRevertMigrationScript( + migrationScriptOptions +) + const service = ProductModuleService const loaders = [loadContainer, loadConnection] as any export const moduleDefinition: ModuleExports = { service, loaders, + runMigrations, + revertMigration, } diff --git a/packages/promotion/integration-tests/__tests__/services/promotion-module/promotion.spec.ts b/packages/promotion/integration-tests/__tests__/services/promotion-module/promotion.spec.ts index 9982b25e06320..f0520ff801315 100644 --- a/packages/promotion/integration-tests/__tests__/services/promotion-module/promotion.spec.ts +++ b/packages/promotion/integration-tests/__tests__/services/promotion-module/promotion.spec.ts @@ -428,6 +428,199 @@ describe("Promotion Service", () => { "rules[].operator (doesnotexist) is invalid. It should be one of gte, lte, gt, lt, eq, ne, in" ) }) + + it("should create a basic buyget promotion successfully", async () => { + const createdPromotion = await service + .create({ + code: "PROMOTION_TEST", + type: PromotionType.BUYGET, + }) + .catch((e) => e) + + const [promotion] = await service.list({ + id: [createdPromotion.id], + }) + + expect(promotion).toEqual( + expect.objectContaining({ + code: "PROMOTION_TEST", + is_automatic: false, + type: PromotionType.BUYGET, + }) + ) + }) + + it("should throw an error when target_rules are not present for buyget promotion", async () => { + const error = await service + .create({ + code: "PROMOTION_TEST", + type: PromotionType.BUYGET, + application_method: { + type: "fixed", + target_type: "items", + allocation: "across", + value: "100", + buy_rules: [ + { + attribute: "product_collection", + operator: "eq", + values: ["pcol_towel"], + }, + ], + }, + }) + .catch((e) => e) + + expect(error.message).toContain( + "Target rules are required for buyget promotion type" + ) + }) + + it("should throw an error when buy_rules are not present for buyget promotion", async () => { + const error = await service + .create({ + code: "PROMOTION_TEST", + type: PromotionType.BUYGET, + application_method: { + type: "fixed", + target_type: "items", + allocation: "across", + value: "100", + }, + }) + .catch((e) => e) + + expect(error.message).toContain( + "Buy rules are required for buyget promotion type" + ) + }) + + it("should throw an error when apply_to_quantity is not present for buyget promotion", async () => { + const error = await service + .create({ + code: "PROMOTION_TEST", + type: PromotionType.BUYGET, + application_method: { + type: "fixed", + target_type: "items", + allocation: "across", + value: "100", + buy_rules_min_quantity: 1, + buy_rules: [ + { + attribute: "product_collection.id", + operator: "eq", + values: ["pcol_towel"], + }, + ], + target_rules: [ + { + attribute: "product.id", + operator: "eq", + values: ["prod_mat"], + }, + ], + }, + }) + .catch((e) => e) + + expect(error.message).toContain( + "apply_to_quantity is a required field for Promotion type of buyget" + ) + }) + + it("should throw an error when buy_rules_min_quantity is not present for buyget promotion", async () => { + const error = await service + .create({ + code: "PROMOTION_TEST", + type: PromotionType.BUYGET, + application_method: { + type: "fixed", + target_type: "items", + allocation: "across", + value: "100", + apply_to_quantity: 1, + buy_rules: [ + { + attribute: "product_collection.id", + operator: "eq", + values: ["pcol_towel"], + }, + ], + target_rules: [ + { + attribute: "product.id", + operator: "eq", + values: ["prod_mat"], + }, + ], + }, + }) + .catch((e) => e) + + expect(error.message).toContain( + "buy_rules_min_quantity is a required field for Promotion type of buyget" + ) + }) + + it("should create a buyget promotion with rules successfully", async () => { + const createdPromotion = await service.create({ + code: "PROMOTION_TEST", + type: PromotionType.BUYGET, + application_method: { + type: "fixed", + target_type: "items", + allocation: "across", + value: "100", + apply_to_quantity: 1, + buy_rules_min_quantity: 1, + buy_rules: [ + { + attribute: "product_collection.id", + operator: "eq", + values: ["pcol_towel"], + }, + ], + target_rules: [ + { + attribute: "product.id", + operator: "eq", + values: "prod_mat", + }, + ], + }, + }) + + expect(createdPromotion).toEqual( + expect.objectContaining({ + code: "PROMOTION_TEST", + is_automatic: false, + type: PromotionType.BUYGET, + application_method: expect.objectContaining({ + type: "fixed", + target_type: "items", + allocation: "across", + value: 100, + apply_to_quantity: 1, + buy_rules_min_quantity: 1, + target_rules: [ + expect.objectContaining({ + attribute: "product.id", + operator: "eq", + values: [expect.objectContaining({ value: "prod_mat" })], + }), + ], + buy_rules: [ + expect.objectContaining({ + attribute: "product_collection.id", + operator: "eq", + values: [expect.objectContaining({ value: "pcol_towel" })], + }), + ], + }), + }) + ) + }) }) describe("update", () => { @@ -966,6 +1159,103 @@ describe("Promotion Service", () => { }) }) + describe("addPromotionBuyRules", () => { + let promotion + + beforeEach(async () => { + ;[promotion] = await service.create([ + { + code: "TEST", + type: PromotionType.BUYGET, + application_method: { + type: "fixed", + target_type: "items", + allocation: "each", + value: "100", + max_quantity: 500, + apply_to_quantity: 1, + buy_rules_min_quantity: 1, + target_rules: [ + { + attribute: "product.id", + operator: "in", + values: ["prod_1", "prod_2"], + }, + ], + buy_rules: [ + { + attribute: "product_collection.id", + operator: "eq", + values: ["pcol_towel"], + }, + ], + }, + }, + ]) + }) + + it("should throw an error when promotion with id does not exist", async () => { + let error + + try { + await service.addPromotionBuyRules("does-not-exist", []) + } catch (e) { + error = e + } + + expect(error.message).toEqual( + "Promotion with id: does-not-exist was not found" + ) + }) + + it("should throw an error when a id is not provided", async () => { + let error + + try { + await service.addPromotionBuyRules(undefined as unknown as string, []) + } catch (e) { + error = e + } + + expect(error.message).toEqual('"promotionId" must be defined') + }) + + it("should successfully create buy rules for a buyget promotion", async () => { + promotion = await service.addPromotionBuyRules(promotion.id, [ + { + attribute: "product.id", + operator: "in", + values: ["prod_3", "prod_4"], + }, + ]) + + expect(promotion).toEqual( + expect.objectContaining({ + id: promotion.id, + application_method: expect.objectContaining({ + buy_rules: expect.arrayContaining([ + expect.objectContaining({ + attribute: "product_collection.id", + operator: "eq", + values: expect.arrayContaining([ + expect.objectContaining({ value: "pcol_towel" }), + ]), + }), + expect.objectContaining({ + attribute: "product.id", + operator: "in", + values: expect.arrayContaining([ + expect.objectContaining({ value: "prod_3" }), + expect.objectContaining({ value: "prod_4" }), + ]), + }), + ]), + }), + }) + ) + }) + }) + describe("removePromotionRules", () => { let promotion @@ -1108,4 +1398,88 @@ describe("Promotion Service", () => { ) }) }) + + describe("removePromotionBuyRules", () => { + let promotion + + beforeEach(async () => { + ;[promotion] = await service.create([ + { + code: "TEST", + type: PromotionType.BUYGET, + application_method: { + type: "fixed", + target_type: "items", + allocation: "each", + value: "100", + max_quantity: 500, + apply_to_quantity: 1, + buy_rules_min_quantity: 1, + target_rules: [ + { + attribute: "product.id", + operator: "in", + values: ["prod_1", "prod_2"], + }, + ], + buy_rules: [ + { + attribute: "product_collection", + operator: "eq", + values: ["pcol_towel"], + }, + ], + }, + }, + ]) + }) + + it("should throw an error when promotion with id does not exist", async () => { + let error + + try { + await service.removePromotionBuyRules("does-not-exist", []) + } catch (e) { + error = e + } + + expect(error.message).toEqual( + "Promotion with id: does-not-exist was not found" + ) + }) + + it("should throw an error when a id is not provided", async () => { + let error + + try { + await service.removePromotionBuyRules( + undefined as unknown as string, + [] + ) + } catch (e) { + error = e + } + + expect(error.message).toEqual('"promotionId" must be defined') + }) + + it("should successfully remove rules for a promotion", async () => { + const [ruleId] = promotion.application_method.buy_rules.map( + (rule) => rule.id + ) + + promotion = await service.removePromotionBuyRules(promotion.id, [ + { id: ruleId }, + ]) + + expect(promotion).toEqual( + expect.objectContaining({ + id: promotion.id, + application_method: expect.objectContaining({ + buy_rules: [], + }), + }) + ) + }) + }) }) diff --git a/packages/promotion/src/index.ts b/packages/promotion/src/index.ts index 220ccf1f5c4bf..d449f7354469a 100644 --- a/packages/promotion/src/index.ts +++ b/packages/promotion/src/index.ts @@ -1,22 +1,10 @@ -import { moduleDefinition } from "./module-definition" -import { Modules } from "@medusajs/modules-sdk" -import * as Models from "@models" -import { ModulesSdkUtils } from "@medusajs/utils" +import { + moduleDefinition, + revertMigration, + runMigrations, +} from "./module-definition" export default moduleDefinition - -const migrationScriptOptions = { - moduleName: Modules.PROMOTION, - models: Models, - pathToMigrations: __dirname + "/migrations", -} - -export const runMigrations = ModulesSdkUtils.buildMigrationScript( - migrationScriptOptions -) -export const revertMigration = ModulesSdkUtils.buildRevertMigrationScript( - migrationScriptOptions -) +export { revertMigration, runMigrations } export * from "./initialize" -export * from "./loaders" diff --git a/packages/promotion/src/migrations/.snapshot-medusa-promotion.json b/packages/promotion/src/migrations/.snapshot-medusa-promotion.json index bdf7a42841e84..109ece07d1061 100644 --- a/packages/promotion/src/migrations/.snapshot-medusa-promotion.json +++ b/packages/promotion/src/migrations/.snapshot-medusa-promotion.json @@ -398,6 +398,24 @@ "nullable": true, "mappedType": "decimal" }, + "apply_to_quantity": { + "name": "apply_to_quantity", + "type": "numeric", + "unsigned": false, + "autoincrement": false, + "primary": false, + "nullable": true, + "mappedType": "decimal" + }, + "buy_rules_min_quantity": { + "name": "buy_rules_min_quantity", + "type": "numeric", + "unsigned": false, + "autoincrement": false, + "primary": false, + "nullable": true, + "mappedType": "decimal" + }, "type": { "name": "type", "type": "text", @@ -697,11 +715,65 @@ "mappedType": "text" } }, - "name": "application_method_promotion_rule", + "name": "application_method_target_rules", + "schema": "public", + "indexes": [ + { + "keyName": "application_method_target_rules_pkey", + "columnNames": ["application_method_id", "promotion_rule_id"], + "composite": true, + "primary": true, + "unique": true + } + ], + "checks": [], + "foreignKeys": { + "application_method_target_rules_application_method_id_foreign": { + "constraintName": "application_method_target_rules_application_method_id_foreign", + "columnNames": ["application_method_id"], + "localTableName": "public.application_method_target_rules", + "referencedColumnNames": ["id"], + "referencedTableName": "public.application_method", + "deleteRule": "cascade", + "updateRule": "cascade" + }, + "application_method_target_rules_promotion_rule_id_foreign": { + "constraintName": "application_method_target_rules_promotion_rule_id_foreign", + "columnNames": ["promotion_rule_id"], + "localTableName": "public.application_method_target_rules", + "referencedColumnNames": ["id"], + "referencedTableName": "public.promotion_rule", + "deleteRule": "cascade", + "updateRule": "cascade" + } + } + }, + { + "columns": { + "application_method_id": { + "name": "application_method_id", + "type": "text", + "unsigned": false, + "autoincrement": false, + "primary": false, + "nullable": false, + "mappedType": "text" + }, + "promotion_rule_id": { + "name": "promotion_rule_id", + "type": "text", + "unsigned": false, + "autoincrement": false, + "primary": false, + "nullable": false, + "mappedType": "text" + } + }, + "name": "application_method_buy_rules", "schema": "public", "indexes": [ { - "keyName": "application_method_promotion_rule_pkey", + "keyName": "application_method_buy_rules_pkey", "columnNames": ["application_method_id", "promotion_rule_id"], "composite": true, "primary": true, @@ -710,19 +782,19 @@ ], "checks": [], "foreignKeys": { - "application_method_promotion_rule_application_method_id_foreign": { - "constraintName": "application_method_promotion_rule_application_method_id_foreign", + "application_method_buy_rules_application_method_id_foreign": { + "constraintName": "application_method_buy_rules_application_method_id_foreign", "columnNames": ["application_method_id"], - "localTableName": "public.application_method_promotion_rule", + "localTableName": "public.application_method_buy_rules", "referencedColumnNames": ["id"], "referencedTableName": "public.application_method", "deleteRule": "cascade", "updateRule": "cascade" }, - "application_method_promotion_rule_promotion_rule_id_foreign": { - "constraintName": "application_method_promotion_rule_promotion_rule_id_foreign", + "application_method_buy_rules_promotion_rule_id_foreign": { + "constraintName": "application_method_buy_rules_promotion_rule_id_foreign", "columnNames": ["promotion_rule_id"], - "localTableName": "public.application_method_promotion_rule", + "localTableName": "public.application_method_buy_rules", "referencedColumnNames": ["id"], "referencedTableName": "public.promotion_rule", "deleteRule": "cascade", diff --git a/packages/promotion/src/migrations/Migration20240122070028.ts b/packages/promotion/src/migrations/Migration20240122084316.ts similarity index 71% rename from packages/promotion/src/migrations/Migration20240122070028.ts rename to packages/promotion/src/migrations/Migration20240122084316.ts index aad49ce928b68..db3a0e841d971 100644 --- a/packages/promotion/src/migrations/Migration20240122070028.ts +++ b/packages/promotion/src/migrations/Migration20240122084316.ts @@ -1,6 +1,6 @@ import { Migration } from "@mikro-orm/migrations" -export class Migration20240122070028 extends Migration { +export class Migration20240122084316 extends Migration { async up(): Promise { this.addSql( 'create table "campaign" ("id" text not null, "name" text not null, "description" text null, "currency" text null, "campaign_identifier" text not null, "starts_at" timestamptz null, "ends_at" timestamptz null, "created_at" timestamptz not null default now(), "updated_at" timestamptz not null default now(), "deleted_at" timestamptz null, constraint "campaign_pkey" primary key ("id"));' @@ -29,7 +29,7 @@ export class Migration20240122070028 extends Migration { ) this.addSql( - 'create table "application_method" ("id" text not null, "value" numeric null, "max_quantity" numeric null, "type" text check ("type" in (\'fixed\', \'percentage\')) not null, "target_type" text check ("target_type" in (\'order\', \'shipping_methods\', \'items\')) not null, "allocation" text check ("allocation" in (\'each\', \'across\')) null, "promotion_id" text not null, "created_at" timestamptz not null default now(), "updated_at" timestamptz not null default now(), "deleted_at" timestamptz null, constraint "application_method_pkey" primary key ("id"));' + 'create table "application_method" ("id" text not null, "value" numeric null, "max_quantity" numeric null, "apply_to_quantity" numeric null, "buy_rules_min_quantity" numeric null, "type" text check ("type" in (\'fixed\', \'percentage\')) not null, "target_type" text check ("target_type" in (\'order\', \'shipping_methods\', \'items\')) not null, "allocation" text check ("allocation" in (\'each\', \'across\')) null, "promotion_id" text not null, "created_at" timestamptz not null default now(), "updated_at" timestamptz not null default now(), "deleted_at" timestamptz null, constraint "application_method_pkey" primary key ("id"));' ) this.addSql( 'create index "IDX_application_method_type" on "application_method" ("type");' @@ -59,7 +59,11 @@ export class Migration20240122070028 extends Migration { ) this.addSql( - 'create table "application_method_promotion_rule" ("application_method_id" text not null, "promotion_rule_id" text not null, constraint "application_method_promotion_rule_pkey" primary key ("application_method_id", "promotion_rule_id"));' + 'create table "application_method_target_rules" ("application_method_id" text not null, "promotion_rule_id" text not null, constraint "application_method_target_rules_pkey" primary key ("application_method_id", "promotion_rule_id"));' + ) + + this.addSql( + 'create table "application_method_buy_rules" ("application_method_id" text not null, "promotion_rule_id" text not null, constraint "application_method_buy_rules_pkey" primary key ("application_method_id", "promotion_rule_id"));' ) this.addSql( @@ -89,10 +93,17 @@ export class Migration20240122070028 extends Migration { ) this.addSql( - 'alter table "application_method_promotion_rule" add constraint "application_method_promotion_rule_application_method_id_foreign" foreign key ("application_method_id") references "application_method" ("id") on update cascade on delete cascade;' + 'alter table "application_method_target_rules" add constraint "application_method_target_rules_application_method_id_foreign" foreign key ("application_method_id") references "application_method" ("id") on update cascade on delete cascade;' + ) + this.addSql( + 'alter table "application_method_target_rules" add constraint "application_method_target_rules_promotion_rule_id_foreign" foreign key ("promotion_rule_id") references "promotion_rule" ("id") on update cascade on delete cascade;' + ) + + this.addSql( + 'alter table "application_method_buy_rules" add constraint "application_method_buy_rules_application_method_id_foreign" foreign key ("application_method_id") references "application_method" ("id") on update cascade on delete cascade;' ) this.addSql( - 'alter table "application_method_promotion_rule" add constraint "application_method_promotion_rule_promotion_rule_id_foreign" foreign key ("promotion_rule_id") references "promotion_rule" ("id") on update cascade on delete cascade;' + 'alter table "application_method_buy_rules" add constraint "application_method_buy_rules_promotion_rule_id_foreign" foreign key ("promotion_rule_id") references "promotion_rule" ("id") on update cascade on delete cascade;' ) this.addSql( diff --git a/packages/promotion/src/models/application-method.ts b/packages/promotion/src/models/application-method.ts index 1f28b377d4e7f..e4201b53f09c1 100644 --- a/packages/promotion/src/models/application-method.ts +++ b/packages/promotion/src/models/application-method.ts @@ -25,6 +25,8 @@ import PromotionRule from "./promotion-rule" type OptionalFields = | "value" | "max_quantity" + | "apply_to_quantity" + | "buy_rules_min_quantity" | "allocation" | DAL.SoftDeletableEntityDateColumns @@ -37,10 +39,16 @@ export default class ApplicationMethod { id!: string @Property({ columnType: "numeric", nullable: true, serializer: Number }) - value?: string | null + value?: string | null = null @Property({ columnType: "numeric", nullable: true, serializer: Number }) - max_quantity?: number | null + max_quantity?: number | null = null + + @Property({ columnType: "numeric", nullable: true, serializer: Number }) + apply_to_quantity?: number | null = null + + @Property({ columnType: "numeric", nullable: true, serializer: Number }) + buy_rules_min_quantity?: number | null = null @Index({ name: "IDX_application_method_type" }) @Enum(() => PromotionUtils.ApplicationMethodType) @@ -63,13 +71,20 @@ export default class ApplicationMethod { }) promotion: Promotion - @ManyToMany(() => PromotionRule, "application_methods", { + @ManyToMany(() => PromotionRule, "method_target_rules", { owner: true, - pivotTable: "application_method_promotion_rule", + pivotTable: "application_method_target_rules", cascade: ["soft-remove"] as any, }) target_rules = new Collection(this) + @ManyToMany(() => PromotionRule, "method_buy_rules", { + owner: true, + pivotTable: "application_method_buy_rules", + cascade: ["soft-remove"] as any, + }) + buy_rules = new Collection(this) + @Property({ onCreate: () => new Date(), columnType: "timestamptz", diff --git a/packages/promotion/src/models/campaign-budget.ts b/packages/promotion/src/models/campaign-budget.ts index 45e37d46c8f9e..4ecf0f9272bd2 100644 --- a/packages/promotion/src/models/campaign-budget.ts +++ b/packages/promotion/src/models/campaign-budget.ts @@ -35,7 +35,7 @@ export default class CampaignBudget { @OneToOne({ entity: () => Campaign, }) - campaign?: Campaign | null + campaign: Campaign | null = null @Property({ columnType: "numeric", @@ -43,7 +43,7 @@ export default class CampaignBudget { serializer: Number, default: null, }) - limit?: number | null + limit: number | null = null @Property({ columnType: "numeric", diff --git a/packages/promotion/src/models/campaign.ts b/packages/promotion/src/models/campaign.ts index 81cffce1e4d67..db01139ff9a3a 100644 --- a/packages/promotion/src/models/campaign.ts +++ b/packages/promotion/src/models/campaign.ts @@ -36,10 +36,10 @@ export default class Campaign { name: string @Property({ columnType: "text", nullable: true }) - description?: string | null + description: string | null = null @Property({ columnType: "text", nullable: true }) - currency?: string | null + currency: string | null = null @Property({ columnType: "text" }) @Unique({ @@ -52,13 +52,13 @@ export default class Campaign { columnType: "timestamptz", nullable: true, }) - starts_at?: Date | null + starts_at: Date | null = null @Property({ columnType: "timestamptz", nullable: true, }) - ends_at?: Date | null + ends_at: Date | null = null @OneToOne({ entity: () => CampaignBudget, @@ -66,7 +66,7 @@ export default class Campaign { cascade: ["soft-remove"] as any, nullable: true, }) - budget?: CampaignBudget | null + budget: CampaignBudget | null = null @OneToMany(() => Promotion, (promotion) => promotion.campaign, { orphanRemoval: true, diff --git a/packages/promotion/src/models/promotion-rule.ts b/packages/promotion/src/models/promotion-rule.ts index c60364832551b..90627a9b6dcd8 100644 --- a/packages/promotion/src/models/promotion-rule.ts +++ b/packages/promotion/src/models/promotion-rule.ts @@ -31,7 +31,7 @@ export default class PromotionRule { id!: string @Property({ columnType: "text", nullable: true }) - description?: string | null + description: string | null = null @Index({ name: "IDX_promotion_rule_attribute" }) @Property({ columnType: "text" }) @@ -53,7 +53,13 @@ export default class PromotionRule { () => ApplicationMethod, (applicationMethod) => applicationMethod.target_rules ) - application_methods = new Collection(this) + method_target_rules = new Collection(this) + + @ManyToMany( + () => ApplicationMethod, + (applicationMethod) => applicationMethod.buy_rules + ) + method_buy_rules = new Collection(this) @Property({ onCreate: () => new Date(), diff --git a/packages/promotion/src/models/promotion.ts b/packages/promotion/src/models/promotion.ts index ace7ee88ecea9..8a003d897782b 100644 --- a/packages/promotion/src/models/promotion.ts +++ b/packages/promotion/src/models/promotion.ts @@ -1,4 +1,4 @@ -import { DAL, PromotionType } from "@medusajs/types" +import { DAL, PromotionTypeValues } from "@medusajs/types" import { DALUtils, PromotionUtils, generateEntityId } from "@medusajs/utils" import { BeforeCreate, @@ -45,14 +45,14 @@ export default class Promotion { nullable: true, cascade: ["soft-remove"] as any, }) - campaign?: Campaign | null + campaign: Campaign | null = null @Property({ columnType: "boolean", default: false }) is_automatic: boolean = false @Index({ name: "IDX_promotion_type" }) @Enum(() => PromotionUtils.PromotionType) - type: PromotionType + type: PromotionTypeValues @OneToOne({ entity: () => ApplicationMethod, diff --git a/packages/promotion/src/module-definition.ts b/packages/promotion/src/module-definition.ts index db87261647c29..d2e899cef86d3 100644 --- a/packages/promotion/src/module-definition.ts +++ b/packages/promotion/src/module-definition.ts @@ -1,12 +1,30 @@ +import { Modules } from "@medusajs/modules-sdk" import { ModuleExports } from "@medusajs/types" +import { ModulesSdkUtils } from "@medusajs/utils" +import * as Models from "@models" import { PromotionModuleService } from "@services" import loadConnection from "./loaders/connection" import loadContainer from "./loaders/container" +const migrationScriptOptions = { + moduleName: Modules.PROMOTION, + models: Models, + pathToMigrations: __dirname + "/migrations", +} + +export const runMigrations = ModulesSdkUtils.buildMigrationScript( + migrationScriptOptions +) +export const revertMigration = ModulesSdkUtils.buildRevertMigrationScript( + migrationScriptOptions +) + const service = PromotionModuleService const loaders = [loadContainer, loadConnection] as any export const moduleDefinition: ModuleExports = { service, loaders, + runMigrations, + revertMigration, } diff --git a/packages/promotion/src/services/promotion-module.ts b/packages/promotion/src/services/promotion-module.ts index e73b75ef49e65..1756901800076 100644 --- a/packages/promotion/src/services/promotion-module.ts +++ b/packages/promotion/src/services/promotion-module.ts @@ -15,6 +15,7 @@ import { InjectTransactionManager, MedusaContext, MedusaError, + PromotionType, isString, mapObjectTo, } from "@medusajs/utils" @@ -35,6 +36,7 @@ import { PromotionService, } from "@services" import { + ApplicationMethodRuleTypes, CreateApplicationMethodDTO, CreateCampaignBudgetDTO, CreateCampaignDTO, @@ -456,6 +458,8 @@ export default class PromotionModuleService< "application_method", "application_method.target_rules", "application_method.target_rules.values", + "application_method.buy_rules", + "application_method.buy_rules.values", "rules", "rules.values", "campaign", @@ -485,7 +489,11 @@ export default class PromotionModuleService< string, PromotionTypes.CreatePromotionRuleDTO[] >() - const applicationMethodRuleMap = new Map< + const methodTargetRulesMap = new Map< + string, + PromotionTypes.CreatePromotionRuleDTO[] + >() + const methodBuyRulesMap = new Map< string, PromotionTypes.CreatePromotionRuleDTO[] >() @@ -551,6 +559,7 @@ export default class PromotionModuleService< if (applMethodData) { const { target_rules: targetRulesData = [], + buy_rules: buyRulesData = [], ...applicationMethodWithoutRules } = applMethodData const applicationMethodData = { @@ -569,11 +578,33 @@ export default class PromotionModuleService< ) } - validateApplicationMethodAttributes(applicationMethodData) + if (promotion.type === PromotionType.BUYGET && !buyRulesData.length) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + `Buy rules are required for ${PromotionType.BUYGET} promotion type` + ) + } + + if ( + promotion.type === PromotionType.BUYGET && + !targetRulesData.length + ) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + `Target rules are required for ${PromotionType.BUYGET} promotion type` + ) + } + + validateApplicationMethodAttributes(applicationMethodData, promotion) + applicationMethodsData.push(applicationMethodData) if (targetRulesData.length) { - applicationMethodRuleMap.set(promotion.id, targetRulesData) + methodTargetRulesMap.set(promotion.id, targetRulesData) + } + + if (buyRulesData.length) { + methodBuyRulesMap.set(promotion.id, buyRulesData) } } @@ -597,8 +628,15 @@ export default class PromotionModuleService< for (const applicationMethod of createdApplicationMethods) { await this.createPromotionRulesAndValues_( - applicationMethodRuleMap.get(applicationMethod.promotion.id) || [], - "application_methods", + methodTargetRulesMap.get(applicationMethod.promotion.id) || [], + "method_target_rules", + applicationMethod, + sharedContext + ) + + await this.createPromotionRulesAndValues_( + methodBuyRulesMap.get(applicationMethod.promotion.id) || [], + "method_buy_rules", applicationMethod, sharedContext ) @@ -694,18 +732,10 @@ export default class PromotionModuleService< existingApplicationMethod.max_quantity = null } - validateApplicationMethodAttributes({ - type: applicationMethodData.type || existingApplicationMethod.type, - target_type: - applicationMethodData.target_type || - existingApplicationMethod.target_type, - allocation: - applicationMethodData.allocation || - existingApplicationMethod.allocation, - max_quantity: - applicationMethodData.max_quantity || - existingApplicationMethod.max_quantity, - }) + validateApplicationMethodAttributes( + applicationMethodData, + existingPromotion + ) applicationMethodsData.push({ ...applicationMethodData, @@ -771,7 +801,7 @@ export default class PromotionModuleService< await this.createPromotionRulesAndValues_( rulesData, - "application_methods", + "method_target_rules", applicationMethod, sharedContext ) @@ -791,10 +821,51 @@ export default class PromotionModuleService< ) } + @InjectManager("baseRepository_") + async addPromotionBuyRules( + promotionId: string, + rulesData: PromotionTypes.CreatePromotionRuleDTO[], + @MedusaContext() sharedContext: Context = {} + ): Promise { + const promotion = await this.promotionService_.retrieve(promotionId, { + relations: ["application_method"], + }) + + const applicationMethod = promotion.application_method + + if (!applicationMethod) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + `application_method for promotion not found` + ) + } + + await this.createPromotionRulesAndValues_( + rulesData, + "method_buy_rules", + applicationMethod, + sharedContext + ) + + return this.retrieve( + promotionId, + { + relations: [ + "rules", + "rules.values", + "application_method", + "application_method.buy_rules", + "application_method.buy_rules.values", + ], + }, + sharedContext + ) + } + @InjectTransactionManager("baseRepository_") protected async createPromotionRulesAndValues_( rulesData: PromotionTypes.CreatePromotionRuleDTO[], - relationName: "promotions" | "application_methods", + relationName: "promotions" | "method_target_rules" | "method_buy_rules", relation: Promotion | ApplicationMethod, @MedusaContext() sharedContext: Context = {} ) { @@ -952,9 +1023,10 @@ export default class PromotionModuleService< rulesData: PromotionTypes.RemovePromotionRuleDTO[], @MedusaContext() sharedContext: Context = {} ): Promise { - await this.removePromotionTargetRules_( + await this.removeApplicationMethodRules_( promotionId, rulesData, + ApplicationMethodRuleTypes.TARGET_RULES, sharedContext ) @@ -973,16 +1045,47 @@ export default class PromotionModuleService< ) } + @InjectManager("baseRepository_") + async removePromotionBuyRules( + promotionId: string, + rulesData: PromotionTypes.RemovePromotionRuleDTO[], + @MedusaContext() sharedContext: Context = {} + ): Promise { + await this.removeApplicationMethodRules_( + promotionId, + rulesData, + ApplicationMethodRuleTypes.BUY_RULES, + sharedContext + ) + + return this.retrieve( + promotionId, + { + relations: [ + "rules", + "rules.values", + "application_method", + "application_method.buy_rules", + "application_method.buy_rules.values", + ], + }, + sharedContext + ) + } + @InjectTransactionManager("baseRepository_") - protected async removePromotionTargetRules_( + protected async removeApplicationMethodRules_( promotionId: string, rulesData: PromotionTypes.RemovePromotionRuleDTO[], + relation: + | ApplicationMethodRuleTypes.TARGET_RULES + | ApplicationMethodRuleTypes.BUY_RULES, @MedusaContext() sharedContext: Context = {} ): Promise { const promotionRuleIds = rulesData.map((ruleData) => ruleData.id) const promotion = await this.promotionService_.retrieve( promotionId, - { relations: ["application_method.target_rules"] }, + { relations: [`application_method.${relation}`] }, sharedContext ) @@ -995,7 +1098,7 @@ export default class PromotionModuleService< ) } - const targetRuleIdsToRemove = applicationMethod.target_rules + const targetRuleIdsToRemove = applicationMethod[relation] .toArray() .filter((rule) => promotionRuleIds.includes(rule.id)) .map((rule) => rule.id) diff --git a/packages/promotion/src/types/application-method.ts b/packages/promotion/src/types/application-method.ts index f269b4de2b7b6..3ba19ad906f23 100644 --- a/packages/promotion/src/types/application-method.ts +++ b/packages/promotion/src/types/application-method.ts @@ -14,6 +14,8 @@ export interface CreateApplicationMethodDTO { value?: string | null promotion: Promotion | string | PromotionDTO max_quantity?: number | null + buy_rules_min_quantity?: number | null + apply_to_quantity?: number | null } export interface UpdateApplicationMethodDTO { @@ -24,4 +26,6 @@ export interface UpdateApplicationMethodDTO { value?: string | null promotion?: Promotion | string | PromotionDTO max_quantity?: number | null + buy_rules_min_quantity?: number | null + apply_to_quantity?: number | null } diff --git a/packages/promotion/src/types/promotion-rule.ts b/packages/promotion/src/types/promotion-rule.ts index 750fe96a410ee..3ed12d5d3794b 100644 --- a/packages/promotion/src/types/promotion-rule.ts +++ b/packages/promotion/src/types/promotion-rule.ts @@ -9,3 +9,8 @@ export interface CreatePromotionRuleDTO { export interface UpdatePromotionRuleDTO { id: string } + +export enum ApplicationMethodRuleTypes { + TARGET_RULES = "target_rules", + BUY_RULES = "buy_rules", +} diff --git a/packages/promotion/src/types/promotion.ts b/packages/promotion/src/types/promotion.ts index 8e63c442cfb70..ae63f9f36b434 100644 --- a/packages/promotion/src/types/promotion.ts +++ b/packages/promotion/src/types/promotion.ts @@ -1,8 +1,8 @@ -import { PromotionType } from "@medusajs/types" +import { PromotionTypeValues } from "@medusajs/types" export interface CreatePromotionDTO { code: string - type: PromotionType + type: PromotionTypeValues is_automatic?: boolean campaign?: string } @@ -10,8 +10,7 @@ export interface CreatePromotionDTO { export interface UpdatePromotionDTO { id: string code?: string - // TODO: add this when buyget is available - // type: PromotionType + type?: PromotionTypeValues is_automatic?: boolean campaign?: string } diff --git a/packages/promotion/src/utils/validations/application-method.ts b/packages/promotion/src/utils/validations/application-method.ts index 95d384e19fd2e..99c6296349cc4 100644 --- a/packages/promotion/src/utils/validations/application-method.ts +++ b/packages/promotion/src/utils/validations/application-method.ts @@ -1,16 +1,17 @@ -import { - ApplicationMethodAllocationValues, - ApplicationMethodTargetTypeValues, - ApplicationMethodTypeValues, -} from "@medusajs/types" import { ApplicationMethodAllocation, ApplicationMethodTargetType, ApplicationMethodType, MedusaError, + PromotionType, isDefined, isPresent, } from "@medusajs/utils" +import { Promotion } from "@models" +import { + CreateApplicationMethodDTO, + UpdateApplicationMethodDTO, +} from "../../types" export const allowedAllocationTargetTypes: string[] = [ ApplicationMethodTargetType.SHIPPING_METHODS, @@ -26,17 +27,40 @@ export const allowedAllocationForQuantity: string[] = [ ApplicationMethodAllocation.EACH, ] -export function validateApplicationMethodAttributes(data: { - type: ApplicationMethodTypeValues - target_type: ApplicationMethodTargetTypeValues - allocation?: ApplicationMethodAllocationValues - max_quantity?: number | null -}) { +export function validateApplicationMethodAttributes( + data: UpdateApplicationMethodDTO | CreateApplicationMethodDTO, + promotion: Promotion +) { + const applicationMethod = promotion?.application_method || {} + const buyRulesMinQuantity = + data.buy_rules_min_quantity || applicationMethod?.buy_rules_min_quantity + const applyToQuantity = + data.apply_to_quantity || applicationMethod?.apply_to_quantity + const targetType = data.target_type || applicationMethod?.target_type + const applicationMethodType = data.type || applicationMethod?.type + const maxQuantity = data.max_quantity || applicationMethod.max_quantity + const allocation = data.allocation || applicationMethod.allocation const allTargetTypes: string[] = Object.values(ApplicationMethodTargetType) + if (promotion?.type === PromotionType.BUYGET) { + if (!isPresent(applyToQuantity)) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + `apply_to_quantity is a required field for Promotion type of ${PromotionType.BUYGET}` + ) + } + + if (!isPresent(buyRulesMinQuantity)) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + `buy_rules_min_quantity is a required field for Promotion type of ${PromotionType.BUYGET}` + ) + } + } + if ( - data.allocation === ApplicationMethodAllocation.ACROSS && - isPresent(data.max_quantity) + allocation === ApplicationMethodAllocation.ACROSS && + isPresent(maxQuantity) ) { throw new MedusaError( MedusaError.Types.INVALID_DATA, @@ -44,7 +68,7 @@ export function validateApplicationMethodAttributes(data: { ) } - if (!allTargetTypes.includes(data.target_type)) { + if (!allTargetTypes.includes(targetType)) { throw new MedusaError( MedusaError.Types.INVALID_DATA, `application_method.target_type should be one of ${allTargetTypes.join( @@ -55,7 +79,7 @@ export function validateApplicationMethodAttributes(data: { const allTypes: string[] = Object.values(ApplicationMethodType) - if (!allTypes.includes(data.type)) { + if (!allTypes.includes(applicationMethodType)) { throw new MedusaError( MedusaError.Types.INVALID_DATA, `application_method.type should be one of ${allTypes.join(", ")}` @@ -63,8 +87,8 @@ export function validateApplicationMethodAttributes(data: { } if ( - allowedAllocationTargetTypes.includes(data.target_type) && - !allowedAllocationTypes.includes(data.allocation || "") + allowedAllocationTargetTypes.includes(targetType) && + !allowedAllocationTypes.includes(allocation || "") ) { throw new MedusaError( MedusaError.Types.INVALID_DATA, @@ -80,7 +104,7 @@ export function validateApplicationMethodAttributes(data: { ApplicationMethodAllocation ) - if (data.allocation && !allAllocationTypes.includes(data.allocation)) { + if (allocation && !allAllocationTypes.includes(allocation)) { throw new MedusaError( MedusaError.Types.INVALID_DATA, `application_method.allocation should be one of ${allAllocationTypes.join( @@ -90,9 +114,9 @@ export function validateApplicationMethodAttributes(data: { } if ( - data.allocation && - allowedAllocationForQuantity.includes(data.allocation) && - !isDefined(data.max_quantity) + allocation && + allowedAllocationForQuantity.includes(allocation) && + !isDefined(maxQuantity) ) { throw new MedusaError( MedusaError.Types.INVALID_DATA, diff --git a/packages/stock-location/src/index.ts b/packages/stock-location/src/index.ts index d3159f3c9a771..6ec4fe48341f7 100644 --- a/packages/stock-location/src/index.ts +++ b/packages/stock-location/src/index.ts @@ -1,8 +1,6 @@ -import { revertMigration, runMigrations } from "./migrations/run-migration" import { moduleDefinition } from "./module-definition" export default moduleDefinition - export * from "./initialize" export { revertMigration, runMigrations } from "./migrations/run-migration" export * from "./types" diff --git a/packages/stock-location/src/module-definition.ts b/packages/stock-location/src/module-definition.ts index 6fdf83e2ed4e1..44ed23a71c771 100644 --- a/packages/stock-location/src/module-definition.ts +++ b/packages/stock-location/src/module-definition.ts @@ -1,9 +1,9 @@ -import StockLocationService from "./services/stock-location" -import loadConnection from "./loaders/connection" -import * as StockLocationModels from "./models" import { ModuleExports } from "@medusajs/types" +import loadConnection from "./loaders/connection" import migrations from "./migrations" import { revertMigration, runMigrations } from "./migrations/run-migration" +import * as StockLocationModels from "./models" +import StockLocationService from "./services/stock-location" const service = StockLocationService const loaders = [loadConnection] diff --git a/packages/types/src/authentication/common/auth-provider.ts b/packages/types/src/authentication/common/auth-provider.ts index f39d01b21755f..e3653642136f8 100644 --- a/packages/types/src/authentication/common/auth-provider.ts +++ b/packages/types/src/authentication/common/auth-provider.ts @@ -5,6 +5,7 @@ export type AuthProviderDTO = { name: string domain: ProviderDomain is_active: boolean + config: Record | null } export type CreateAuthProviderDTO = { @@ -12,6 +13,7 @@ export type CreateAuthProviderDTO = { name: string domain?: ProviderDomain is_active?: boolean + config?: Record } export type UpdateAuthProviderDTO = { @@ -19,6 +21,7 @@ export type UpdateAuthProviderDTO = { name?: string domain?: ProviderDomain is_active?: boolean + config?: Record } export enum ProviderDomain { diff --git a/packages/types/src/authentication/common/auth-user.ts b/packages/types/src/authentication/common/auth-user.ts index ae5b03b0fe253..11357a10ebe8a 100644 --- a/packages/types/src/authentication/common/auth-user.ts +++ b/packages/types/src/authentication/common/auth-user.ts @@ -4,6 +4,7 @@ import { AuthProviderDTO } from "./auth-provider" export type AuthUserDTO = { id: string provider_id: string + entity_id: string provider: AuthProviderDTO provider_metadata?: Record user_metadata: Record @@ -12,6 +13,7 @@ export type AuthUserDTO = { export type CreateAuthUserDTO = { provider_id: string + entity_id: string provider_metadata?: Record user_metadata?: Record app_metadata?: Record diff --git a/packages/types/src/authentication/common/index.ts b/packages/types/src/authentication/common/index.ts index b4282c985c6a3..332e78fe42975 100644 --- a/packages/types/src/authentication/common/index.ts +++ b/packages/types/src/authentication/common/index.ts @@ -1,2 +1,3 @@ export * from "./auth-user" export * from "./auth-provider" +export * from "./provider" diff --git a/packages/types/src/authentication/common/provider.ts b/packages/types/src/authentication/common/provider.ts new file mode 100644 index 0000000000000..aef4339630a86 --- /dev/null +++ b/packages/types/src/authentication/common/provider.ts @@ -0,0 +1,5 @@ +export type AuthenticationResponse = { + success: boolean + authUser?: any + error?: string +} diff --git a/packages/types/src/authentication/index.ts b/packages/types/src/authentication/index.ts index 711715ff6f885..1aa665fd541fe 100644 --- a/packages/types/src/authentication/index.ts +++ b/packages/types/src/authentication/index.ts @@ -1,3 +1,2 @@ export * from "./service" export * from "./common" -export * from "./provider" diff --git a/packages/types/src/authentication/provider.ts b/packages/types/src/authentication/provider.ts deleted file mode 100644 index db950c2077f5f..0000000000000 --- a/packages/types/src/authentication/provider.ts +++ /dev/null @@ -1,8 +0,0 @@ -export abstract class AbstractAuthenticationModuleProvider { - public static PROVIDER: string - public static DISPLAY_NAME: string - - abstract authenticate( - data: Record - ): Promise> -} diff --git a/packages/types/src/authentication/service.ts b/packages/types/src/authentication/service.ts index 78df1eabca25b..ddd0d76c6b4a6 100644 --- a/packages/types/src/authentication/service.ts +++ b/packages/types/src/authentication/service.ts @@ -1,5 +1,5 @@ -import { IModuleService } from "../modules-sdk" import { + AuthenticationResponse, AuthProviderDTO, AuthUserDTO, CreateAuthProviderDTO, @@ -9,10 +9,17 @@ import { UpdateAuthProviderDTO, UpdateAuthUserDTO, } from "./common" -import { FindConfig } from "../common" + import { Context } from "../shared-context" +import { FindConfig } from "../common" +import { IModuleService } from "../modules-sdk" export interface IAuthenticationModuleService extends IModuleService { + authenticate( + provider: string, + providerData: Record + ): Promise + retrieveAuthProvider( provider: string, config?: FindConfig, diff --git a/packages/types/src/bundles.ts b/packages/types/src/bundles.ts index da5d2a095040d..ff2518bdf59b6 100644 --- a/packages/types/src/bundles.ts +++ b/packages/types/src/bundles.ts @@ -18,4 +18,3 @@ export * as SearchTypes from "./search" export * as StockLocationTypes from "./stock-location" export * as TransactionBaseTypes from "./transaction-base" export * as WorkflowTypes from "./workflow" - diff --git a/packages/types/src/dal/repository-service.ts b/packages/types/src/dal/repository-service.ts index 652a3a9b025b2..cc18daff1aca3 100644 --- a/packages/types/src/dal/repository-service.ts +++ b/packages/types/src/dal/repository-service.ts @@ -46,7 +46,7 @@ export interface RepositoryService< update(data: TDTOs["update"][], context?: Context): Promise - delete(ids: string[], context?: Context): Promise + delete(idsOrPKs: string[] | object[], context?: Context): Promise /** * Soft delete entities and cascade to related entities if configured. diff --git a/packages/types/src/index.ts b/packages/types/src/index.ts index 823b2eada2880..c7cf134dd62fa 100644 --- a/packages/types/src/index.ts +++ b/packages/types/src/index.ts @@ -14,9 +14,9 @@ export * from "./joiner" export * from "./link-modules" export * from "./logger" export * from "./modules-sdk" +export * from "./payment" export * from "./pricing" export * from "./product" -export * from "./payment" export * from "./product-category" export * from "./promotion" export * from "./region" diff --git a/packages/types/src/modules-sdk/index.ts b/packages/types/src/modules-sdk/index.ts index 1d52937aaa6e2..5a7188cc2347e 100644 --- a/packages/types/src/modules-sdk/index.ts +++ b/packages/types/src/modules-sdk/index.ts @@ -36,7 +36,7 @@ export type InternalModuleDeclaration = { resources: MODULE_RESOURCE_TYPE dependencies?: string[] definition?: ModuleDefinition // That represent the definition of the module, such as the one we have for the medusa supported modules. This property is used for custom made modules. - resolve?: string + resolve?: string | ModuleExports options?: Record /** * If multiple modules are registered with the same key, the alias can be used to differentiate them @@ -227,14 +227,20 @@ export declare type ModuleJoinerRelationship = JoinerRelationship & { export type ModuleExports = { service: Constructor loaders?: ModuleLoaderFunction[] + /** + * @deprecated property will be removed in future versions + */ migrations?: any[] + /** + * @deprecated property will be removed in future versions + */ models?: Constructor[] runMigrations?( - options: LoaderOptions, + options: LoaderOptions, moduleDeclaration?: InternalModuleDeclaration ): Promise revertMigration?( - options: LoaderOptions, + options: LoaderOptions, moduleDeclaration?: InternalModuleDeclaration ): Promise } diff --git a/packages/types/src/promotion/common/application-method.ts b/packages/types/src/promotion/common/application-method.ts index 77478e14b2339..4a5e469853d1c 100644 --- a/packages/types/src/promotion/common/application-method.ts +++ b/packages/types/src/promotion/common/application-method.ts @@ -16,8 +16,11 @@ export interface ApplicationMethodDTO { allocation?: ApplicationMethodAllocationValues value?: string | null max_quantity?: number | null + buy_rules_min_quantity?: number | null + apply_to_quantity?: number | null promotion?: PromotionDTO | string target_rules?: PromotionRuleDTO[] + buy_rules?: PromotionRuleDTO[] } export interface CreateApplicationMethodDTO { @@ -26,8 +29,11 @@ export interface CreateApplicationMethodDTO { allocation?: ApplicationMethodAllocationValues value?: string | null max_quantity?: number | null + buy_rules_min_quantity?: number | null + apply_to_quantity?: number | null promotion?: PromotionDTO | string target_rules?: CreatePromotionRuleDTO[] + buy_rules?: CreatePromotionRuleDTO[] } export interface UpdateApplicationMethodDTO { @@ -37,6 +43,8 @@ export interface UpdateApplicationMethodDTO { allocation?: ApplicationMethodAllocationValues value?: string | null max_quantity?: number | null + buy_rules_min_quantity?: number | null + apply_to_quantity?: number | null promotion?: PromotionDTO | string } diff --git a/packages/types/src/promotion/common/promotion.ts b/packages/types/src/promotion/common/promotion.ts index d0cea3caad20a..018e97724c197 100644 --- a/packages/types/src/promotion/common/promotion.ts +++ b/packages/types/src/promotion/common/promotion.ts @@ -8,12 +8,12 @@ import { import { CampaignDTO } from "./campaign" import { CreatePromotionRuleDTO, PromotionRuleDTO } from "./promotion-rule" -export type PromotionType = "standard" | "buyget" +export type PromotionTypeValues = "standard" | "buyget" export interface PromotionDTO { id: string code?: string - type?: PromotionType + type?: PromotionTypeValues is_automatic?: boolean application_method?: ApplicationMethodDTO rules?: PromotionRuleDTO[] @@ -22,7 +22,7 @@ export interface PromotionDTO { export interface CreatePromotionDTO { code: string - type: PromotionType + type: PromotionTypeValues is_automatic?: boolean application_method?: CreateApplicationMethodDTO rules?: CreatePromotionRuleDTO[] @@ -34,7 +34,7 @@ export interface UpdatePromotionDTO { id: string is_automatic?: boolean code?: string - type?: PromotionType + type?: PromotionTypeValues application_method?: UpdateApplicationMethodDTO campaign_id?: string } @@ -44,6 +44,6 @@ export interface FilterablePromotionProps id?: string[] code?: string[] is_automatic?: boolean - type?: PromotionType[] + type?: PromotionTypeValues[] budget_id?: string[] } diff --git a/packages/types/src/promotion/service.ts b/packages/types/src/promotion/service.ts index d968133f1514b..fd9721ef54344 100644 --- a/packages/types/src/promotion/service.ts +++ b/packages/types/src/promotion/service.ts @@ -90,6 +90,12 @@ export interface IPromotionModuleService extends IModuleService { sharedContext?: Context ): Promise + addPromotionBuyRules( + promotionId: string, + rulesData: CreatePromotionRuleDTO[], + sharedContext?: Context + ): Promise + removePromotionRules( promotionId: string, rulesData: RemovePromotionRuleDTO[], @@ -102,6 +108,12 @@ export interface IPromotionModuleService extends IModuleService { sharedContext?: Context ): Promise + removePromotionBuyRules( + promotionId: string, + rulesData: RemovePromotionRuleDTO[], + sharedContext?: Context + ): Promise + createCampaigns( data: CreateCampaignDTO, sharedContext?: Context diff --git a/packages/utils/src/authentication/abstract-authentication-provider.ts b/packages/utils/src/authentication/abstract-authentication-provider.ts new file mode 100644 index 0000000000000..f4d69ab1bd82d --- /dev/null +++ b/packages/utils/src/authentication/abstract-authentication-provider.ts @@ -0,0 +1,19 @@ +import { AuthenticationResponse } from "@medusajs/types"; + +export abstract class AbstractAuthenticationModuleProvider { + public static PROVIDER: string + public static DISPLAY_NAME: string + + public get provider() { + return (this.constructor as Function & { PROVIDER: string }).PROVIDER + } + + public get displayName() { + return (this.constructor as Function & { DISPLAY_NAME: string }) + .DISPLAY_NAME + } + + abstract authenticate( + data: Record + ): Promise +} diff --git a/packages/utils/src/authentication/index.ts b/packages/utils/src/authentication/index.ts new file mode 100644 index 0000000000000..43c855ec7cf50 --- /dev/null +++ b/packages/utils/src/authentication/index.ts @@ -0,0 +1 @@ +export * from "./abstract-authentication-provider" diff --git a/packages/utils/src/dal/mikro-orm/mikro-orm-create-connection.ts b/packages/utils/src/dal/mikro-orm/mikro-orm-create-connection.ts index ecfedbc5ccaa0..7b35fa2f9fb68 100644 --- a/packages/utils/src/dal/mikro-orm/mikro-orm-create-connection.ts +++ b/packages/utils/src/dal/mikro-orm/mikro-orm-create-connection.ts @@ -18,8 +18,8 @@ export async function mikroOrmCreateConnection( // It is important that the knex package version is the same as the one used by MikroORM knex package driverOptions = database.connection clientUrl = - database.connection.context.client.config.connection.connectionString - schema = database.connection.context.client.config.searchPath + database.connection.context?.client?.config?.connection?.connectionString + schema = database.connection.context?.client?.config?.searchPath } const { MikroORM } = await import("@mikro-orm/postgresql") diff --git a/packages/utils/src/index.ts b/packages/utils/src/index.ts index 5f96dcb3dd3f6..d86097b681024 100644 --- a/packages/utils/src/index.ts +++ b/packages/utils/src/index.ts @@ -1,3 +1,4 @@ +export * from "./authentication" export * from "./bundles" export * from "./common" export * from "./dal" diff --git a/packages/utils/src/modules-sdk/decorators/index.ts b/packages/utils/src/modules-sdk/decorators/index.ts index ef9137051ab19..fbea00947621a 100644 --- a/packages/utils/src/modules-sdk/decorators/index.ts +++ b/packages/utils/src/modules-sdk/decorators/index.ts @@ -1,3 +1,3 @@ -export * from "./inject-transaction-manager" export * from "./inject-manager" export * from "./inject-shared-context" +export * from "./inject-transaction-manager" diff --git a/packages/utils/src/modules-sdk/load-module-database-config.ts b/packages/utils/src/modules-sdk/load-module-database-config.ts index 2f9a2ec924c62..b92defb1733a3 100644 --- a/packages/utils/src/modules-sdk/load-module-database-config.ts +++ b/packages/utils/src/modules-sdk/load-module-database-config.ts @@ -93,7 +93,7 @@ export function loadDatabaseConfig( database.connection = options.database!.connection } - if (!database.clientUrl && !silent) { + if (!database.clientUrl && !silent && !database.connection) { throw new MedusaError( MedusaError.Types.INVALID_ARGUMENT, "No database clientUrl provided. Please provide the clientUrl through the [MODULE]_POSTGRES_URL, MEDUSA_POSTGRES_URL or POSTGRES_URL environment variable or the options object in the initialize function." diff --git a/packages/utils/src/orchestration/index.ts b/packages/utils/src/orchestration/index.ts index e6355e4311ea6..98bcdbbef7516 100644 --- a/packages/utils/src/orchestration/index.ts +++ b/packages/utils/src/orchestration/index.ts @@ -1 +1,2 @@ export * from "./symbol" +export * from "./types" diff --git a/packages/utils/src/orchestration/types.ts b/packages/utils/src/orchestration/types.ts new file mode 100644 index 0000000000000..26cba445d6b50 --- /dev/null +++ b/packages/utils/src/orchestration/types.ts @@ -0,0 +1,34 @@ +export enum TransactionHandlerType { + INVOKE = "invoke", + COMPENSATE = "compensate", +} + +export enum TransactionStepStatus { + IDLE = "idle", + OK = "ok", + WAITING = "waiting_response", + TEMPORARY_FAILURE = "temp_failure", + PERMANENT_FAILURE = "permanent_failure", +} + +export enum TransactionState { + NOT_STARTED = "not_started", + INVOKING = "invoking", + WAITING_TO_COMPENSATE = "waiting_to_compensate", + COMPENSATING = "compensating", + DONE = "done", + REVERTED = "reverted", + FAILED = "failed", +} + +export enum TransactionStepState { + NOT_STARTED = "not_started", + INVOKING = "invoking", + COMPENSATING = "compensating", + DONE = "done", + REVERTED = "reverted", + FAILED = "failed", + DORMANT = "dormant", + SKIPPED = "skipped", + TIMEOUT = "timeout", +} diff --git a/packages/workflow-engine-inmemory/.gitignore b/packages/workflow-engine-inmemory/.gitignore new file mode 100644 index 0000000000000..874c6c69d3341 --- /dev/null +++ b/packages/workflow-engine-inmemory/.gitignore @@ -0,0 +1,6 @@ +/dist +node_modules +.DS_store +.env* +.env +*.sql diff --git a/packages/workflow-engine-inmemory/CHANGELOG.md b/packages/workflow-engine-inmemory/CHANGELOG.md new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/packages/workflow-engine-inmemory/README.md b/packages/workflow-engine-inmemory/README.md new file mode 100644 index 0000000000000..b34e46ea20d0c --- /dev/null +++ b/packages/workflow-engine-inmemory/README.md @@ -0,0 +1 @@ +# Workflow Orchestrator diff --git a/packages/workflow-engine-inmemory/integration-tests/__fixtures__/index.ts b/packages/workflow-engine-inmemory/integration-tests/__fixtures__/index.ts new file mode 100644 index 0000000000000..987a8a99bd67c --- /dev/null +++ b/packages/workflow-engine-inmemory/integration-tests/__fixtures__/index.ts @@ -0,0 +1,4 @@ +export * from "./workflow_1" +export * from "./workflow_2" +export * from "./workflow_step_timeout" +export * from "./workflow_transaction_timeout" diff --git a/packages/workflow-engine-inmemory/integration-tests/__fixtures__/workflow_1.ts b/packages/workflow-engine-inmemory/integration-tests/__fixtures__/workflow_1.ts new file mode 100644 index 0000000000000..cb0056466e910 --- /dev/null +++ b/packages/workflow-engine-inmemory/integration-tests/__fixtures__/workflow_1.ts @@ -0,0 +1,65 @@ +import { + StepResponse, + createStep, + createWorkflow, +} from "@medusajs/workflows-sdk" + +const step_1 = createStep( + "step_1", + jest.fn((input) => { + input.test = "test" + return new StepResponse(input, { compensate: 123 }) + }), + jest.fn((compensateInput) => { + if (!compensateInput) { + return + } + + console.log("reverted", compensateInput.compensate) + return new StepResponse({ + reverted: true, + }) + }) +) + +const step_2 = createStep( + "step_2", + jest.fn((input, context) => { + console.log("triggered async request", context.metadata.idempotency_key) + + if (input) { + return new StepResponse({ notAsyncResponse: input.hey }) + } + }), + jest.fn((_, context) => { + return new StepResponse({ + step: context.metadata.action, + idempotency_key: context.metadata.idempotency_key, + reverted: true, + }) + }) +) + +const step_3 = createStep( + "step_3", + jest.fn((res) => { + return new StepResponse({ + done: { + inputFromSyncStep: res.notAsyncResponse, + }, + }) + }) +) + +createWorkflow("workflow_1", function (input) { + step_1(input) + + const ret2 = step_2({ hey: "oh" }) + + step_2({ hey: "async hello" }).config({ + name: "new_step_name", + async: true, + }) + + return step_3(ret2) +}) diff --git a/packages/workflow-engine-inmemory/integration-tests/__fixtures__/workflow_2.ts b/packages/workflow-engine-inmemory/integration-tests/__fixtures__/workflow_2.ts new file mode 100644 index 0000000000000..f15d51889fe3e --- /dev/null +++ b/packages/workflow-engine-inmemory/integration-tests/__fixtures__/workflow_2.ts @@ -0,0 +1,71 @@ +import { + StepResponse, + createStep, + createWorkflow, +} from "@medusajs/workflows-sdk" + +const step_1 = createStep( + "step_1", + jest.fn((input) => { + input.test = "test" + return new StepResponse(input, { compensate: 123 }) + }), + jest.fn((compensateInput) => { + if (!compensateInput) { + return + } + + console.log("reverted", compensateInput.compensate) + return new StepResponse({ + reverted: true, + }) + }) +) + +const step_2 = createStep( + "step_2", + jest.fn((input, context) => { + console.log("triggered async request", context.metadata.idempotency_key) + + if (input) { + return new StepResponse({ notAsyncResponse: input.hey }) + } + }), + jest.fn((_, context) => { + return new StepResponse({ + step: context.metadata.action, + idempotency_key: context.metadata.idempotency_key, + reverted: true, + }) + }) +) + +const step_3 = createStep( + "step_3", + jest.fn((res) => { + return new StepResponse({ + done: { + inputFromSyncStep: res.notAsyncResponse, + }, + }) + }) +) + +createWorkflow( + { + name: "workflow_2", + retentionTime: 1000, + }, + function (input) { + step_1(input) + + const ret2 = step_2({ hey: "oh" }) + + step_2({ hey: "async hello" }).config({ + name: "new_step_name", + async: true, + }) + + return step_3(ret2) + } +) diff --git a/packages/workflow-engine-inmemory/integration-tests/__fixtures__/workflow_step_timeout.ts b/packages/workflow-engine-inmemory/integration-tests/__fixtures__/workflow_step_timeout.ts new file mode 100644 index 0000000000000..a97112ffc12a1 --- /dev/null +++ b/packages/workflow-engine-inmemory/integration-tests/__fixtures__/workflow_step_timeout.ts @@ -0,0 +1,29 @@ +import { + StepResponse, + createStep, + createWorkflow, +} from "@medusajs/workflows-sdk" +import { setTimeout } from "timers/promises" + +const step_1 = createStep( + "step_1", + jest.fn(async (input) => { + await setTimeout(200) + + return new StepResponse(input, { compensate: 123 }) + }), + jest.fn(() => {}) +) + +createWorkflow( + { + name: "workflow_step_timeout", + }, + function (input) { + const resp = step_1(input).config({ + timeout: 0.1, // 0.1 second + }) + + return resp + } +) diff --git a/packages/workflow-engine-inmemory/integration-tests/__fixtures__/workflow_transaction_timeout.ts b/packages/workflow-engine-inmemory/integration-tests/__fixtures__/workflow_transaction_timeout.ts new file mode 100644 index 0000000000000..154da2b5d4d8c --- /dev/null +++ b/packages/workflow-engine-inmemory/integration-tests/__fixtures__/workflow_transaction_timeout.ts @@ -0,0 +1,36 @@ +import { + StepResponse, + createStep, + createWorkflow, +} from "@medusajs/workflows-sdk" + +const step_1 = createStep( + "step_1", + jest.fn((input) => { + input.test = "test" + return new StepResponse(input, { compensate: 123 }) + }), + jest.fn((compensateInput) => { + if (!compensateInput) { + return + } + + return new StepResponse({ + reverted: true, + }) + }) +) + +createWorkflow( + { + name: "workflow_transaction_timeout", + timeout: 0.1, // 0.1 second + }, + function (input) { + const resp = step_1(input).config({ + async: true, + }) + + return resp + } +) diff --git a/packages/workflow-engine-inmemory/integration-tests/__tests__/index.spec.ts b/packages/workflow-engine-inmemory/integration-tests/__tests__/index.spec.ts new file mode 100644 index 0000000000000..11b92ab0cb53c --- /dev/null +++ b/packages/workflow-engine-inmemory/integration-tests/__tests__/index.spec.ts @@ -0,0 +1,163 @@ +import { MedusaApp } from "@medusajs/modules-sdk" +import { RemoteJoinerQuery } from "@medusajs/types" +import { TransactionHandlerType } from "@medusajs/utils" +import { IWorkflowsModuleService } from "@medusajs/workflows-sdk" +import { knex } from "knex" +import { setTimeout } from "timers/promises" +import "../__fixtures__" +import { DB_URL, TestDatabase } from "../utils" + +const sharedPgConnection = knex({ + client: "pg", + searchPath: process.env.MEDUSA_WORKFLOW_ENGINE_DB_SCHEMA, + connection: { + connectionString: DB_URL, + debug: false, + }, +}) + +const afterEach_ = async () => { + await TestDatabase.clearTables(sharedPgConnection) +} + +describe("Workflow Orchestrator module", function () { + describe("Testing basic workflow", function () { + let workflowOrcModule: IWorkflowsModuleService + let query: ( + query: string | RemoteJoinerQuery | object, + variables?: Record + ) => Promise + + afterEach(afterEach_) + + beforeAll(async () => { + const { + runMigrations, + query: remoteQuery, + modules, + } = await MedusaApp({ + sharedResourcesConfig: { + database: { + connection: sharedPgConnection, + }, + }, + modulesConfig: { + workflows: { + resolve: __dirname + "/../..", + }, + }, + }) + + query = remoteQuery + + await runMigrations() + + workflowOrcModule = + modules.workflows as unknown as IWorkflowsModuleService + }) + + afterEach(afterEach_) + + it("should return a list of workflow executions and remove after completed when there is no retentionTime set", async () => { + await workflowOrcModule.run("workflow_1", { + input: { + value: "123", + }, + throwOnError: true, + }) + + let executionsList = await query({ + workflow_executions: { + fields: ["workflow_id", "transaction_id", "state"], + }, + }) + + expect(executionsList).toHaveLength(1) + + const { result } = await workflowOrcModule.setStepSuccess({ + idempotencyKey: { + action: TransactionHandlerType.INVOKE, + stepId: "new_step_name", + workflowId: "workflow_1", + transactionId: executionsList[0].transaction_id, + }, + stepResponse: { uhuuuu: "yeaah!" }, + }) + + executionsList = await query({ + workflow_executions: { + fields: ["id"], + }, + }) + + expect(executionsList).toHaveLength(0) + expect(result).toEqual({ + done: { + inputFromSyncStep: "oh", + }, + }) + }) + + it("should return a list of workflow executions and keep it saved when there is a retentionTime set", async () => { + await workflowOrcModule.run("workflow_2", { + input: { + value: "123", + }, + throwOnError: true, + transactionId: "transaction_1", + }) + + let executionsList = await query({ + workflow_executions: { + fields: ["id"], + }, + }) + + expect(executionsList).toHaveLength(1) + + await workflowOrcModule.setStepSuccess({ + idempotencyKey: { + action: TransactionHandlerType.INVOKE, + stepId: "new_step_name", + workflowId: "workflow_2", + transactionId: "transaction_1", + }, + stepResponse: { uhuuuu: "yeaah!" }, + }) + + executionsList = await query({ + workflow_executions: { + fields: ["id"], + }, + }) + + expect(executionsList).toHaveLength(1) + }) + + it("should revert the entire transaction when a step timeout expires", async () => { + const { transaction } = await workflowOrcModule.run( + "workflow_step_timeout", + { + input: {}, + throwOnError: false, + } + ) + + expect(transaction.flow.state).toEqual("reverted") + }) + + it("should revert the entire transaction when the transaction timeout expires", async () => { + const { transaction } = await workflowOrcModule.run( + "workflow_transaction_timeout", + { + input: {}, + throwOnError: false, + } + ) + + await setTimeout(200) + + expect(transaction.flow.state).toEqual("reverted") + }) + }) +}) diff --git a/packages/workflow-engine-inmemory/integration-tests/setup-env.js b/packages/workflow-engine-inmemory/integration-tests/setup-env.js new file mode 100644 index 0000000000000..7de2d9de2441f --- /dev/null +++ b/packages/workflow-engine-inmemory/integration-tests/setup-env.js @@ -0,0 +1,6 @@ +if (typeof process.env.DB_TEMP_NAME === "undefined") { + const tempName = parseInt(process.env.JEST_WORKER_ID || "1") + process.env.DB_TEMP_NAME = `medusa-workflow-engine-inmemory-${tempName}` +} + +process.env.MEDUSA_WORKFLOW_ENGINE_DB_SCHEMA = "public" diff --git a/packages/workflow-engine-inmemory/integration-tests/setup.js b/packages/workflow-engine-inmemory/integration-tests/setup.js new file mode 100644 index 0000000000000..43f99aab4ac94 --- /dev/null +++ b/packages/workflow-engine-inmemory/integration-tests/setup.js @@ -0,0 +1,3 @@ +import { JestUtils } from "medusa-test-utils" + +JestUtils.afterAllHookDropDatabase() diff --git a/packages/workflow-engine-inmemory/integration-tests/utils/database.ts b/packages/workflow-engine-inmemory/integration-tests/utils/database.ts new file mode 100644 index 0000000000000..ed61b5e489ed2 --- /dev/null +++ b/packages/workflow-engine-inmemory/integration-tests/utils/database.ts @@ -0,0 +1,22 @@ +import * as process from "process" + +const DB_HOST = process.env.DB_HOST ?? "localhost" +const DB_USERNAME = process.env.DB_USERNAME ?? "" +const DB_PASSWORD = process.env.DB_PASSWORD +const DB_NAME = process.env.DB_TEMP_NAME + +export const DB_URL = `postgres://${DB_USERNAME}${ + DB_PASSWORD ? `:${DB_PASSWORD}` : "" +}@${DB_HOST}/${DB_NAME}` + +interface TestDatabase { + clearTables(knex): Promise +} + +export const TestDatabase: TestDatabase = { + clearTables: async (knex) => { + await knex.raw(` + TRUNCATE TABLE workflow_execution CASCADE; + `) + }, +} diff --git a/packages/workflow-engine-inmemory/integration-tests/utils/index.ts b/packages/workflow-engine-inmemory/integration-tests/utils/index.ts new file mode 100644 index 0000000000000..6b917ed30e5e7 --- /dev/null +++ b/packages/workflow-engine-inmemory/integration-tests/utils/index.ts @@ -0,0 +1 @@ +export * from "./database" diff --git a/packages/workflow-engine-inmemory/jest.config.js b/packages/workflow-engine-inmemory/jest.config.js new file mode 100644 index 0000000000000..456054fe8ae27 --- /dev/null +++ b/packages/workflow-engine-inmemory/jest.config.js @@ -0,0 +1,22 @@ +module.exports = { + moduleNameMapper: { + "^@models": "/src/models", + "^@services": "/src/services", + "^@repositories": "/src/repositories", + "^@types": "/src/types", + }, + transform: { + "^.+\\.[jt]s?$": [ + "ts-jest", + { + tsConfig: "tsconfig.spec.json", + isolatedModules: true, + }, + ], + }, + testEnvironment: `node`, + moduleFileExtensions: [`js`, `ts`], + modulePathIgnorePatterns: ["dist/"], + setupFiles: ["/integration-tests/setup-env.js"], + setupFilesAfterEnv: ["/integration-tests/setup.js"], +} diff --git a/packages/workflow-engine-inmemory/mikro-orm.config.dev.ts b/packages/workflow-engine-inmemory/mikro-orm.config.dev.ts new file mode 100644 index 0000000000000..81651a76003f2 --- /dev/null +++ b/packages/workflow-engine-inmemory/mikro-orm.config.dev.ts @@ -0,0 +1,8 @@ +import * as entities from "./src/models" + +module.exports = { + entities: Object.values(entities), + schema: "public", + clientUrl: "postgres://postgres@localhost/medusa-workflow-engine-inmemory", + type: "postgresql", +} diff --git a/packages/workflow-engine-inmemory/package.json b/packages/workflow-engine-inmemory/package.json new file mode 100644 index 0000000000000..d82f33d8b79bd --- /dev/null +++ b/packages/workflow-engine-inmemory/package.json @@ -0,0 +1,59 @@ +{ + "name": "@medusajs/workflow-engine-inmemory", + "version": "0.0.1", + "description": "Medusa Workflow Orchestrator module", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "files": [ + "dist" + ], + "engines": { + "node": ">=16" + }, + "repository": { + "type": "git", + "url": "https://github.com/medusajs/medusa", + "directory": "packages/workflow-engine-inmemory" + }, + "publishConfig": { + "access": "public" + }, + "author": "Medusa", + "license": "MIT", + "scripts": { + "watch": "tsc --build --watch", + "watch:test": "tsc --build tsconfig.spec.json --watch", + "prepublishOnly": "cross-env NODE_ENV=production tsc --build && tsc-alias -p tsconfig.json", + "build": "rimraf dist && tsc --build && tsc-alias -p tsconfig.json", + "test": "jest --passWithNoTests --runInBand --bail --forceExit -- src/**/__tests__/**/*.ts", + "test:integration": "jest --runInBand --forceExit -- integration-tests/**/__tests__/**/*.ts", + "migration:generate": " MIKRO_ORM_CLI=./mikro-orm.config.dev.ts mikro-orm migration:generate", + "migration:initial": " MIKRO_ORM_CLI=./mikro-orm.config.dev.ts mikro-orm migration:create --initial", + "migration:create": " MIKRO_ORM_CLI=./mikro-orm.config.dev.ts mikro-orm migration:create", + "migration:up": " MIKRO_ORM_CLI=./mikro-orm.config.dev.ts mikro-orm migration:up", + "orm:cache:clear": " MIKRO_ORM_CLI=./mikro-orm.config.dev.ts mikro-orm cache:clear" + }, + "devDependencies": { + "@mikro-orm/cli": "5.9.7", + "cross-env": "^5.2.1", + "jest": "^29.6.3", + "medusa-test-utils": "^1.1.40", + "rimraf": "^3.0.2", + "ts-jest": "^29.1.1", + "ts-node": "^10.9.1", + "tsc-alias": "^1.8.6", + "typescript": "^5.1.6" + }, + "dependencies": { + "@medusajs/modules-sdk": "^1.12.5", + "@medusajs/types": "^1.11.9", + "@medusajs/utils": "^1.11.2", + "@medusajs/workflows-sdk": "^0.1.0", + "@mikro-orm/core": "5.9.7", + "@mikro-orm/migrations": "5.9.7", + "@mikro-orm/postgresql": "5.9.7", + "awilix": "^8.0.0", + "dotenv": "^16.1.4", + "knex": "2.4.2" + } +} diff --git a/packages/workflow-engine-inmemory/src/index.ts b/packages/workflow-engine-inmemory/src/index.ts new file mode 100644 index 0000000000000..78040405651bb --- /dev/null +++ b/packages/workflow-engine-inmemory/src/index.ts @@ -0,0 +1,22 @@ +import { Modules } from "@medusajs/modules-sdk" +import { ModulesSdkUtils } from "@medusajs/utils" +import * as models from "@models" +import { moduleDefinition } from "./module-definition" + +export default moduleDefinition + +const migrationScriptOptions = { + moduleName: Modules.WORKFLOW_ENGINE, + models: models, + pathToMigrations: __dirname + "/migrations", +} + +export const runMigrations = ModulesSdkUtils.buildMigrationScript( + migrationScriptOptions +) +export const revertMigration = ModulesSdkUtils.buildRevertMigrationScript( + migrationScriptOptions +) + +export * from "./initialize" +export * from "./loaders" diff --git a/packages/workflow-engine-inmemory/src/initialize/index.ts b/packages/workflow-engine-inmemory/src/initialize/index.ts new file mode 100644 index 0000000000000..20f4f49231b99 --- /dev/null +++ b/packages/workflow-engine-inmemory/src/initialize/index.ts @@ -0,0 +1,36 @@ +import { + ExternalModuleDeclaration, + InternalModuleDeclaration, + MedusaModule, + MODULE_PACKAGE_NAMES, + Modules, +} from "@medusajs/modules-sdk" +import { ModulesSdkTypes } from "@medusajs/types" +import { WorkflowOrchestratorTypes } from "@medusajs/workflows-sdk" +import { moduleDefinition } from "../module-definition" +import { InitializeModuleInjectableDependencies } from "../types" + +export const initialize = async ( + options?: + | ModulesSdkTypes.ModuleServiceInitializeOptions + | ModulesSdkTypes.ModuleServiceInitializeCustomDataLayerOptions + | ExternalModuleDeclaration + | InternalModuleDeclaration, + injectedDependencies?: InitializeModuleInjectableDependencies +): Promise => { + const loaded = + // eslint-disable-next-line max-len + await MedusaModule.bootstrap( + { + moduleKey: Modules.WORKFLOW_ENGINE, + defaultPath: MODULE_PACKAGE_NAMES[Modules.WORKFLOW_ENGINE], + declaration: options as + | InternalModuleDeclaration + | ExternalModuleDeclaration, + injectedDependencies, + moduleExports: moduleDefinition, + } + ) + + return loaded[Modules.WORKFLOW_ENGINE] +} diff --git a/packages/workflow-engine-inmemory/src/joiner-config.ts b/packages/workflow-engine-inmemory/src/joiner-config.ts new file mode 100644 index 0000000000000..7999e9c3ab52d --- /dev/null +++ b/packages/workflow-engine-inmemory/src/joiner-config.ts @@ -0,0 +1,34 @@ +import { Modules } from "@medusajs/modules-sdk" +import { ModuleJoinerConfig } from "@medusajs/types" +import { MapToConfig } from "@medusajs/utils" +import { WorkflowExecution } from "@models" +import moduleSchema from "./schema" + +export const LinkableKeys = { + workflow_execution_id: WorkflowExecution.name, +} + +const entityLinkableKeysMap: MapToConfig = {} +Object.entries(LinkableKeys).forEach(([key, value]) => { + entityLinkableKeysMap[value] ??= [] + entityLinkableKeysMap[value].push({ + mapTo: key, + valueFrom: key.split("_").pop()!, + }) +}) + +export const entityNameToLinkableKeysMap: MapToConfig = entityLinkableKeysMap + +export const joinerConfig: ModuleJoinerConfig = { + serviceName: Modules.WORKFLOW_ENGINE, + primaryKeys: ["id"], + schema: moduleSchema, + linkableKeys: LinkableKeys, + alias: { + name: ["workflow_execution", "workflow_executions"], + args: { + entity: WorkflowExecution.name, + methodSuffix: "WorkflowExecution", + }, + }, +} diff --git a/packages/workflow-engine-inmemory/src/loaders/connection.ts b/packages/workflow-engine-inmemory/src/loaders/connection.ts new file mode 100644 index 0000000000000..580e05e95cef9 --- /dev/null +++ b/packages/workflow-engine-inmemory/src/loaders/connection.ts @@ -0,0 +1,36 @@ +import { + InternalModuleDeclaration, + LoaderOptions, + Modules, +} from "@medusajs/modules-sdk" +import { ModulesSdkTypes } from "@medusajs/types" +import { ModulesSdkUtils } from "@medusajs/utils" +import { EntitySchema } from "@mikro-orm/core" +import * as WorkflowOrchestratorModels from "../models" + +export default async ( + { + options, + container, + logger, + }: LoaderOptions< + | ModulesSdkTypes.ModuleServiceInitializeOptions + | ModulesSdkTypes.ModuleServiceInitializeCustomDataLayerOptions + >, + moduleDeclaration?: InternalModuleDeclaration +): Promise => { + const entities = Object.values( + WorkflowOrchestratorModels + ) as unknown as EntitySchema[] + const pathToMigrations = __dirname + "/../migrations" + + await ModulesSdkUtils.mikroOrmConnectionLoader({ + moduleName: Modules.WORKFLOW_ENGINE, + entities, + container, + options, + moduleDeclaration, + logger, + pathToMigrations, + }) +} diff --git a/packages/workflow-engine-inmemory/src/loaders/container.ts b/packages/workflow-engine-inmemory/src/loaders/container.ts new file mode 100644 index 0000000000000..9a0c5553b490c --- /dev/null +++ b/packages/workflow-engine-inmemory/src/loaders/container.ts @@ -0,0 +1,9 @@ +import { MikroOrmBaseRepository, ModulesSdkUtils } from "@medusajs/utils" +import * as ModuleModels from "@models" +import * as ModuleServices from "@services" + +export default ModulesSdkUtils.moduleContainerLoaderFactory({ + moduleModels: ModuleModels, + moduleServices: ModuleServices, + moduleRepositories: { BaseRepository: MikroOrmBaseRepository }, +}) diff --git a/packages/workflow-engine-inmemory/src/loaders/index.ts b/packages/workflow-engine-inmemory/src/loaders/index.ts new file mode 100644 index 0000000000000..5445bc7412131 --- /dev/null +++ b/packages/workflow-engine-inmemory/src/loaders/index.ts @@ -0,0 +1,3 @@ +export * from "./connection" +export * from "./container" +export * from "./utils" diff --git a/packages/workflow-engine-inmemory/src/loaders/utils.ts b/packages/workflow-engine-inmemory/src/loaders/utils.ts new file mode 100644 index 0000000000000..3131eb8f92f12 --- /dev/null +++ b/packages/workflow-engine-inmemory/src/loaders/utils.ts @@ -0,0 +1,10 @@ +import { asClass } from "awilix" +import { InMemoryDistributedTransactionStorage } from "../utils" + +export default async ({ container }): Promise => { + container.register({ + inMemoryDistributedTransactionStorage: asClass( + InMemoryDistributedTransactionStorage + ).singleton(), + }) +} diff --git a/packages/workflow-engine-inmemory/src/migrations/Migration20231228143900.ts b/packages/workflow-engine-inmemory/src/migrations/Migration20231228143900.ts new file mode 100644 index 0000000000000..af9958e80a783 --- /dev/null +++ b/packages/workflow-engine-inmemory/src/migrations/Migration20231228143900.ts @@ -0,0 +1,41 @@ +import { Migration } from "@mikro-orm/migrations" + +export class Migration20231221104256 extends Migration { + async up(): Promise { + this.addSql( + ` + CREATE TABLE IF NOT EXISTS workflow_execution + ( + id character varying NOT NULL, + workflow_id character varying NOT NULL, + transaction_id character varying NOT NULL, + execution jsonb NULL, + context jsonb NULL, + state character varying NOT NULL, + created_at timestamp WITHOUT time zone NOT NULL DEFAULT Now(), + updated_at timestamp WITHOUT time zone NOT NULL DEFAULT Now(), + deleted_at timestamp WITHOUT time zone NULL, + CONSTRAINT "PK_workflow_execution_workflow_id_transaction_id" PRIMARY KEY ("workflow_id", "transaction_id") + ); + + CREATE UNIQUE INDEX IF NOT EXISTS "IDX_workflow_execution_id" ON "workflow_execution" ("id"); + CREATE INDEX IF NOT EXISTS "IDX_workflow_execution_workflow_id" ON "workflow_execution" ("workflow_id") WHERE deleted_at IS NULL; + CREATE INDEX IF NOT EXISTS "IDX_workflow_execution_transaction_id" ON "workflow_execution" ("transaction_id") WHERE deleted_at IS NULL; + CREATE INDEX IF NOT EXISTS "IDX_workflow_execution_state" ON "workflow_execution" ("state") WHERE deleted_at IS NULL; + ` + ) + } + + async down(): Promise { + this.addSql( + ` + DROP INDEX "IDX_workflow_execution_id"; + DROP INDEX "IDX_workflow_execution_workflow_id"; + DROP INDEX "IDX_workflow_execution_transaction_id"; + DROP INDEX "IDX_workflow_execution_state"; + + DROP TABLE IF EXISTS workflow_execution; + ` + ) + } +} diff --git a/packages/workflow-engine-inmemory/src/models/index.ts b/packages/workflow-engine-inmemory/src/models/index.ts new file mode 100644 index 0000000000000..78fcbfa9214f9 --- /dev/null +++ b/packages/workflow-engine-inmemory/src/models/index.ts @@ -0,0 +1 @@ +export { default as WorkflowExecution } from "./workflow-execution" diff --git a/packages/workflow-engine-inmemory/src/models/workflow-execution.ts b/packages/workflow-engine-inmemory/src/models/workflow-execution.ts new file mode 100644 index 0000000000000..753d9e62db678 --- /dev/null +++ b/packages/workflow-engine-inmemory/src/models/workflow-execution.ts @@ -0,0 +1,76 @@ +import { TransactionState } from "@medusajs/orchestration" +import { DALUtils, generateEntityId } from "@medusajs/utils" +import { + BeforeCreate, + Entity, + Enum, + Filter, + Index, + OnInit, + OptionalProps, + PrimaryKey, + Property, + Unique, +} from "@mikro-orm/core" + +type OptionalFields = "deleted_at" + +@Entity() +@Unique({ + name: "IDX_workflow_execution_workflow_id_transaction_id_unique", + properties: ["workflow_id", "transaction_id"], +}) +@Filter(DALUtils.mikroOrmSoftDeletableFilterOptions) +export default class WorkflowExecution { + [OptionalProps]?: OptionalFields + + @Property({ columnType: "text", nullable: false }) + @Index({ name: "IDX_workflow_execution_id" }) + id!: string + + @Index({ name: "IDX_workflow_execution_workflow_id" }) + @PrimaryKey({ columnType: "text" }) + workflow_id: string + + @Index({ name: "IDX_workflow_execution_transaction_id" }) + @PrimaryKey({ columnType: "text" }) + transaction_id: string + + @Property({ columnType: "jsonb", nullable: true }) + execution: Record | null = null + + @Property({ columnType: "jsonb", nullable: true }) + context: Record | null = null + + @Index({ name: "IDX_workflow_execution_state" }) + @Enum(() => TransactionState) + state: TransactionState + + @Property({ + onCreate: () => new Date(), + columnType: "timestamptz", + defaultRaw: "now()", + }) + created_at: Date + + @Property({ + onCreate: () => new Date(), + onUpdate: () => new Date(), + columnType: "timestamptz", + defaultRaw: "now()", + }) + updated_at: Date + + @Property({ columnType: "timestamptz", nullable: true }) + deleted_at: Date | null = null + + @BeforeCreate() + onCreate() { + this.id = generateEntityId(this.id, "wf_exec") + } + + @OnInit() + onInit() { + this.id = generateEntityId(this.id, "wf_exec") + } +} diff --git a/packages/workflow-engine-inmemory/src/module-definition.ts b/packages/workflow-engine-inmemory/src/module-definition.ts new file mode 100644 index 0000000000000..b86c23807bc40 --- /dev/null +++ b/packages/workflow-engine-inmemory/src/module-definition.ts @@ -0,0 +1,13 @@ +import { ModuleExports } from "@medusajs/types" +import { WorkflowsModuleService } from "@services" +import loadConnection from "./loaders/connection" +import loadContainer from "./loaders/container" +import loadUtils from "./loaders/utils" + +const service = WorkflowsModuleService +const loaders = [loadContainer, loadConnection, loadUtils] as any + +export const moduleDefinition: ModuleExports = { + service, + loaders, +} diff --git a/packages/workflow-engine-inmemory/src/repositories/index.ts b/packages/workflow-engine-inmemory/src/repositories/index.ts new file mode 100644 index 0000000000000..8def202608b8c --- /dev/null +++ b/packages/workflow-engine-inmemory/src/repositories/index.ts @@ -0,0 +1,2 @@ +export { MikroOrmBaseRepository as BaseRepository } from "@medusajs/utils" +export { WorkflowExecutionRepository } from "./workflow-execution" diff --git a/packages/workflow-engine-inmemory/src/repositories/workflow-execution.ts b/packages/workflow-engine-inmemory/src/repositories/workflow-execution.ts new file mode 100644 index 0000000000000..9e6553ec748d8 --- /dev/null +++ b/packages/workflow-engine-inmemory/src/repositories/workflow-execution.ts @@ -0,0 +1,7 @@ +import { DALUtils } from "@medusajs/utils" +import { WorkflowExecution } from "@models" + +// eslint-disable-next-line max-len +export class WorkflowExecutionRepository extends DALUtils.mikroOrmBaseRepositoryFactory( + WorkflowExecution +) {} diff --git a/packages/workflow-engine-inmemory/src/schema/index.ts b/packages/workflow-engine-inmemory/src/schema/index.ts new file mode 100644 index 0000000000000..3d7d91edea1dc --- /dev/null +++ b/packages/workflow-engine-inmemory/src/schema/index.ts @@ -0,0 +1,26 @@ +export default ` +scalar DateTime +scalar JSON + +enum TransactionState { + NOT_STARTED + INVOKING + WAITING_TO_COMPENSATE + COMPENSATING + DONE + REVERTED + FAILED +} + +type WorkflowExecution { + id: ID! + created_at: DateTime! + updated_at: DateTime! + deleted_at: DateTime + workflow_id: string + transaction_id: string + execution: JSON + context: JSON + state: TransactionState +} +` diff --git a/packages/workflow-engine-inmemory/src/services/__tests__/index.spec.ts b/packages/workflow-engine-inmemory/src/services/__tests__/index.spec.ts new file mode 100644 index 0000000000000..728f6245c6bfd --- /dev/null +++ b/packages/workflow-engine-inmemory/src/services/__tests__/index.spec.ts @@ -0,0 +1,5 @@ +describe("Noop test", () => { + it("noop check", async () => { + expect(true).toBe(true) + }) +}) diff --git a/packages/workflow-engine-inmemory/src/services/index.ts b/packages/workflow-engine-inmemory/src/services/index.ts new file mode 100644 index 0000000000000..5a6d313d860b3 --- /dev/null +++ b/packages/workflow-engine-inmemory/src/services/index.ts @@ -0,0 +1,3 @@ +export * from "./workflow-execution" +export * from "./workflow-orchestrator" +export * from "./workflows-module" diff --git a/packages/workflow-engine-inmemory/src/services/workflow-execution.ts b/packages/workflow-engine-inmemory/src/services/workflow-execution.ts new file mode 100644 index 0000000000000..158557ec0bae8 --- /dev/null +++ b/packages/workflow-engine-inmemory/src/services/workflow-execution.ts @@ -0,0 +1,21 @@ +import { DAL } from "@medusajs/types" +import { ModulesSdkUtils } from "@medusajs/utils" +import { WorkflowExecution } from "@models" + +type InjectedDependencies = { + workflowExecutionRepository: DAL.RepositoryService +} + +export class WorkflowExecutionService< + TEntity extends WorkflowExecution = WorkflowExecution +> extends ModulesSdkUtils.abstractServiceFactory( + WorkflowExecution +) { + protected workflowExecutionRepository_: DAL.RepositoryService + + constructor({ workflowExecutionRepository }: InjectedDependencies) { + // @ts-ignore + super(...arguments) + this.workflowExecutionRepository_ = workflowExecutionRepository + } +} diff --git a/packages/workflow-engine-inmemory/src/services/workflow-orchestrator.ts b/packages/workflow-engine-inmemory/src/services/workflow-orchestrator.ts new file mode 100644 index 0000000000000..55b2f33f1577f --- /dev/null +++ b/packages/workflow-engine-inmemory/src/services/workflow-orchestrator.ts @@ -0,0 +1,528 @@ +import { + DistributedTransaction, + DistributedTransactionEvents, + TransactionHandlerType, + TransactionStep, +} from "@medusajs/orchestration" +import { ContainerLike, Context, MedusaContainer } from "@medusajs/types" +import { InjectSharedContext, isString, MedusaContext } from "@medusajs/utils" +import { + type FlowRunOptions, + MedusaWorkflow, + ReturnWorkflow, +} from "@medusajs/workflows-sdk" +import { ulid } from "ulid" +import { InMemoryDistributedTransactionStorage } from "../utils" + +export type WorkflowOrchestratorRunOptions = FlowRunOptions & { + transactionId?: string + container?: ContainerLike +} + +type RegisterStepSuccessOptions = Omit< + WorkflowOrchestratorRunOptions, + "transactionId" | "input" +> + +type IdempotencyKeyParts = { + workflowId: string + transactionId: string + stepId: string + action: "invoke" | "compensate" +} + +type NotifyOptions = { + eventType: keyof DistributedTransactionEvents + workflowId: string + transactionId?: string + step?: TransactionStep + response?: unknown + result?: unknown + errors?: unknown[] +} + +type WorkflowId = string +type TransactionId = string + +type SubscriberHandler = { + (input: NotifyOptions): void +} & { + _id?: string +} + +type SubscribeOptions = { + workflowId: string + transactionId?: string + subscriber: SubscriberHandler + subscriberId?: string +} + +type UnsubscribeOptions = { + workflowId: string + transactionId?: string + subscriberOrId: string | SubscriberHandler +} + +type TransactionSubscribers = Map +type Subscribers = Map + +const AnySubscriber = "any" + +export class WorkflowOrchestratorService { + private subscribers: Subscribers = new Map() + + constructor({ + inMemoryDistributedTransactionStorage, + }: { + inMemoryDistributedTransactionStorage: InMemoryDistributedTransactionStorage + workflowOrchestratorService: WorkflowOrchestratorService + }) { + inMemoryDistributedTransactionStorage.setWorkflowOrchestratorService(this) + DistributedTransaction.setStorage(inMemoryDistributedTransactionStorage) + } + + @InjectSharedContext() + async run( + workflowIdOrWorkflow: string | ReturnWorkflow, + options?: WorkflowOrchestratorRunOptions, + @MedusaContext() sharedContext: Context = {} + ) { + let { + input, + context, + transactionId, + resultFrom, + throwOnError, + events: eventHandlers, + container, + } = options ?? {} + + const workflowId = isString(workflowIdOrWorkflow) + ? workflowIdOrWorkflow + : workflowIdOrWorkflow.getName() + + if (!workflowId) { + throw new Error("Workflow ID is required") + } + + context ??= {} + context.transactionId ??= transactionId ?? ulid() + + const events: FlowRunOptions["events"] = this.buildWorkflowEvents({ + customEventHandlers: eventHandlers, + workflowId, + transactionId: context.transactionId, + }) + + const exportedWorkflow: any = MedusaWorkflow.getWorkflow(workflowId) + if (!exportedWorkflow) { + throw new Error(`Workflow with id "${workflowId}" not found.`) + } + + const flow = exportedWorkflow(container as MedusaContainer) + + const ret = await flow.run({ + input, + throwOnError, + resultFrom, + context, + events, + }) + + // TODO: temporary + const acknowledgement = { + transactionId: context.transactionId, + workflowId: workflowId, + } + + if (ret.transaction.hasFinished()) { + const { result, errors } = ret + this.notify({ + eventType: "onFinish", + workflowId, + transactionId: context.transactionId, + result, + errors, + }) + } + + return { acknowledgement, ...ret } + } + + @InjectSharedContext() + async getRunningTransaction( + workflowId: string, + transactionId: string, + options?: WorkflowOrchestratorRunOptions, + @MedusaContext() sharedContext: Context = {} + ): Promise { + let { context, container } = options ?? {} + + if (!workflowId) { + throw new Error("Workflow ID is required") + } + + if (!transactionId) { + throw new Error("TransactionId ID is required") + } + + context ??= {} + context.transactionId ??= transactionId + + const exportedWorkflow: any = MedusaWorkflow.getWorkflow(workflowId) + if (!exportedWorkflow) { + throw new Error(`Workflow with id "${workflowId}" not found.`) + } + + const flow = exportedWorkflow(container as MedusaContainer) + + const transaction = await flow.getRunningTransaction(transactionId, context) + + return transaction + } + + @InjectSharedContext() + async setStepSuccess( + { + idempotencyKey, + stepResponse, + options, + }: { + idempotencyKey: string | IdempotencyKeyParts + stepResponse: unknown + options?: RegisterStepSuccessOptions + }, + @MedusaContext() sharedContext: Context = {} + ) { + const { + context, + throwOnError, + resultFrom, + container, + events: eventHandlers, + } = options ?? {} + + const [idempotencyKey_, { workflowId, transactionId }] = + this.buildIdempotencyKeyAndParts(idempotencyKey) + + const exportedWorkflow: any = MedusaWorkflow.getWorkflow(workflowId) + if (!exportedWorkflow) { + throw new Error(`Workflow with id "${workflowId}" not found.`) + } + + const flow = exportedWorkflow(container as MedusaContainer) + + const events = this.buildWorkflowEvents({ + customEventHandlers: eventHandlers, + transactionId, + workflowId, + }) + + const ret = await flow.registerStepSuccess({ + idempotencyKey: idempotencyKey_, + context, + resultFrom, + throwOnError, + events, + response: stepResponse, + }) + + if (ret.transaction.hasFinished()) { + const { result, errors } = ret + this.notify({ + eventType: "onFinish", + workflowId, + transactionId, + result, + errors, + }) + } + + return ret + } + + @InjectSharedContext() + async setStepFailure( + { + idempotencyKey, + stepResponse, + options, + }: { + idempotencyKey: string | IdempotencyKeyParts + stepResponse: unknown + options?: RegisterStepSuccessOptions + }, + @MedusaContext() sharedContext: Context = {} + ) { + const { + context, + throwOnError, + resultFrom, + container, + events: eventHandlers, + } = options ?? {} + + const [idempotencyKey_, { workflowId, transactionId }] = + this.buildIdempotencyKeyAndParts(idempotencyKey) + + const exportedWorkflow: any = MedusaWorkflow.getWorkflow(workflowId) + if (!exportedWorkflow) { + throw new Error(`Workflow with id "${workflowId}" not found.`) + } + + const flow = exportedWorkflow(container as MedusaContainer) + + const events = this.buildWorkflowEvents({ + customEventHandlers: eventHandlers, + transactionId, + workflowId, + }) + + const ret = await flow.registerStepFailure({ + idempotencyKey: idempotencyKey_, + context, + resultFrom, + throwOnError, + events, + response: stepResponse, + }) + + if (ret.transaction.hasFinished()) { + const { result, errors } = ret + this.notify({ + eventType: "onFinish", + workflowId, + transactionId, + result, + errors, + }) + } + + return ret + } + + @InjectSharedContext() + subscribe( + { workflowId, transactionId, subscriber, subscriberId }: SubscribeOptions, + @MedusaContext() sharedContext: Context = {} + ) { + subscriber._id = subscriberId + const subscribers = this.subscribers.get(workflowId) ?? new Map() + + const handlerIndex = (handlers) => { + return handlers.indexOf((s) => s === subscriber || s._id === subscriberId) + } + + if (transactionId) { + const transactionSubscribers = subscribers.get(transactionId) ?? [] + const subscriberIndex = handlerIndex(transactionSubscribers) + if (subscriberIndex !== -1) { + transactionSubscribers.slice(subscriberIndex, 1) + } + + transactionSubscribers.push(subscriber) + subscribers.set(transactionId, transactionSubscribers) + this.subscribers.set(workflowId, subscribers) + return + } + + const workflowSubscribers = subscribers.get(AnySubscriber) ?? [] + const subscriberIndex = handlerIndex(workflowSubscribers) + if (subscriberIndex !== -1) { + workflowSubscribers.slice(subscriberIndex, 1) + } + + workflowSubscribers.push(subscriber) + subscribers.set(AnySubscriber, workflowSubscribers) + this.subscribers.set(workflowId, subscribers) + } + + @InjectSharedContext() + unsubscribe( + { workflowId, transactionId, subscriberOrId }: UnsubscribeOptions, + @MedusaContext() sharedContext: Context = {} + ) { + const subscribers = this.subscribers.get(workflowId) ?? new Map() + + const filterSubscribers = (handlers: SubscriberHandler[]) => { + return handlers.filter((handler) => { + return handler._id + ? handler._id !== (subscriberOrId as string) + : handler !== (subscriberOrId as SubscriberHandler) + }) + } + + if (transactionId) { + const transactionSubscribers = subscribers.get(transactionId) ?? [] + const newTransactionSubscribers = filterSubscribers( + transactionSubscribers + ) + subscribers.set(transactionId, newTransactionSubscribers) + this.subscribers.set(workflowId, subscribers) + return + } + + const workflowSubscribers = subscribers.get(AnySubscriber) ?? [] + const newWorkflowSubscribers = filterSubscribers(workflowSubscribers) + subscribers.set(AnySubscriber, newWorkflowSubscribers) + this.subscribers.set(workflowId, subscribers) + } + + private notify(options: NotifyOptions) { + const { + eventType, + workflowId, + transactionId, + errors, + result, + step, + response, + } = options + + const subscribers: TransactionSubscribers = + this.subscribers.get(workflowId) ?? new Map() + + const notifySubscribers = (handlers: SubscriberHandler[]) => { + handlers.forEach((handler) => { + handler({ + eventType, + workflowId, + transactionId, + step, + response, + result, + errors, + }) + }) + } + + if (transactionId) { + const transactionSubscribers = subscribers.get(transactionId) ?? [] + notifySubscribers(transactionSubscribers) + } + + const workflowSubscribers = subscribers.get(AnySubscriber) ?? [] + notifySubscribers(workflowSubscribers) + } + + private buildWorkflowEvents({ + customEventHandlers, + workflowId, + transactionId, + }): DistributedTransactionEvents { + const notify = ({ + eventType, + step, + result, + response, + errors, + }: { + eventType: keyof DistributedTransactionEvents + step?: TransactionStep + response?: unknown + result?: unknown + errors?: unknown[] + }) => { + this.notify({ + workflowId, + transactionId, + eventType, + response, + step, + result, + errors, + }) + } + + return { + onTimeout: ({ transaction }) => { + customEventHandlers?.onTimeout?.({ transaction }) + notify({ eventType: "onTimeout" }) + }, + + onBegin: ({ transaction }) => { + customEventHandlers?.onBegin?.({ transaction }) + notify({ eventType: "onBegin" }) + }, + onResume: ({ transaction }) => { + customEventHandlers?.onResume?.({ transaction }) + notify({ eventType: "onResume" }) + }, + onCompensateBegin: ({ transaction }) => { + customEventHandlers?.onCompensateBegin?.({ transaction }) + notify({ eventType: "onCompensateBegin" }) + }, + onFinish: ({ transaction, result, errors }) => { + // TODO: unsubscribe transaction handlers on finish + customEventHandlers?.onFinish?.({ transaction, result, errors }) + }, + + onStepBegin: ({ step, transaction }) => { + customEventHandlers?.onStepBegin?.({ step, transaction }) + + notify({ eventType: "onStepBegin", step }) + }, + onStepSuccess: ({ step, transaction }) => { + const response = transaction.getContext().invoke[step.id] + customEventHandlers?.onStepSuccess?.({ step, transaction, response }) + + notify({ eventType: "onStepSuccess", step, response }) + }, + onStepFailure: ({ step, transaction }) => { + const errors = transaction.getErrors(TransactionHandlerType.INVOKE)[ + step.id + ] + customEventHandlers?.onStepFailure?.({ step, transaction, errors }) + + notify({ eventType: "onStepFailure", step, errors }) + }, + + onCompensateStepSuccess: ({ step, transaction }) => { + const response = transaction.getContext().compensate[step.id] + customEventHandlers?.onStepSuccess?.({ step, transaction, response }) + + notify({ eventType: "onCompensateStepSuccess", step, response }) + }, + onCompensateStepFailure: ({ step, transaction }) => { + const errors = transaction.getErrors(TransactionHandlerType.COMPENSATE)[ + step.id + ] + customEventHandlers?.onStepFailure?.({ step, transaction, errors }) + + notify({ eventType: "onCompensateStepFailure", step, errors }) + }, + } + } + + private buildIdempotencyKeyAndParts( + idempotencyKey: string | IdempotencyKeyParts + ): [string, IdempotencyKeyParts] { + const parts: IdempotencyKeyParts = { + workflowId: "", + transactionId: "", + stepId: "", + action: "invoke", + } + let idempotencyKey_ = idempotencyKey as string + + const setParts = (workflowId, transactionId, stepId, action) => { + parts.workflowId = workflowId + parts.transactionId = transactionId + parts.stepId = stepId + parts.action = action + } + + if (!isString(idempotencyKey)) { + const { workflowId, transactionId, stepId, action } = + idempotencyKey as IdempotencyKeyParts + idempotencyKey_ = [workflowId, transactionId, stepId, action].join(":") + setParts(workflowId, transactionId, stepId, action) + } else { + const [workflowId, transactionId, stepId, action] = + idempotencyKey_.split(":") + setParts(workflowId, transactionId, stepId, action) + } + + return [idempotencyKey_, parts] + } +} diff --git a/packages/workflow-engine-inmemory/src/services/workflows-module.ts b/packages/workflow-engine-inmemory/src/services/workflows-module.ts new file mode 100644 index 0000000000000..31be5674d58a3 --- /dev/null +++ b/packages/workflow-engine-inmemory/src/services/workflows-module.ts @@ -0,0 +1,199 @@ +import { + Context, + DAL, + FindConfig, + InternalModuleDeclaration, + ModuleJoinerConfig, +} from "@medusajs/types" +import {} from "@medusajs/types/src" +import { + InjectManager, + InjectSharedContext, + MedusaContext, +} from "@medusajs/utils" +import type { + ReturnWorkflow, + UnwrapWorkflowInputDataType, + WorkflowOrchestratorTypes, +} from "@medusajs/workflows-sdk" +import { + WorkflowExecutionService, + WorkflowOrchestratorService, +} from "@services" +import { joinerConfig } from "../joiner-config" + +type InjectedDependencies = { + baseRepository: DAL.RepositoryService + workflowExecutionService: WorkflowExecutionService + workflowOrchestratorService: WorkflowOrchestratorService +} + +export class WorkflowsModuleService + implements WorkflowOrchestratorTypes.IWorkflowsModuleService +{ + protected baseRepository_: DAL.RepositoryService + protected workflowExecutionService_: WorkflowExecutionService + protected workflowOrchestratorService_: WorkflowOrchestratorService + + constructor( + { + baseRepository, + workflowExecutionService, + workflowOrchestratorService, + }: InjectedDependencies, + protected readonly moduleDeclaration: InternalModuleDeclaration + ) { + this.baseRepository_ = baseRepository + this.workflowExecutionService_ = workflowExecutionService + this.workflowOrchestratorService_ = workflowOrchestratorService + } + + __joinerConfig(): ModuleJoinerConfig { + return joinerConfig + } + + @InjectManager("baseRepository_") + async listWorkflowExecution( + filters: WorkflowOrchestratorTypes.FilterableWorkflowExecutionProps = {}, + config: FindConfig = {}, + @MedusaContext() sharedContext: Context = {} + ): Promise { + const wfExecutions = await this.workflowExecutionService_.list( + filters, + config, + sharedContext + ) + + return this.baseRepository_.serialize< + WorkflowOrchestratorTypes.WorkflowExecutionDTO[] + >(wfExecutions, { + populate: true, + }) + } + + @InjectManager("baseRepository_") + async listAndCountWorkflowExecution( + filters: WorkflowOrchestratorTypes.FilterableWorkflowExecutionProps = {}, + config: FindConfig = {}, + @MedusaContext() sharedContext: Context = {} + ): Promise<[WorkflowOrchestratorTypes.WorkflowExecutionDTO[], number]> { + const [wfExecutions, count] = + await this.workflowExecutionService_.listAndCount( + filters, + config, + sharedContext + ) + + return [ + await this.baseRepository_.serialize< + WorkflowOrchestratorTypes.WorkflowExecutionDTO[] + >(wfExecutions, { + populate: true, + }), + count, + ] + } + + @InjectSharedContext() + async run>( + workflowIdOrWorkflow: TWorkflow, + options: WorkflowOrchestratorTypes.WorkflowOrchestratorRunDTO< + TWorkflow extends ReturnWorkflow + ? UnwrapWorkflowInputDataType + : unknown + > = {}, + @MedusaContext() context: Context = {} + ) { + const ret = await this.workflowOrchestratorService_.run< + TWorkflow extends ReturnWorkflow + ? UnwrapWorkflowInputDataType + : unknown + >(workflowIdOrWorkflow, options, context) + + return ret as any + } + + @InjectSharedContext() + async getRunningTransaction( + workflowId: string, + transactionId: string, + @MedusaContext() context: Context = {} + ) { + return this.workflowOrchestratorService_.getRunningTransaction( + workflowId, + transactionId, + context + ) + } + + @InjectSharedContext() + async setStepSuccess( + { + idempotencyKey, + stepResponse, + options, + }: { + idempotencyKey: string | object + stepResponse: unknown + options?: Record + }, + @MedusaContext() context: Context = {} + ) { + return this.workflowOrchestratorService_.setStepSuccess( + { + idempotencyKey, + stepResponse, + options, + } as any, + context + ) + } + + @InjectSharedContext() + async setStepFailure( + { + idempotencyKey, + stepResponse, + options, + }: { + idempotencyKey: string | object + stepResponse: unknown + options?: Record + }, + @MedusaContext() context: Context = {} + ) { + return this.workflowOrchestratorService_.setStepFailure( + { + idempotencyKey, + stepResponse, + options, + } as any, + context + ) + } + + @InjectSharedContext() + async subscribe( + args: { + workflowId: string + transactionId?: string + subscriber: Function + subscriberId?: string + }, + @MedusaContext() context: Context = {} + ) { + return this.workflowOrchestratorService_.subscribe(args as any, context) + } + + @InjectSharedContext() + async unsubscribe( + args: { + workflowId: string + transactionId?: string + subscriberOrId: string | Function + }, + @MedusaContext() context: Context = {} + ) { + return this.workflowOrchestratorService_.unsubscribe(args as any, context) + } +} diff --git a/packages/workflow-engine-inmemory/src/types/index.ts b/packages/workflow-engine-inmemory/src/types/index.ts new file mode 100644 index 0000000000000..0f252977b02a2 --- /dev/null +++ b/packages/workflow-engine-inmemory/src/types/index.ts @@ -0,0 +1,5 @@ +import { Logger } from "@medusajs/types" + +export type InitializeModuleInjectableDependencies = { + logger?: Logger +} diff --git a/packages/workflow-engine-inmemory/src/utils/index.ts b/packages/workflow-engine-inmemory/src/utils/index.ts new file mode 100644 index 0000000000000..01bae8b302b04 --- /dev/null +++ b/packages/workflow-engine-inmemory/src/utils/index.ts @@ -0,0 +1 @@ +export * from "./workflow-orchestrator-storage" diff --git a/packages/workflow-engine-inmemory/src/utils/workflow-orchestrator-storage.ts b/packages/workflow-engine-inmemory/src/utils/workflow-orchestrator-storage.ts new file mode 100644 index 0000000000000..7254f3b90dc2d --- /dev/null +++ b/packages/workflow-engine-inmemory/src/utils/workflow-orchestrator-storage.ts @@ -0,0 +1,201 @@ +import { + DistributedTransaction, + DistributedTransactionStorage, + TransactionCheckpoint, + TransactionStep, +} from "@medusajs/orchestration" +import { TransactionState } from "@medusajs/utils" +import { + WorkflowExecutionService, + WorkflowOrchestratorService, +} from "@services" + +// eslint-disable-next-line max-len +export class InMemoryDistributedTransactionStorage extends DistributedTransactionStorage { + private workflowExecutionService_: WorkflowExecutionService + private workflowOrchestratorService_: WorkflowOrchestratorService + + private storage: Map = new Map() + private retries: Map = new Map() + private timeouts: Map = new Map() + + constructor({ + workflowExecutionService, + }: { + workflowExecutionService: WorkflowExecutionService + }) { + super() + + this.workflowExecutionService_ = workflowExecutionService + } + + setWorkflowOrchestratorService(workflowOrchestratorService) { + this.workflowOrchestratorService_ = workflowOrchestratorService + } + + private async saveToDb(data: TransactionCheckpoint) { + await this.workflowExecutionService_.upsert([ + { + workflow_id: data.flow.modelId, + transaction_id: data.flow.transactionId, + execution: data.flow, + context: { + data: data.context, + errors: data.errors, + }, + state: data.flow.state, + }, + ]) + } + + private async deleteFromDb(data: TransactionCheckpoint) { + await this.workflowExecutionService_.delete([ + { + workflow_id: data.flow.modelId, + transaction_id: data.flow.transactionId, + }, + ]) + } + + async get(key: string): Promise { + return this.storage.get(key) + } + + async list(): Promise { + return Array.from(this.storage.values()) + } + + async save( + key: string, + data: TransactionCheckpoint, + ttl?: number + ): Promise { + this.storage.set(key, data) + + let retentionTime + + /** + * Store the retention time only if the transaction is done, failed or reverted. + * From that moment, this tuple can be later on archived or deleted after the retention time. + */ + const hasFinished = [ + TransactionState.DONE, + TransactionState.FAILED, + TransactionState.REVERTED, + ].includes(data.flow.state) + + if (hasFinished) { + retentionTime = data.flow.options?.retentionTime + Object.assign(data, { + retention_time: retentionTime, + }) + } + + if (hasFinished && !retentionTime) { + await this.deleteFromDb(data) + } else { + await this.saveToDb(data) + } + + if (hasFinished) { + this.storage.delete(key) + } + } + + async scheduleRetry( + transaction: DistributedTransaction, + step: TransactionStep, + timestamp: number, + interval: number + ): Promise { + const { modelId: workflowId, transactionId } = transaction + + const inter = setTimeout(async () => { + await this.workflowOrchestratorService_.run(workflowId, { + transactionId, + throwOnError: false, + }) + }, interval * 1e3) + + const key = `${workflowId}:${transactionId}:${step.id}` + this.retries.set(key, inter) + } + + async clearRetry( + transaction: DistributedTransaction, + step: TransactionStep + ): Promise { + const { modelId: workflowId, transactionId } = transaction + + const key = `${workflowId}:${transactionId}:${step.id}` + const inter = this.retries.get(key) + if (inter) { + clearTimeout(inter as NodeJS.Timeout) + this.retries.delete(key) + } + } + + async scheduleTransactionTimeout( + transaction: DistributedTransaction, + timestamp: number, + interval: number + ): Promise { + const { modelId: workflowId, transactionId } = transaction + + const inter = setTimeout(async () => { + await this.workflowOrchestratorService_.run(workflowId, { + transactionId, + throwOnError: false, + }) + }, interval * 1e3) + + const key = `${workflowId}:${transactionId}` + this.timeouts.set(key, inter) + } + + async clearTransactionTimeout( + transaction: DistributedTransaction + ): Promise { + const { modelId: workflowId, transactionId } = transaction + + const key = `${workflowId}:${transactionId}` + const inter = this.timeouts.get(key) + if (inter) { + clearTimeout(inter as NodeJS.Timeout) + this.timeouts.delete(key) + } + } + + async scheduleStepTimeout( + transaction: DistributedTransaction, + step: TransactionStep, + timestamp: number, + interval: number + ): Promise { + const { modelId: workflowId, transactionId } = transaction + + const inter = setTimeout(async () => { + await this.workflowOrchestratorService_.run(workflowId, { + transactionId, + throwOnError: false, + }) + }, interval * 1e3) + + const key = `${workflowId}:${transactionId}:${step.id}` + this.timeouts.set(key, inter) + } + + async clearStepTimeout( + transaction: DistributedTransaction, + step: TransactionStep + ): Promise { + const { modelId: workflowId, transactionId } = transaction + + const key = `${workflowId}:${transactionId}:${step.id}` + const inter = this.timeouts.get(key) + if (inter) { + clearTimeout(inter as NodeJS.Timeout) + this.timeouts.delete(key) + } + } +} diff --git a/packages/workflow-engine-inmemory/tsconfig.json b/packages/workflow-engine-inmemory/tsconfig.json new file mode 100644 index 0000000000000..d4e5080094a4b --- /dev/null +++ b/packages/workflow-engine-inmemory/tsconfig.json @@ -0,0 +1,38 @@ +{ + "compilerOptions": { + "lib": ["es2020"], + "target": "es2020", + "outDir": "./dist", + "esModuleInterop": true, + "declarationMap": true, + "declaration": true, + "module": "commonjs", + "moduleResolution": "node", + "emitDecoratorMetadata": true, + "experimentalDecorators": true, + "sourceMap": false, + "noImplicitReturns": true, + "strictNullChecks": true, + "strictFunctionTypes": true, + "noImplicitThis": true, + "allowJs": true, + "skipLibCheck": true, + "downlevelIteration": true, // to use ES5 specific tooling + "baseUrl": ".", + "resolveJsonModule": true, + "paths": { + "@models": ["./src/models"], + "@services": ["./src/services"], + "@repositories": ["./src/repositories"], + "@types": ["./src/types"] + } + }, + "include": ["src"], + "exclude": [ + "dist", + "./src/**/__tests__", + "./src/**/__mocks__", + "./src/**/__fixtures__", + "node_modules" + ] +} diff --git a/packages/workflow-engine-inmemory/tsconfig.spec.json b/packages/workflow-engine-inmemory/tsconfig.spec.json new file mode 100644 index 0000000000000..48e47e8cbb3be --- /dev/null +++ b/packages/workflow-engine-inmemory/tsconfig.spec.json @@ -0,0 +1,8 @@ +{ + "extends": "./tsconfig.json", + "include": ["src", "integration-tests"], + "exclude": ["node_modules", "dist"], + "compilerOptions": { + "sourceMap": true + } +} diff --git a/packages/workflow-engine-redis/.gitignore b/packages/workflow-engine-redis/.gitignore new file mode 100644 index 0000000000000..874c6c69d3341 --- /dev/null +++ b/packages/workflow-engine-redis/.gitignore @@ -0,0 +1,6 @@ +/dist +node_modules +.DS_store +.env* +.env +*.sql diff --git a/packages/workflow-engine-redis/CHANGELOG.md b/packages/workflow-engine-redis/CHANGELOG.md new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/packages/workflow-engine-redis/README.md b/packages/workflow-engine-redis/README.md new file mode 100644 index 0000000000000..b34e46ea20d0c --- /dev/null +++ b/packages/workflow-engine-redis/README.md @@ -0,0 +1 @@ +# Workflow Orchestrator diff --git a/packages/workflow-engine-redis/integration-tests/__fixtures__/index.ts b/packages/workflow-engine-redis/integration-tests/__fixtures__/index.ts new file mode 100644 index 0000000000000..987a8a99bd67c --- /dev/null +++ b/packages/workflow-engine-redis/integration-tests/__fixtures__/index.ts @@ -0,0 +1,4 @@ +export * from "./workflow_1" +export * from "./workflow_2" +export * from "./workflow_step_timeout" +export * from "./workflow_transaction_timeout" diff --git a/packages/workflow-engine-redis/integration-tests/__fixtures__/workflow_1.ts b/packages/workflow-engine-redis/integration-tests/__fixtures__/workflow_1.ts new file mode 100644 index 0000000000000..cb0056466e910 --- /dev/null +++ b/packages/workflow-engine-redis/integration-tests/__fixtures__/workflow_1.ts @@ -0,0 +1,65 @@ +import { + StepResponse, + createStep, + createWorkflow, +} from "@medusajs/workflows-sdk" + +const step_1 = createStep( + "step_1", + jest.fn((input) => { + input.test = "test" + return new StepResponse(input, { compensate: 123 }) + }), + jest.fn((compensateInput) => { + if (!compensateInput) { + return + } + + console.log("reverted", compensateInput.compensate) + return new StepResponse({ + reverted: true, + }) + }) +) + +const step_2 = createStep( + "step_2", + jest.fn((input, context) => { + console.log("triggered async request", context.metadata.idempotency_key) + + if (input) { + return new StepResponse({ notAsyncResponse: input.hey }) + } + }), + jest.fn((_, context) => { + return new StepResponse({ + step: context.metadata.action, + idempotency_key: context.metadata.idempotency_key, + reverted: true, + }) + }) +) + +const step_3 = createStep( + "step_3", + jest.fn((res) => { + return new StepResponse({ + done: { + inputFromSyncStep: res.notAsyncResponse, + }, + }) + }) +) + +createWorkflow("workflow_1", function (input) { + step_1(input) + + const ret2 = step_2({ hey: "oh" }) + + step_2({ hey: "async hello" }).config({ + name: "new_step_name", + async: true, + }) + + return step_3(ret2) +}) diff --git a/packages/workflow-engine-redis/integration-tests/__fixtures__/workflow_2.ts b/packages/workflow-engine-redis/integration-tests/__fixtures__/workflow_2.ts new file mode 100644 index 0000000000000..f15d51889fe3e --- /dev/null +++ b/packages/workflow-engine-redis/integration-tests/__fixtures__/workflow_2.ts @@ -0,0 +1,71 @@ +import { + StepResponse, + createStep, + createWorkflow, +} from "@medusajs/workflows-sdk" + +const step_1 = createStep( + "step_1", + jest.fn((input) => { + input.test = "test" + return new StepResponse(input, { compensate: 123 }) + }), + jest.fn((compensateInput) => { + if (!compensateInput) { + return + } + + console.log("reverted", compensateInput.compensate) + return new StepResponse({ + reverted: true, + }) + }) +) + +const step_2 = createStep( + "step_2", + jest.fn((input, context) => { + console.log("triggered async request", context.metadata.idempotency_key) + + if (input) { + return new StepResponse({ notAsyncResponse: input.hey }) + } + }), + jest.fn((_, context) => { + return new StepResponse({ + step: context.metadata.action, + idempotency_key: context.metadata.idempotency_key, + reverted: true, + }) + }) +) + +const step_3 = createStep( + "step_3", + jest.fn((res) => { + return new StepResponse({ + done: { + inputFromSyncStep: res.notAsyncResponse, + }, + }) + }) +) + +createWorkflow( + { + name: "workflow_2", + retentionTime: 1000, + }, + function (input) { + step_1(input) + + const ret2 = step_2({ hey: "oh" }) + + step_2({ hey: "async hello" }).config({ + name: "new_step_name", + async: true, + }) + + return step_3(ret2) + } +) diff --git a/packages/workflow-engine-redis/integration-tests/__fixtures__/workflow_step_timeout.ts b/packages/workflow-engine-redis/integration-tests/__fixtures__/workflow_step_timeout.ts new file mode 100644 index 0000000000000..0bdbf9fd9cca5 --- /dev/null +++ b/packages/workflow-engine-redis/integration-tests/__fixtures__/workflow_step_timeout.ts @@ -0,0 +1,51 @@ +import { + StepResponse, + createStep, + createWorkflow, +} from "@medusajs/workflows-sdk" +import { setTimeout } from "timers/promises" + +const step_1 = createStep( + "step_1", + jest.fn(async (input) => { + await setTimeout(200) + + return new StepResponse(input, { compensate: 123 }) + }) +) + +const step_1_async = createStep( + { + name: "step_1_async", + async: true, + timeout: 0.1, // 0.1 second + }, + + jest.fn(async (input) => { + return new StepResponse(input, { compensate: 123 }) + }) +) + +createWorkflow( + { + name: "workflow_step_timeout", + }, + function (input) { + const resp = step_1(input).config({ + timeout: 0.1, // 0.1 second + }) + + return resp + } +) + +createWorkflow( + { + name: "workflow_step_timeout_async", + }, + function (input) { + const resp = step_1_async(input) + + return resp + } +) diff --git a/packages/workflow-engine-redis/integration-tests/__fixtures__/workflow_transaction_timeout.ts b/packages/workflow-engine-redis/integration-tests/__fixtures__/workflow_transaction_timeout.ts new file mode 100644 index 0000000000000..6e1c2852f2d2f --- /dev/null +++ b/packages/workflow-engine-redis/integration-tests/__fixtures__/workflow_transaction_timeout.ts @@ -0,0 +1,44 @@ +import { + StepResponse, + createStep, + createWorkflow, +} from "@medusajs/workflows-sdk" +import { setTimeout } from "timers/promises" + +const step_1 = createStep( + "step_1", + jest.fn(async (input) => { + await setTimeout(200) + + return new StepResponse({ + executed: true, + }) + }), + jest.fn() +) + +createWorkflow( + { + name: "workflow_transaction_timeout", + timeout: 0.1, // 0.1 second + }, + function (input) { + const resp = step_1(input) + + return resp + } +) + +createWorkflow( + { + name: "workflow_transaction_timeout_async", + timeout: 0.1, // 0.1 second + }, + function (input) { + const resp = step_1(input).config({ + async: true, + }) + + return resp + } +) diff --git a/packages/workflow-engine-redis/integration-tests/__tests__/index.spec.ts b/packages/workflow-engine-redis/integration-tests/__tests__/index.spec.ts new file mode 100644 index 0000000000000..802fff34187d1 --- /dev/null +++ b/packages/workflow-engine-redis/integration-tests/__tests__/index.spec.ts @@ -0,0 +1,245 @@ +import { MedusaApp } from "@medusajs/modules-sdk" +import { + TransactionStepTimeoutError, + TransactionTimeoutError, +} from "@medusajs/orchestration" +import { RemoteJoinerQuery } from "@medusajs/types" +import { TransactionHandlerType } from "@medusajs/utils" +import { IWorkflowsModuleService } from "@medusajs/workflows-sdk" +import { knex } from "knex" +import { setTimeout } from "timers/promises" +import "../__fixtures__" +import { DB_URL, TestDatabase } from "../utils" + +const sharedPgConnection = knex({ + client: "pg", + searchPath: process.env.MEDUSA_WORKFLOW_ENGINE_DB_SCHEMA, + connection: { + connectionString: DB_URL, + debug: false, + }, +}) + +const afterEach_ = async () => { + await TestDatabase.clearTables(sharedPgConnection) +} + +describe("Workflow Orchestrator module", function () { + describe("Testing basic workflow", function () { + let workflowOrcModule: IWorkflowsModuleService + let query: ( + query: string | RemoteJoinerQuery | object, + variables?: Record + ) => Promise + + afterEach(afterEach_) + + beforeAll(async () => { + const { + runMigrations, + query: remoteQuery, + modules, + } = await MedusaApp({ + sharedResourcesConfig: { + database: { + connection: sharedPgConnection, + }, + }, + modulesConfig: { + workflows: { + resolve: __dirname + "/../..", + options: { + redis: { + url: "localhost:6379", + }, + }, + }, + }, + }) + + query = remoteQuery + + await runMigrations() + + workflowOrcModule = + modules.workflows as unknown as IWorkflowsModuleService + }) + + afterEach(afterEach_) + + it("should return a list of workflow executions and remove after completed when there is no retentionTime set", async () => { + await workflowOrcModule.run("workflow_1", { + input: { + value: "123", + }, + throwOnError: true, + }) + + let executionsList = await query({ + workflow_executions: { + fields: ["workflow_id", "transaction_id", "state"], + }, + }) + + expect(executionsList).toHaveLength(1) + + const { result } = await workflowOrcModule.setStepSuccess({ + idempotencyKey: { + action: TransactionHandlerType.INVOKE, + stepId: "new_step_name", + workflowId: "workflow_1", + transactionId: executionsList[0].transaction_id, + }, + stepResponse: { uhuuuu: "yeaah!" }, + }) + + executionsList = await query({ + workflow_executions: { + fields: ["id"], + }, + }) + + expect(executionsList).toHaveLength(0) + expect(result).toEqual({ + done: { + inputFromSyncStep: "oh", + }, + }) + }) + + it("should return a list of workflow executions and keep it saved when there is a retentionTime set", async () => { + await workflowOrcModule.run("workflow_2", { + input: { + value: "123", + }, + throwOnError: true, + transactionId: "transaction_1", + }) + + let executionsList = await query({ + workflow_executions: { + fields: ["id"], + }, + }) + + expect(executionsList).toHaveLength(1) + + await workflowOrcModule.setStepSuccess({ + idempotencyKey: { + action: TransactionHandlerType.INVOKE, + stepId: "new_step_name", + workflowId: "workflow_2", + transactionId: "transaction_1", + }, + stepResponse: { uhuuuu: "yeaah!" }, + }) + + executionsList = await query({ + workflow_executions: { + fields: ["id"], + }, + }) + + expect(executionsList).toHaveLength(1) + }) + + it("should revert the entire transaction when a step timeout expires", async () => { + const { transaction, result, errors } = await workflowOrcModule.run( + "workflow_step_timeout", + { + input: { + myInput: "123", + }, + throwOnError: false, + } + ) + + expect(transaction.flow.state).toEqual("reverted") + expect(result).toEqual({ + myInput: "123", + }) + expect(errors).toHaveLength(1) + expect(errors[0].action).toEqual("step_1") + expect(errors[0].error).toBeInstanceOf(TransactionStepTimeoutError) + }) + + it("should revert the entire transaction when the transaction timeout expires", async () => { + const { transaction, result, errors } = await workflowOrcModule.run( + "workflow_transaction_timeout", + { + input: {}, + transactionId: "trx", + throwOnError: false, + } + ) + + expect(transaction.flow.state).toEqual("reverted") + expect(result).toEqual({ executed: true }) + expect(errors).toHaveLength(1) + expect(errors[0].action).toEqual("step_1") + expect( + TransactionTimeoutError.isTransactionTimeoutError(errors[0].error) + ).toBe(true) + }) + + it("should revert the entire transaction when a step timeout expires in a async step", async () => { + await workflowOrcModule.run("workflow_step_timeout_async", { + input: { + myInput: "123", + }, + transactionId: "transaction_1", + throwOnError: false, + }) + + await setTimeout(200) + + const { transaction, result, errors } = await workflowOrcModule.run( + "workflow_step_timeout_async", + { + input: { + myInput: "123", + }, + transactionId: "transaction_1", + throwOnError: false, + } + ) + + expect(transaction.flow.state).toEqual("reverted") + expect(result).toEqual(undefined) + expect(errors).toHaveLength(1) + expect(errors[0].action).toEqual("step_1_async") + expect( + TransactionStepTimeoutError.isTransactionStepTimeoutError( + errors[0].error + ) + ).toBe(true) + }) + + it("should revert the entire transaction when the transaction timeout expires in a transaction containing an async step", async () => { + await workflowOrcModule.run("workflow_transaction_timeout_async", { + input: {}, + transactionId: "transaction_1", + throwOnError: false, + }) + + await setTimeout(200) + + const { transaction, result, errors } = await workflowOrcModule.run( + "workflow_transaction_timeout_async", + { + input: {}, + transactionId: "transaction_1", + throwOnError: false, + } + ) + + expect(transaction.flow.state).toEqual("reverted") + expect(result).toEqual(undefined) + expect(errors).toHaveLength(1) + expect(errors[0].action).toEqual("step_1") + expect( + TransactionTimeoutError.isTransactionTimeoutError(errors[0].error) + ).toBe(true) + }) + }) +}) diff --git a/packages/workflow-engine-redis/integration-tests/setup-env.js b/packages/workflow-engine-redis/integration-tests/setup-env.js new file mode 100644 index 0000000000000..18f30b372c4d2 --- /dev/null +++ b/packages/workflow-engine-redis/integration-tests/setup-env.js @@ -0,0 +1,6 @@ +if (typeof process.env.DB_TEMP_NAME === "undefined") { + const tempName = parseInt(process.env.JEST_WORKER_ID || "1") + process.env.DB_TEMP_NAME = `medusa-workflow-engine-redis-${tempName}` +} + +process.env.MEDUSA_WORKFLOW_ENGINE_DB_SCHEMA = "public" diff --git a/packages/workflow-engine-redis/integration-tests/setup.js b/packages/workflow-engine-redis/integration-tests/setup.js new file mode 100644 index 0000000000000..43f99aab4ac94 --- /dev/null +++ b/packages/workflow-engine-redis/integration-tests/setup.js @@ -0,0 +1,3 @@ +import { JestUtils } from "medusa-test-utils" + +JestUtils.afterAllHookDropDatabase() diff --git a/packages/workflow-engine-redis/integration-tests/utils/database.ts b/packages/workflow-engine-redis/integration-tests/utils/database.ts new file mode 100644 index 0000000000000..582baee15c7f1 --- /dev/null +++ b/packages/workflow-engine-redis/integration-tests/utils/database.ts @@ -0,0 +1,53 @@ +import * as process from "process" + +const DB_HOST = process.env.DB_HOST ?? "localhost" +const DB_USERNAME = process.env.DB_USERNAME ?? "" +const DB_PASSWORD = process.env.DB_PASSWORD +const DB_NAME = process.env.DB_TEMP_NAME + +export const DB_URL = `postgres://${DB_USERNAME}${ + DB_PASSWORD ? `:${DB_PASSWORD}` : "" +}@${DB_HOST}/${DB_NAME}` + +const Redis = require("ioredis") + +const redisUrl = process.env.REDIS_URL || "redis://localhost:6379" +const redis = new Redis(redisUrl) + +interface TestDatabase { + clearTables(knex): Promise +} + +export const TestDatabase: TestDatabase = { + clearTables: async (knex) => { + await knex.raw(` + TRUNCATE TABLE workflow_execution CASCADE; + `) + + await cleanRedis() + }, +} + +async function deleteKeysByPattern(pattern) { + const stream = redis.scanStream({ + match: pattern, + count: 100, + }) + + for await (const keys of stream) { + if (keys.length) { + const pipeline = redis.pipeline() + keys.forEach((key) => pipeline.del(key)) + await pipeline.exec() + } + } +} + +async function cleanRedis() { + try { + await deleteKeysByPattern("bull:*") + await deleteKeysByPattern("dtrans:*") + } catch (error) { + console.error("Error:", error) + } +} diff --git a/packages/workflow-engine-redis/integration-tests/utils/index.ts b/packages/workflow-engine-redis/integration-tests/utils/index.ts new file mode 100644 index 0000000000000..6b917ed30e5e7 --- /dev/null +++ b/packages/workflow-engine-redis/integration-tests/utils/index.ts @@ -0,0 +1 @@ +export * from "./database" diff --git a/packages/workflow-engine-redis/jest.config.js b/packages/workflow-engine-redis/jest.config.js new file mode 100644 index 0000000000000..860ba90a49c5e --- /dev/null +++ b/packages/workflow-engine-redis/jest.config.js @@ -0,0 +1,21 @@ +module.exports = { + moduleNameMapper: { + "^@models": "/src/models", + "^@services": "/src/services", + "^@repositories": "/src/repositories", + }, + transform: { + "^.+\\.[jt]s?$": [ + "ts-jest", + { + tsConfig: "tsconfig.spec.json", + isolatedModules: true, + }, + ], + }, + testEnvironment: `node`, + moduleFileExtensions: [`js`, `ts`], + modulePathIgnorePatterns: ["dist/"], + setupFiles: ["/integration-tests/setup-env.js"], + setupFilesAfterEnv: ["/integration-tests/setup.js"], +} diff --git a/packages/workflow-engine-redis/mikro-orm.config.dev.ts b/packages/workflow-engine-redis/mikro-orm.config.dev.ts new file mode 100644 index 0000000000000..5468c7a41d5e0 --- /dev/null +++ b/packages/workflow-engine-redis/mikro-orm.config.dev.ts @@ -0,0 +1,8 @@ +import * as entities from "./src/models" + +module.exports = { + entities: Object.values(entities), + schema: "public", + clientUrl: "postgres://postgres@localhost/medusa-workflow-engine-redis", + type: "postgresql", +} diff --git a/packages/workflow-engine-redis/package.json b/packages/workflow-engine-redis/package.json new file mode 100644 index 0000000000000..2e8631f9c3583 --- /dev/null +++ b/packages/workflow-engine-redis/package.json @@ -0,0 +1,61 @@ +{ + "name": "@medusajs/workflow-engine-redis", + "version": "0.0.1", + "description": "Medusa Workflow Orchestrator module using Redis to track workflows executions", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "files": [ + "dist" + ], + "engines": { + "node": ">=16" + }, + "repository": { + "type": "git", + "url": "https://github.com/medusajs/medusa", + "directory": "packages/workflow-engine-redis" + }, + "publishConfig": { + "access": "public" + }, + "author": "Medusa", + "license": "MIT", + "scripts": { + "watch": "tsc --build --watch", + "watch:test": "tsc --build tsconfig.spec.json --watch", + "prepublishOnly": "cross-env NODE_ENV=production tsc --build && tsc-alias -p tsconfig.json", + "build": "rimraf dist && tsc --build && tsc-alias -p tsconfig.json", + "test": "jest --passWithNoTests --runInBand --bail --forceExit -- src/**/__tests__/**/*.ts", + "test:integration": "jest --runInBand --forceExit -- integration-tests/**/__tests__/**/*.ts", + "migration:generate": " MIKRO_ORM_CLI=./mikro-orm.config.dev.ts mikro-orm migration:generate", + "migration:initial": " MIKRO_ORM_CLI=./mikro-orm.config.dev.ts mikro-orm migration:create --initial", + "migration:create": " MIKRO_ORM_CLI=./mikro-orm.config.dev.ts mikro-orm migration:create", + "migration:up": " MIKRO_ORM_CLI=./mikro-orm.config.dev.ts mikro-orm migration:up", + "orm:cache:clear": " MIKRO_ORM_CLI=./mikro-orm.config.dev.ts mikro-orm cache:clear" + }, + "devDependencies": { + "@mikro-orm/cli": "5.9.7", + "cross-env": "^5.2.1", + "jest": "^29.6.3", + "medusa-test-utils": "^1.1.40", + "rimraf": "^3.0.2", + "ts-jest": "^29.1.1", + "ts-node": "^10.9.1", + "tsc-alias": "^1.8.6", + "typescript": "^5.1.6" + }, + "dependencies": { + "@medusajs/modules-sdk": "^1.12.5", + "@medusajs/types": "^1.11.9", + "@medusajs/utils": "^1.11.2", + "@medusajs/workflows-sdk": "^0.1.0", + "@mikro-orm/core": "5.9.7", + "@mikro-orm/migrations": "5.9.7", + "@mikro-orm/postgresql": "5.9.7", + "awilix": "^8.0.0", + "bullmq": "^5.1.3", + "dotenv": "^16.1.4", + "ioredis": "^5.3.2", + "knex": "2.4.2" + } +} diff --git a/packages/workflow-engine-redis/src/index.ts b/packages/workflow-engine-redis/src/index.ts new file mode 100644 index 0000000000000..78040405651bb --- /dev/null +++ b/packages/workflow-engine-redis/src/index.ts @@ -0,0 +1,22 @@ +import { Modules } from "@medusajs/modules-sdk" +import { ModulesSdkUtils } from "@medusajs/utils" +import * as models from "@models" +import { moduleDefinition } from "./module-definition" + +export default moduleDefinition + +const migrationScriptOptions = { + moduleName: Modules.WORKFLOW_ENGINE, + models: models, + pathToMigrations: __dirname + "/migrations", +} + +export const runMigrations = ModulesSdkUtils.buildMigrationScript( + migrationScriptOptions +) +export const revertMigration = ModulesSdkUtils.buildRevertMigrationScript( + migrationScriptOptions +) + +export * from "./initialize" +export * from "./loaders" diff --git a/packages/workflow-engine-redis/src/initialize/index.ts b/packages/workflow-engine-redis/src/initialize/index.ts new file mode 100644 index 0000000000000..20f4f49231b99 --- /dev/null +++ b/packages/workflow-engine-redis/src/initialize/index.ts @@ -0,0 +1,36 @@ +import { + ExternalModuleDeclaration, + InternalModuleDeclaration, + MedusaModule, + MODULE_PACKAGE_NAMES, + Modules, +} from "@medusajs/modules-sdk" +import { ModulesSdkTypes } from "@medusajs/types" +import { WorkflowOrchestratorTypes } from "@medusajs/workflows-sdk" +import { moduleDefinition } from "../module-definition" +import { InitializeModuleInjectableDependencies } from "../types" + +export const initialize = async ( + options?: + | ModulesSdkTypes.ModuleServiceInitializeOptions + | ModulesSdkTypes.ModuleServiceInitializeCustomDataLayerOptions + | ExternalModuleDeclaration + | InternalModuleDeclaration, + injectedDependencies?: InitializeModuleInjectableDependencies +): Promise => { + const loaded = + // eslint-disable-next-line max-len + await MedusaModule.bootstrap( + { + moduleKey: Modules.WORKFLOW_ENGINE, + defaultPath: MODULE_PACKAGE_NAMES[Modules.WORKFLOW_ENGINE], + declaration: options as + | InternalModuleDeclaration + | ExternalModuleDeclaration, + injectedDependencies, + moduleExports: moduleDefinition, + } + ) + + return loaded[Modules.WORKFLOW_ENGINE] +} diff --git a/packages/workflow-engine-redis/src/joiner-config.ts b/packages/workflow-engine-redis/src/joiner-config.ts new file mode 100644 index 0000000000000..7999e9c3ab52d --- /dev/null +++ b/packages/workflow-engine-redis/src/joiner-config.ts @@ -0,0 +1,34 @@ +import { Modules } from "@medusajs/modules-sdk" +import { ModuleJoinerConfig } from "@medusajs/types" +import { MapToConfig } from "@medusajs/utils" +import { WorkflowExecution } from "@models" +import moduleSchema from "./schema" + +export const LinkableKeys = { + workflow_execution_id: WorkflowExecution.name, +} + +const entityLinkableKeysMap: MapToConfig = {} +Object.entries(LinkableKeys).forEach(([key, value]) => { + entityLinkableKeysMap[value] ??= [] + entityLinkableKeysMap[value].push({ + mapTo: key, + valueFrom: key.split("_").pop()!, + }) +}) + +export const entityNameToLinkableKeysMap: MapToConfig = entityLinkableKeysMap + +export const joinerConfig: ModuleJoinerConfig = { + serviceName: Modules.WORKFLOW_ENGINE, + primaryKeys: ["id"], + schema: moduleSchema, + linkableKeys: LinkableKeys, + alias: { + name: ["workflow_execution", "workflow_executions"], + args: { + entity: WorkflowExecution.name, + methodSuffix: "WorkflowExecution", + }, + }, +} diff --git a/packages/workflow-engine-redis/src/loaders/connection.ts b/packages/workflow-engine-redis/src/loaders/connection.ts new file mode 100644 index 0000000000000..580e05e95cef9 --- /dev/null +++ b/packages/workflow-engine-redis/src/loaders/connection.ts @@ -0,0 +1,36 @@ +import { + InternalModuleDeclaration, + LoaderOptions, + Modules, +} from "@medusajs/modules-sdk" +import { ModulesSdkTypes } from "@medusajs/types" +import { ModulesSdkUtils } from "@medusajs/utils" +import { EntitySchema } from "@mikro-orm/core" +import * as WorkflowOrchestratorModels from "../models" + +export default async ( + { + options, + container, + logger, + }: LoaderOptions< + | ModulesSdkTypes.ModuleServiceInitializeOptions + | ModulesSdkTypes.ModuleServiceInitializeCustomDataLayerOptions + >, + moduleDeclaration?: InternalModuleDeclaration +): Promise => { + const entities = Object.values( + WorkflowOrchestratorModels + ) as unknown as EntitySchema[] + const pathToMigrations = __dirname + "/../migrations" + + await ModulesSdkUtils.mikroOrmConnectionLoader({ + moduleName: Modules.WORKFLOW_ENGINE, + entities, + container, + options, + moduleDeclaration, + logger, + pathToMigrations, + }) +} diff --git a/packages/workflow-engine-redis/src/loaders/container.ts b/packages/workflow-engine-redis/src/loaders/container.ts new file mode 100644 index 0000000000000..9a0c5553b490c --- /dev/null +++ b/packages/workflow-engine-redis/src/loaders/container.ts @@ -0,0 +1,9 @@ +import { MikroOrmBaseRepository, ModulesSdkUtils } from "@medusajs/utils" +import * as ModuleModels from "@models" +import * as ModuleServices from "@services" + +export default ModulesSdkUtils.moduleContainerLoaderFactory({ + moduleModels: ModuleModels, + moduleServices: ModuleServices, + moduleRepositories: { BaseRepository: MikroOrmBaseRepository }, +}) diff --git a/packages/workflow-engine-redis/src/loaders/index.ts b/packages/workflow-engine-redis/src/loaders/index.ts new file mode 100644 index 0000000000000..8b66bc0be45ec --- /dev/null +++ b/packages/workflow-engine-redis/src/loaders/index.ts @@ -0,0 +1,4 @@ +export * from "./connection" +export * from "./container" +export * from "./redis" +export * from "./utils" diff --git a/packages/workflow-engine-redis/src/loaders/redis.ts b/packages/workflow-engine-redis/src/loaders/redis.ts new file mode 100644 index 0000000000000..8321a6d1473f3 --- /dev/null +++ b/packages/workflow-engine-redis/src/loaders/redis.ts @@ -0,0 +1,78 @@ +import { LoaderOptions } from "@medusajs/modules-sdk" +import { asValue } from "awilix" +import Redis from "ioredis" +import { RedisWorkflowsOptions } from "../types" + +export default async ({ + container, + logger, + options, +}: LoaderOptions): Promise => { + const { + url, + options: redisOptions, + pubsub, + } = options?.redis as RedisWorkflowsOptions + + // TODO: get default from ENV VAR + if (!url) { + throw Error( + "No `redis.url` provided in `workflowOrchestrator` module options. It is required for the Workflow Orchestrator Redis." + ) + } + + const cnnPubSub = pubsub ?? { url, options: redisOptions } + + const queueName = options?.queueName ?? "medusa-workflows" + + let connection + let redisPublisher + let redisSubscriber + let workerConnection + + try { + connection = await getConnection(url, redisOptions) + workerConnection = await getConnection(url, { + ...(redisOptions ?? {}), + maxRetriesPerRequest: null, + }) + logger?.info( + `Connection to Redis in module 'workflow-engine-redis' established` + ) + } catch (err) { + logger?.error( + `An error occurred while connecting to Redis in module 'workflow-engine-redis': ${err}` + ) + } + + try { + redisPublisher = await getConnection(cnnPubSub.url, cnnPubSub.options) + redisSubscriber = await getConnection(cnnPubSub.url, cnnPubSub.options) + logger?.info( + `Connection to Redis PubSub in module 'workflow-engine-redis' established` + ) + } catch (err) { + logger?.error( + `An error occurred while connecting to Redis PubSub in module 'workflow-engine-redis': ${err}` + ) + } + + container.register({ + redisConnection: asValue(connection), + redisWorkerConnection: asValue(workerConnection), + redisPublisher: asValue(redisPublisher), + redisSubscriber: asValue(redisSubscriber), + redisQueueName: asValue(queueName), + }) +} + +async function getConnection(url, redisOptions) { + const connection = new Redis(url, { + lazyConnect: true, + ...(redisOptions ?? {}), + }) + + await connection.connect() + + return connection +} diff --git a/packages/workflow-engine-redis/src/loaders/utils.ts b/packages/workflow-engine-redis/src/loaders/utils.ts new file mode 100644 index 0000000000000..f662dc1e177f2 --- /dev/null +++ b/packages/workflow-engine-redis/src/loaders/utils.ts @@ -0,0 +1,10 @@ +import { asClass } from "awilix" +import { RedisDistributedTransactionStorage } from "../utils" + +export default async ({ container }): Promise => { + container.register({ + redisDistributedTransactionStorage: asClass( + RedisDistributedTransactionStorage + ).singleton(), + }) +} diff --git a/packages/workflow-engine-redis/src/migrations/Migration20231228143900.ts b/packages/workflow-engine-redis/src/migrations/Migration20231228143900.ts new file mode 100644 index 0000000000000..af9958e80a783 --- /dev/null +++ b/packages/workflow-engine-redis/src/migrations/Migration20231228143900.ts @@ -0,0 +1,41 @@ +import { Migration } from "@mikro-orm/migrations" + +export class Migration20231221104256 extends Migration { + async up(): Promise { + this.addSql( + ` + CREATE TABLE IF NOT EXISTS workflow_execution + ( + id character varying NOT NULL, + workflow_id character varying NOT NULL, + transaction_id character varying NOT NULL, + execution jsonb NULL, + context jsonb NULL, + state character varying NOT NULL, + created_at timestamp WITHOUT time zone NOT NULL DEFAULT Now(), + updated_at timestamp WITHOUT time zone NOT NULL DEFAULT Now(), + deleted_at timestamp WITHOUT time zone NULL, + CONSTRAINT "PK_workflow_execution_workflow_id_transaction_id" PRIMARY KEY ("workflow_id", "transaction_id") + ); + + CREATE UNIQUE INDEX IF NOT EXISTS "IDX_workflow_execution_id" ON "workflow_execution" ("id"); + CREATE INDEX IF NOT EXISTS "IDX_workflow_execution_workflow_id" ON "workflow_execution" ("workflow_id") WHERE deleted_at IS NULL; + CREATE INDEX IF NOT EXISTS "IDX_workflow_execution_transaction_id" ON "workflow_execution" ("transaction_id") WHERE deleted_at IS NULL; + CREATE INDEX IF NOT EXISTS "IDX_workflow_execution_state" ON "workflow_execution" ("state") WHERE deleted_at IS NULL; + ` + ) + } + + async down(): Promise { + this.addSql( + ` + DROP INDEX "IDX_workflow_execution_id"; + DROP INDEX "IDX_workflow_execution_workflow_id"; + DROP INDEX "IDX_workflow_execution_transaction_id"; + DROP INDEX "IDX_workflow_execution_state"; + + DROP TABLE IF EXISTS workflow_execution; + ` + ) + } +} diff --git a/packages/workflow-engine-redis/src/models/index.ts b/packages/workflow-engine-redis/src/models/index.ts new file mode 100644 index 0000000000000..78fcbfa9214f9 --- /dev/null +++ b/packages/workflow-engine-redis/src/models/index.ts @@ -0,0 +1 @@ +export { default as WorkflowExecution } from "./workflow-execution" diff --git a/packages/workflow-engine-redis/src/models/workflow-execution.ts b/packages/workflow-engine-redis/src/models/workflow-execution.ts new file mode 100644 index 0000000000000..753d9e62db678 --- /dev/null +++ b/packages/workflow-engine-redis/src/models/workflow-execution.ts @@ -0,0 +1,76 @@ +import { TransactionState } from "@medusajs/orchestration" +import { DALUtils, generateEntityId } from "@medusajs/utils" +import { + BeforeCreate, + Entity, + Enum, + Filter, + Index, + OnInit, + OptionalProps, + PrimaryKey, + Property, + Unique, +} from "@mikro-orm/core" + +type OptionalFields = "deleted_at" + +@Entity() +@Unique({ + name: "IDX_workflow_execution_workflow_id_transaction_id_unique", + properties: ["workflow_id", "transaction_id"], +}) +@Filter(DALUtils.mikroOrmSoftDeletableFilterOptions) +export default class WorkflowExecution { + [OptionalProps]?: OptionalFields + + @Property({ columnType: "text", nullable: false }) + @Index({ name: "IDX_workflow_execution_id" }) + id!: string + + @Index({ name: "IDX_workflow_execution_workflow_id" }) + @PrimaryKey({ columnType: "text" }) + workflow_id: string + + @Index({ name: "IDX_workflow_execution_transaction_id" }) + @PrimaryKey({ columnType: "text" }) + transaction_id: string + + @Property({ columnType: "jsonb", nullable: true }) + execution: Record | null = null + + @Property({ columnType: "jsonb", nullable: true }) + context: Record | null = null + + @Index({ name: "IDX_workflow_execution_state" }) + @Enum(() => TransactionState) + state: TransactionState + + @Property({ + onCreate: () => new Date(), + columnType: "timestamptz", + defaultRaw: "now()", + }) + created_at: Date + + @Property({ + onCreate: () => new Date(), + onUpdate: () => new Date(), + columnType: "timestamptz", + defaultRaw: "now()", + }) + updated_at: Date + + @Property({ columnType: "timestamptz", nullable: true }) + deleted_at: Date | null = null + + @BeforeCreate() + onCreate() { + this.id = generateEntityId(this.id, "wf_exec") + } + + @OnInit() + onInit() { + this.id = generateEntityId(this.id, "wf_exec") + } +} diff --git a/packages/workflow-engine-redis/src/module-definition.ts b/packages/workflow-engine-redis/src/module-definition.ts new file mode 100644 index 0000000000000..0a3d33f5806d4 --- /dev/null +++ b/packages/workflow-engine-redis/src/module-definition.ts @@ -0,0 +1,19 @@ +import { ModuleExports } from "@medusajs/types" +import { WorkflowsModuleService } from "@services" +import loadConnection from "./loaders/connection" +import loadContainer from "./loaders/container" +import redisConnection from "./loaders/redis" +import loadUtils from "./loaders/utils" + +const service = WorkflowsModuleService +const loaders = [ + loadContainer, + loadConnection, + loadUtils, + redisConnection, +] as any + +export const moduleDefinition: ModuleExports = { + service, + loaders, +} diff --git a/packages/workflow-engine-redis/src/repositories/index.ts b/packages/workflow-engine-redis/src/repositories/index.ts new file mode 100644 index 0000000000000..8def202608b8c --- /dev/null +++ b/packages/workflow-engine-redis/src/repositories/index.ts @@ -0,0 +1,2 @@ +export { MikroOrmBaseRepository as BaseRepository } from "@medusajs/utils" +export { WorkflowExecutionRepository } from "./workflow-execution" diff --git a/packages/workflow-engine-redis/src/repositories/workflow-execution.ts b/packages/workflow-engine-redis/src/repositories/workflow-execution.ts new file mode 100644 index 0000000000000..9e6553ec748d8 --- /dev/null +++ b/packages/workflow-engine-redis/src/repositories/workflow-execution.ts @@ -0,0 +1,7 @@ +import { DALUtils } from "@medusajs/utils" +import { WorkflowExecution } from "@models" + +// eslint-disable-next-line max-len +export class WorkflowExecutionRepository extends DALUtils.mikroOrmBaseRepositoryFactory( + WorkflowExecution +) {} diff --git a/packages/workflow-engine-redis/src/schema/index.ts b/packages/workflow-engine-redis/src/schema/index.ts new file mode 100644 index 0000000000000..3d7d91edea1dc --- /dev/null +++ b/packages/workflow-engine-redis/src/schema/index.ts @@ -0,0 +1,26 @@ +export default ` +scalar DateTime +scalar JSON + +enum TransactionState { + NOT_STARTED + INVOKING + WAITING_TO_COMPENSATE + COMPENSATING + DONE + REVERTED + FAILED +} + +type WorkflowExecution { + id: ID! + created_at: DateTime! + updated_at: DateTime! + deleted_at: DateTime + workflow_id: string + transaction_id: string + execution: JSON + context: JSON + state: TransactionState +} +` diff --git a/packages/workflow-engine-redis/src/services/__tests__/index.spec.ts b/packages/workflow-engine-redis/src/services/__tests__/index.spec.ts new file mode 100644 index 0000000000000..728f6245c6bfd --- /dev/null +++ b/packages/workflow-engine-redis/src/services/__tests__/index.spec.ts @@ -0,0 +1,5 @@ +describe("Noop test", () => { + it("noop check", async () => { + expect(true).toBe(true) + }) +}) diff --git a/packages/workflow-engine-redis/src/services/index.ts b/packages/workflow-engine-redis/src/services/index.ts new file mode 100644 index 0000000000000..5a6d313d860b3 --- /dev/null +++ b/packages/workflow-engine-redis/src/services/index.ts @@ -0,0 +1,3 @@ +export * from "./workflow-execution" +export * from "./workflow-orchestrator" +export * from "./workflows-module" diff --git a/packages/workflow-engine-redis/src/services/workflow-execution.ts b/packages/workflow-engine-redis/src/services/workflow-execution.ts new file mode 100644 index 0000000000000..158557ec0bae8 --- /dev/null +++ b/packages/workflow-engine-redis/src/services/workflow-execution.ts @@ -0,0 +1,21 @@ +import { DAL } from "@medusajs/types" +import { ModulesSdkUtils } from "@medusajs/utils" +import { WorkflowExecution } from "@models" + +type InjectedDependencies = { + workflowExecutionRepository: DAL.RepositoryService +} + +export class WorkflowExecutionService< + TEntity extends WorkflowExecution = WorkflowExecution +> extends ModulesSdkUtils.abstractServiceFactory( + WorkflowExecution +) { + protected workflowExecutionRepository_: DAL.RepositoryService + + constructor({ workflowExecutionRepository }: InjectedDependencies) { + // @ts-ignore + super(...arguments) + this.workflowExecutionRepository_ = workflowExecutionRepository + } +} diff --git a/packages/workflow-engine-redis/src/services/workflow-orchestrator.ts b/packages/workflow-engine-redis/src/services/workflow-orchestrator.ts new file mode 100644 index 0000000000000..77770a5c741e5 --- /dev/null +++ b/packages/workflow-engine-redis/src/services/workflow-orchestrator.ts @@ -0,0 +1,577 @@ +import { + DistributedTransaction, + DistributedTransactionEvents, + TransactionHandlerType, + TransactionStep, +} from "@medusajs/orchestration" +import { ContainerLike, Context, MedusaContainer } from "@medusajs/types" +import { InjectSharedContext, MedusaContext, isString } from "@medusajs/utils" +import { + FlowRunOptions, + MedusaWorkflow, + ReturnWorkflow, +} from "@medusajs/workflows-sdk" +import Redis from "ioredis" +import { ulid } from "ulid" +import type { RedisDistributedTransactionStorage } from "../utils" + +export type WorkflowOrchestratorRunOptions = FlowRunOptions & { + transactionId?: string + container?: ContainerLike +} + +type RegisterStepSuccessOptions = Omit< + WorkflowOrchestratorRunOptions, + "transactionId" | "input" +> + +type IdempotencyKeyParts = { + workflowId: string + transactionId: string + stepId: string + action: "invoke" | "compensate" +} + +type NotifyOptions = { + eventType: keyof DistributedTransactionEvents + workflowId: string + transactionId?: string + step?: TransactionStep + response?: unknown + result?: unknown + errors?: unknown[] +} + +type WorkflowId = string +type TransactionId = string + +type SubscriberHandler = { + (input: NotifyOptions): void +} & { + _id?: string +} + +type SubscribeOptions = { + workflowId: string + transactionId?: string + subscriber: SubscriberHandler + subscriberId?: string +} + +type UnsubscribeOptions = { + workflowId: string + transactionId?: string + subscriberOrId: string | SubscriberHandler +} + +type TransactionSubscribers = Map +type Subscribers = Map + +const AnySubscriber = "any" + +export class WorkflowOrchestratorService { + private instanceId = ulid() + protected redisPublisher: Redis + protected redisSubscriber: Redis + private subscribers: Subscribers = new Map() + + constructor({ + redisDistributedTransactionStorage, + redisPublisher, + redisSubscriber, + }: { + redisDistributedTransactionStorage: RedisDistributedTransactionStorage + workflowOrchestratorService: WorkflowOrchestratorService + redisPublisher: Redis + redisSubscriber: Redis + }) { + this.redisPublisher = redisPublisher + this.redisSubscriber = redisSubscriber + + redisDistributedTransactionStorage.setWorkflowOrchestratorService(this) + DistributedTransaction.setStorage(redisDistributedTransactionStorage) + + this.redisSubscriber.on("message", async (_, message) => { + const { instanceId, data } = JSON.parse(message) + + await this.notify(data, false, instanceId) + }) + } + + @InjectSharedContext() + async run( + workflowIdOrWorkflow: string | ReturnWorkflow, + options?: WorkflowOrchestratorRunOptions, + @MedusaContext() sharedContext: Context = {} + ) { + let { + input, + context, + transactionId, + resultFrom, + throwOnError, + events: eventHandlers, + container, + } = options ?? {} + + const workflowId = isString(workflowIdOrWorkflow) + ? workflowIdOrWorkflow + : workflowIdOrWorkflow.getName() + + if (!workflowId) { + throw new Error("Workflow ID is required") + } + + context ??= {} + context.transactionId ??= transactionId ?? ulid() + + const events: FlowRunOptions["events"] = this.buildWorkflowEvents({ + customEventHandlers: eventHandlers, + workflowId, + transactionId: context.transactionId, + }) + + const exportedWorkflow: any = MedusaWorkflow.getWorkflow(workflowId) + if (!exportedWorkflow) { + throw new Error(`Workflow with id "${workflowId}" not found.`) + } + + const flow = exportedWorkflow(container as MedusaContainer) + + const ret = await flow.run({ + input, + throwOnError, + resultFrom, + context, + events, + }) + + // TODO: temporary + const acknowledgement = { + transactionId: context.transactionId, + workflowId: workflowId, + } + + if (ret.transaction.hasFinished()) { + const { result, errors } = ret + await this.notify({ + eventType: "onFinish", + workflowId, + transactionId: context.transactionId, + result, + errors, + }) + } + + return { acknowledgement, ...ret } + } + + @InjectSharedContext() + async getRunningTransaction( + workflowId: string, + transactionId: string, + options?: WorkflowOrchestratorRunOptions, + @MedusaContext() sharedContext: Context = {} + ): Promise { + let { context, container } = options ?? {} + + if (!workflowId) { + throw new Error("Workflow ID is required") + } + + if (!transactionId) { + throw new Error("TransactionId ID is required") + } + + context ??= {} + context.transactionId ??= transactionId + + const exportedWorkflow: any = MedusaWorkflow.getWorkflow(workflowId) + if (!exportedWorkflow) { + throw new Error(`Workflow with id "${workflowId}" not found.`) + } + + const flow = exportedWorkflow(container as MedusaContainer) + + const transaction = await flow.getRunningTransaction(transactionId, context) + + return transaction + } + + @InjectSharedContext() + async setStepSuccess( + { + idempotencyKey, + stepResponse, + options, + }: { + idempotencyKey: string | IdempotencyKeyParts + stepResponse: unknown + options?: RegisterStepSuccessOptions + }, + @MedusaContext() sharedContext: Context = {} + ) { + const { + context, + throwOnError, + resultFrom, + container, + events: eventHandlers, + } = options ?? {} + + const [idempotencyKey_, { workflowId, transactionId }] = + this.buildIdempotencyKeyAndParts(idempotencyKey) + + const exportedWorkflow: any = MedusaWorkflow.getWorkflow(workflowId) + if (!exportedWorkflow) { + throw new Error(`Workflow with id "${workflowId}" not found.`) + } + + const flow = exportedWorkflow(container as MedusaContainer) + + const events = this.buildWorkflowEvents({ + customEventHandlers: eventHandlers, + transactionId, + workflowId, + }) + + const ret = await flow.registerStepSuccess({ + idempotencyKey: idempotencyKey_, + context, + resultFrom, + throwOnError, + events, + response: stepResponse, + }) + + if (ret.transaction.hasFinished()) { + const { result, errors } = ret + await this.notify({ + eventType: "onFinish", + workflowId, + transactionId, + result, + errors, + }) + } + + return ret + } + + @InjectSharedContext() + async setStepFailure( + { + idempotencyKey, + stepResponse, + options, + }: { + idempotencyKey: string | IdempotencyKeyParts + stepResponse: unknown + options?: RegisterStepSuccessOptions + }, + @MedusaContext() sharedContext: Context = {} + ) { + const { + context, + throwOnError, + resultFrom, + container, + events: eventHandlers, + } = options ?? {} + + const [idempotencyKey_, { workflowId, transactionId }] = + this.buildIdempotencyKeyAndParts(idempotencyKey) + + const exportedWorkflow: any = MedusaWorkflow.getWorkflow(workflowId) + if (!exportedWorkflow) { + throw new Error(`Workflow with id "${workflowId}" not found.`) + } + + const flow = exportedWorkflow(container as MedusaContainer) + + const events = this.buildWorkflowEvents({ + customEventHandlers: eventHandlers, + transactionId, + workflowId, + }) + + const ret = await flow.registerStepFailure({ + idempotencyKey: idempotencyKey_, + context, + resultFrom, + throwOnError, + events, + response: stepResponse, + }) + + if (ret.transaction.hasFinished()) { + const { result, errors } = ret + await this.notify({ + eventType: "onFinish", + workflowId, + transactionId, + result, + errors, + }) + } + + return ret + } + + @InjectSharedContext() + subscribe( + { workflowId, transactionId, subscriber, subscriberId }: SubscribeOptions, + @MedusaContext() sharedContext: Context = {} + ) { + subscriber._id = subscriberId + const subscribers = this.subscribers.get(workflowId) ?? new Map() + + // Subscribe instance to redis + if (!this.subscribers.has(workflowId)) { + void this.redisSubscriber.subscribe(this.getChannelName(workflowId)) + } + + const handlerIndex = (handlers) => { + return handlers.indexOf((s) => s === subscriber || s._id === subscriberId) + } + + if (transactionId) { + const transactionSubscribers = subscribers.get(transactionId) ?? [] + const subscriberIndex = handlerIndex(transactionSubscribers) + if (subscriberIndex !== -1) { + transactionSubscribers.slice(subscriberIndex, 1) + } + + transactionSubscribers.push(subscriber) + subscribers.set(transactionId, transactionSubscribers) + this.subscribers.set(workflowId, subscribers) + return + } + + const workflowSubscribers = subscribers.get(AnySubscriber) ?? [] + const subscriberIndex = handlerIndex(workflowSubscribers) + if (subscriberIndex !== -1) { + workflowSubscribers.slice(subscriberIndex, 1) + } + + workflowSubscribers.push(subscriber) + subscribers.set(AnySubscriber, workflowSubscribers) + this.subscribers.set(workflowId, subscribers) + } + + @InjectSharedContext() + unsubscribe( + { workflowId, transactionId, subscriberOrId }: UnsubscribeOptions, + @MedusaContext() sharedContext: Context = {} + ) { + const subscribers = this.subscribers.get(workflowId) ?? new Map() + + const filterSubscribers = (handlers: SubscriberHandler[]) => { + return handlers.filter((handler) => { + return handler._id + ? handler._id !== (subscriberOrId as string) + : handler !== (subscriberOrId as SubscriberHandler) + }) + } + + // Unsubscribe instance + if (!this.subscribers.has(workflowId)) { + void this.redisSubscriber.unsubscribe(this.getChannelName(workflowId)) + } + + if (transactionId) { + const transactionSubscribers = subscribers.get(transactionId) ?? [] + const newTransactionSubscribers = filterSubscribers( + transactionSubscribers + ) + subscribers.set(transactionId, newTransactionSubscribers) + this.subscribers.set(workflowId, subscribers) + return + } + + const workflowSubscribers = subscribers.get(AnySubscriber) ?? [] + const newWorkflowSubscribers = filterSubscribers(workflowSubscribers) + subscribers.set(AnySubscriber, newWorkflowSubscribers) + this.subscribers.set(workflowId, subscribers) + } + + private async notify( + options: NotifyOptions, + publish = true, + instanceId = this.instanceId + ) { + if (!publish && instanceId === this.instanceId) { + return + } + + if (publish) { + const channel = this.getChannelName(options.workflowId) + + const message = JSON.stringify({ + instanceId: this.instanceId, + data: options, + }) + await this.redisPublisher.publish(channel, message) + } + + const { + eventType, + workflowId, + transactionId, + errors, + result, + step, + response, + } = options + + const subscribers: TransactionSubscribers = + this.subscribers.get(workflowId) ?? new Map() + + const notifySubscribers = (handlers: SubscriberHandler[]) => { + handlers.forEach((handler) => { + handler({ + eventType, + workflowId, + transactionId, + step, + response, + result, + errors, + }) + }) + } + + if (transactionId) { + const transactionSubscribers = subscribers.get(transactionId) ?? [] + notifySubscribers(transactionSubscribers) + } + + const workflowSubscribers = subscribers.get(AnySubscriber) ?? [] + notifySubscribers(workflowSubscribers) + } + + private getChannelName(workflowId: string): string { + return `orchestrator:${workflowId}` + } + + private buildWorkflowEvents({ + customEventHandlers, + workflowId, + transactionId, + }): DistributedTransactionEvents { + const notify = async ({ + eventType, + step, + result, + response, + errors, + }: { + eventType: keyof DistributedTransactionEvents + step?: TransactionStep + response?: unknown + result?: unknown + errors?: unknown[] + }) => { + await this.notify({ + workflowId, + transactionId, + eventType, + response, + step, + result, + errors, + }) + } + + return { + onTimeout: async ({ transaction }) => { + customEventHandlers?.onTimeout?.({ transaction }) + await notify({ eventType: "onTimeout" }) + }, + + onBegin: async ({ transaction }) => { + customEventHandlers?.onBegin?.({ transaction }) + await notify({ eventType: "onBegin" }) + }, + onResume: async ({ transaction }) => { + customEventHandlers?.onResume?.({ transaction }) + await notify({ eventType: "onResume" }) + }, + onCompensateBegin: async ({ transaction }) => { + customEventHandlers?.onCompensateBegin?.({ transaction }) + await notify({ eventType: "onCompensateBegin" }) + }, + onFinish: async ({ transaction, result, errors }) => { + // TODO: unsubscribe transaction handlers on finish + customEventHandlers?.onFinish?.({ transaction, result, errors }) + }, + + onStepBegin: async ({ step, transaction }) => { + customEventHandlers?.onStepBegin?.({ step, transaction }) + + await notify({ eventType: "onStepBegin", step }) + }, + onStepSuccess: async ({ step, transaction }) => { + const response = transaction.getContext().invoke[step.id] + customEventHandlers?.onStepSuccess?.({ step, transaction, response }) + + await notify({ eventType: "onStepSuccess", step, response }) + }, + onStepFailure: async ({ step, transaction }) => { + const errors = transaction.getErrors(TransactionHandlerType.INVOKE)[ + step.id + ] + customEventHandlers?.onStepFailure?.({ step, transaction, errors }) + + await notify({ eventType: "onStepFailure", step, errors }) + }, + + onCompensateStepSuccess: async ({ step, transaction }) => { + const response = transaction.getContext().compensate[step.id] + customEventHandlers?.onStepSuccess?.({ step, transaction, response }) + + await notify({ eventType: "onCompensateStepSuccess", step, response }) + }, + onCompensateStepFailure: async ({ step, transaction }) => { + const errors = transaction.getErrors(TransactionHandlerType.COMPENSATE)[ + step.id + ] + customEventHandlers?.onStepFailure?.({ step, transaction, errors }) + + await notify({ eventType: "onCompensateStepFailure", step, errors }) + }, + } + } + + private buildIdempotencyKeyAndParts( + idempotencyKey: string | IdempotencyKeyParts + ): [string, IdempotencyKeyParts] { + const parts: IdempotencyKeyParts = { + workflowId: "", + transactionId: "", + stepId: "", + action: "invoke", + } + let idempotencyKey_ = idempotencyKey as string + + const setParts = (workflowId, transactionId, stepId, action) => { + parts.workflowId = workflowId + parts.transactionId = transactionId + parts.stepId = stepId + parts.action = action + } + + if (!isString(idempotencyKey)) { + const { workflowId, transactionId, stepId, action } = + idempotencyKey as IdempotencyKeyParts + idempotencyKey_ = [workflowId, transactionId, stepId, action].join(":") + setParts(workflowId, transactionId, stepId, action) + } else { + const [workflowId, transactionId, stepId, action] = + idempotencyKey_.split(":") + setParts(workflowId, transactionId, stepId, action) + } + + return [idempotencyKey_, parts] + } +} diff --git a/packages/workflow-engine-redis/src/services/workflows-module.ts b/packages/workflow-engine-redis/src/services/workflows-module.ts new file mode 100644 index 0000000000000..31be5674d58a3 --- /dev/null +++ b/packages/workflow-engine-redis/src/services/workflows-module.ts @@ -0,0 +1,199 @@ +import { + Context, + DAL, + FindConfig, + InternalModuleDeclaration, + ModuleJoinerConfig, +} from "@medusajs/types" +import {} from "@medusajs/types/src" +import { + InjectManager, + InjectSharedContext, + MedusaContext, +} from "@medusajs/utils" +import type { + ReturnWorkflow, + UnwrapWorkflowInputDataType, + WorkflowOrchestratorTypes, +} from "@medusajs/workflows-sdk" +import { + WorkflowExecutionService, + WorkflowOrchestratorService, +} from "@services" +import { joinerConfig } from "../joiner-config" + +type InjectedDependencies = { + baseRepository: DAL.RepositoryService + workflowExecutionService: WorkflowExecutionService + workflowOrchestratorService: WorkflowOrchestratorService +} + +export class WorkflowsModuleService + implements WorkflowOrchestratorTypes.IWorkflowsModuleService +{ + protected baseRepository_: DAL.RepositoryService + protected workflowExecutionService_: WorkflowExecutionService + protected workflowOrchestratorService_: WorkflowOrchestratorService + + constructor( + { + baseRepository, + workflowExecutionService, + workflowOrchestratorService, + }: InjectedDependencies, + protected readonly moduleDeclaration: InternalModuleDeclaration + ) { + this.baseRepository_ = baseRepository + this.workflowExecutionService_ = workflowExecutionService + this.workflowOrchestratorService_ = workflowOrchestratorService + } + + __joinerConfig(): ModuleJoinerConfig { + return joinerConfig + } + + @InjectManager("baseRepository_") + async listWorkflowExecution( + filters: WorkflowOrchestratorTypes.FilterableWorkflowExecutionProps = {}, + config: FindConfig = {}, + @MedusaContext() sharedContext: Context = {} + ): Promise { + const wfExecutions = await this.workflowExecutionService_.list( + filters, + config, + sharedContext + ) + + return this.baseRepository_.serialize< + WorkflowOrchestratorTypes.WorkflowExecutionDTO[] + >(wfExecutions, { + populate: true, + }) + } + + @InjectManager("baseRepository_") + async listAndCountWorkflowExecution( + filters: WorkflowOrchestratorTypes.FilterableWorkflowExecutionProps = {}, + config: FindConfig = {}, + @MedusaContext() sharedContext: Context = {} + ): Promise<[WorkflowOrchestratorTypes.WorkflowExecutionDTO[], number]> { + const [wfExecutions, count] = + await this.workflowExecutionService_.listAndCount( + filters, + config, + sharedContext + ) + + return [ + await this.baseRepository_.serialize< + WorkflowOrchestratorTypes.WorkflowExecutionDTO[] + >(wfExecutions, { + populate: true, + }), + count, + ] + } + + @InjectSharedContext() + async run>( + workflowIdOrWorkflow: TWorkflow, + options: WorkflowOrchestratorTypes.WorkflowOrchestratorRunDTO< + TWorkflow extends ReturnWorkflow + ? UnwrapWorkflowInputDataType + : unknown + > = {}, + @MedusaContext() context: Context = {} + ) { + const ret = await this.workflowOrchestratorService_.run< + TWorkflow extends ReturnWorkflow + ? UnwrapWorkflowInputDataType + : unknown + >(workflowIdOrWorkflow, options, context) + + return ret as any + } + + @InjectSharedContext() + async getRunningTransaction( + workflowId: string, + transactionId: string, + @MedusaContext() context: Context = {} + ) { + return this.workflowOrchestratorService_.getRunningTransaction( + workflowId, + transactionId, + context + ) + } + + @InjectSharedContext() + async setStepSuccess( + { + idempotencyKey, + stepResponse, + options, + }: { + idempotencyKey: string | object + stepResponse: unknown + options?: Record + }, + @MedusaContext() context: Context = {} + ) { + return this.workflowOrchestratorService_.setStepSuccess( + { + idempotencyKey, + stepResponse, + options, + } as any, + context + ) + } + + @InjectSharedContext() + async setStepFailure( + { + idempotencyKey, + stepResponse, + options, + }: { + idempotencyKey: string | object + stepResponse: unknown + options?: Record + }, + @MedusaContext() context: Context = {} + ) { + return this.workflowOrchestratorService_.setStepFailure( + { + idempotencyKey, + stepResponse, + options, + } as any, + context + ) + } + + @InjectSharedContext() + async subscribe( + args: { + workflowId: string + transactionId?: string + subscriber: Function + subscriberId?: string + }, + @MedusaContext() context: Context = {} + ) { + return this.workflowOrchestratorService_.subscribe(args as any, context) + } + + @InjectSharedContext() + async unsubscribe( + args: { + workflowId: string + transactionId?: string + subscriberOrId: string | Function + }, + @MedusaContext() context: Context = {} + ) { + return this.workflowOrchestratorService_.unsubscribe(args as any, context) + } +} diff --git a/packages/workflow-engine-redis/src/types/index.ts b/packages/workflow-engine-redis/src/types/index.ts new file mode 100644 index 0000000000000..1b066ce1d88af --- /dev/null +++ b/packages/workflow-engine-redis/src/types/index.ts @@ -0,0 +1,34 @@ +import { Logger } from "@medusajs/types" +import { RedisOptions } from "ioredis" + +export type InitializeModuleInjectableDependencies = { + logger?: Logger +} + +/** + * Module config type + */ +export type RedisWorkflowsOptions = { + /** + * Redis connection string + */ + url?: string + + /** + * Queue name used for retries and timeouts + */ + queueName?: string + + /** + * Redis client options + */ + options?: RedisOptions + + /** + * Optiona connection string and options to pub/sub + */ + pubsub?: { + url: string + options?: RedisOptions + } +} diff --git a/packages/workflow-engine-redis/src/utils/index.ts b/packages/workflow-engine-redis/src/utils/index.ts new file mode 100644 index 0000000000000..01bae8b302b04 --- /dev/null +++ b/packages/workflow-engine-redis/src/utils/index.ts @@ -0,0 +1 @@ +export * from "./workflow-orchestrator-storage" diff --git a/packages/workflow-engine-redis/src/utils/workflow-orchestrator-storage.ts b/packages/workflow-engine-redis/src/utils/workflow-orchestrator-storage.ts new file mode 100644 index 0000000000000..533181cf7f8fc --- /dev/null +++ b/packages/workflow-engine-redis/src/utils/workflow-orchestrator-storage.ts @@ -0,0 +1,304 @@ +import { + DistributedTransaction, + DistributedTransactionStorage, + TransactionCheckpoint, + TransactionStep, +} from "@medusajs/orchestration" +import { TransactionState } from "@medusajs/utils" +import { + WorkflowExecutionService, + WorkflowOrchestratorService, +} from "@services" +import { Queue, Worker } from "bullmq" +import Redis from "ioredis" + +enum JobType { + RETRY = "retry", + STEP_TIMEOUT = "step_timeout", + TRANSACTION_TIMEOUT = "transaction_timeout", +} + +// eslint-disable-next-line max-len +export class RedisDistributedTransactionStorage extends DistributedTransactionStorage { + private static TTL_AFTER_COMPLETED = 60 * 15 // 15 minutes + private workflowExecutionService_: WorkflowExecutionService + private workflowOrchestratorService_: WorkflowOrchestratorService + + private redisClient: Redis + private queue: Queue + private worker: Worker + + constructor({ + workflowExecutionService, + redisConnection, + redisWorkerConnection, + redisQueueName, + }: { + workflowExecutionService: WorkflowExecutionService + redisConnection: Redis + redisWorkerConnection: Redis + redisQueueName: string + }) { + super() + + this.workflowExecutionService_ = workflowExecutionService + + this.redisClient = redisConnection + + this.queue = new Queue(redisQueueName, { connection: this.redisClient }) + this.worker = new Worker( + redisQueueName, + async (job) => { + const allJobs = [ + JobType.RETRY, + JobType.STEP_TIMEOUT, + JobType.TRANSACTION_TIMEOUT, + ] + + if (allJobs.includes(job.name as JobType)) { + await this.executeTransaction( + job.data.workflowId, + job.data.transactionId + ) + } + }, + { connection: redisWorkerConnection } + ) + } + + setWorkflowOrchestratorService(workflowOrchestratorService) { + this.workflowOrchestratorService_ = workflowOrchestratorService + } + + private async saveToDb(data: TransactionCheckpoint) { + await this.workflowExecutionService_.upsert([ + { + workflow_id: data.flow.modelId, + transaction_id: data.flow.transactionId, + execution: data.flow, + context: { + data: data.context, + errors: data.errors, + }, + state: data.flow.state, + }, + ]) + } + + private async deleteFromDb(data: TransactionCheckpoint) { + await this.workflowExecutionService_.delete([ + { + workflow_id: data.flow.modelId, + transaction_id: data.flow.transactionId, + }, + ]) + } + + private async executeTransaction(workflowId: string, transactionId: string) { + return await this.workflowOrchestratorService_.run(workflowId, { + transactionId, + throwOnError: false, + }) + } + + private stringifyWithSymbol(key, value) { + if (key === "__type" && typeof value === "symbol") { + return Symbol.keyFor(value) + } + + return value + } + + private jsonWithSymbol(key, value) { + if (key === "__type" && typeof value === "string") { + return Symbol.for(value) + } + + return value + } + + async get(key: string): Promise { + const data = await this.redisClient.get(key) + + return data ? JSON.parse(data, this.jsonWithSymbol) : undefined + } + + async list(): Promise { + const keys = await this.redisClient.keys( + DistributedTransaction.keyPrefix + ":*" + ) + const transactions: any[] = [] + for (const key of keys) { + const data = await this.redisClient.get(key) + if (data) { + transactions.push(JSON.parse(data, this.jsonWithSymbol)) + } + } + return transactions + } + + async save( + key: string, + data: TransactionCheckpoint, + ttl?: number + ): Promise { + let retentionTime + + /** + * Store the retention time only if the transaction is done, failed or reverted. + * From that moment, this tuple can be later on archived or deleted after the retention time. + */ + const hasFinished = [ + TransactionState.DONE, + TransactionState.FAILED, + TransactionState.REVERTED, + ].includes(data.flow.state) + + if (hasFinished) { + retentionTime = data.flow.options?.retentionTime + Object.assign(data, { + retention_time: retentionTime, + }) + } + + if (!hasFinished) { + if (ttl) { + await this.redisClient.set( + key, + JSON.stringify(data, this.stringifyWithSymbol), + "EX", + ttl + ) + } else { + await this.redisClient.set( + key, + JSON.stringify(data, this.stringifyWithSymbol) + ) + } + } + + if (hasFinished && !retentionTime) { + await this.deleteFromDb(data) + } else { + await this.saveToDb(data) + } + + if (hasFinished) { + // await this.redisClient.del(key) + await this.redisClient.set( + key, + JSON.stringify(data, this.stringifyWithSymbol), + "EX", + RedisDistributedTransactionStorage.TTL_AFTER_COMPLETED + ) + } + } + + async scheduleRetry( + transaction: DistributedTransaction, + step: TransactionStep, + timestamp: number, + interval: number + ): Promise { + await this.queue.add( + JobType.RETRY, + { + workflowId: transaction.modelId, + transactionId: transaction.transactionId, + stepId: step.id, + }, + { + delay: interval * 1000, + jobId: this.getJobId(JobType.RETRY, transaction, step), + removeOnComplete: true, + } + ) + } + + async clearRetry( + transaction: DistributedTransaction, + step: TransactionStep + ): Promise { + await this.removeJob(JobType.RETRY, transaction, step) + } + + async scheduleTransactionTimeout( + transaction: DistributedTransaction, + timestamp: number, + interval: number + ): Promise { + await this.queue.add( + JobType.TRANSACTION_TIMEOUT, + { + workflowId: transaction.modelId, + transactionId: transaction.transactionId, + }, + { + delay: interval * 1000, + jobId: this.getJobId(JobType.TRANSACTION_TIMEOUT, transaction), + removeOnComplete: true, + } + ) + } + + async clearTransactionTimeout( + transaction: DistributedTransaction + ): Promise { + await this.removeJob(JobType.TRANSACTION_TIMEOUT, transaction) + } + + async scheduleStepTimeout( + transaction: DistributedTransaction, + step: TransactionStep, + timestamp: number, + interval: number + ): Promise { + await this.queue.add( + JobType.STEP_TIMEOUT, + { + workflowId: transaction.modelId, + transactionId: transaction.transactionId, + stepId: step.id, + }, + { + delay: interval * 1000, + jobId: this.getJobId(JobType.STEP_TIMEOUT, transaction, step), + removeOnComplete: true, + } + ) + } + + async clearStepTimeout( + transaction: DistributedTransaction, + step: TransactionStep + ): Promise { + await this.removeJob(JobType.STEP_TIMEOUT, transaction, step) + } + + private getJobId( + type: JobType, + transaction: DistributedTransaction, + step?: TransactionStep + ) { + const key = [type, transaction.modelId, transaction.transactionId] + + if (step) { + key.push(step.id) + } + + return key.join(":") + } + + private async removeJob( + type: JobType, + transaction: DistributedTransaction, + step?: TransactionStep + ) { + const jobId = this.getJobId(type, transaction, step) + const job = await this.queue.getJob(jobId) + + if (job && job.attemptsStarted === 0) { + await job.remove() + } + } +} diff --git a/packages/workflow-engine-redis/tsconfig.json b/packages/workflow-engine-redis/tsconfig.json new file mode 100644 index 0000000000000..d4e5080094a4b --- /dev/null +++ b/packages/workflow-engine-redis/tsconfig.json @@ -0,0 +1,38 @@ +{ + "compilerOptions": { + "lib": ["es2020"], + "target": "es2020", + "outDir": "./dist", + "esModuleInterop": true, + "declarationMap": true, + "declaration": true, + "module": "commonjs", + "moduleResolution": "node", + "emitDecoratorMetadata": true, + "experimentalDecorators": true, + "sourceMap": false, + "noImplicitReturns": true, + "strictNullChecks": true, + "strictFunctionTypes": true, + "noImplicitThis": true, + "allowJs": true, + "skipLibCheck": true, + "downlevelIteration": true, // to use ES5 specific tooling + "baseUrl": ".", + "resolveJsonModule": true, + "paths": { + "@models": ["./src/models"], + "@services": ["./src/services"], + "@repositories": ["./src/repositories"], + "@types": ["./src/types"] + } + }, + "include": ["src"], + "exclude": [ + "dist", + "./src/**/__tests__", + "./src/**/__mocks__", + "./src/**/__fixtures__", + "node_modules" + ] +} diff --git a/packages/workflow-engine-redis/tsconfig.spec.json b/packages/workflow-engine-redis/tsconfig.spec.json new file mode 100644 index 0000000000000..48e47e8cbb3be --- /dev/null +++ b/packages/workflow-engine-redis/tsconfig.spec.json @@ -0,0 +1,8 @@ +{ + "extends": "./tsconfig.json", + "include": ["src", "integration-tests"], + "exclude": ["node_modules", "dist"], + "compilerOptions": { + "sourceMap": true + } +} diff --git a/integration-tests/plugins/__tests__/workflows/utils/composer/compose.ts b/packages/workflows-sdk/src/helper/__tests__/compose.ts similarity index 99% rename from integration-tests/plugins/__tests__/workflows/utils/composer/compose.ts rename to packages/workflows-sdk/src/helper/__tests__/compose.ts index 9fdedc22c24b0..b0c594596bc8a 100644 --- a/integration-tests/plugins/__tests__/workflows/utils/composer/compose.ts +++ b/packages/workflows-sdk/src/helper/__tests__/compose.ts @@ -6,7 +6,7 @@ import { parallelize, StepResponse, transform, -} from "@medusajs/workflows-sdk" +} from "../.." jest.setTimeout(30000) diff --git a/packages/workflows-sdk/src/index.ts b/packages/workflows-sdk/src/index.ts index 9c27d4e26a115..cec02f1e03298 100644 --- a/packages/workflows-sdk/src/index.ts +++ b/packages/workflows-sdk/src/index.ts @@ -1,4 +1,5 @@ export * from "./helper" export * from "./medusa-workflow" +export * as WorkflowOrchestratorTypes from "./types" export * from "./utils/composer" export * as Composer from "./utils/composer" diff --git a/packages/workflows-sdk/src/types/common.ts b/packages/workflows-sdk/src/types/common.ts new file mode 100644 index 0000000000000..f3a81e72713fa --- /dev/null +++ b/packages/workflows-sdk/src/types/common.ts @@ -0,0 +1,21 @@ +import { BaseFilterable } from "@medusajs/types" + +export interface WorkflowExecutionDTO { + id: string + workflow_id: string + transaction_id: string + execution: string + context: string + state: any + created_at: Date + updated_at: Date + deleted_at: Date +} + +export interface FilterableWorkflowExecutionProps + extends BaseFilterable { + id?: string[] + workflow_id?: string[] + transaction_id?: string[] + state?: any[] +} diff --git a/packages/workflows-sdk/src/types/index.ts b/packages/workflows-sdk/src/types/index.ts new file mode 100644 index 0000000000000..0c73656566caa --- /dev/null +++ b/packages/workflows-sdk/src/types/index.ts @@ -0,0 +1,3 @@ +export * from "./common" +export * from "./mutations" +export * from "./service" diff --git a/packages/workflows-sdk/src/types/mutations.ts b/packages/workflows-sdk/src/types/mutations.ts new file mode 100644 index 0000000000000..ef3234143e511 --- /dev/null +++ b/packages/workflows-sdk/src/types/mutations.ts @@ -0,0 +1,7 @@ +export interface UpsertWorkflowExecutionDTO { + workflow_id: string + transaction_id: string + execution: Record + context: Record + state: any +} diff --git a/packages/workflows-sdk/src/types/service.ts b/packages/workflows-sdk/src/types/service.ts new file mode 100644 index 0000000000000..ed055e39e672f --- /dev/null +++ b/packages/workflows-sdk/src/types/service.ts @@ -0,0 +1,116 @@ +import { + ContainerLike, + Context, + FindConfig, + IModuleService, +} from "@medusajs/types" +import { ReturnWorkflow, UnwrapWorkflowInputDataType } from "../utils/composer" +import { + FilterableWorkflowExecutionProps, + WorkflowExecutionDTO, +} from "./common" + +type FlowRunOptions = { + input?: TData + context?: Context + resultFrom?: string | string[] | Symbol + throwOnError?: boolean + events?: Record +} + +export interface WorkflowOrchestratorRunDTO + extends FlowRunOptions { + transactionId?: string + container?: ContainerLike +} + +export type IdempotencyKeyParts = { + workflowId: string + transactionId: string + stepId: string + action: "invoke" | "compensate" +} + +export interface IWorkflowsModuleService extends IModuleService { + listWorkflowExecution( + filters?: FilterableWorkflowExecutionProps, + config?: FindConfig, + sharedContext?: Context + ): Promise + + listAndCountWorkflowExecution( + filters?: FilterableWorkflowExecutionProps, + config?: FindConfig, + sharedContext?: Context + ): Promise<[WorkflowExecutionDTO[], number]> + + run< + TWorkflow extends ReturnWorkflow = ReturnWorkflow< + any, + any, + any + >, + TData = UnwrapWorkflowInputDataType + >( + workflowId: string, + options?: WorkflowOrchestratorRunDTO, + sharedContext?: Context + ): Promise<{ + errors: Error[] + transaction: object + result: any + acknowledgement: object + }> + + getRunningTransaction( + workflowId: string, + transactionId: string, + options?: Record, + sharedContext?: Context + ): Promise + + setStepSuccess( + { + idempotencyKey, + stepResponse, + options, + }: { + idempotencyKey: string | IdempotencyKeyParts + stepResponse: unknown + options?: Record + }, + sharedContext?: Context + ) + + setStepFailure( + { + idempotencyKey, + stepResponse, + options, + }: { + idempotencyKey: string | object + stepResponse: unknown + options?: Record + }, + sharedContext?: Context + ) + + subscribe( + args: { + workflowId: string + transactionId?: string + subscriber: Function + subscriberId?: string + }, + sharedContext?: Context + ): Promise + + unsubscribe( + args: { + workflowId: string + transactionId?: string + subscriberOrId: string | Function + }, + sharedContext?: Context + ) +} diff --git a/packages/workflows-sdk/src/utils/_playground.ts b/packages/workflows-sdk/src/utils/_playground.ts index 6bc33ea235c5e..8224af5046048 100644 --- a/packages/workflows-sdk/src/utils/_playground.ts +++ b/packages/workflows-sdk/src/utils/_playground.ts @@ -26,3 +26,16 @@ workflow() .then((res) => { console.log(res.result) // result: { step2: { test: "test", test2: "step1" } } }) + +/*type type0 = typeof workflow extends ReturnWorkflow + ? T + : never + +function run< + TWorkflow extends ReturnWorkflow, + TData = TWorkflow extends ReturnWorkflow + ? T + : never +>(name: string, options: FlowRunOptions) {} + +const test = run("workflow", { input: "string" })*/ diff --git a/packages/workflows-sdk/src/utils/composer/create-step.ts b/packages/workflows-sdk/src/utils/composer/create-step.ts index 9a7edabd6f422..0cd31423520f5 100644 --- a/packages/workflows-sdk/src/utils/composer/create-step.ts +++ b/packages/workflows-sdk/src/utils/composer/create-step.ts @@ -1,4 +1,11 @@ -import { resolveValue, StepResponse } from "./helpers" +import { + TransactionStepsDefinition, + WorkflowManager, +} from "@medusajs/orchestration" +import { OrchestrationUtils, isString } from "@medusajs/utils" +import { ulid } from "ulid" +import { StepResponse, resolveValue } from "./helpers" +import { proxify } from "./helpers/proxy" import { CreateWorkflowComposerContext, StepExecutionContext, @@ -6,9 +13,6 @@ import { StepFunctionResult, WorkflowData, } from "./type" -import { proxify } from "./helpers/proxy" -import { TransactionStepsDefinition } from "@medusajs/orchestration" -import { isString, OrchestrationUtils } from "@medusajs/utils" /** * The type of invocation function passed to a step. @@ -166,19 +170,37 @@ function applyStep< : undefined, } - stepConfig!.noCompensation = !compensateFn + stepConfig.uuid = ulid() + stepConfig.noCompensation = !compensateFn this.flow.addAction(stepName, stepConfig) - this.handlers.set(stepName, handler) + + if (!this.handlers.has(stepName)) { + this.handlers.set(stepName, handler) + } const ret = { __type: OrchestrationUtils.SymbolWorkflowStep, __step__: stepName, - config: (config: Pick) => { - this.flow.replaceAction(stepName, stepName, { + config: ( + localConfig: { name?: string } & Omit< + TransactionStepsDefinition, + "next" | "uuid" | "action" + > + ) => { + const newStepName = localConfig.name ?? stepName + + delete localConfig.name + + this.handlers.set(newStepName, handler) + + this.flow.replaceAction(stepConfig.uuid!, newStepName, { ...stepConfig, - ...config, + ...localConfig, }) + + WorkflowManager.update(this.workflowId, this.flow, this.handlers) + return proxify(ret) }, } @@ -241,11 +263,14 @@ export function createStep< TInvokeResultCompensateInput >( /** - * The name of the step or its configuration (currently support maxRetries). + * The name of the step or its configuration. */ nameOrConfig: | string - | ({ name: string } & Pick), + | ({ name: string } & Omit< + TransactionStepsDefinition, + "next" | "uuid" | "action" + >), /** * An invocation function that will be executed when the workflow is executed. The function must return an instance of {@link StepResponse}. The constructor of {@link StepResponse} * accepts the output of the step as a first argument, and optionally as a second argument the data to be passed to the compensation function as a parameter. diff --git a/packages/workflows-sdk/src/utils/composer/create-workflow.ts b/packages/workflows-sdk/src/utils/composer/create-workflow.ts index 945dd5cf33baf..c9f6b669641af 100644 --- a/packages/workflows-sdk/src/utils/composer/create-workflow.ts +++ b/packages/workflows-sdk/src/utils/composer/create-workflow.ts @@ -5,7 +5,7 @@ import { WorkflowManager, } from "@medusajs/orchestration" import { LoadedModule, MedusaContainer } from "@medusajs/types" -import { OrchestrationUtils } from "@medusajs/utils" +import { isString, OrchestrationUtils } from "@medusajs/utils" import { ExportedWorkflow, exportWorkflow } from "../../helper" import { proxify } from "./helpers/proxy" import { @@ -63,7 +63,11 @@ global[OrchestrationUtils.SymbolMedusaWorkflowComposerContext] = null * } * ``` */ -type ReturnWorkflow> = { +export type ReturnWorkflow< + TData, + TResult, + THooks extends Record +> = { ( container?: LoadedModule[] | MedusaContainer ): Omit< @@ -73,8 +77,20 @@ type ReturnWorkflow> = { ExportedWorkflow } & THooks & { getName: () => string + } & { + config: (config: TransactionModelOptions) => void } +/** + * Extract the raw type of the expected input data of a workflow. + * + * @example + * type WorkflowInputData = UnwrapWorkflowInputDataType + */ +export type UnwrapWorkflowInputDataType< + T extends ReturnWorkflow +> = T extends ReturnWorkflow ? TData : never + /** * This function creates a workflow with the provided name and a constructor function. * The constructor function builds the workflow from steps created by the {@link createStep} function. @@ -136,9 +152,9 @@ export function createWorkflow< THooks extends Record = Record >( /** - * The name of the workflow. + * The name of the workflow or its configuration. */ - name: string, + nameOrConfig: string | ({ name: string } & TransactionModelOptions), /** * The constructor function that is executed when the `run` method in {@link ReturnWorkflow} is used. * The function can't be an arrow function or an asynchronus function. It also can't directly manipulate data. @@ -151,9 +167,11 @@ export function createWorkflow< [K in keyof TResult]: | WorkflowData | WorkflowDataProperties - }, - options?: TransactionModelOptions + } ): ReturnWorkflow { + const name = isString(nameOrConfig) ? nameOrConfig : nameOrConfig.name + const options = isString(nameOrConfig) ? {} : nameOrConfig + const handlers: WorkflowHandler = new Map() if (WorkflowManager.getWorkflow(name)) { @@ -185,13 +203,17 @@ export function createWorkflow< const inputPlaceHolder = proxify({ __type: OrchestrationUtils.SymbolInputReference, __step__: "", + config: () => { + // TODO: config default value? + throw new Error("Config is not available for the input object.") + }, }) const returnedStep = composer.apply(context, [inputPlaceHolder]) delete global[OrchestrationUtils.SymbolMedusaWorkflowComposerContext] - WorkflowManager.update(name, context.flow, handlers) + WorkflowManager.update(name, context.flow, handlers, options) const workflow = exportWorkflow( name, @@ -206,8 +228,12 @@ export function createWorkflow< container?: LoadedModule[] | MedusaContainer ) => { const workflow_ = workflow(container) + const expandedFlow: any = workflow_ + expandedFlow.config = (config) => { + workflow_.setOptions(config) + } - return workflow_ + return expandedFlow } let shouldRegisterHookHandler = true diff --git a/packages/workflows-sdk/src/utils/composer/type.ts b/packages/workflows-sdk/src/utils/composer/type.ts index 3e05390c50c5b..2ef18de2472d8 100644 --- a/packages/workflows-sdk/src/utils/composer/type.ts +++ b/packages/workflows-sdk/src/utils/composer/type.ts @@ -37,13 +37,8 @@ export type StepFunction = (keyof TInput extends [] }) & WorkflowDataProperties<{ [K in keyof TOutput]: TOutput[K] - }> & { - config( - config: Pick - ): WorkflowData<{ - [K in keyof TOutput]: TOutput[K] - }> - } & WorkflowDataProperties<{ + }> & + WorkflowDataProperties<{ [K in keyof TOutput]: TOutput[K] }> @@ -62,7 +57,22 @@ export type WorkflowData = (T extends object [Key in keyof T]: WorkflowData } : WorkflowDataProperties) & - WorkflowDataProperties + WorkflowDataProperties & { + config( + config: { name?: string } & Omit< + TransactionStepsDefinition, + "next" | "uuid" | "action" + > + ): T extends object + ? WorkflowData< + T extends object + ? { + [K in keyof T]: T[K] + } + : T + > + : T + } export type CreateWorkflowComposerContext = { hooks_: string[] diff --git a/www/apps/docs/content/plugins/cms/strapi.md b/www/apps/docs/content/plugins/cms/strapi.md index 69b3990ceb314..17d6786e5a1fe 100644 --- a/www/apps/docs/content/plugins/cms/strapi.md +++ b/www/apps/docs/content/plugins/cms/strapi.md @@ -5,198 +5,238 @@ addHowToData: true # Strapi -In this document, you’ll learn how to integrate Strapi with Medusa to add rich Content Management System (CMS) functionalities. +In this document, you’ll learn how to integrate Medusa with Strapi. -:::info +:::note -This plugin is a [community plugin](https://github.com/Deathwish98/medusa-plugin-strapi) and is not managed by the official Medusa team. At the moment, it supports v4 of Strapi. +This plugin is a [community plugin](https://github.com/SGFGOV/medusa-strapi-repo) and is not managed by the official Medusa team. It supports v4 of Strapi. If you run into any issues, please refer to the [repository of the community plugin](https://github.com/SGFGOV/medusa-strapi-repo). ::: ## Overview -[Strapi](https://strapi.io/) is an open source headless CMS service that allows developers to have complete control over their content models. It can be integrated into many other frameworks, including Medusa. +[Strapi](https://strapi.io/) is an open source headless CMS service that allows developers to have complete control over their content models. It can be integrated into many other frameworks, including Medusa. -By integrating Strapi to Medusa, you can benefit from powerful features in your ecommerce store including detailed product CMS details, [two-way sync](#test-two-way-sync), an easy-to-use interface to use for static content and pages, and much more. +By integrating Strapi into Medusa, you can benefit from powerful features in your ecommerce store, including detailed product CMS details, two-way sync, an easy-to-use interface to use for static content and pages, and much more. --- ## Prerequisites -### Medusa CLI - -[Medusa’s CLI tool](../../cli/reference.mdx#how-to-install-cli-tool) is required to set up a new Medusa backend. - -### Redis - -Redis is required for the Strapi plugin to work as expected on your Medusa backend. If you don’t have it installed, you can learn [how to install it in this documentation](../../development/backend/prepare-environment.mdx#redis). - ---- - -## Create Strapi Project - -The first step is to create a Strapi project using the Medusa template: - -```bash -npx create-strapi-app strapi-medusa --template shahednasser/strapi-medusa-template -``` - -This creates the Strapi project in the directory `strapi-medusa`. - -Once the installation is finished, the Strapi development backend will run on `localhost:1337`. A new page will also open in your default browser to create a new admin user and log in. - -![Create User Form in Strapi](https://res.cloudinary.com/dza7lstvk/image/upload/v1668001083/Medusa%20Docs/Strapi/9pFE1Ij_h2dicv.png) - -Once you log in, you can access the Strapi dashboard. +### Medusa Components -### Create a Strapi User +This guide assumes you already have a Medusa backend installed. If not, you can learn how to install [it here](../../create-medusa-app.mdx). -The Strapi plugin in Medusa requires the credentials of a Strapi user. To create a new user, go to Content Manager, then choose User under Collection Types. +An event bus module must be installed and configured on your Medusa backend to sync data from Medusa to Strapi. You can install the [Redis event bus module](../../development/events/modules/redis.md). -![Showing the users under Content Manager](https://res.cloudinary.com/dza7lstvk/image/upload/v1668001096/Medusa%20Docs/Strapi/YyGJPUf_mr5sx7.png) +### Strapi Database -Click on the Create new entry button at the top right. This opens a new form to enter the user’s details. - -![Create User Form on Strapi](https://res.cloudinary.com/dza7lstvk/image/upload/v1668001105/Medusa%20Docs/Strapi/mdMhSlV_vy7ygv.png) - -Enter the user’s username, email, and password. Once you’re done, click on the Save button at the top right. +You must create a PostgreSQL database to be used with Strapi. You can refer to [PostgreSQL’s documentation](https://www.postgresql.org/docs/current/sql-createdatabase.html) for more details. --- -## Modify Permissions - -By default, created users have the “Authenticated” role. Before you start using the Strapi plugin on your Medusa backend, you must modify this role’s permissions to allow making changes to Medusa’s models in Strapi. - -On your Strapi dashboard, go to Settings → Roles → Authenticated. Then, under the Permissions section, expand the accordion of each content model type and check the Select All checkbox. +## Setup Strapi Project -![An example of modifying permissions on the Product content type](https://res.cloudinary.com/dza7lstvk/image/upload/v1668001116/Medusa%20Docs/Strapi/QgckXqS_wlyxe8.png) +In this section, you’ll setup a Strapi project with a Medusa plugin installed. To do that: -Once you’re done, click the Save button at the top right. - ---- - -## Create Medusa Backend - -:::note - -You can use the Strapi plugin on an existing Medusa backend, however, existing data (such as existing products) will not be imported. Only newer data will be imported. - -::: - -To create your Medusa backend, run the following command: +1\. Clone the Strapi project repository: ```bash -npx @medusajs/medusa-cli@latest new medusa-backend +git clone https://github.com/SGFGOV/medusa-strapi-repo.git ``` -### Configure your Backend +2\. Change to the `medusa-strapi-repo/packages/medusa-strapi` directory. -Once the command is done executing, change to the newly created `medusa-backend` directory: +3\. Copy the `.env.test` file to a new `.env` file. -```bash -cd medusa-backend -``` +### Change Strapi Environment Variables -You must then configure your backend to: - -- Connect to a PostgreSQL database, as explained [here](../../references/medusa_config/interfaces/medusa_config.ConfigModule.mdx#database-configuration) -- Install and configure an event-bus module, as explained [here](../../references/medusa_config/interfaces/medusa_config.ConfigModule.mdx#recommended-event-bus-modules) - -### Run Migrations - -After configuring the connection to the database, you must run migrations to add the necessary database schema definitions in your database. To do that, run the following command in the `medusa-backend` directory: +In the `.env` file, change the following environment variables: ```bash -npx @medusajs/medusa-cli@latest migrations run +# IMPORTANT: Change supersecret with random and unique strings +APP_KEYS=supersecret +API_TOKEN_SALT=supersecret +ADMIN_JWT_SECRET=supersecret +JWT_SECRET=supersecret + +MEDUSA_STRAPI_SECRET=supersecret + +MEDUSA_BACKEND_URL=http://localhost:9000 +MEDUSA_BACKEND_ADMIN=http://localhost:7001 + +SUPERUSER_EMAIL=support@medusa-commerce.com +SUPERUSER_USERNAME=SuperUser +SUPERUSER_PASSWORD=MedusaStrapi1 + +DATABASE_HOST=localhost +DATABASE_PORT=5432 +DATABASE_NAME=postgres_strapi +DATABASE_USERNAME=postgres +DATABASE_PASSWORD= +DATABASE_SSL=false +DATABASE_SCHEMA=public ``` -You can optionally seed your database with demo data by running the `seed` command: +1. Change `APP_KEYS`, `API_TOKEN_SALT`, `JWT_SECRET`, and `ADMIN_JWT_SECRET` to a random and unique string. These keys are used by Strapi to sign session cookies, generate API tokens, and more. +2. Change `MEDUSA_STRAPI_SECRET` to a random unique string. The value of this environment variable is used later in your Medusa configurations. +3. Change `MEDUSA_BACKEND_URL` to the URL of your Medusa backend. If you’re running it locally, it should be `http://localhost:9000`. +4. Change `MEDUSA_BACKEND_ADMIN` to the URL of your Medusa admin. If you’re running it locally, it should be `http://localhost:7001`. +5. Change the following environment variables to define the Strapi super user: + 1. `SUPERUSER_EMAIL`: the super user’s email. By default, it’s `support@medusa-commerce.com`. + 2. `SUPERUSER_USERNAME`: the super user’s username. By default, it’s `SuperUser`. + 3. `SUPERUSER_PASSWORD`: the super user’s password. By default, it’s `MedusaStrapi1`. + 4. `SUPERUSER_FIRSTNAME`: the super user’s first name. By default, it’s `Medusa`. + 5. `SUPERUSER_LASTNAME`: the super user’s last name. By default, it’s `Commerce`. +6. Change the database environment variables based on your database configurations. All database environment variables start with `DATABASE_`. +7. You can optionally configure other services, such as S3 or MeiliSearch, as explained [here](https://github.com/SGFGOV/medusa-strapi-repo/tree/development/packages/medusa-strapi#media-bucket). -```bash -npx @medusajs/medusa-cli@latest seed --seed-file=data/seed.json +### Build Packages + +Once you’re done, install and build packages in the root `medusa-strapi-repo` directory: + +```bash npm2yarn +# Install packages +npm install +# Build packages +npm run build ``` --- -## Install the Strapi Plugin +## Install Plugin in Medusa In the directory of your Medusa backend, run the following command to install the Strapi plugin: ```bash npm2yarn -npm install medusa-plugin-strapi +npm install medusa-plugin-strapi-ts ``` -Then, add the following environment variables: +### Configure Plugin -```bash -STRAPI_USER= -STRAPI_PASSWORD= -STRAPI_PROTOCOL=http # Optional -STRAPI_URL= # Optional -STRAPI_PORT= # Optional -``` - -Where: - -- `` is either the email address or username of the user you created in the previous step. -- `` is the password of the user you created in the previous step. -- `` is the protocol of your Strapi backend. If you’re using a local Strapi backend, set this to `http`. The default value is `https`. -- `` is the URL of your Strapi backend. By default, the URL is `localhost`. -- `` is the port the Strapi backend runs on. By default, the port is `1337`. - -Finally, open `medusa-config.js` and add the following new item to the `plugins` array: +Next, add the plugin to the `plugins` array in `medusa-config.js`: -```jsx title="medusa-config.js" +```js title="medusa-config.js" const plugins = [ // ... { - resolve: `medusa-plugin-strapi`, + resolve: "medusa-plugin-strapi-ts", options: { - strapi_medusa_user: process.env.STRAPI_USER, - strapi_medusa_password: process.env.STRAPI_PASSWORD, - strapi_url: process.env.STRAPI_URL, // optional - strapi_port: process.env.STRAPI_PORT, // optional - strapi_protocol: process.env.STRAPI_PROTOCOL, // optional + strapi_protocol: process.env.STRAPI_PROTOCOL, + strapi_host: process.env.STRAPI_SERVER_HOSTNAME, + strapi_port: process.env.STRAPI_PORT, + strapi_secret: process.env.STRAPI_SECRET, + strapi_default_user: { + username: process.env.STRAPI_MEDUSA_USER, + password: process.env.STRAPI_MEDUSA_PASSWORD, + email: process.env.STRAPI_MEDUSA_EMAIL, + confirmed: true, + blocked: false, + provider: "local", + }, + strapi_admin: { + username: process.env.STRAPI_SUPER_USERNAME, + password: process.env.STRAPI_SUPER_PASSWORD, + email: process.env.STRAPI_SUPER_USER_EMAIL, + }, + auto_start: true, }, }, ] ``` +The plugin accepts the following options: + +1. `strapi_protocol`: The protocol of the Strapi server. If running locally, it should be `http`. Otherwise, it should be `https`. +2. `strapi_host`: the domain of the Strapi server. If running locally, use `127.0.0.1`. +3. `strapi_port`: the port that the Strapi server is running on, if any. If running locally, use `1337`. +4. `strapi_secret`: the same secret used for the `MEDUSA_STRAPI_SECRET` environment variable in the Strapi project. +5. `strapi_default_user`: The details of an existing user or a user to create in the Strapi backend that is used to update data in Strapi. It’s an object accepting the following properties: + 1. `username`: The user’s username. + 2. `password`: The user’s password. + 3. `email`: The user’s email. + 4. `confirmed`: Whether the user is confirmed. + 5. `blocked`: Whether the user is blocked. + 6. `provider`: The name of the authentication provider. +6. `strapi_admin`: The details of the super admin. The super admin is only used to create the default user if it doesn’t exist. It’s an object accepting the following properties: + 1. `username`: the super admin’s username. Its value is the same as that of the `SUPERUSER_USERNAME` environment variable in the Strapi project. + 2. `password`: the super admin’s password. Its value is the same as that of the `SUPERUSER_PASSWORD` environment variable in the Strapi project. + 3. `email`: the super admin’s email. Its value is the same as that of the `SUPERUSER_EMAIL` environment variable in the Strapi project. +7. `auto_start`: Whether to initialize the Strapi connection when Medusa starts. Disabling this may cause issues when syncing data from Medusa to Strapi. + +Refer to the [plugin’s README](https://github.com/SGFGOV/medusa-strapi-repo/blob/development/packages/medusa-plugin-strapi-ts/README.md) for more options. + +Make sure to add the necessary environment variables for the above options in `.env`: + +```bash +STRAPI_PROTOCOL=http +STRAPI_SERVER_HOSTNAME=127.0.0.1 +STRAPI_PORT=1337 +STRAPI_SECRET=supersecret + +STRAPI_MEDUSA_USER=medusa +STRAPI_MEDUSA_PASSWORD=supersecret +STRAPI_MEDUSA_EMAIL=admin@medusa-test.com + +STRAPI_SUPER_USERNAME=SuperUser +STRAPI_SUPER_PASSWORD=MedusaStrapi1 +STRAPI_SUPER_USER_EMAIL=support@medusa-commerce.com +``` + --- -## Run Medusa Backend +## Test Integration -Make sure the Strapi backend is still running. If not, you can run the following command to run the Strapi backend in the directory of the Strapi project: +To test the integration between Medusa and Strapi, first, start the Strapi server by running the following command in the `medusa-strapi-repo/packages/medusa-strapi` directory: -```bash npm2yarn +```bash title="medusa-strapi-repo/packages/medusa-strapi" npm2yarn npm run develop ``` -Then, in the directory of your Medusa backend, run the following command to start the Medusa backend: +Then, start the Medusa backend by running the following command in the root directory of your Medusa backend: -```bash npm2yarn +```bash title="Medusa Backend" npm2yarn npx medusa develop ``` -Once you start your Medusa backend, if you ran the `--seed` command when you created your Medusa backend, you’ll see that `product.created` events have been triggered along with similar events. This will update Strapi with the products you seeded. +If the connection to Strapi is successful, you’ll find the following message logged in your Medusa backend with no errors: ---- +```bash +info: Checking Strapi Health ,data: +debug: check-url: http://127.0.0.1:1337/_health ,data: +info: Strapi Subscriber Initialized +``` + +### Two-Way Syncing -## Test Two-Way Sync +To test syncing data from Medusa to Strapi, try creating or updating a product either using the Medusa admin or the [REST APIs](https://docs.medusajs.com/api/admin#products_postproducts). This triggers the associated event in Medusa, which makes the updates in Strapi. -This plugin ensures a two-way sync between the Medusa backend and the Strapi backend. So, if you update data on Strapi, it will be reflected on your Medusa backend, and vice-versa. +:::tip -### Update Products on Strapi +If data isn’t synced with Strapi when making updates in Medusa, make sure that you’ve installed the event bus module as explained in the [Prerequisites section](#medusa-components) and that the events are triggered. + +::: + +To test syncing data from Strapi to Medusa, try updating one of the products in the Strapi dashboard. If you check the product’s details in Medusa, they’re updated as expected. + +:::tip + +Data is only synced to Strapi once you create or update them. So, if you have products in your Medusa backend from before integrating Strapi, they won’t be available by default in Strapi. You’ll have to make updates to them, which triggers the update in Strapi. + +::: -Try updating any products on Strapi by going to Content Manager → Products and choosing a product from the list. Then, make changes to the product and click Save. If you view the products on your backend now, either using the [REST APIs](https://docs.medusajs.com/api/admin#products_getproducts) or using [the Medusa Admin](../../user-guide/products/index.mdx), you’ll see that the product has been updated. +### Synced Entities -### Update Products on Medusa +The Medusa and Strapi plugins support syncing the following Medusa entities: -If you try to update products on Medusa either using the [REST APIs](https://docs.medusajs.com/api/admin#products_postproductsproduct) or using [the Medusa Admin](../../user-guide/products/manage.mdx), you’ll see that the product is also updated on Strapi. +- `Region` +- `Product` +- `ProductVariant` +- `ProductCollection` +- `ProductCategory` --- -## See Also +## Learn More -- [Deploy the Medusa backend](../../deployments/server/index.mdx) -- [Create your own plugin](../../development/plugins/create.mdx) +To learn more about the integration between Medusa and Strapi, refer to the [community plugin](https://github.com/SGFGOV/medusa-strapi-repo). diff --git a/yarn.lock b/yarn.lock index ee39d59dc07a7..8131536a76ed6 100644 --- a/yarn.lock +++ b/yarn.lock @@ -6060,14 +6060,44 @@ __metadata: languageName: node linkType: hard -"@hapi/hoek@npm:^9.0.0": +"@hapi/boom@npm:^10.0.1": + version: 10.0.1 + resolution: "@hapi/boom@npm:10.0.1" + dependencies: + "@hapi/hoek": ^11.0.2 + checksum: e4ae8a69bb67c5687320d320a0706ac66e797a659c19fb1c9b909eaefe3b41780e4ecd4382de1297b10c33e9db81f79667324576b9153f57b0cf701293b908d0 + languageName: node + linkType: hard + +"@hapi/bourne@npm:^3.0.0": + version: 3.0.0 + resolution: "@hapi/bourne@npm:3.0.0" + checksum: 2e2df62f6bc6f32b980ba5bbdc09200c93c55c8306399ec0f2781da088a82aab699498c89fe94fec4acf770210f9aee28c75bfc2f04044849ac01b034134e717 + languageName: node + linkType: hard + +"@hapi/hoek@npm:^10.0.1": + version: 10.0.1 + resolution: "@hapi/hoek@npm:10.0.1" + checksum: 320d5dc7a4070fa29e6344a3af9e44854980c6606848f7b7f59715174880cc09a1fe1e8adf44cf887100bd8d6a8664e9dc415986b30dc91df13455f7114de549 + languageName: node + linkType: hard + +"@hapi/hoek@npm:^11.0.2": + version: 11.0.4 + resolution: "@hapi/hoek@npm:11.0.4" + checksum: 3c0e487824daaf3af4c29e46fd57b0c5801ce9164fef2417c70e271cd970e13cc542b196f70ba1cfc9ef944eed825fcac261085ab5e2928c6017428bf576b363 + languageName: node + linkType: hard + +"@hapi/hoek@npm:^9.0.0, @hapi/hoek@npm:^9.3.0": version: 9.3.0 resolution: "@hapi/hoek@npm:9.3.0" checksum: a096063805051fb8bba4c947e293c664b05a32b47e13bc654c0dd43813a1cec993bdd8f29ceb838020299e1d0f89f68dc0d62a603c13c9cc8541963f0beca055 languageName: node linkType: hard -"@hapi/topo@npm:^5.0.0": +"@hapi/topo@npm:^5.0.0, @hapi/topo@npm:^5.1.0": version: 5.1.0 resolution: "@hapi/topo@npm:5.1.0" dependencies: @@ -6076,6 +6106,17 @@ __metadata: languageName: node linkType: hard +"@hapi/wreck@npm:^18.0.0": + version: 18.0.1 + resolution: "@hapi/wreck@npm:18.0.1" + dependencies: + "@hapi/boom": ^10.0.1 + "@hapi/bourne": ^3.0.0 + "@hapi/hoek": ^11.0.2 + checksum: 46b1b1f750a66c4724964eb6d9192d1d19cfa45e602386aae76f52e3b423c9ae14a03a0f0e9f962e7d973708e1b0b6ab42d2ae77539a691fa77a18c78ccf285c + languageName: node + linkType: hard + "@headlessui/react@npm:^1.7.18": version: 1.7.18 resolution: "@headlessui/react@npm:1.7.18" @@ -7854,9 +7895,12 @@ __metadata: cross-env: ^5.2.1 dotenv: ^16.1.4 jest: ^29.6.3 + jsonwebtoken: ^9.0.2 knex: 2.4.2 medusa-test-utils: ^1.1.40 rimraf: ^3.0.2 + scrypt-kdf: ^2.0.1 + simple-oauth2: ^5.0.0 ts-jest: ^29.1.1 ts-node: ^10.9.1 tsc-alias: ^1.8.6 @@ -8698,7 +8742,61 @@ __metadata: languageName: unknown linkType: soft -"@medusajs/workflows-sdk@^0.1.1, @medusajs/workflows-sdk@workspace:packages/workflows-sdk": +"@medusajs/workflow-engine-inmemory@workspace:packages/workflow-engine-inmemory": + version: 0.0.0-use.local + resolution: "@medusajs/workflow-engine-inmemory@workspace:packages/workflow-engine-inmemory" + dependencies: + "@medusajs/modules-sdk": ^1.12.5 + "@medusajs/types": ^1.11.9 + "@medusajs/utils": ^1.11.2 + "@medusajs/workflows-sdk": ^0.1.0 + "@mikro-orm/cli": 5.9.7 + "@mikro-orm/core": 5.9.7 + "@mikro-orm/migrations": 5.9.7 + "@mikro-orm/postgresql": 5.9.7 + awilix: ^8.0.0 + cross-env: ^5.2.1 + dotenv: ^16.1.4 + jest: ^29.6.3 + knex: 2.4.2 + medusa-test-utils: ^1.1.40 + rimraf: ^3.0.2 + ts-jest: ^29.1.1 + ts-node: ^10.9.1 + tsc-alias: ^1.8.6 + typescript: ^5.1.6 + languageName: unknown + linkType: soft + +"@medusajs/workflow-engine-redis@workspace:packages/workflow-engine-redis": + version: 0.0.0-use.local + resolution: "@medusajs/workflow-engine-redis@workspace:packages/workflow-engine-redis" + dependencies: + "@medusajs/modules-sdk": ^1.12.5 + "@medusajs/types": ^1.11.9 + "@medusajs/utils": ^1.11.2 + "@medusajs/workflows-sdk": ^0.1.0 + "@mikro-orm/cli": 5.9.7 + "@mikro-orm/core": 5.9.7 + "@mikro-orm/migrations": 5.9.7 + "@mikro-orm/postgresql": 5.9.7 + awilix: ^8.0.0 + bullmq: ^5.1.3 + cross-env: ^5.2.1 + dotenv: ^16.1.4 + ioredis: ^5.3.2 + jest: ^29.6.3 + knex: 2.4.2 + medusa-test-utils: ^1.1.40 + rimraf: ^3.0.2 + ts-jest: ^29.1.1 + ts-node: ^10.9.1 + tsc-alias: ^1.8.6 + typescript: ^5.1.6 + languageName: unknown + linkType: soft + +"@medusajs/workflows-sdk@^0.1.0, @medusajs/workflows-sdk@^0.1.1, @medusajs/workflows-sdk@workspace:packages/workflows-sdk": version: 0.0.0-use.local resolution: "@medusajs/workflows-sdk@workspace:packages/workflows-sdk" dependencies: @@ -12137,7 +12235,7 @@ __metadata: languageName: node linkType: hard -"@sideway/address@npm:^4.1.3": +"@sideway/address@npm:^4.1.3, @sideway/address@npm:^4.1.4": version: 4.1.4 resolution: "@sideway/address@npm:4.1.4" dependencies: @@ -21777,6 +21875,23 @@ __metadata: languageName: node linkType: hard +"bullmq@npm:^5.1.3": + version: 5.1.3 + resolution: "bullmq@npm:5.1.3" + dependencies: + cron-parser: ^4.6.0 + glob: ^8.0.3 + ioredis: ^5.3.2 + lodash: ^4.17.21 + msgpackr: ^1.10.1 + node-abort-controller: ^3.1.1 + semver: ^7.5.4 + tslib: ^2.0.0 + uuid: ^9.0.0 + checksum: dc2177dfd736b2d008ccab1ba9f77f80cc730ce6197c9ffa0f37327e1cf34bd8b97d83ee9f9008253ef0c0854bbd04f8c925889a3370a0899e8f5c7a34fd3ab3 + languageName: node + linkType: hard + "bundle-name@npm:^3.0.0": version: 3.0.0 resolution: "bundle-name@npm:3.0.0" @@ -34937,6 +35052,19 @@ __metadata: languageName: node linkType: hard +"joi@npm:^17.6.4": + version: 17.12.0 + resolution: "joi@npm:17.12.0" + dependencies: + "@hapi/hoek": ^9.3.0 + "@hapi/topo": ^5.1.0 + "@sideway/address": ^4.1.4 + "@sideway/formula": ^3.0.1 + "@sideway/pinpoint": ^2.0.0 + checksum: 2378f4ec8de2bc12674ce3e6faac509f52ff4f734c67bf68c288816b20336d4e59433ea1c1e187f1009075c81ec5fa8b5061094feb37a855d6e3ee0cfcd79dd8 + languageName: node + linkType: hard + "join-component@npm:^1.1.0": version: 1.1.0 resolution: "join-component@npm:1.1.0" @@ -35406,6 +35534,24 @@ __metadata: languageName: node linkType: hard +"jsonwebtoken@npm:^9.0.2": + version: 9.0.2 + resolution: "jsonwebtoken@npm:9.0.2" + dependencies: + jws: ^3.2.2 + lodash.includes: ^4.3.0 + lodash.isboolean: ^3.0.3 + lodash.isinteger: ^4.0.4 + lodash.isnumber: ^3.0.3 + lodash.isplainobject: ^4.0.6 + lodash.isstring: ^4.0.1 + lodash.once: ^4.0.0 + ms: ^2.1.1 + semver: ^7.5.4 + checksum: d287a29814895e866db2e5a0209ce730cbc158441a0e5a70d5e940eb0d28ab7498c6bf45029cc8b479639bca94056e9a7f254e2cdb92a2f5750c7f358657a131 + languageName: node + linkType: hard + "jsprim@npm:^1.2.2": version: 1.4.2 resolution: "jsprim@npm:1.4.2" @@ -38528,6 +38674,18 @@ __metadata: languageName: node linkType: hard +"msgpackr@npm:^1.10.1": + version: 1.10.1 + resolution: "msgpackr@npm:1.10.1" + dependencies: + msgpackr-extract: ^3.0.2 + dependenciesMeta: + msgpackr-extract: + optional: true + checksum: 2e6ed91af89ec15d1e5595c5b837a4adcbb185b0fbd4773d728ced89ab4abbdd3401f6777b193d487d9807e1cb0cf3da1ba9a0bd2d5a553e22355cea84a36bab + languageName: node + linkType: hard + "msgpackr@npm:^1.5.4, msgpackr@npm:^1.6.2": version: 1.9.5 resolution: "msgpackr@npm:1.9.5" @@ -38825,6 +38983,13 @@ __metadata: languageName: node linkType: hard +"node-abort-controller@npm:^3.1.1": + version: 3.1.1 + resolution: "node-abort-controller@npm:3.1.1" + checksum: f7ad0e7a8e33809d4f3a0d1d65036a711c39e9d23e0319d80ebe076b9a3b4432b4d6b86a7fab65521de3f6872ffed36fc35d1327487c48eb88c517803403eda3 + languageName: node + linkType: hard + "node-addon-api@npm:^4.3.0": version: 4.3.0 resolution: "node-addon-api@npm:4.3.0" @@ -45990,6 +46155,18 @@ __metadata: languageName: node linkType: hard +"simple-oauth2@npm:^5.0.0": + version: 5.0.0 + resolution: "simple-oauth2@npm:5.0.0" + dependencies: + "@hapi/hoek": ^10.0.1 + "@hapi/wreck": ^18.0.0 + debug: ^4.3.4 + joi: ^17.6.4 + checksum: 1cb5a4eb9022f656e1bb9a1f43d771dd058d4a4fa181b42d0e1e7ca7b5cfc42e35fad1c722be9bb6fa218398b3b0499010554a7367d2bd85eb9d7634f92546c1 + languageName: node + linkType: hard + "simple-string-table@npm:^1.0.0": version: 1.0.0 resolution: "simple-string-table@npm:1.0.0"