From 7b974a35b26fc94eb5755cc444651a2db821a3d8 Mon Sep 17 00:00:00 2001 From: JCSanPedro Date: Fri, 17 Jan 2025 12:11:04 +1300 Subject: [PATCH] TRN-808 Sylo Data Pallet (#917) * create cargo package for new sylo pallet * implement pallet functionality for registering resolvers * implement tests for registering resolvers * implement functionality for record validation * implement tests for creating validation records * setup benchmarking for sylo pallet * implement remaning sylo extrinsic benchmarks * modify on charge tx to force sylo fee swap for sylo pallet extrinsics * use a storage value for reserved sylo resolver method * fix mock in fee-control tests * add more comments to config * fix tests compiling * implement integration tests for sylo pallet fees * resize constants for sylo config * use EnsureOrigin and emit event on state change * avoid unnecessary clones in sylo pallet * ensure strings have upper bound in benchmark and add verification * refactor tests * fix benchmark verification * Update benchmarks for pallet-sylo on TRN-808-sylo-pallet * implement e2e tests to validate sylo gas costs * remove commented code * add more doc comments to sylo pallet * add tests for admin set calls * better error name and refactor to benchmark impl * use correct args for sylo weights * add more coverage for sylo fee edge cases and more e2e tests * Update benchmarks for pallet-sylo on TRN-808-sylo-pallet * fix fee-control and fee-proxy mocks * fix linter * remove unneeded imports * rename sylo-pallet -> sylo-data-verification-pallet * minor refactor --------- Co-authored-by: GitHub Action --- Cargo.lock | 32 + Cargo.toml | 1 + e2e/common/index.ts | 35 +- e2e/test/Doughnuts.test.ts | 2 +- e2e/test/SyloGasCosts.test.ts | 462 ++++++++ e2e/test/XRPL.test.ts | 7 +- pallet/common/src/test_utils.rs | 88 +- pallet/fee-control/Cargo.toml | 6 + pallet/fee-control/src/mock.rs | 10 + pallet/fee-proxy/Cargo.toml | 7 + pallet/fee-proxy/src/impls.rs | 206 +++- pallet/fee-proxy/src/lib.rs | 5 +- pallet/fee-proxy/src/mock.rs | 8 + pallet/sylo-data-verification/Cargo.toml | 48 + .../src/benchmarking.rs | 279 +++++ pallet/sylo-data-verification/src/lib.rs | 507 +++++++++ pallet/sylo-data-verification/src/mock.rs | 52 + pallet/sylo-data-verification/src/tests.rs | 1009 +++++++++++++++++ pallet/sylo-data-verification/src/types.rs | 101 ++ pallet/sylo-data-verification/src/weights.rs | 235 ++++ pallet/xrpl/src/lib.rs | 2 +- runtime/Cargo.toml | 4 + runtime/src/lib.rs | 22 + runtime/src/tests/mod.rs | 1 + runtime/src/tests/sylo_fees.rs | 286 +++++ runtime/src/weights/mod.rs | 1 + .../weights/pallet_sylo_data_verification.rs | 163 +++ 27 files changed, 3537 insertions(+), 42 deletions(-) create mode 100644 e2e/test/SyloGasCosts.test.ts create mode 100644 pallet/sylo-data-verification/Cargo.toml create mode 100644 pallet/sylo-data-verification/src/benchmarking.rs create mode 100644 pallet/sylo-data-verification/src/lib.rs create mode 100644 pallet/sylo-data-verification/src/mock.rs create mode 100644 pallet/sylo-data-verification/src/tests.rs create mode 100644 pallet/sylo-data-verification/src/types.rs create mode 100644 pallet/sylo-data-verification/src/weights.rs create mode 100644 runtime/src/tests/sylo_fees.rs create mode 100644 runtime/src/weights/pallet_sylo_data_verification.rs diff --git a/Cargo.lock b/Cargo.lock index ddc50f1a3..63c39fd43 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5826,8 +5826,12 @@ dependencies = [ "pallet-evm", "pallet-fee-proxy", "pallet-futurepass", + "pallet-proxy", + "pallet-sylo-data-verification", "pallet-timestamp", "pallet-transaction-payment", + "pallet-utility", + "pallet-xrpl", "parity-scale-codec", "precompile-utils", "scale-info", @@ -5854,8 +5858,12 @@ dependencies = [ "pallet-evm", "pallet-fee-control", "pallet-futurepass", + "pallet-proxy", + "pallet-sylo-data-verification", "pallet-timestamp", "pallet-transaction-payment", + "pallet-utility", + "pallet-xrpl", "parity-scale-codec", "precompile-utils", "scale-info", @@ -6344,6 +6352,29 @@ dependencies = [ "sp-std", ] +[[package]] +name = "pallet-sylo-data-verification" +version = "0.0.1" +dependencies = [ + "frame-benchmarking", + "frame-support", + "frame-system", + "hex", + "pallet-assets", + "pallet-assets-ext", + "pallet-balances", + "parity-scale-codec", + "scale-info", + "seed-pallet-common", + "seed-primitives", + "serde", + "sp-arithmetic", + "sp-core", + "sp-io", + "sp-runtime", + "sp-std", +] + [[package]] name = "pallet-timestamp" version = "4.0.0-dev" @@ -9126,6 +9157,7 @@ dependencies = [ "pallet-sft-rpc-runtime-api", "pallet-staking", "pallet-sudo", + "pallet-sylo-data-verification", "pallet-timestamp", "pallet-token-approvals", "pallet-transaction-payment", diff --git a/Cargo.toml b/Cargo.toml index a4711f8f2..cff7d1d45 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -132,6 +132,7 @@ pallet-nft-peg = { path = "pallet/nft-peg", default-features = false } pallet-sft = { path = "pallet/sft", default-features = false } pallet-sft-rpc = { path = "pallet/sft/rpc", default-features = false } pallet-sft-rpc-runtime-api = { path = "pallet/sft/rpc/runtime-api", default-features = false } +pallet-sylo-data-verification = { path = "pallet/sylo-data-verification", default-features = false } pallet-token-approvals = { path = "pallet/token-approvals", default-features = false } pallet-tx-fee-pot = { path = "pallet/tx-fee-pot", default-features = false } pallet-vortex-distribution = { path = "pallet/vortex-distribution", default-features = false } diff --git a/e2e/common/index.ts b/e2e/common/index.ts index 47d0ccfae..26ff902ad 100644 --- a/e2e/common/index.ts +++ b/e2e/common/index.ts @@ -623,15 +623,31 @@ export const finalizeTx = ( extrinsic: SubmittableExtrinsic<"promise">, opts?: Partial, ) => { - return new Promise((resolve) => { + const sendCb = + (resolve: any, reject: any) => + ({ internalError, dispatchError, status, events = [] }: any) => { + if (internalError) { + return reject(internalError); + } + + if (dispatchError && !dispatchError.isModule) { + return reject(dispatchError.toJSON()); + } + + if (dispatchError && dispatchError.isModule) { + const { section, name, docs } = dispatchError.registry.findMetaError(dispatchError.asModule); + + return reject({ section, name, docs }); + } + + if (status.isInBlock) resolve(events); + }; + + return new Promise((resolve, reject) => { if (opts) { - extrinsic.signAndSend(signer, opts, ({ status, events = [] }: any) => { - if (status.isInBlock) resolve(events); - }); + extrinsic.signAndSend(signer, opts, sendCb(resolve, reject)); } else { - extrinsic.signAndSend(signer, ({ status, events = [] }: any) => { - if (status.isInBlock) resolve(events); - }); + extrinsic.signAndSend(signer, sendCb(resolve, reject)); } }); }; @@ -807,3 +823,8 @@ export const loadTestUsers = (userAmount?: number): KeyringPair[] => { console.log(`loaded ${keypairs.length} users`); return keypairs; }; + +export const getPrefixLength = (encoded: SubmittableExtrinsic): number => { + if (encoded.encodedLength < 66) return 6; + return 8; +}; diff --git a/e2e/test/Doughnuts.test.ts b/e2e/test/Doughnuts.test.ts index fe8a9001d..da4a5610b 100644 --- a/e2e/test/Doughnuts.test.ts +++ b/e2e/test/Doughnuts.test.ts @@ -345,7 +345,7 @@ describe("Doughnuts", () => { .transact(call, doughnutHex, nonce, genesis_hash, tip, holderSig) .send() .catch((err: any) => { - console.log(err); + console.log("DOUGHNUT ERR", err); }); // console.log(events); diff --git a/e2e/test/SyloGasCosts.test.ts b/e2e/test/SyloGasCosts.test.ts new file mode 100644 index 000000000..d08a286b7 --- /dev/null +++ b/e2e/test/SyloGasCosts.test.ts @@ -0,0 +1,462 @@ +import { JsonRpcProvider } from "@ethersproject/providers"; +import { ApiPromise, Keyring, WsProvider } from "@polkadot/api"; +import { KeyringPair } from "@polkadot/keyring/types"; +import { hexToU8a, u8aToHex } from "@polkadot/util"; +import { blake2AsHex } from "@polkadot/util-crypto"; +import { Doughnut, PayloadVersion, SignatureVersion, Topping } from "@therootnetwork/doughnut-nodejs"; +import { expect } from "chai"; +import { blake256 } from "codechain-primitives"; +import { Wallet, utils as ethersUtils } from "ethers"; +import { computePublicKey } from "ethers/lib/utils"; +import { xit } from "mocha"; +import { encode, encodeForSigning } from "ripple-binary-codec"; +import { deriveAddress, sign } from "ripple-keypairs"; + +import { + ALITH_PRIVATE_KEY, + GAS_TOKEN_ID, + NodeProcess, + finalizeTx, + getNextAssetId, + getPrefixLength, + startNode, + stringToHex, + typedefs, +} from "../common"; + +const PROXY_TYPE = { + Any: 1, +}; + +const TRN_PERMISSION_DOMAIN: string = "trn"; + +describe("Sylo", () => { + let node: NodeProcess; + let api: ApiPromise; + let keyring: Keyring; + let alith: KeyringPair; + let userPrivateKey: string; + let user: KeyringPair; + let provider: JsonRpcProvider; + let genesisHash: string; + let feeTokenAssetId: number; + + before(async () => { + node = await startNode(); + + const wsProvider = new WsProvider(`ws://127.0.0.1:${node.rpcPort}`); + api = await ApiPromise.create({ provider: wsProvider, types: typedefs }); + genesisHash = api.genesisHash.toHex().slice(2); + + provider = new JsonRpcProvider(`http://127.0.0.1:${node.rpcPort}`); + + keyring = new Keyring({ type: "ethereum" }); + alith = keyring.addFromSeed(hexToU8a(ALITH_PRIVATE_KEY)); + userPrivateKey = Wallet.createRandom().privateKey; + user = keyring.addFromSeed(hexToU8a(userPrivateKey)); + + feeTokenAssetId = await getNextAssetId(api); + + // add liquidity for XRP/SYLO token and set up user funds + const txs = [ + api.tx.assetsExt.createAsset("sylo", "SYLO", 18, 1, alith.address), + api.tx.assets.mint(feeTokenAssetId, alith.address, 2_000_000_000_000_000), + api.tx.assets.mint(feeTokenAssetId, user.address, 2_000_000_000_000_000), + api.tx.assets.transfer(GAS_TOKEN_ID, user.address, 1), // avoids xrp balance increase due to preservation rules + api.tx.dex.addLiquidity( + feeTokenAssetId, + GAS_TOKEN_ID, + 100_000_000_000, + 100_000_000_000, + 100_000_000_000, + 100_000_000_000, + null, + null, + ), + ]; + await finalizeTx(alith, api.tx.utility.batch(txs)); + + // set payment asset + await finalizeTx(alith, api.tx.sudo.sudo(api.tx.syloDataVerification.setPaymentAsset(feeTokenAssetId))); + + console.log("liquidity setup complete..."); + }); + + after(async () => node.stop()); + + // A set of sylo extrinsics to test, where each extrinsic should be paid for + // using sylo tokens + const createSyloExtrinsics = (api: ApiPromise) => [ + api.tx.syloDataVerification.registerResolver("id", ["endpoint"]), + api.tx.syloDataVerification.updateResolver("id", ["endpoint-2"]), + api.tx.syloDataVerification.deregisterResolver("id"), + api.tx.syloDataVerification.createValidationRecord( + "data-id", + [{ method: "sylo-resolver", identifier: "id" }], + "data-type", + ["tag"], + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + api.tx.syloDataVerification.addValidationRecordEntry( + "data-id", + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + api.tx.syloDataVerification.updateValidationRecord( + "data-id", + [{ method: "sylo-resolver", identifier: "id-2" }], + "data-type-2", + ["tag-2"], + ), + api.tx.syloDataVerification.deleteValidationRecord("data-id"), + ]; + + it("can submit sylo extrinsic and pay with sylo tokens", async () => { + const calls = createSyloExtrinsics(api); + + for (const call of calls) { + console.log("testing call", call.meta.name.toString()); + + const userXRPBalanceBefore = + ((await api.query.assets.account(GAS_TOKEN_ID, user.address)).toJSON() as any)?.balance ?? 0; + const userSyloBalanceBefore = + ((await api.query.assets.account(feeTokenAssetId, user.address)).toJSON() as any)?.balance ?? 0; + + await finalizeTx(user, call); + + // verify balances updated + const userXRPBalanceAfter = + ((await api.query.assets.account(GAS_TOKEN_ID, user.address)).toJSON() as any)?.balance ?? 0; + const userSyloBalanceAfter = + ((await api.query.assets.account(feeTokenAssetId, user.address)).toJSON() as any)?.balance ?? 0; + + expect(userXRPBalanceAfter).to.be.eq(userXRPBalanceBefore); + expect(userSyloBalanceAfter).to.be.lessThan(userSyloBalanceBefore); + } + }); + + it("can submit sylo extrinsic with futurepass", async () => { + // create a random user A + const userPrivateKey = Wallet.createRandom().privateKey; + const user: KeyringPair = keyring.addFromSeed(hexToU8a(userPrivateKey)); + + // create a futurepass for user + await finalizeTx(alith, api.tx.futurepass.create(user.address)); + + // fund the futurepass account + const futurepassAddress = (await api.query.futurepass.holders(user.address)).toString(); + await finalizeTx(alith, api.tx.assets.transfer(feeTokenAssetId, futurepassAddress, 100_000_000)); // gas + await finalizeTx(alith, api.tx.assets.transfer(GAS_TOKEN_ID, futurepassAddress, 1)); // preservation rules + + const calls = createSyloExtrinsics(api); + + for (const call of calls) { + console.log("testing call", call.meta.name.toString()); + + const userXRPBalanceBefore = + ((await api.query.assets.account(GAS_TOKEN_ID, user.address)).toJSON() as any)?.balance ?? 0; + const userSyloBalanceBefore = + ((await api.query.assets.account(feeTokenAssetId, user.address)).toJSON() as any)?.balance ?? 0; + + const fpXRPBalanceBefore = + ((await api.query.assets.account(GAS_TOKEN_ID, futurepassAddress)).toJSON() as any)?.balance ?? 0; + const fpSyloBalanceBefore = + ((await api.query.assets.account(feeTokenAssetId, futurepassAddress)).toJSON() as any)?.balance ?? 0; + + const futurepassCall = api.tx.futurepass.proxyExtrinsic(futurepassAddress, call); + + await finalizeTx(user, futurepassCall); + + const userXRPBalanceAfter = + ((await api.query.assets.account(GAS_TOKEN_ID, user.address)).toJSON() as any)?.balance ?? 0; + const userSyloBalanceAfter = + ((await api.query.assets.account(feeTokenAssetId, user.address)).toJSON() as any)?.balance ?? 0; + + const fpXRPBalanceAfter = + ((await api.query.assets.account(GAS_TOKEN_ID, futurepassAddress)).toJSON() as any)?.balance ?? 0; + const fpSyloBalanceAfter = + ((await api.query.assets.account(feeTokenAssetId, futurepassAddress)).toJSON() as any)?.balance ?? 0; + + // validate the futurepass's token balance has decreased, and the user's asset + // balance remains the same + expect(userXRPBalanceAfter).to.be.eq(userXRPBalanceBefore); + expect(userSyloBalanceAfter).to.be.eq(userSyloBalanceBefore); + + expect(fpXRPBalanceAfter).to.be.eq(fpXRPBalanceBefore); + expect(fpSyloBalanceAfter).to.be.lt(fpSyloBalanceBefore); + } + }); + + it("can submit sylo extrinsic with proxy", async () => { + // create a random user A + const user: KeyringPair = keyring.addFromSeed(hexToU8a(Wallet.createRandom().privateKey)); + + // create a futurepass for user + await finalizeTx(alith, api.tx.futurepass.create(user.address)); + + const delegateWallet = Wallet.createRandom(); + const delegate = keyring.addFromSeed(hexToU8a(delegateWallet.privateKey)); + + const futurepassAddress = (await api.query.futurepass.holders(user.address)).toString(); + + // fund user to create fp + await finalizeTx(alith, api.tx.assets.transfer(GAS_TOKEN_ID, user.address, 10_000_000)); + await finalizeTx(alith, api.tx.balances.transfer(user.address, 10_000_000)); + + // fund delegate for extrinsics + await finalizeTx(alith, api.tx.assets.transfer(GAS_TOKEN_ID, delegate.address, 1)); + await finalizeTx(alith, api.tx.assets.transfer(feeTokenAssetId, delegate.address, 10_000_000_000)); + + const deadline = (await provider.getBlockNumber()) + 20; + const message = ethersUtils + .solidityKeccak256( + ["address", "address", "uint8", "uint32"], + [futurepassAddress, delegate.address, PROXY_TYPE.Any, deadline], + ) + .substring(2); + const signature = await delegateWallet.signMessage(message); + + // register a delegate for futurepass (only way to create a proxy) + await finalizeTx( + user, + api.tx.futurepass.registerDelegateWithSignature( + futurepassAddress, + delegate.address, + PROXY_TYPE.Any, + deadline, + signature, + ), + ); + + const delegateXRPBalanceBefore = + ((await api.query.assets.account(GAS_TOKEN_ID, delegate.address)).toJSON() as any)?.balance ?? 0; + const delegateSyloBalanceBefore = + ((await api.query.assets.account(feeTokenAssetId, delegate.address)).toJSON() as any)?.balance ?? 0; + + await finalizeTx( + delegate, + api.tx.proxy.proxy(futurepassAddress, null, api.tx.syloDataVerification.registerResolver("test-proxy", [])), + ); + + // verify balances updated + const delegateXRPBalanceAfter = + ((await api.query.assets.account(GAS_TOKEN_ID, delegate.address)).toJSON() as any)?.balance ?? 0; + const delegateSyloBalanceAfter = + ((await api.query.assets.account(feeTokenAssetId, delegate.address)).toJSON() as any)?.balance ?? 0; + + expect(delegateXRPBalanceAfter).to.be.eq(delegateXRPBalanceBefore); + expect(delegateSyloBalanceAfter).to.be.lessThan(delegateSyloBalanceBefore); + }); + + it("can submit sylo extrinsic with xrpl", async () => { + const user = Wallet.createRandom(); + const publicKey = computePublicKey(user.publicKey, true); + + // fund the user account to pay for tx fees + await finalizeTx(alith, api.tx.assets.transfer(GAS_TOKEN_ID, user.address, 1)); + await finalizeTx(alith, api.tx.assets.transfer(feeTokenAssetId, user.address, 10_000_000_000)); + + const call = createSyloExtrinsics(api)[0]; + + const hashedExtrinsicWithoutPrefix = blake256(call.toHex().slice(getPrefixLength(call))).toString(); + const maxBlockNumber = +(await api.query.system.number()).toString() + 5; + + const xamanJsonTx = { + AccountTxnID: "16969036626990000000000000000000F236FD752B5E4C84810AB3D41A3C2580", + SigningPubKey: publicKey.slice(2), + Account: deriveAddress(publicKey.slice(2)), + Memos: [ + { + Memo: { + MemoType: stringToHex("extrinsic"), + // remove `0x` from extrinsic hex string + MemoData: stringToHex(`${genesisHash}:0:${maxBlockNumber}:0:${hashedExtrinsicWithoutPrefix}`), + }, + }, + ], + }; + + // sign xaman tx + const message = encode(xamanJsonTx); + const encodedSigningMessage = encodeForSigning(xamanJsonTx); + const signature = sign(encodedSigningMessage, user.privateKey.slice(2)); + + const userXRPBalanceBefore = + ((await api.query.assets.account(GAS_TOKEN_ID, user.address)).toJSON() as any)?.balance ?? 0; + const userSyloBalanceBefore = + ((await api.query.assets.account(feeTokenAssetId, user.address)).toJSON() as any)?.balance ?? 0; + + // execute xaman tx extrinsic + await new Promise(async (resolve) => { + await api.tx.xrpl.transact(`0x${message}`, `0x${signature}`, call).send(({ events = [], status }) => { + if (status.isInBlock) resolve(events); + }); + }); + + const userXRPBalanceAfter = + ((await api.query.assets.account(GAS_TOKEN_ID, user.address)).toJSON() as any)?.balance ?? 0; + const userSyloBalanceAfter = + ((await api.query.assets.account(feeTokenAssetId, user.address)).toJSON() as any)?.balance ?? 0; + + expect(userXRPBalanceAfter).to.be.eq(userXRPBalanceBefore); + expect(userSyloBalanceAfter).to.be.lessThan(userSyloBalanceBefore); + }); + + it("can submit sylo extrinsics in batch call", async () => { + const calls = createSyloExtrinsics(api); + + const userXRPBalanceBefore = + ((await api.query.assets.account(GAS_TOKEN_ID, user.address)).toJSON() as any)?.balance ?? 0; + const userSyloBalanceBefore = + ((await api.query.assets.account(feeTokenAssetId, user.address)).toJSON() as any)?.balance ?? 0; + + await finalizeTx(user, api.tx.utility.batch(calls)); + + const userXRPBalanceAfter = + ((await api.query.assets.account(GAS_TOKEN_ID, user.address)).toJSON() as any)?.balance ?? 0; + const userSyloBalanceAfter = + ((await api.query.assets.account(feeTokenAssetId, user.address)).toJSON() as any)?.balance ?? 0; + + expect(userXRPBalanceAfter).to.be.eq(userXRPBalanceBefore); + expect(userSyloBalanceAfter).to.be.lessThan(userSyloBalanceBefore); + }); + + // Failures to pay for extrinsics will hang, so failures tests are disabled. + // Enable and run these tests manually to verify fee swap behaviour. + xit("fails to submit without sylo tokens available", async () => { + // create a new user + const userPrivateKey = Wallet.createRandom().privateKey; + const user = keyring.addFromSeed(hexToU8a(userPrivateKey)); + + // ensure user has enough xrp to submit regular extrinsics + await finalizeTx(alith, api.tx.assets.transfer(GAS_TOKEN_ID, user.address, 100_000_000)); + + await finalizeTx(user, api.tx.syloDataVerification.registerResolver("id", ["endpoint"])); + }); + + xit("fails to submit when wrapping sylo exstrinsic in fee-proxy call", async () => { + // create a new user + const userPrivateKey = Wallet.createRandom().privateKey; + const user = keyring.addFromSeed(hexToU8a(userPrivateKey)); + + // ensure user has enough xrp to submit regular extrinsics + await finalizeTx(alith, api.tx.assets.transfer(GAS_TOKEN_ID, user.address, 100_000_000)); + + const syloCall = api.tx.syloDataVerification.registerResolver("id", ["endpoint"]); + + const next_fee_token_id = 2148; + + // add liquidity for XRP/SYLO token and set up user funds + const txs = [ + api.tx.assetsExt.createAsset("sylo-new", "SYLO-NEW", 18, 1, alith.address), + api.tx.assets.mint(next_fee_token_id, user.address, 2_000_000_000_000_000), + api.tx.dex.addLiquidity( + next_fee_token_id, + GAS_TOKEN_ID, + 100_000_000_000, + 100_000_000_000, + 100_000_000_000, + 100_000_000_000, + null, + null, + ), + ]; + await finalizeTx(alith, api.tx.utility.batch(txs)); + + const maxTokenPayment = 5_000_000; + + await finalizeTx(user, api.tx.feeProxy.callWithFeePreferences(next_fee_token_id, maxTokenPayment, syloCall)); + }); + + xit("fails to submit sylo extrinsic in mixed batch of calls", async () => { + const calls = createSyloExtrinsics(api); + + await finalizeTx(user, api.tx.utility.batch([api.tx.system.remark("hello"), ...calls])).catch(console.log); + }); + + it("fails to submit sylo extrinsic with doughnuts", async () => { + // create a doughnut + const holderPrivateKey = Wallet.createRandom().privateKey; + const holder: KeyringPair = keyring.addFromSeed(hexToU8a(holderPrivateKey)); + const issuerPubkey = user.publicKey; + const holderPubkey = holder.publicKey; + const feeMode = 0; + const expiry = 100000; + const notBefore = 0; + + const doughnut = new Doughnut(PayloadVersion.V1, issuerPubkey, holderPubkey, feeMode, expiry, notBefore); + + const module = [ + { + name: "Balances", + block_cooldown: 0, + methods: [ + { + name: "transfer", + block_cooldown: 0, + constraints: null, + }, + ], + }, + ]; + + const topping = new Topping(module); + + // Add to trn topping + doughnut.addTopping(TRN_PERMISSION_DOMAIN, topping.encode()); + + // Sign the doughnut + const userWallet = await new Wallet(userPrivateKey); + const ethHash = blake2AsHex(doughnut.payload()); + const ethSlice = Buffer.from(ethHash.slice(2), "hex"); + const issuerSig = await userWallet.signMessage(ethSlice); + const sigUint8 = Buffer.from(issuerSig.slice(2), "hex"); + doughnut.addSignature(sigUint8, SignatureVersion.EIP191); + + // Verify that the doughnut is valid + const verified = doughnut.verify(holderPubkey, 5); + expect(verified).to.be.equal(true); + + // Encode the doughnut + const encodedDoughnut = doughnut.encode(); + const doughnutHex = u8aToHex(encodedDoughnut); + + // whitelist the holder. + await finalizeTx(alith, api.tx.sudo.sudo(api.tx.doughnut.updateWhitelistedHolders(holder.address, true))); + + const tip = 0; + const genesis_hash = await api.rpc.chain.getBlockHash(0); + + const call = createSyloExtrinsics(api)[0]; + + const nonce = ((await api.query.system.account(holder.address)).toJSON() as any)?.nonce; + const tx = await api.tx.doughnut.transact(call, doughnutHex, nonce, genesis_hash, tip, ""); + const txU8a = tx.toU8a(true).slice(2); + const txHex = u8aToHex(txU8a); + const holderWallet = await new Wallet(holderPrivateKey); + const txHash = blake2AsHex(txHex); + const txSlice = Buffer.from(txHash.slice(2), "hex"); + const holderSig = await holderWallet.signMessage(txSlice); + + const doughnutErr = await new Promise((resolve) => { + api.tx.doughnut + .transact(call, doughnutHex, nonce, genesis_hash, tip, holderSig) + .send(({ internalError, dispatchError }) => { + if (internalError) { + return resolve(internalError); + } + + if (dispatchError && !dispatchError.isModule) { + return resolve(dispatchError.toJSON()); + } + + if (dispatchError && dispatchError.isModule) { + const { section, name, docs } = dispatchError.registry.findMetaError(dispatchError.asModule); + + return resolve({ section, name, docs }); + } + }); + }); + + console.error("doughtnut err:", doughnutErr); + }); +}); diff --git a/e2e/test/XRPL.test.ts b/e2e/test/XRPL.test.ts index 0263d567b..c2f2c94b3 100644 --- a/e2e/test/XRPL.test.ts +++ b/e2e/test/XRPL.test.ts @@ -1,5 +1,4 @@ import { ApiPromise, Keyring, WsProvider } from "@polkadot/api"; -import { SubmittableExtrinsic } from "@polkadot/api/types"; import type { KeyringPair } from "@polkadot/keyring/types"; import { DispatchError } from "@polkadot/types/interfaces"; import { hexToU8a } from "@polkadot/util"; @@ -20,6 +19,7 @@ import { assetIdToERC20ContractAddress, finalizeTx, getNextAssetId, + getPrefixLength, poolAddress, startNode, stringToHex, @@ -1309,8 +1309,3 @@ describe("XRPL pallet", () => { expect(errorFound).to.be.true; }); }); - -function getPrefixLength(encoded: SubmittableExtrinsic): number { - if (encoded.encodedLength < 66) return 6; - return 8; -} diff --git a/pallet/common/src/test_utils.rs b/pallet/common/src/test_utils.rs index ff9a67def..5d9e38642 100644 --- a/pallet/common/src/test_utils.rs +++ b/pallet/common/src/test_utils.rs @@ -36,7 +36,7 @@ pub mod test_prelude { pub use sp_core::{H160, H256, U256}; pub use sp_runtime::{ testing::Header, - traits::{BlakeTwo256, IdentityLookup}, + traits::{BlakeTwo256, IdentityLookup, LookupError, StaticLookup}, ArithmeticError, BoundedVec, BuildStorage, DispatchError::BadOrigin, Permill, TokenError, @@ -723,3 +723,89 @@ macro_rules! impl_pallet_scheduler_config { } }; } + +#[macro_export] +macro_rules! impl_pallet_sylo_data_verification_config { + ($test:ident) => { + parameter_types! { + pub const MaxResolvers: u32 = 10; + pub const MaxTags: u32 = 10; + pub const MaxEntries: u32 = 100; + pub const MaxServiceEndpoints: u32 = 10; + pub const StringLimit: u32 = 500; + } + impl pallet_sylo_data_verification::Config for Test { + type RuntimeCall = RuntimeCall; + type RuntimeEvent = RuntimeEvent; + type ApproveOrigin = EnsureRoot; + type MaxResolvers = MaxResolvers; + type MaxTags = MaxTags; + type MaxEntries = MaxEntries; + type MaxServiceEndpoints = MaxServiceEndpoints; + type StringLimit = StringLimit; + type WeightInfo = (); + } + }; +} + +#[macro_export] +macro_rules! impl_pallet_xrpl_config { + ($test:ident) => { + pub struct FuturepassIdentityLookup; + impl StaticLookup for FuturepassIdentityLookup { + type Source = H160; + type Target = H160; + fn lookup(s: Self::Source) -> Result { + Ok(s) + } + fn unlookup(t: Self::Target) -> Self::Source { + t + } + } + impl ExtrinsicChecker for FuturepassIdentityLookup { + type Call = RuntimeCall; + type Extra = (); + type Result = bool; + fn check_extrinsic(_call: &Self::Call, _extra: &Self::Extra) -> Self::Result { + false + } + } + + pub struct ValidatedCall; + impl ExtrinsicChecker for ValidatedCall { + type Call = RuntimeCall; + type Extra = (); + type Result = bool; + fn check_extrinsic(_call: &Self::Call, _extra: &Self::Extra) -> Self::Result { + true + } + } + + parameter_types! { + pub const MaxMessageLength: u32 = 2048; + pub const MaxSignatureLength: u32 = 80; + } + impl pallet_xrpl::Config for Test { + type RuntimeEvent = RuntimeEvent; + type RuntimeCall = RuntimeCall; + type CallValidator = ValidatedCall; + type FuturepassLookup = FuturepassIdentityLookup; + type PalletsOrigin = OriginCaller; + type MaxMessageLength = MaxMessageLength; + type MaxSignatureLength = MaxSignatureLength; + type WeightInfo = (); + } + }; +} + +#[macro_export] +macro_rules! impl_pallet_utility_config { + ($test:ident) => { + impl pallet_utility::Config for Test { + type RuntimeEvent = RuntimeEvent; + type RuntimeCall = RuntimeCall; + type PalletsOrigin = OriginCaller; + type WeightInfo = (); + } + }; +} diff --git a/pallet/fee-control/Cargo.toml b/pallet/fee-control/Cargo.toml index 47c9cf9f0..a2ef10902 100644 --- a/pallet/fee-control/Cargo.toml +++ b/pallet/fee-control/Cargo.toml @@ -38,6 +38,10 @@ pallet-fee-proxy = { workspace = true, default-features = true } pallet-dex = { workspace = true } pallet-assets-ext = { workspace = true } pallet-futurepass = { workspace = true } +pallet-sylo-data-verification = { workspace = true } +pallet-xrpl = { workspace = true } +pallet-proxy = { workspace = true } +pallet-utility = { workspace = true } seed-pallet-common= { workspace = true, default-features = true } [features] @@ -49,6 +53,8 @@ std = [ "frame-system/std", "pallet-assets/std", "pallet-balances/std", + "pallet-proxy/std", + "pallet-utility/std", "sp-runtime/std", "sp-std/std", "seed-pallet-common/std", diff --git a/pallet/fee-control/src/mock.rs b/pallet/fee-control/src/mock.rs index 1cd08d24e..404525f6a 100644 --- a/pallet/fee-control/src/mock.rs +++ b/pallet/fee-control/src/mock.rs @@ -22,6 +22,8 @@ use frame_support::{ use pallet_evm::{AddressMapping, BlockHashMapping, EnsureAddressNever, GasWeightMapping}; use precompile_utils::{Address, ErcIdConversion}; use seed_pallet_common::test_prelude::*; +use seed_pallet_common::ExtrinsicChecker; +use sp_runtime::traits::{LookupError, StaticLookup}; use sp_runtime::ConsensusEngineId; pub const MOCK_PAYMENT_ASSET_ID: AssetId = 100; @@ -39,7 +41,11 @@ construct_runtime!( Evm: pallet_evm, Timestamp: pallet_timestamp, Futurepass: pallet_futurepass, + Sylo: pallet_sylo_data_verification, MockPallet: mock_pallet::pallet, + Xrpl: pallet_xrpl, + Utility: pallet_utility, + Proxy: pallet_proxy, FeeControl: pallet_fee_control, } ); @@ -54,6 +60,10 @@ impl_pallet_dex_config!(Test); impl_pallet_timestamp_config!(Test); impl_pallet_evm_config!(Test); impl_pallet_futurepass_config!(Test); +impl_pallet_sylo_data_verification_config!(Test); +impl_pallet_xrpl_config!(Test); +impl_pallet_proxy_config!(Test); +impl_pallet_utility_config!(Test); impl_pallet_fee_control_config!(Test); impl mock_pallet::pallet::Config for Test {} diff --git a/pallet/fee-proxy/Cargo.toml b/pallet/fee-proxy/Cargo.toml index d2cc3e043..521b0f605 100644 --- a/pallet/fee-proxy/Cargo.toml +++ b/pallet/fee-proxy/Cargo.toml @@ -23,6 +23,10 @@ pallet-evm = { workspace = true } pallet-assets-ext = { workspace = true } pallet-dex = { workspace = true } pallet-futurepass = { workspace = true } +pallet-proxy = { workspace = true } +pallet-utility = { workspace = true } +pallet-xrpl = { workspace = true } +pallet-sylo-data-verification = { workspace = true } pallet-transaction-payment = { workspace = true } precompile-utils = { workspace = true } @@ -44,7 +48,10 @@ std = [ "precompile-utils/std", "pallet-assets-ext/std", "pallet-futurepass/std", + "pallet-proxy/std", + "pallet-utility/std", "pallet-evm/std", + "pallet-sylo-data-verification/std", "pallet-transaction-payment/std", "scale-info/std", "seed-primitives/std", diff --git a/pallet/fee-proxy/src/impls.rs b/pallet/fee-proxy/src/impls.rs index 9ac9dae9a..58a6fdfc6 100644 --- a/pallet/fee-proxy/src/impls.rs +++ b/pallet/fee-proxy/src/impls.rs @@ -30,12 +30,26 @@ where + pallet_dex::Config + pallet_evm::Config + pallet_assets_ext::Config - + pallet_futurepass::Config, + + pallet_futurepass::Config + + pallet_sylo_data_verification::Config + + pallet_proxy::Config + + pallet_utility::Config + + pallet_xrpl::Config, ::RuntimeCall: IsSubType>, ::RuntimeCall: IsSubType>, + ::RuntimeCall: IsSubType>, + ::RuntimeCall: IsSubType>, + ::RuntimeCall: IsSubType>, + ::RuntimeCall: IsSubType>, ::RuntimeCall: IsSubType>, ::RuntimeCall: IsSubType>, + ::RuntimeCall: IsSubType>, + ::RuntimeCall: IsSubType>, + ::RuntimeCall: IsSubType>, + ::RuntimeCall: IsSubType>, ::RuntimeCall: IsSubType>, + ::RuntimeCall: + IsSubType>, ::OnChargeTransaction: OnChargeTransaction, ::ErcIdConversion: ErcIdConversion, Balance: From<<::OnChargeTransaction as OnChargeTransaction>::Balance>, @@ -45,7 +59,10 @@ where <::OnChargeTransaction as OnChargeTransaction>::LiquidityInfo; /// Intercept the withdraw fee, and swap any tokens to gas tokens if the call is - /// pallet_fee_proxy.call_with_fee_preferences() + /// pallet_fee_proxy.call_with_fee_preferences(). + /// + /// This also additionally will force the Sylo token as the gas token if the call + /// is detected as a extrinsic for the sylo pallet. fn withdraw_fee( who: &T::AccountId, call: &::RuntimeCall, @@ -65,12 +82,53 @@ where } } + let do_fee_swap = |who: &T::AccountId, + payment_asset: &AssetId, + mut total_fee: Balance, + max_payment: Balance| + -> Result<(), TransactionValidityError> { + let native_asset = ::FeeAssetId::get(); + + // If the account has less balance than the minimum_deposit, we need to add + // the minimum deposit onto the total_fee. + // This is due to the preservation rules of the withdraw call made within + // <::OnChargeTransaction as OnChargeTransaction>::withdraw_fee + let account_balance = pallet_assets_ext::Pallet::::balance(native_asset, who); + // Minium balance is hardcoded to 1 + let minimum_balance = pallet_assets_ext::Pallet::::minimum_balance(native_asset); + if account_balance < minimum_balance { + total_fee = total_fee.saturating_add(minimum_balance); + } + let path: &[AssetId] = &[*payment_asset, native_asset]; + pallet_dex::Pallet::::do_swap_with_exact_target( + who, + total_fee, + max_payment, + path, + *who, + None, + ) + .map_err(|_| InvalidTransaction::Payment)?; + + Ok(()) + }; + + let is_sylo_and_valid_call = is_sylo_and_valid_call::(call)?; + + // if the call is a sylo pallet call, then we always force a fee swap with the + // sylo token + if is_sylo_and_valid_call { + let payment_asset = pallet_sylo_data_verification::SyloAssetId::::get() + .ok_or(InvalidTransaction::Payment)?; + + do_fee_swap(who, &payment_asset, Balance::from(fee), u128::MAX)?; + } + // Check whether this call has specified fee preferences if let Some(call_with_fee_preferences { payment_asset, max_payment, call }) = call.is_sub_type() { let mut total_fee: Balance = Balance::from(fee); - let native_asset = ::FeeAssetId::get(); let mut add_evm_gas_cost = |gas_limit: &u64, @@ -127,27 +185,7 @@ where add_evm_gas_cost(gas_limit, max_fee_per_gas, max_priority_fee_per_gas); } - // If the account has less balance than the minimum_deposit, we need to add - // the minimum deposit onto the total_fee. - // This is due to the preservation rules of the withdraw call made within - // <::OnChargeTransaction as OnChargeTransaction>::withdraw_fee - let account_balance = pallet_assets_ext::Pallet::::balance(native_asset, who); - // Minium balance is hardcoded to 1 - // pallet_assets_ext::Pallet::::minimum_balance(native_asset); - let minimum_balance = pallet_assets_ext::Pallet::::minimum_balance(native_asset); - if account_balance < minimum_balance { - total_fee = total_fee.saturating_add(minimum_balance); - } - let path: &[AssetId] = &[*payment_asset, native_asset]; - pallet_dex::Pallet::::do_swap_with_exact_target( - who, - total_fee, - *max_payment, - path, - *who, - None, - ) - .map_err(|_| InvalidTransaction::Payment)?; + do_fee_swap(who, payment_asset, total_fee, *max_payment)?; }; <::OnChargeTransaction as OnChargeTransaction>::withdraw_fee( @@ -182,3 +220,123 @@ where ) } } + +/// Helper function to determine if a call is a sylo pallet call that +/// should be paid using sylo tokens. This function will also attempt to destructure +/// any proxy calls and check the inner call. This includes: +/// - pallet_futurepass.proxy_extrinsic +/// - pallet_xrpl.transact +/// - pallet_proxy.proxy +/// - pallet_proxy.proxy_announce +/// - pallet_utility.batch +/// - pallet_utility.batch_all +/// - pallet_utility.force_batch +/// +/// Not all proxy calls are supported, such as some sudo calls, or scheduled +/// calls. In these edge cases, the fee for the call will be paid in the native +/// fee token. +/// +/// This will also return an error if the call is an invalid sylo call. A sylo call +/// can be invalid in the following cases: +/// - The sylo call has been wrapped in a call_with_fee_preferences call. Sylo +/// calls should be paid in Sylos only. +/// - The sylo call is in a batch/batch_all call. In batch calls, if any call is +/// a sylo call, then all inner calls must be a sylo call. This simplifies the +/// implementation, preventing a need to process the fee for each individual call. +fn is_sylo_and_valid_call( + call: &::RuntimeCall, +) -> Result +where + T: Config + + frame_system::Config + + pallet_futurepass::Config + + pallet_xrpl::Config + + pallet_proxy::Config + + pallet_utility::Config + + pallet_sylo_data_verification::Config, + ::RuntimeCall: IsSubType>, + ::RuntimeCall: IsSubType>, + ::RuntimeCall: IsSubType>, + ::RuntimeCall: IsSubType>, + ::RuntimeCall: IsSubType>, + ::RuntimeCall: IsSubType>, + ::RuntimeCall: IsSubType>, + ::RuntimeCall: IsSubType>, + ::RuntimeCall: IsSubType>, + ::RuntimeCall: IsSubType>, + ::RuntimeCall: + IsSubType>, +{ + if match call.is_sub_type() { + Some(pallet_sylo_data_verification::Call::register_resolver { .. }) => true, + Some(pallet_sylo_data_verification::Call::update_resolver { .. }) => true, + Some(pallet_sylo_data_verification::Call::deregister_resolver { .. }) => true, + Some(pallet_sylo_data_verification::Call::create_validation_record { .. }) => true, + Some(pallet_sylo_data_verification::Call::add_validation_record_entry { .. }) => true, + Some(pallet_sylo_data_verification::Call::update_validation_record { .. }) => true, + Some(pallet_sylo_data_verification::Call::delete_validation_record { .. }) => true, + _ => false, + } { + return Ok(true); + } + + // check if the inner call of a futurepass call is a sylo call + if let Some(pallet_futurepass::Call::proxy_extrinsic { call, .. }) = call.is_sub_type() { + return is_sylo_and_valid_call::(call.as_ref().into_ref()); + } + + // check if the inner call of a proxy pallet call is a sylo call + match call.is_sub_type() { + Some(pallet_proxy::Call::proxy { call, .. }) => { + return is_sylo_and_valid_call::(call.as_ref().into_ref()) + }, + Some(pallet_proxy::Call::proxy_announced { call, .. }) => { + return is_sylo_and_valid_call::(call.as_ref().into_ref()) + }, + _ => Ok::(false), + }?; + + // check if the inner call of a xrpl call is a sylo call + if let Some(pallet_xrpl::Call::transact { call, .. }) = call.is_sub_type() { + return is_sylo_and_valid_call::(call.as_ref().into_ref()); + } + + match call.is_sub_type() { + // for batch calls, if there is any call which is a sylo call, then + // all calls must be a sylo call + Some(pallet_utility::Call::batch { calls, .. }) + | Some(pallet_utility::Call::force_batch { calls, .. }) + | Some(pallet_utility::Call::batch_all { calls, .. }) => { + let sylo_calls = calls + .into_iter() + .map(|call| is_sylo_and_valid_call::(call.into_ref())) + .collect::>>() + .into_iter() + .collect::, _>>()?; + + if sylo_calls.iter().any(|x| *x) { + if !sylo_calls.iter().all(|x| *x) { + Err(InvalidTransaction::Payment)?; + } else { + return Ok(true); + } + } + + Ok::(false) + }, + Some(pallet_utility::Call::as_derivative { call, .. }) => { + return is_sylo_and_valid_call::(call.as_ref().into_ref()) + }, + _ => Ok(false), + }?; + + // prevent using the fee proxy if the inner call is a sylo call + if let Some(call_with_fee_preferences { call, .. }) = call.is_sub_type() { + let is_sylo_call = is_sylo_and_valid_call::(call.as_ref().into_ref())?; + if is_sylo_call { + Err(InvalidTransaction::Payment)?; + } + } + + Ok(false) +} diff --git a/pallet/fee-proxy/src/lib.rs b/pallet/fee-proxy/src/lib.rs index e2ae3183d..4d7403711 100644 --- a/pallet/fee-proxy/src/lib.rs +++ b/pallet/fee-proxy/src/lib.rs @@ -25,7 +25,7 @@ pub use pallet::*; use frame_support::{ dispatch::{Dispatchable, GetDispatchInfo, PostDispatchInfo}, pallet_prelude::*, - traits::IsSubType, + traits::{IsSubType, IsType}, }; use frame_system::pallet_prelude::*; use seed_pallet_common::{FeeConfig, MaintenanceCheckEVM}; @@ -63,7 +63,8 @@ pub mod pallet { + Dispatchable + GetDispatchInfo + From> - + IsSubType>; + + IsSubType> + + IsType<::RuntimeCall>; /// The system event type type RuntimeEvent: From> + IsType<::RuntimeEvent>; /// The caller origin, overarching type of all pallets origins. diff --git a/pallet/fee-proxy/src/mock.rs b/pallet/fee-proxy/src/mock.rs index b3e221fed..8d88ac931 100644 --- a/pallet/fee-proxy/src/mock.rs +++ b/pallet/fee-proxy/src/mock.rs @@ -40,6 +40,10 @@ construct_runtime!( Timestamp: pallet_timestamp, Futurepass: pallet_futurepass, FeeControl: pallet_fee_control, + Sylo: pallet_sylo_data_verification, + Xrpl: pallet_xrpl, + Utility: pallet_utility, + Proxy: pallet_proxy, } ); @@ -53,6 +57,10 @@ impl_pallet_timestamp_config!(Test); impl_pallet_evm_config!(Test); impl_pallet_futurepass_config!(Test); impl_pallet_fee_control_config!(Test); +impl_pallet_sylo_data_verification_config!(Test); +impl_pallet_xrpl_config!(Test); +impl_pallet_proxy_config!(Test); +impl_pallet_utility_config!(Test); // Mock ErcIdConversion for testing purposes impl ErcIdConversion for Test diff --git a/pallet/sylo-data-verification/Cargo.toml b/pallet/sylo-data-verification/Cargo.toml new file mode 100644 index 000000000..3a9df2459 --- /dev/null +++ b/pallet/sylo-data-verification/Cargo.toml @@ -0,0 +1,48 @@ +[package] +name = "pallet-sylo-data-verification" +version = "0.0.1" +description = "Root Network Sylo Data Verification Pallet" +authors.workspace = true +edition.workspace = true +license.workspace = true +repository.workspace = true + +[dependencies] +hex = { workspace = true } +serde = { workspace = true } +scale-info = { workspace = true } +codec = { workspace = true } + +frame-support = { workspace = true } +frame-system = { workspace = true } +frame-benchmarking = { workspace = true, optional = true } +sp-core = { workspace = true } +sp-io = { workspace = true } +sp-runtime = { workspace = true } +sp-arithmetic = { workspace = true } +sp-std = { workspace = true } + +seed-primitives = { workspace = true } +seed-pallet-common = { workspace = true } + +[dev-dependencies] +sp-io = { workspace = true } +pallet-assets = { workspace = true } +pallet-balances = { workspace = true } +pallet-assets-ext = { workspace = true, default-features = true } + +[features] +default = ["std"] +std = [ + "codec/std", + "sp-runtime/std", + "frame-support/std", + "frame-system/std", + "sp-std/std", + "seed-primitives/std", + "seed-pallet-common/std", + "sp-io/std", + "frame-benchmarking?/std" +] +runtime-benchmarks = ["frame-benchmarking"] +try-runtime = ["frame-support/try-runtime"] diff --git a/pallet/sylo-data-verification/src/benchmarking.rs b/pallet/sylo-data-verification/src/benchmarking.rs new file mode 100644 index 000000000..d163023c0 --- /dev/null +++ b/pallet/sylo-data-verification/src/benchmarking.rs @@ -0,0 +1,279 @@ +// Copyright 2022-2023 Futureverse Corporation Limited +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// You may obtain a copy of the License at the root of this project source code + +#![cfg(feature = "runtime-benchmarks")] + +use super::*; + +use crate::Pallet as Sylo; + +use alloc::string::{String, ToString}; +use frame_benchmarking::{account as bench_account, benchmarks, impl_benchmark_test_suite}; +use frame_support::{assert_ok, BoundedVec}; +use frame_system::RawOrigin; +use sp_core::H160; + +/// This is a helper function to get an account. +pub fn account(name: &'static str) -> T::AccountId +where + T::AccountId: From, +{ + bench_account(name, 0, 0) +} + +pub fn origin(acc: &T::AccountId) -> RawOrigin { + RawOrigin::Signed(acc.clone()) +} + +pub fn bounded_string(name: &str) -> BoundedVec::StringLimit> { + BoundedVec::truncate_from(name.as_bytes().to_vec()) +} + +pub fn max_bounded_string() -> BoundedVec::StringLimit> { + let mut max_string = BoundedVec::new(); + for _ in 1..T::StringLimit::get() { + max_string.force_push(b'a'); + } + max_string +} + +pub fn setup_resolver( + caller: T::AccountId, + identifier: BoundedVec::StringLimit>, +) -> BoundedVec::StringLimit> { + let service_endpoints = BoundedVec::truncate_from(vec![bounded_string::( + "https://service-endpoint.one.two.three", + )]); + + assert_ok!(Sylo::::register_resolver( + RawOrigin::Signed(caller).into(), + identifier.clone(), + service_endpoints, + )); + + return identifier; +} + +pub fn setup_validation_record( + caller: T::AccountId, +) -> BoundedVec::StringLimit> { + let data_id = bounded_string::("data-id"); + let resolvers = BoundedVec::new(); + let data_type = bounded_string::("data-type"); + let tags = BoundedVec::new(); + let checksum = H256::from_low_u64_be(123); + + assert_ok!(Sylo::::create_validation_record( + RawOrigin::Signed(caller).into(), + data_id.clone(), + resolvers, + data_type, + tags, + checksum, + )); + + return data_id; +} + +benchmarks! { + where_clause { where ::AccountId: From + Into } + + set_payment_asset {}: _(RawOrigin::Root, 24) + verify { + assert_eq!(SyloAssetId::::get(), Some(24)); + } + + set_sylo_resolver_method { + let method = bounded_string::("sylo-resolver-method"); + }: _(RawOrigin::Root, method.clone()) + verify { + assert_eq!(SyloResolverMethod::::get(), method); + } + + register_resolver { + let p in 1 .. T::MaxResolvers::get(); + + let alice = account::("Alice"); + + let identifier = max_bounded_string::(); + + let mut service_endpoints = BoundedVec::new(); + for _ in 1..p { + service_endpoints.force_push(max_bounded_string::()); + } + }: _(origin::(&alice), identifier.clone(), service_endpoints.clone()) + verify { + assert_eq!(Resolvers::::get(identifier), Some(Resolver { + controller: alice, service_endpoints + })); + } + + update_resolver { + let p in 1 .. T::MaxServiceEndpoints::get(); + + let alice = account::("Alice"); + + let identifier = setup_resolver::(alice.clone(), bounded_string::("sylo-data-resolver")); + + let mut service_endpoints = BoundedVec::new(); + for _ in 1..p { + service_endpoints.force_push(max_bounded_string::()); + } + }: _(origin::(&alice), identifier.clone(), service_endpoints.clone()) + verify { + assert_eq!(Resolvers::::get(identifier), Some(Resolver { + controller: alice, service_endpoints + })); + } + + deregister_resolver { + let alice = account::("Alice"); + + let identifier = setup_resolver::(alice.clone(), bounded_string::("sylo-data-resolver")); + }: _(origin::(&alice), identifier.clone()) + verify { + assert_eq!(Resolvers::::get(identifier), None); + } + + create_validation_record { + let q in 1 .. T::MaxResolvers::get(); + let r in 1 .. T::MaxTags::get(); + + let alice = account::("Alice"); + + let data_id = bounded_string::("data-id"); + + let mut resolvers = BoundedVec::new(); + for i in 1 .. q { + // create a maximum sized resolver id that is unique to each + // resolver + let mut resolver_id = String::from("sylo-resolver"); + resolver_id.push_str(i.to_string().as_str()); + let mut resolver_id = bounded_string::(resolver_id.as_str()); + for _ in 1..T::StringLimit::get() { + resolver_id.force_push(b'a'); + } + + let resolver_id = setup_resolver::(alice.clone(), resolver_id); + resolvers.force_push(ResolverId { + method: max_bounded_string::(), + identifier: resolver_id, + }); + } + + let data_type = max_bounded_string::(); + + let mut tags = BoundedVec::new(); + for _ in 1 .. r { + tags.force_push(max_bounded_string::()); + } + + let checksum = H256::from_low_u64_be(123); + + let block: BlockNumberFor = 1_u32.into(); + }: _(origin::(&alice), data_id.clone(), resolvers.clone(), data_type.clone(), tags.clone(), checksum.clone()) + verify { + assert_eq!(ValidationRecords::::get(&alice, &data_id), Some(ValidationRecord { + author: alice, + resolvers: resolvers, + data_type: data_type, + tags: tags, + entries: BoundedVec::truncate_from(vec![ValidationEntry { + checksum, + block, + }]), + })); + } + + add_validation_record_entry { + let alice = account::("Alice"); + + let data_id = setup_validation_record::(alice.clone()); + + let checksum = H256::from_low_u64_be(123); + }: _(origin::(&alice), data_id.clone(), checksum.clone()) + verify { + assert_eq!(ValidationRecords::::get(&alice, &data_id), Some(ValidationRecord { + author: alice, + resolvers: BoundedVec::new(), + data_type: bounded_string::("data-type"), + tags: BoundedVec::new(), + entries: BoundedVec::truncate_from(vec![ValidationEntry { + checksum, + block: 0_u32.into(), + }, ValidationEntry { + checksum, + block: 1_u32.into(), + }]), + })); + } + + update_validation_record { + let q in 1 .. T::MaxResolvers::get(); + let r in 1 .. T::MaxTags::get(); + + let alice = account::("Alice"); + + let data_id = setup_validation_record::(alice.clone()); + + let mut resolvers = BoundedVec::new(); + for i in 1 .. q { + // create a maximum sized resolver id that is unique to each + // resolver + let mut resolver_id = String::from("sylo-resolver"); + resolver_id.push_str(i.to_string().as_str()); + let mut resolver_id = bounded_string::(resolver_id.as_str()); + for _ in 1..T::StringLimit::get() { + resolver_id.force_push(b'a'); + } + + let resolver_id = setup_resolver::(alice.clone(), resolver_id); + resolvers.force_push(ResolverId { + method: max_bounded_string::(), + identifier: resolver_id, + }); + } + + let data_type = max_bounded_string::(); + + let mut tags = BoundedVec::new(); + for _ in 1 .. r { + tags.force_push(max_bounded_string::()); + } + + let block: BlockNumberFor = 1_u32.into(); + }: _(origin::(&alice), data_id.clone(), Some(resolvers.clone()), Some(data_type.clone()), Some(tags.clone())) + verify { + let validation_record = ValidationRecords::::get(&alice, &data_id).unwrap(); + assert_eq!(validation_record.resolvers, resolvers); + assert_eq!(validation_record.data_type, data_type); + assert_eq!(validation_record.tags, tags); + } + + delete_validation_record { + let alice = account::("Alice"); + + let data_id = setup_validation_record::(alice.clone()); + }: _(origin::(&alice), data_id.clone()) + verify { + assert_eq!(ValidationRecords::::get(&alice, &data_id), None); + } +} + +impl_benchmark_test_suite!( + Sylo, + seed_primitives::test_utils::TestExt::::default().build(), + crate::mock::Test +); diff --git a/pallet/sylo-data-verification/src/lib.rs b/pallet/sylo-data-verification/src/lib.rs new file mode 100644 index 000000000..ea1389572 --- /dev/null +++ b/pallet/sylo-data-verification/src/lib.rs @@ -0,0 +1,507 @@ +// Copyright 2022-2023 Futureverse Corporation Limited +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// You may obtain a copy of the License at the root of this project source code +#![cfg_attr(not(feature = "std"), no_std)] +extern crate alloc; + +pub use pallet::*; + +use frame_support::{ + dispatch::{Dispatchable, GetDispatchInfo, PostDispatchInfo}, + pallet_prelude::*, + traits::IsSubType, +}; +use frame_system::pallet_prelude::*; +use seed_primitives::AssetId; +use sp_core::H256; +use sp_std::prelude::*; +use sp_std::{convert::TryInto, vec}; + +#[cfg(feature = "runtime-benchmarks")] +mod benchmarking; +#[cfg(test)] +mod mock; +#[cfg(test)] +mod tests; +pub mod types; + +pub use types::*; + +pub mod weights; +pub use weights::WeightInfo; + +#[frame_support::pallet] +pub mod pallet { + use super::*; + + /// The current storage version. + const STORAGE_VERSION: StorageVersion = StorageVersion::new(1); + + #[pallet::pallet] + #[pallet::storage_version(STORAGE_VERSION)] + pub struct Pallet(_); + + #[pallet::config] + pub trait Config: frame_system::Config { + /// The overarching call type. + type RuntimeCall: Parameter + + Dispatchable + + GetDispatchInfo + + From> + + IsSubType>; + + /// The system event type + type RuntimeEvent: From> + IsType<::RuntimeEvent>; + + /// Allowed origins to set payment asset and reversed sylo method + type ApproveOrigin: EnsureOrigin; + + /// Interface to access weight values + type WeightInfo: WeightInfo; + + /// The maximim number of resolvers in a validation record. + #[pallet::constant] + type MaxResolvers: Get; + + /// The maximum number of tags in a validation record. + #[pallet::constant] + type MaxTags: Get; + + /// The maximum number of validation entries in a record. + #[pallet::constant] + type MaxEntries: Get; + + /// The maximum number of service endpoints for a registered resolver. + #[pallet::constant] + type MaxServiceEndpoints: Get; + + /// The max length of strings used within the Sylo Pallet. This limits + /// the maximum size for resolver identifiers, data identifier, service + /// endpoint strings, and tag strings. + #[pallet::constant] + type StringLimit: Get; + } + + /// The default string used as the reserved method for sylo resolvers + #[pallet::type_value] + pub fn DefaultReservedSyloResolvedMethod() -> BoundedVec { + BoundedVec::truncate_from(b"sylo-data".to_vec()) + } + + #[pallet::storage] + pub type SyloAssetId = StorageValue<_, AssetId, OptionQuery>; + + #[pallet::storage] + pub type SyloResolverMethod = StorageValue< + _, + BoundedVec, + ValueQuery, + DefaultReservedSyloResolvedMethod, + >; + + #[pallet::storage] + pub type Resolvers = StorageMap< + _, + Twox64Concat, + BoundedVec, + Resolver, + >; + + #[pallet::storage] + pub type ValidationRecords = StorageDoubleMap< + _, + Twox64Concat, + T::AccountId, + Twox64Concat, + BoundedVec, + ValidationRecord< + T::AccountId, + BlockNumberFor, + T::MaxResolvers, + T::MaxTags, + T::MaxEntries, + T::StringLimit, + >, + >; + + #[pallet::error] + pub enum Error { + /// The Resolver identifier is already in use + ResolverAlreadyRegistered, + /// The Resolver has not been registered + ResolverNotRegistered, + /// Account is not controller of resolver + NotController, + /// A validation record with the given data id has already been created + RecordAlreadyCreated, + /// The validation record to be updated has not been created + NoValidationRecord, + } + + #[pallet::event] + #[pallet::generate_deposit(pub(crate) fn deposit_event)] + pub enum Event { + /// The asset used to for extrinsics has been set + PaymentAssetSet { asset_id: AssetId }, + /// The string reserved for the method used by sylo resolvers has been set + SyloResolverMethodSet { method: Vec }, + /// A new resolver has been registered and set in storage + ResolverRegistered { + id: Vec, + controller: T::AccountId, + service_endpoints: BoundedVec, T::MaxServiceEndpoints>, + }, + /// An existing resolver has had it's service endpoints updated + ResolverUpdated { + id: Vec, + controller: T::AccountId, + service_endpoints: BoundedVec, T::MaxServiceEndpoints>, + }, + /// An existing resolver has been deregistered and removed from storage + ResolverDeregistered { id: Vec }, + /// A new validation record has been created and set in storage + ValidationRecordCreated { author: T::AccountId, id: Vec }, + /// An entry of an existing validation record has been added + ValidationEntryAdded { author: T::AccountId, id: Vec, checksum: H256 }, + /// An existing validation record has had its fields updated + ValidationRecordUpdated { + author: T::AccountId, + id: Vec, + resolvers: Option>>, + data_type: Option>, + tags: Option>>, + }, + /// An existing validation record has been deleted and removed from + /// storage + ValidationRecordDeleted { author: T::AccountId, id: Vec }, + } + + #[pallet::call] + impl Pallet { + /// Set the asset used to pay for sylo extrinsics. + /// + /// This operation requires root access. + #[pallet::call_index(0)] + #[pallet::weight({ + T::WeightInfo::set_payment_asset() + })] + pub fn set_payment_asset(origin: OriginFor, payment_asset: AssetId) -> DispatchResult { + T::ApproveOrigin::ensure_origin(origin)?; + >::put(payment_asset); + Self::deposit_event(Event::PaymentAssetSet { asset_id: payment_asset }); + Ok(()) + } + + /// Set the string used as the reserved sylo resolver method. + /// + /// This operation requires root access. + #[pallet::call_index(1)] + #[pallet::weight({ + T::WeightInfo::set_sylo_resolver_method() + })] + pub fn set_sylo_resolver_method( + origin: OriginFor, + resolver_method: BoundedVec, + ) -> DispatchResult { + T::ApproveOrigin::ensure_origin(origin)?; + >::put(&resolver_method); + Self::deposit_event(Event::SyloResolverMethodSet { method: resolver_method.to_vec() }); + Ok(()) + } + + /// Register a new resolver. + /// + /// The caller will be set as the controller of the resolver. + #[pallet::call_index(2)] + #[pallet::weight({ + T::WeightInfo::register_resolver(service_endpoints.len() as u32) + })] + pub fn register_resolver( + origin: OriginFor, + identifier: BoundedVec, + service_endpoints: BoundedVec, T::MaxServiceEndpoints>, + ) -> DispatchResult { + let who = ensure_signed(origin)?; + + ensure!( + !>::contains_key(&identifier), + Error::::ResolverAlreadyRegistered + ); + + let resolver = + Resolver { controller: who.clone(), service_endpoints: service_endpoints.clone() }; + + >::insert(&identifier, resolver); + + Self::deposit_event(Event::ResolverRegistered { + id: identifier.to_vec(), + controller: who, + service_endpoints, + }); + + Ok(()) + } + + /// Update the the service endpoints of an existing the resolver. + /// + /// Caller must be the controller of the resolver. + #[pallet::call_index(3)] + #[pallet::weight({ + T::WeightInfo::update_resolver(service_endpoints.len() as u32) + })] + pub fn update_resolver( + origin: OriginFor, + identifier: BoundedVec, + service_endpoints: BoundedVec, T::MaxServiceEndpoints>, + ) -> DispatchResult { + let who = ensure_signed(origin)?; + + >::try_mutate(&identifier, |resolver| -> DispatchResult { + let resolver = resolver.as_mut().ok_or(Error::::ResolverNotRegistered)?; + + ensure!(who == resolver.controller, Error::::NotController); + + resolver.service_endpoints = service_endpoints.clone(); + + Self::deposit_event(Event::ResolverUpdated { + id: identifier.to_vec(), + controller: who, + service_endpoints, + }); + + Ok(()) + })?; + + Ok(()) + } + + /// Deregister an existing resolver. + /// + /// Caller must be the controller of the resolver. + #[pallet::call_index(4)] + #[pallet::weight({ + T::WeightInfo::deregister_resolver() + })] + pub fn deregister_resolver( + origin: OriginFor, + identifier: BoundedVec, + ) -> DispatchResult { + let who = ensure_signed(origin)?; + + let resolver = + >::get(&identifier).ok_or(Error::::ResolverNotRegistered)?; + + ensure!(who == resolver.controller, Error::::NotController); + + >::remove(&identifier); + + Self::deposit_event(Event::ResolverDeregistered { id: identifier.to_vec() }); + + Ok(()) + } + + /// Create a new validation record. + /// + /// The caller will be set as the record's author. + /// + /// For any specified resolvers which use the reserved sylo resolver + /// method, those resolvers must already be registered and exist in storage. + /// + /// The initial record entry will use the current system block for the + /// block value. + #[pallet::call_index(5)] + #[pallet::weight({ + T::WeightInfo::create_validation_record(resolvers.len() as u32, tags.len() as u32) + })] + pub fn create_validation_record( + origin: OriginFor, + data_id: BoundedVec, + resolvers: BoundedVec, T::MaxResolvers>, + data_type: BoundedVec, + tags: BoundedVec, T::MaxTags>, + checksum: H256, + ) -> DispatchResult { + let who = ensure_signed(origin)?; + + ensure!( + !>::contains_key(&who, &data_id), + Error::::RecordAlreadyCreated + ); + + Self::validate_sylo_resolvers(&resolvers)?; + + let current_block = >::block_number(); + + let record = ValidationRecord { + author: who.clone(), + resolvers, + data_type, + tags, + entries: BoundedVec::truncate_from(vec![ValidationEntry { + checksum, + block: current_block, + }]), + }; + + >::insert(&who, &data_id, record); + + Self::deposit_event(Event::ValidationRecordCreated { + author: who, + id: data_id.to_vec(), + }); + + Ok(()) + } + + /// Add a new entry to an existing validation record. + /// + /// The current block will be used as the entry's block number. + /// + /// Caller must be the author of the record. + #[pallet::call_index(6)] + #[pallet::weight({ + T::WeightInfo::add_validation_record_entry() + })] + pub fn add_validation_record_entry( + origin: OriginFor, + data_id: BoundedVec, + checksum: H256, + ) -> DispatchResult { + let who = ensure_signed(origin)?; + + >::try_mutate(&who, &data_id, |record| -> DispatchResult { + let record = record.as_mut().ok_or(Error::::NoValidationRecord)?; + + record.entries.force_push(ValidationEntry { + checksum, + block: >::block_number(), + }); + + Self::deposit_event(Event::ValidationEntryAdded { + author: who.clone(), + id: data_id.to_vec(), + checksum, + }); + + Ok(()) + })?; + + Ok(()) + } + + /// Update a validation record's fields. The call takes in an Option + /// value for the fields: resolvers, data_type, and tags. + /// + /// Setting those fields to Some value will update the field in storage, + /// whilst setting to None will be a no-op. + /// + /// Caller must be the author of the record. + #[pallet::call_index(7)] + #[pallet::weight({ + T::WeightInfo::update_validation_record( + resolvers.as_ref().map_or(0, |v| v.len() as u32), + tags.as_ref().map_or(0, |v| v.len() as u32) + ) + })] + pub fn update_validation_record( + origin: OriginFor, + data_id: BoundedVec, + resolvers: Option, T::MaxResolvers>>, + data_type: Option>, + tags: Option, T::MaxTags>>, + ) -> DispatchResult { + let who = ensure_signed(origin)?; + + >::try_mutate(&who, &data_id, |record| -> DispatchResult { + let record = record.as_mut().ok_or(Error::::NoValidationRecord)?; + + if let Some(ref new_resolvers) = resolvers { + Self::validate_sylo_resolvers(new_resolvers)?; + record.resolvers = new_resolvers.clone(); + } + + if let Some(ref new_data_type) = data_type { + record.data_type = new_data_type.clone(); + } + + if let Some(ref new_tags) = tags { + record.tags = new_tags.clone(); + } + + Self::deposit_event(Event::ValidationRecordUpdated { + author: who.clone(), + id: data_id.to_vec(), + resolvers: resolvers + .map(|r| r.iter().map(|resolver| resolver.to_did()).collect()), + data_type: data_type.map(|dt| dt.to_vec()), + tags: tags.map(|t| t.iter().map(|tag| tag.to_vec()).collect()), + }); + + Ok(()) + })?; + + Ok(()) + } + + /// Delete an existing validation record. + /// + /// Caller must be the author of the record. + #[pallet::call_index(8)] + #[pallet::weight({ + T::WeightInfo::delete_validation_record() + })] + pub fn delete_validation_record( + origin: OriginFor, + data_id: BoundedVec, + ) -> DispatchResult { + let who = ensure_signed(origin)?; + + ensure!( + >::contains_key(&who, &data_id), + Error::::NoValidationRecord + ); + + >::remove(&who, &data_id); + + Self::deposit_event(Event::ValidationRecordDeleted { + author: who, + id: data_id.to_vec(), + }); + + Ok(()) + } + } + + impl Pallet { + pub fn validate_sylo_resolvers( + resolvers: &BoundedVec, T::MaxResolvers>, + ) -> DispatchResult { + let reserved_method = >::get(); + + // Ensure any sylo data resolvers are already registered + resolvers + .iter() + .filter(|resolver| resolver.method == reserved_method) + .try_for_each(|resolver| -> DispatchResult { + ensure!( + >::contains_key(&resolver.identifier), + Error::::ResolverNotRegistered + ); + Ok(()) + })?; + + Ok(()) + } + } +} diff --git a/pallet/sylo-data-verification/src/mock.rs b/pallet/sylo-data-verification/src/mock.rs new file mode 100644 index 000000000..f76bcc115 --- /dev/null +++ b/pallet/sylo-data-verification/src/mock.rs @@ -0,0 +1,52 @@ +// Copyright 2022-2023 Futureverse Corporation Limited +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// You may obtain a copy of the License at the root of this project source code + +use super::*; +use crate::{self as pallet_sylo_data_verification}; +use seed_pallet_common::test_prelude::*; + +construct_runtime!( + pub enum Test { + System: frame_system, + Balances: pallet_balances, + Assets: pallet_assets, + AssetsExt: pallet_assets_ext, + SyloDataVerification: pallet_sylo_data_verification, + } +); + +impl_frame_system_config!(Test); +impl_pallet_balance_config!(Test); +impl_pallet_assets_config!(Test); +impl_pallet_assets_ext_config!(Test); + +parameter_types! { + pub const MaxResolvers: u32 = 10; + pub const MaxTags: u32 = 10; + pub const MaxEntries: u32 = 100; + pub const MaxServiceEndpoints: u32 = 10; + pub const StringLimit: u32 = 250; +} +impl Config for Test { + type RuntimeCall = RuntimeCall; + type RuntimeEvent = RuntimeEvent; + type ApproveOrigin = EnsureRoot; + type MaxResolvers = MaxResolvers; + type MaxTags = MaxTags; + type MaxEntries = MaxEntries; + type MaxServiceEndpoints = MaxServiceEndpoints; + type StringLimit = StringLimit; + type WeightInfo = (); +} diff --git a/pallet/sylo-data-verification/src/tests.rs b/pallet/sylo-data-verification/src/tests.rs new file mode 100644 index 000000000..ab76246b9 --- /dev/null +++ b/pallet/sylo-data-verification/src/tests.rs @@ -0,0 +1,1009 @@ +// Copyright 2022-2023 Futureverse Corporation Limited +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// You may obtain a copy of the License at the root of this project source code + +use super::*; +use mock::{RuntimeEvent as MockEvent, SyloDataVerification, System, Test}; +use seed_pallet_common::test_prelude::*; + +fn create_and_register_resolver( + identifier: BoundedVec::StringLimit>, + service_endpoints: Vec::StringLimit>>, +) -> ( + AccountId, + BoundedVec::StringLimit>, + BoundedVec< + BoundedVec::StringLimit>, + ::MaxServiceEndpoints, + >, +) { + let controller: AccountId = create_account(1); + + let service_endpoints = + BoundedVec::<_, ::MaxServiceEndpoints>::try_from(service_endpoints) + .unwrap(); + + assert_ok!(SyloDataVerification::register_resolver( + RawOrigin::Signed(controller.clone()).into(), + identifier.clone(), + service_endpoints.clone(), + )); + + (controller, identifier, service_endpoints) +} + +fn create_initial_validation_record( + author: ::AccountId, + data_id: &str, + resolvers: Vec<(&str, &str)>, + data_type: &str, + tags: Vec<&str>, +) -> ( + BoundedVec, + BoundedVec, mock::MaxResolvers>, + BoundedVec, + BoundedVec, mock::MaxTags>, + H256, + ValidationRecord< + ::AccountId, + BlockNumberFor, + mock::MaxResolvers, + mock::MaxTags, + mock::MaxEntries, + mock::StringLimit, + >, +) { + let data_id = bounded_string(data_id); + let resolvers = BoundedVec::truncate_from( + resolvers + .iter() + .map(|(method, identifier)| create_resolver_id(method, identifier)) + .collect(), + ); + let data_type = bounded_string(data_type); + let tags = BoundedVec::truncate_from(tags.iter().map(|tag| bounded_string(tag)).collect()); + let checksum = H256::from_low_u64_be(123); + + let record = ValidationRecord { + author, + resolvers: resolvers.clone(), + data_type: data_type.clone(), + tags: tags.clone(), + entries: BoundedVec::truncate_from(vec![ValidationEntry { + checksum, + block: System::block_number(), + }]), + }; + + return (data_id, resolvers, data_type, tags, checksum, record); +} + +fn create_resolver_id(method: &str, identifier: &str) -> ResolverId<::StringLimit> { + ResolverId { + method: BoundedVec::truncate_from(method.as_bytes().to_vec()), + identifier: BoundedVec::truncate_from(identifier.as_bytes().to_vec()), + } +} + +fn bounded_string(str: &str) -> BoundedVec::StringLimit> { + BoundedVec::truncate_from(str.as_bytes().to_vec()) +} + +mod set_payment_asset { + use super::*; + + #[test] + fn set_payment_asset_works() { + TestExt::::default().build().execute_with(|| { + assert_ok!(SyloDataVerification::set_payment_asset(RawOrigin::Root.into(), 50)); + assert_eq!(SyloAssetId::::get(), Some(50)); + + // Check event + System::assert_last_event(MockEvent::SyloDataVerification( + crate::Event::PaymentAssetSet { asset_id: 50 }, + )); + }); + } + + #[test] + fn set_payment_asset_not_root_fails() { + TestExt::::default().build().execute_with(|| { + let new_account: AccountId = create_account(1); + + assert_noop!( + SyloDataVerification::set_payment_asset(RawOrigin::Signed(new_account).into(), 50), + BadOrigin + ); + }); + } +} + +mod set_sylo_resolver_method { + use super::*; + + #[test] + fn set_sylo_resolver_method_works() { + TestExt::::default().build().execute_with(|| { + let method = bounded_string("sylo"); + + assert_ok!(SyloDataVerification::set_sylo_resolver_method( + RawOrigin::Root.into(), + method.clone() + )); + assert_eq!(SyloResolverMethod::::get(), method.clone()); + + // Check event + System::assert_last_event(MockEvent::SyloDataVerification( + crate::Event::SyloResolverMethodSet { method: method.to_vec() }, + )); + }); + } + + #[test] + fn set_sylo_resolver_method_not_root_fails() { + TestExt::::default().build().execute_with(|| { + let new_account: AccountId = create_account(1); + + assert_noop!( + SyloDataVerification::set_sylo_resolver_method( + RawOrigin::Signed(new_account).into(), + bounded_string("sylo") + ), + BadOrigin + ); + }); + } +} + +mod resolver_registration { + use super::*; + + #[test] + fn resolver_registration_works() { + TestExt::::default().build().execute_with(|| { + let (controller, identifier, service_endpoints) = create_and_register_resolver( + bounded_string("test-resolver"), + vec![ + bounded_string("https://endpoint.one"), + bounded_string("https://endpoint.two"), + ], + ); + + System::assert_last_event(MockEvent::SyloDataVerification( + Event::::ResolverRegistered { + id: identifier.to_vec(), + controller: controller.clone(), + service_endpoints: service_endpoints.clone(), + }, + )); + + assert_eq!( + Resolvers::::get(identifier).unwrap(), + Resolver { controller, service_endpoints } + ) + }); + } + + #[test] + fn resolver_register_existing_fails() { + TestExt::::default().build().execute_with(|| { + let (controller, identifier, service_endpoints) = create_and_register_resolver( + bounded_string("test-resolver"), + vec![ + bounded_string("https://endpoint.one"), + bounded_string("https://endpoint.two"), + ], + ); + + assert_noop!( + SyloDataVerification::register_resolver( + RawOrigin::Signed(controller).into(), + identifier, + service_endpoints, + ), + Error::::ResolverAlreadyRegistered, + ); + }); + } +} + +mod resolver_update { + use super::*; + + #[test] + fn resolver_update_works() { + TestExt::::default().build().execute_with(|| { + let (controller, identifier, mut service_endpoints) = create_and_register_resolver( + bounded_string("test-resolver"), + vec![ + bounded_string("https://endpoint.one"), + bounded_string("https://endpoint.two"), + ], + ); + + service_endpoints.force_push(bounded_string("https://endpoint.three")); + + assert_ok!(SyloDataVerification::update_resolver( + RawOrigin::Signed(controller.clone()).into(), + identifier.clone(), + service_endpoints.clone(), + )); + + System::assert_last_event(MockEvent::SyloDataVerification( + Event::::ResolverUpdated { + id: identifier.to_vec(), + controller: controller.clone(), + service_endpoints: service_endpoints.clone(), + }, + )); + + assert_eq!( + Resolvers::::get(identifier).unwrap(), + Resolver { controller, service_endpoints } + ) + }); + } + + #[test] + fn resolver_update_not_existing_fails() { + TestExt::::default().build().execute_with(|| { + let controller: AccountId = create_account(1); + + let identifier = bounded_string("test-resolver"); + + let service_endpoints = + BoundedVec::<_, ::MaxServiceEndpoints>::try_from(vec![]).unwrap(); + + assert_noop!( + SyloDataVerification::update_resolver( + RawOrigin::Signed(controller).into(), + identifier, + service_endpoints, + ), + Error::::ResolverNotRegistered, + ); + }); + } + + #[test] + fn resolver_update_not_controller_fails() { + TestExt::::default().build().execute_with(|| { + let (_, identifier, service_endpoints) = create_and_register_resolver( + bounded_string("test-resolver"), + vec![ + bounded_string("https://endpoint.one"), + bounded_string("https://endpoint.two"), + ], + ); + + let not_controller: AccountId = create_account(2); + + assert_noop!( + SyloDataVerification::update_resolver( + RawOrigin::Signed(not_controller).into(), + identifier, + service_endpoints, + ), + Error::::NotController, + ); + }); + } +} + +mod resolver_unregistration { + use super::*; + + #[test] + fn resolver_unregistration_works() { + TestExt::::default().build().execute_with(|| { + let (controller, identifier, _) = create_and_register_resolver( + bounded_string("test-resolver"), + vec![ + bounded_string("https://endpoint.one"), + bounded_string("https://endpoint.two"), + ], + ); + + assert_ok!(SyloDataVerification::deregister_resolver( + RawOrigin::Signed(controller.clone()).into(), + identifier.clone(), + )); + + System::assert_last_event(MockEvent::SyloDataVerification( + Event::::ResolverDeregistered { id: identifier.to_vec() }, + )); + + assert!(Resolvers::::get(identifier).is_none()); + }); + } + + #[test] + fn resolver_deregister_not_existing_fails() { + TestExt::::default().build().execute_with(|| { + let controller: AccountId = create_account(1); + + let identifier = bounded_string("test-resolver"); + + assert_noop!( + SyloDataVerification::deregister_resolver( + RawOrigin::Signed(controller).into(), + identifier, + ), + Error::::ResolverNotRegistered, + ); + }); + } + + #[test] + fn resolver_deregister_not_controller_fails() { + TestExt::::default().build().execute_with(|| { + let (_, identifier, _) = create_and_register_resolver( + bounded_string("test-resolver"), + vec![ + bounded_string("https://endpoint.one"), + bounded_string("https://endpoint.two"), + ], + ); + + let not_controller: AccountId = create_account(2); + + assert_noop!( + SyloDataVerification::deregister_resolver( + RawOrigin::Signed(not_controller).into(), + identifier, + ), + Error::::NotController, + ); + }); + } +} + +mod create_validation_record { + use core::str; + + use sp_core::hexdisplay::AsBytesRef; + + use super::*; + + #[test] + fn create_validation_records_works() { + TestExt::::default().build().execute_with(|| { + let alice: AccountId = create_account(2); + + let (data_id, resolvers, data_type, tags, checksum, record) = + create_initial_validation_record( + alice, + "data_id", + vec![("method-1", "resolver-1")], + "data_type", + vec!["tag-1", "tag-2"], + ); + + assert_ok!(SyloDataVerification::create_validation_record( + RawOrigin::Signed(alice.clone()).into(), + data_id.clone(), + resolvers.clone(), + data_type.clone(), + tags.clone(), + checksum.clone() + )); + + System::assert_last_event(MockEvent::SyloDataVerification( + Event::::ValidationRecordCreated { + author: alice.clone(), + id: data_id.clone().to_vec(), + }, + )); + + assert_eq!( + ValidationRecords::::get(alice.clone(), data_id.clone()).unwrap(), + record + ); + }); + } + + #[test] + fn create_existing_validation_record_fails() { + TestExt::::default().build().execute_with(|| { + let alice: AccountId = create_account(2); + + let (data_id, resolvers, data_type, tags, checksum, _) = + create_initial_validation_record( + alice, + "data_id", + vec![("method-1", "resolver-1")], + "data_type", + vec!["tag-1", "tag-2"], + ); + + assert_ok!(SyloDataVerification::create_validation_record( + RawOrigin::Signed(alice.clone()).into(), + data_id.clone(), + resolvers.clone(), + data_type.clone(), + tags.clone(), + checksum.clone() + )); + + assert_noop!( + SyloDataVerification::create_validation_record( + RawOrigin::Signed(alice.clone()).into(), + data_id.clone(), + resolvers.clone(), + data_type.clone(), + tags.clone(), + checksum.clone() + ), + Error::::RecordAlreadyCreated + ); + }); + } + + #[test] + fn create_validation_records_with_sylo_resolvers_works() { + TestExt::::default().build().execute_with(|| { + // Ensure sylo resolver is registered + let (_, identifier, _) = create_and_register_resolver( + bounded_string("test-resolver"), + vec![bounded_string("https://endpoint.one")], + ); + + let alice: AccountId = create_account(2); + + let (data_id, resolvers, data_type, tags, checksum, record) = + create_initial_validation_record( + alice, + "data_id", + vec![( + str::from_utf8(SyloResolverMethod::::get().as_bytes_ref()).unwrap(), + str::from_utf8(identifier.to_vec().as_bytes_ref()).unwrap(), + )], + "data_type", + vec!["tag-1", "tag-2"], + ); + + assert_ok!(SyloDataVerification::create_validation_record( + RawOrigin::Signed(alice.clone()).into(), + data_id.clone(), + resolvers.clone(), + data_type.clone(), + tags.clone(), + checksum.clone() + )); + + System::assert_last_event(MockEvent::SyloDataVerification( + Event::::ValidationRecordCreated { + author: alice.clone(), + id: data_id.clone().to_vec(), + }, + )); + + assert_eq!( + ValidationRecords::::get(alice.clone(), data_id.clone()).unwrap(), + record + ); + }); + } + + #[test] + fn create_validation_record_with_deregistered_sylo_resolver_fails() { + TestExt::::default().build().execute_with(|| { + let alice: AccountId = create_account(2); + + let (data_id, resolvers, data_type, tags, checksum, _) = + create_initial_validation_record( + alice, + "data_id", + vec![( + str::from_utf8(SyloResolverMethod::::get().as_bytes_ref()).unwrap(), + // identifier references a non-existent resolver + "deregistered-resolver", + )], + "data_type", + vec!["tag-1", "tag-2"], + ); + + assert_noop!( + SyloDataVerification::create_validation_record( + RawOrigin::Signed(alice.clone()).into(), + data_id.clone(), + resolvers.clone(), + data_type.clone(), + tags.clone(), + checksum.clone() + ), + Error::::ResolverNotRegistered + ); + }); + } + + #[test] + fn create_multiple_validation_records_with_same_author_works() { + TestExt::::default().build().execute_with(|| { + let alice: AccountId = create_account(2); + + for i in 1..5 { + let (data_id, resolvers, data_type, tags, checksum, record) = + create_initial_validation_record( + alice, + format!("data_id_{i}").as_str(), + vec![("method-1", "resolver-1")], + "data_type", + vec!["tag-1", "tag-2"], + ); + + assert_ok!(SyloDataVerification::create_validation_record( + RawOrigin::Signed(alice.clone()).into(), + data_id.clone(), + resolvers.clone(), + data_type.clone(), + tags.clone(), + checksum.clone() + )); + + System::assert_last_event(MockEvent::SyloDataVerification( + Event::::ValidationRecordCreated { + author: alice.clone(), + id: data_id.clone().to_vec(), + }, + )); + + assert_eq!( + ValidationRecords::::get(alice.clone(), data_id.clone()).unwrap(), + record + ); + } + }); + } + + #[test] + fn create_validation_records_with_different_author_works() { + TestExt::::default().build().execute_with(|| { + for i in 2..5 { + let author: AccountId = create_account(i); + + let (data_id, resolvers, data_type, tags, checksum, record) = + create_initial_validation_record( + author, + // use the same data id for each author's validation record + format!("data_id").as_str(), + vec![("method-1", "resolver-1")], + "data_type", + vec!["tag-1", "tag-2"], + ); + + assert_ok!(SyloDataVerification::create_validation_record( + RawOrigin::Signed(author.clone()).into(), + data_id.clone(), + resolvers.clone(), + data_type.clone(), + tags.clone(), + checksum.clone() + )); + + System::assert_last_event(MockEvent::SyloDataVerification( + Event::::ValidationRecordCreated { + author: author.clone(), + id: data_id.clone().to_vec(), + }, + )); + + assert_eq!( + ValidationRecords::::get(author.clone(), data_id.clone()).unwrap(), + record + ); + } + }); + } +} + +mod add_validation_record_entry { + use super::*; + + #[test] + fn add_validation_entry_works() { + TestExt::::default().build().execute_with(|| { + let alice: AccountId = create_account(2); + + let (data_id, resolvers, data_type, tags, checksum, _) = + create_initial_validation_record( + alice, + "data_id", + vec![("method-1", "resolver-1")], + "data_type", + vec!["tag-1", "tag-2"], + ); + + assert_ok!(SyloDataVerification::create_validation_record( + RawOrigin::Signed(alice.clone()).into(), + data_id.clone(), + resolvers.clone(), + data_type.clone(), + tags.clone(), + checksum.clone() + )); + + for i in 2..5 { + let checksum = H256::from_low_u64_be(i); + + assert_ok!(SyloDataVerification::add_validation_record_entry( + RawOrigin::Signed(alice.clone()).into(), + data_id.clone(), + checksum.clone() + )); + + System::assert_last_event(MockEvent::SyloDataVerification( + Event::::ValidationEntryAdded { + author: alice.clone(), + id: data_id.clone().to_vec(), + checksum, + }, + )); + + let record = + ValidationRecords::::get(alice.clone(), data_id.clone()).unwrap(); + + assert!(record.entries.len() as u64 == i); + assert!(record.entries.last().unwrap().checksum == checksum); + } + }); + } + + #[test] + fn add_not_existing_validation_entry_fails() { + TestExt::::default().build().execute_with(|| { + let alice: AccountId = create_account(2); + + let (data_id, _, _, _, checksum, _) = + create_initial_validation_record(alice, "data_id", vec![], "data_type", vec![]); + + assert_noop!( + SyloDataVerification::add_validation_record_entry( + RawOrigin::Signed(alice.clone()).into(), + data_id.clone(), + checksum.clone() + ), + Error::::NoValidationRecord + ); + }); + } + + #[test] + fn only_author_can_add_validation_entry() { + TestExt::::default().build().execute_with(|| { + let alice: AccountId = create_account(2); + + let (data_id, resolvers, data_type, tags, checksum, _) = + create_initial_validation_record( + alice, + "data_id", + vec![("method-1", "resolver-1")], + "data_type", + vec!["tag-1", "tag-2"], + ); + + assert_ok!(SyloDataVerification::create_validation_record( + RawOrigin::Signed(alice.clone()).into(), + data_id.clone(), + resolvers.clone(), + data_type.clone(), + tags.clone(), + checksum.clone() + )); + + let bob: AccountId = create_account(3); + + assert_noop!( + SyloDataVerification::add_validation_record_entry( + RawOrigin::Signed(bob.clone()).into(), + data_id, + checksum + ), + Error::::NoValidationRecord + ); + }); + } +} + +mod update_validation_record { + use super::*; + + #[test] + fn update_validation_record_works() { + TestExt::::default().build().execute_with(|| { + let alice: AccountId = create_account(2); + + let (data_id, resolvers, data_type, tags, checksum, record) = + create_initial_validation_record( + alice, + "data_id", + vec![("method-1", "resolver-1")], + "data_type", + vec!["tag-1", "tag-2"], + ); + + assert_ok!(SyloDataVerification::create_validation_record( + RawOrigin::Signed(alice.clone()).into(), + data_id.clone(), + resolvers.clone(), + data_type.clone(), + tags.clone(), + checksum.clone() + )); + + let (_, new_resolvers, new_data_type, new_tags, _, _) = + create_initial_validation_record( + alice, + "data_id", + // add anotehr resolver + vec![("method-1", "resolver-1"), ("method-2", "resolver-2")], + // modify data type + "data_type_2", + // add more tags + vec!["tag-1", "tag-2", "tag-3"], + ); + + // Update the list of resolvers + assert_ok!(SyloDataVerification::update_validation_record( + RawOrigin::Signed(alice.clone()).into(), + data_id.clone(), + Some(new_resolvers.clone()), + None, + None + )); + + assert_eq!( + ValidationRecords::::get(alice.clone(), data_id.clone()).unwrap(), + ValidationRecord { + author: alice.clone(), + resolvers: new_resolvers.clone(), + data_type: data_type.clone(), + tags: tags.clone(), + entries: record.entries.clone(), + } + ); + + System::assert_last_event(MockEvent::SyloDataVerification( + Event::::ValidationRecordUpdated { + author: alice.clone(), + id: data_id.clone().to_vec(), + resolvers: Some( + new_resolvers.clone().iter().map(|resolver| resolver.to_did()).collect(), + ), + data_type: None, + tags: None, + }, + )); + + // Update the data type + assert_ok!(SyloDataVerification::update_validation_record( + RawOrigin::Signed(alice.clone()).into(), + data_id.clone(), + None, + Some(new_data_type.clone()), + None + )); + + assert_eq!( + ValidationRecords::::get(alice.clone(), data_id.clone()).unwrap(), + ValidationRecord { + author: alice.clone(), + resolvers: new_resolvers.clone(), + data_type: new_data_type.clone(), + tags: tags.clone(), + entries: record.entries.clone(), + } + ); + + System::assert_last_event(MockEvent::SyloDataVerification( + Event::::ValidationRecordUpdated { + author: alice.clone(), + id: data_id.clone().to_vec(), + resolvers: None, + data_type: Some(new_data_type.clone().to_vec()), + tags: None, + }, + )); + + // Update the list of tags + assert_ok!(SyloDataVerification::update_validation_record( + RawOrigin::Signed(alice.clone()).into(), + data_id.clone(), + None, + None, + Some(new_tags.clone()), + )); + + assert_eq!( + ValidationRecords::::get(alice.clone(), data_id.clone()).unwrap(), + ValidationRecord { + author: alice.clone(), + resolvers: new_resolvers.clone(), + data_type: new_data_type.clone(), + tags: new_tags.clone(), + entries: record.entries.clone(), + } + ); + + System::assert_last_event(MockEvent::SyloDataVerification( + Event::::ValidationRecordUpdated { + author: alice.clone(), + id: data_id.clone().to_vec(), + resolvers: None, + data_type: None, + tags: Some(new_tags.iter().map(|tag| tag.to_vec()).collect()), + }, + )); + }); + } + + #[test] + fn update_not_existing_validation_record_fails() { + TestExt::::default().build().execute_with(|| { + let alice: AccountId = create_account(2); + + let (data_id, resolvers, data_type, tags, _, _) = create_initial_validation_record( + alice, + "data_id", + vec![("method-1", "resolver-1")], + "data_type", + vec!["tag-1", "tag-2"], + ); + + assert_noop!( + SyloDataVerification::update_validation_record( + RawOrigin::Signed(alice.clone()).into(), + data_id.clone(), + Some(resolvers.clone()), + Some(data_type.clone()), + Some(tags.clone()), + ), + Error::::NoValidationRecord + ); + }); + } + + #[test] + fn only_author_can_update_validation_record() { + TestExt::::default().build().execute_with(|| { + let alice: AccountId = create_account(2); + + let (data_id, resolvers, data_type, tags, checksum, _) = + create_initial_validation_record( + alice, + "data_id", + vec![("method-1", "resolver-1")], + "data_type", + vec!["tag-1", "tag-2"], + ); + + assert_ok!(SyloDataVerification::create_validation_record( + RawOrigin::Signed(alice.clone()).into(), + data_id.clone(), + resolvers.clone(), + data_type.clone(), + tags.clone(), + checksum.clone() + )); + + let bob: AccountId = create_account(3); + + assert_noop!( + SyloDataVerification::update_validation_record( + RawOrigin::Signed(bob.clone()).into(), + data_id.clone(), + Some(resolvers.clone()), + Some(data_type.clone()), + Some(tags.clone()), + ), + Error::::NoValidationRecord + ); + }); + } +} + +mod delete_validation_record { + use super::*; + + #[test] + fn delete_validation_record_works() { + TestExt::::default().build().execute_with(|| { + let alice: AccountId = create_account(2); + + let (data_id, resolvers, data_type, tags, checksum, _) = + create_initial_validation_record( + alice, + "data_id", + vec![("method-1", "resolver-1")], + "data_type", + vec!["tag-1", "tag-2"], + ); + + assert_ok!(SyloDataVerification::create_validation_record( + RawOrigin::Signed(alice.clone()).into(), + data_id.clone(), + resolvers.clone(), + data_type.clone(), + tags.clone(), + checksum.clone() + )); + + assert_ok!(SyloDataVerification::delete_validation_record( + RawOrigin::Signed(alice.clone()).into(), + data_id.clone(), + )); + }); + } + + #[test] + fn delete_not_existing_validation_record_fails() { + TestExt::::default().build().execute_with(|| { + let alice: AccountId = create_account(2); + + let (data_id, resolvers, data_type, tags, _, _) = create_initial_validation_record( + alice, + "data_id", + vec![("method-1", "resolver-1")], + "data_type", + vec!["tag-1", "tag-2"], + ); + + assert_noop!( + SyloDataVerification::update_validation_record( + RawOrigin::Signed(alice.clone()).into(), + data_id.clone(), + Some(resolvers.clone()), + Some(data_type.clone()), + Some(tags.clone()), + ), + Error::::NoValidationRecord + ); + }); + } + + #[test] + fn only_author_can_delete_validation_record() { + TestExt::::default().build().execute_with(|| { + let alice: AccountId = create_account(2); + + let (data_id, resolvers, data_type, tags, checksum, _) = + create_initial_validation_record( + alice, + "data_id", + vec![("method-1", "resolver-1")], + "data_type", + vec!["tag-1", "tag-2"], + ); + + assert_ok!(SyloDataVerification::create_validation_record( + RawOrigin::Signed(alice.clone()).into(), + data_id.clone(), + resolvers.clone(), + data_type.clone(), + tags.clone(), + checksum.clone() + )); + + let bob: AccountId = create_account(3); + + assert_noop!( + SyloDataVerification::delete_validation_record( + RawOrigin::Signed(bob.clone()).into(), + data_id.clone(), + ), + Error::::NoValidationRecord + ); + }); + } +} diff --git a/pallet/sylo-data-verification/src/types.rs b/pallet/sylo-data-verification/src/types.rs new file mode 100644 index 000000000..9f7b723ea --- /dev/null +++ b/pallet/sylo-data-verification/src/types.rs @@ -0,0 +1,101 @@ +// Copyright 2022-2023 Futureverse Corporation Limited +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// You may obtain a copy of the License at the root of this project source code + +use alloc::{format, string::String, vec::Vec}; +use codec::{Decode, Encode, MaxEncodedLen}; +use frame_support::{ + traits::Get, BoundedVec, CloneNoBound, EqNoBound, PartialEqNoBound, RuntimeDebugNoBound, +}; +use scale_info::TypeInfo; +use sp_core::H256; +use sp_std::{fmt::Debug, prelude::*}; + +#[derive( + CloneNoBound, + RuntimeDebugNoBound, + Encode, + Decode, + PartialEqNoBound, + EqNoBound, + TypeInfo, + MaxEncodedLen, +)] +#[scale_info(skip_type_params(StringLimit))] +pub struct ResolverId +where + StringLimit: Get, +{ + pub method: BoundedVec, + pub identifier: BoundedVec, +} + +impl> ResolverId { + pub fn to_did(&self) -> Vec { + let method = self.method.to_vec(); + let method = String::from_utf8_lossy(method.as_slice()); + + let identifier = self.identifier.to_vec(); + let identifier = String::from_utf8_lossy(identifier.as_slice()); + + format!("did:{method}:{identifier}").as_bytes().to_vec() + } +} + +pub type ServiceEndpoint = BoundedVec; + +#[derive( + Clone, Encode, Decode, RuntimeDebugNoBound, PartialEqNoBound, Eq, TypeInfo, MaxEncodedLen, +)] +#[scale_info(skip_type_params(MaxServiceEndpoints, StringLimit))] +pub struct Resolver +where + AccountId: Debug + PartialEq + Clone, + MaxServiceEndpoints: Get, + StringLimit: Get, +{ + pub controller: AccountId, + pub service_endpoints: BoundedVec, MaxServiceEndpoints>, +} + +#[derive( + Clone, Encode, Decode, RuntimeDebugNoBound, PartialEqNoBound, Eq, TypeInfo, MaxEncodedLen, +)] +pub struct ValidationEntry +where + BlockNumber: Debug + PartialEq + Clone, +{ + pub checksum: H256, + pub block: BlockNumber, +} + +#[derive( + Clone, Encode, Decode, RuntimeDebugNoBound, PartialEqNoBound, Eq, TypeInfo, MaxEncodedLen, +)] +#[scale_info(skip_type_params(MaxResolvers, MaxTags, MaxEntries, StringLimit))] +pub struct ValidationRecord +where + AccountId: Debug + PartialEq + Clone, + BlockNumber: Debug + PartialEq + Clone, + MaxResolvers: Get, + MaxTags: Get, + MaxEntries: Get, + StringLimit: Get, +{ + pub author: AccountId, + pub resolvers: BoundedVec, MaxResolvers>, + pub data_type: BoundedVec, + pub tags: BoundedVec, MaxTags>, + pub entries: BoundedVec, MaxEntries>, +} diff --git a/pallet/sylo-data-verification/src/weights.rs b/pallet/sylo-data-verification/src/weights.rs new file mode 100644 index 000000000..8f95bef37 --- /dev/null +++ b/pallet/sylo-data-verification/src/weights.rs @@ -0,0 +1,235 @@ +// This file is part of Substrate. + +// Copyright (C) 2022 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Autogenerated weights for pallet_sylo_data_verification +//! +//! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 4.0.0-dev +//! DATE: 2025-01-16, STEPS: `50`, REPEAT: 20, LOW RANGE: `[]`, HIGH RANGE: `[]` +//! HOSTNAME: `ip-172-31-102-147`, CPU: `Intel(R) Xeon(R) CPU E5-2686 v4 @ 2.30GHz` +//! EXECUTION: , WASM-EXECUTION: Compiled, CHAIN: Some("dev"), DB CACHE: 1024 + +// Executed Command: +// ./target/release/seed +// benchmark +// pallet +// --chain=dev +// --steps=50 +// --repeat=20 +// --pallet=pallet-sylo-data-verification +// --extrinsic=* +// --wasm-execution=compiled +// --heap-pages=4096 +// --output +// ./pallet/sylo/src/weights.rs +// --template +// ./scripts/pallet_template.hbs + +#![cfg_attr(rustfmt, rustfmt_skip)] +#![allow(unused_parens)] +#![allow(unused_imports)] + +use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use sp_std::marker::PhantomData; + +/// Weight functions needed for pallet_sylo_data_verification. +pub trait WeightInfo { + fn set_payment_asset() -> Weight; + fn set_sylo_resolver_method() -> Weight; + fn register_resolver(p: u32, ) -> Weight; + fn update_resolver(p: u32, ) -> Weight; + fn deregister_resolver() -> Weight; + fn create_validation_record(q: u32, r: u32, ) -> Weight; + fn add_validation_record_entry() -> Weight; + fn update_validation_record(q: u32, r: u32, ) -> Weight; + fn delete_validation_record() -> Weight; +} + +/// Weights for pallet_sylo_data_verification using the Substrate node and recommended hardware. +pub struct SubstrateWeight(PhantomData); +impl WeightInfo for SubstrateWeight { + // Storage: `Sylo::SyloAssetId` (r:0 w:1) + // Proof: `Sylo::SyloAssetId` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `MaxEncodedLen`) + fn set_payment_asset() -> Weight { + Weight::from_all(24_170_000) + .saturating_add(T::DbWeight::get().writes(1)) + } + // Storage: `Sylo::SyloResolverMethod` (r:0 w:1) + // Proof: `Sylo::SyloResolverMethod` (`max_values`: Some(1), `max_size`: Some(502), added: 997, mode: `MaxEncodedLen`) + fn set_sylo_resolver_method() -> Weight { + Weight::from_all(25_267_000) + .saturating_add(T::DbWeight::get().writes(1)) + } + // Storage: `Sylo::Resolvers` (r:1 w:1) + // Proof: `Sylo::Resolvers` (`max_values`: None, `max_size`: Some(5551), added: 8026, mode: `MaxEncodedLen`) + /// The range of component `p` is `[1, 10]`. + fn register_resolver(p: u32, ) -> Weight { + Weight::from_all(40_852_871) + // Standard Error: 19_118 + .saturating_add(Weight::from_all(2_364_094_u64).saturating_mul(p as u64)) + .saturating_add(T::DbWeight::get().reads(1)) + .saturating_add(T::DbWeight::get().writes(1)) + } + // Storage: `Sylo::Resolvers` (r:1 w:1) + // Proof: `Sylo::Resolvers` (`max_values`: None, `max_size`: Some(5551), added: 8026, mode: `MaxEncodedLen`) + /// The range of component `p` is `[1, 10]`. + fn update_resolver(p: u32, ) -> Weight { + Weight::from_all(41_166_323) + // Standard Error: 15_527 + .saturating_add(Weight::from_all(2_380_835_u64).saturating_mul(p as u64)) + .saturating_add(T::DbWeight::get().reads(1)) + .saturating_add(T::DbWeight::get().writes(1)) + } + // Storage: `Sylo::Resolvers` (r:1 w:1) + // Proof: `Sylo::Resolvers` (`max_values`: None, `max_size`: Some(5551), added: 8026, mode: `MaxEncodedLen`) + fn deregister_resolver() -> Weight { + Weight::from_all(43_080_000) + .saturating_add(T::DbWeight::get().reads(1)) + .saturating_add(T::DbWeight::get().writes(1)) + } + // Storage: `Sylo::ValidationRecords` (r:1 w:1) + // Proof: `Sylo::ValidationRecords` (`max_values`: None, `max_size`: Some(19724), added: 22199, mode: `MaxEncodedLen`) + // Storage: `Sylo::SyloResolverMethod` (r:1 w:0) + // Proof: `Sylo::SyloResolverMethod` (`max_values`: Some(1), `max_size`: Some(502), added: 997, mode: `MaxEncodedLen`) + /// The range of component `q` is `[1, 10]`. + /// The range of component `r` is `[1, 10]`. + fn create_validation_record(q: u32, r: u32, ) -> Weight { + Weight::from_all(49_387_366) + // Standard Error: 25_671 + .saturating_add(Weight::from_all(2_595_326_u64).saturating_mul(q as u64)) + // Standard Error: 25_671 + .saturating_add(Weight::from_all(1_271_899_u64).saturating_mul(r as u64)) + .saturating_add(T::DbWeight::get().reads(2)) + .saturating_add(T::DbWeight::get().writes(1)) + } + // Storage: `Sylo::ValidationRecords` (r:1 w:1) + // Proof: `Sylo::ValidationRecords` (`max_values`: None, `max_size`: Some(19724), added: 22199, mode: `MaxEncodedLen`) + fn add_validation_record_entry() -> Weight { + Weight::from_all(46_387_000) + .saturating_add(T::DbWeight::get().reads(1)) + .saturating_add(T::DbWeight::get().writes(1)) + } + // Storage: `Sylo::ValidationRecords` (r:1 w:1) + // Proof: `Sylo::ValidationRecords` (`max_values`: None, `max_size`: Some(19724), added: 22199, mode: `MaxEncodedLen`) + // Storage: `Sylo::SyloResolverMethod` (r:1 w:0) + // Proof: `Sylo::SyloResolverMethod` (`max_values`: Some(1), `max_size`: Some(502), added: 997, mode: `MaxEncodedLen`) + /// The range of component `q` is `[1, 10]`. + /// The range of component `r` is `[1, 10]`. + fn update_validation_record(q: u32, r: u32, ) -> Weight { + Weight::from_all(47_896_051) + // Standard Error: 31_451 + .saturating_add(Weight::from_all(7_549_938_u64).saturating_mul(q as u64)) + // Standard Error: 31_451 + .saturating_add(Weight::from_all(2_559_365_u64).saturating_mul(r as u64)) + .saturating_add(T::DbWeight::get().reads(2)) + .saturating_add(T::DbWeight::get().writes(1)) + } + // Storage: `Sylo::ValidationRecords` (r:1 w:1) + // Proof: `Sylo::ValidationRecords` (`max_values`: None, `max_size`: Some(19724), added: 22199, mode: `MaxEncodedLen`) + fn delete_validation_record() -> Weight { + Weight::from_all(43_060_000) + .saturating_add(T::DbWeight::get().reads(1)) + .saturating_add(T::DbWeight::get().writes(1)) + } +} + +// For backwards compatibility and tests +impl WeightInfo for () { + // Storage: `Sylo::SyloAssetId` (r:0 w:1) + // Proof: `Sylo::SyloAssetId` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `MaxEncodedLen`) + fn set_payment_asset() -> Weight { + Weight::from_all(24_170_000) + .saturating_add(RocksDbWeight::get().writes(1)) + } + // Storage: `Sylo::SyloResolverMethod` (r:0 w:1) + // Proof: `Sylo::SyloResolverMethod` (`max_values`: Some(1), `max_size`: Some(502), added: 997, mode: `MaxEncodedLen`) + fn set_sylo_resolver_method() -> Weight { + Weight::from_all(25_267_000) + .saturating_add(RocksDbWeight::get().writes(1)) + } + // Storage: `Sylo::Resolvers` (r:1 w:1) + // Proof: `Sylo::Resolvers` (`max_values`: None, `max_size`: Some(5551), added: 8026, mode: `MaxEncodedLen`) + /// The range of component `p` is `[1, 10]`. + fn register_resolver(p: u32, ) -> Weight { + Weight::from_all(40_852_871) + // Standard Error: 19_118 + .saturating_add(Weight::from_all(2_364_094_u64).saturating_mul(p as u64)) + .saturating_add(RocksDbWeight::get().reads(1)) + .saturating_add(RocksDbWeight::get().writes(1)) + } + // Storage: `Sylo::Resolvers` (r:1 w:1) + // Proof: `Sylo::Resolvers` (`max_values`: None, `max_size`: Some(5551), added: 8026, mode: `MaxEncodedLen`) + /// The range of component `p` is `[1, 10]`. + fn update_resolver(p: u32, ) -> Weight { + Weight::from_all(41_166_323) + // Standard Error: 15_527 + .saturating_add(Weight::from_all(2_380_835_u64).saturating_mul(p as u64)) + .saturating_add(RocksDbWeight::get().reads(1)) + .saturating_add(RocksDbWeight::get().writes(1)) + } + // Storage: `Sylo::Resolvers` (r:1 w:1) + // Proof: `Sylo::Resolvers` (`max_values`: None, `max_size`: Some(5551), added: 8026, mode: `MaxEncodedLen`) + fn deregister_resolver() -> Weight { + Weight::from_all(43_080_000) + .saturating_add(RocksDbWeight::get().reads(1)) + .saturating_add(RocksDbWeight::get().writes(1)) + } + // Storage: `Sylo::ValidationRecords` (r:1 w:1) + // Proof: `Sylo::ValidationRecords` (`max_values`: None, `max_size`: Some(19724), added: 22199, mode: `MaxEncodedLen`) + // Storage: `Sylo::SyloResolverMethod` (r:1 w:0) + // Proof: `Sylo::SyloResolverMethod` (`max_values`: Some(1), `max_size`: Some(502), added: 997, mode: `MaxEncodedLen`) + /// The range of component `q` is `[1, 10]`. + /// The range of component `r` is `[1, 10]`. + fn create_validation_record(q: u32, r: u32, ) -> Weight { + Weight::from_all(49_387_366) + // Standard Error: 25_671 + .saturating_add(Weight::from_all(2_595_326_u64).saturating_mul(q as u64)) + // Standard Error: 25_671 + .saturating_add(Weight::from_all(1_271_899_u64).saturating_mul(r as u64)) + .saturating_add(RocksDbWeight::get().reads(2)) + .saturating_add(RocksDbWeight::get().writes(1)) + } + // Storage: `Sylo::ValidationRecords` (r:1 w:1) + // Proof: `Sylo::ValidationRecords` (`max_values`: None, `max_size`: Some(19724), added: 22199, mode: `MaxEncodedLen`) + fn add_validation_record_entry() -> Weight { + Weight::from_all(46_387_000) + .saturating_add(RocksDbWeight::get().reads(1)) + .saturating_add(RocksDbWeight::get().writes(1)) + } + // Storage: `Sylo::ValidationRecords` (r:1 w:1) + // Proof: `Sylo::ValidationRecords` (`max_values`: None, `max_size`: Some(19724), added: 22199, mode: `MaxEncodedLen`) + // Storage: `Sylo::SyloResolverMethod` (r:1 w:0) + // Proof: `Sylo::SyloResolverMethod` (`max_values`: Some(1), `max_size`: Some(502), added: 997, mode: `MaxEncodedLen`) + /// The range of component `q` is `[1, 10]`. + /// The range of component `r` is `[1, 10]`. + fn update_validation_record(q: u32, r: u32, ) -> Weight { + Weight::from_all(47_896_051) + // Standard Error: 31_451 + .saturating_add(Weight::from_all(7_549_938_u64).saturating_mul(q as u64)) + // Standard Error: 31_451 + .saturating_add(Weight::from_all(2_559_365_u64).saturating_mul(r as u64)) + .saturating_add(RocksDbWeight::get().reads(2)) + .saturating_add(RocksDbWeight::get().writes(1)) + } + // Storage: `Sylo::ValidationRecords` (r:1 w:1) + // Proof: `Sylo::ValidationRecords` (`max_values`: None, `max_size`: Some(19724), added: 22199, mode: `MaxEncodedLen`) + fn delete_validation_record() -> Weight { + Weight::from_all(43_060_000) + .saturating_add(RocksDbWeight::get().reads(1)) + .saturating_add(RocksDbWeight::get().writes(1)) + } +} + diff --git a/pallet/xrpl/src/lib.rs b/pallet/xrpl/src/lib.rs index 733095349..8a7b4d226 100644 --- a/pallet/xrpl/src/lib.rs +++ b/pallet/xrpl/src/lib.rs @@ -355,7 +355,7 @@ pub mod pallet { + Dispatchable + GetDispatchInfo + From> - // + IsType<::RuntimeCall> + + IsType<::RuntimeCall> + IsSubType>; /// Inner call validator diff --git a/runtime/Cargo.toml b/runtime/Cargo.toml index d33f6b2bb..634260e32 100644 --- a/runtime/Cargo.toml +++ b/runtime/Cargo.toml @@ -89,6 +89,7 @@ pallet-nfi = { workspace = true } pallet-nft = { workspace = true } pallet-nft-rpc-runtime-api = { workspace = true } pallet-sft = { workspace = true } +pallet-sylo-data-verification = { workspace = true } pallet-sft-rpc-runtime-api = { workspace = true } pallet-token-approvals = { workspace = true } pallet-tx-fee-pot = { workspace = true } @@ -198,6 +199,7 @@ std = [ "pallet-nft-rpc-runtime-api/std", "pallet-sft/std", "pallet-sft-rpc-runtime-api/std", + "pallet-sylo-data-verification/std", "pallet-xrpl-bridge/std", "pallet-xrpl/std", "pallet-tx-fee-pot/std", @@ -282,6 +284,7 @@ try-runtime = [ "pallet-nfi/try-runtime", "pallet-nft/try-runtime", "pallet-sft/try-runtime", + "pallet-sylo-data-verification/try-runtime", "pallet-xrpl-bridge/try-runtime", "pallet-xrpl/try-runtime", "pallet-token-approvals/try-runtime", @@ -335,6 +338,7 @@ runtime-benchmarks = [ "pallet-nfi/runtime-benchmarks", "pallet-nft/runtime-benchmarks", "pallet-sft/runtime-benchmarks", + "pallet-sylo-data-verification/runtime-benchmarks", "pallet-xrpl-bridge/runtime-benchmarks", "pallet-xrpl/runtime-benchmarks", "pallet-dex/runtime-benchmarks", diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index 39adeeb25..209f1ca15 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -596,6 +596,26 @@ impl pallet_preimage::Config for Runtime { type ByteDeposit = PreimageByteDeposit; } +parameter_types! { + pub const MaxResolvers: u8 = 10; + pub const MaxTags: u8 = 10; + pub const MaxEntries: u8 = 100; + pub const MaxServiceEndpoints: u8 = 10; + pub const SyloStringLimit: u16 = 500; +} + +impl pallet_sylo_data_verification::Config for Runtime { + type RuntimeCall = RuntimeCall; + type RuntimeEvent = RuntimeEvent; + type ApproveOrigin = EnsureRoot; + type MaxResolvers = MaxResolvers; + type MaxTags = MaxTags; + type MaxEntries = MaxEntries; + type MaxServiceEndpoints = MaxServiceEndpoints; + type StringLimit = SyloStringLimit; + type WeightInfo = weights::pallet_sylo_data_verification::WeightInfo; +} + impl pallet_utility::Config for Runtime { type RuntimeEvent = RuntimeEvent; type RuntimeCall = RuntimeCall; @@ -1434,6 +1454,7 @@ construct_runtime!( Crowdsale: pallet_crowdsale = 49, Nfi: pallet_nfi = 50, Migration: pallet_migration = 51, + SyloDataVerification: pallet_sylo_data_verification = 52, // Election pallet. Only works with staking ElectionProviderMultiPhase: pallet_election_provider_multi_phase = 22, @@ -2360,5 +2381,6 @@ mod benches { [pallet_crowdsale, Crowdsale] [pallet_evm, EVM] [pallet_migration, Migration] + [pallet_sylo_data_verification, SyloDataVerification] ); } diff --git a/runtime/src/tests/mod.rs b/runtime/src/tests/mod.rs index 16e9bb75f..bbd906b42 100644 --- a/runtime/src/tests/mod.rs +++ b/runtime/src/tests/mod.rs @@ -22,6 +22,7 @@ mod evm_tests; mod maintenance_mode; mod multiplier; mod staker_payouts; +mod sylo_fees; use frame_support::traits::{ fungibles::Inspect as _, diff --git a/runtime/src/tests/sylo_fees.rs b/runtime/src/tests/sylo_fees.rs new file mode 100644 index 000000000..ca6cfe19b --- /dev/null +++ b/runtime/src/tests/sylo_fees.rs @@ -0,0 +1,286 @@ +// Copyright 2022-2023 Futureverse Corporation Limited +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// You may obtain a copy of the License at the root of this project source code + +//! Integration tests for the sylo pallet. Ensures sylo extrinsics are correctly +//! charged with the set Sylo payment token. +#![cfg(test)] + +use crate::{ + tests::{alice, bob, ExtBuilder}, + Assets, AssetsExt, Dex, Futurepass, Runtime, RuntimeOrigin, SyloDataVerification, XrpCurrency, +}; +use frame_support::{ + assert_err, assert_ok, + dispatch::{GetDispatchInfo, RawOrigin}, + pallet_prelude::{InvalidTransaction, TransactionValidityError}, + traits::{fungible::Inspect, fungibles::Inspect as Inspects}, +}; +use seed_pallet_common::test_prelude::create_account; + +use crate::constants::XRP_ASSET_ID; +use pallet_transaction_payment::ChargeTransactionPayment; +use seed_primitives::{AccountId, Balance}; +use sp_core::H256; +use sp_runtime::{traits::SignedExtension, BoundedVec}; + +#[test] +fn sylo_extrinsic_works_with_sylo_token() { + ExtBuilder::default().build().execute_with(|| { + let new_account = create_account(2); + + let payment_asset = setup_sylo_liquidity(new_account.clone()); + + let calls = create_sylo_calls(); + + for call in calls.iter() { + let caller_token_balance_before = AssetsExt::balance(payment_asset, &new_account); + + let dispatch_info = call.get_dispatch_info(); + + assert_ok!( as SignedExtension>::pre_dispatch( + ChargeTransactionPayment::from(0), + &new_account, + &call, + &dispatch_info, + 1, + )); + + let caller_token_balance_after = AssetsExt::balance(payment_asset, &new_account); + + // validate caller had their sylo token balance reduced + assert!(caller_token_balance_before > caller_token_balance_after); + } + }); +} + +#[test] +fn sylo_extrinsic_works_with_futurepass_payment() { + ExtBuilder::default().build().execute_with(|| { + assert_ok!(Futurepass::create(RuntimeOrigin::signed(alice()), alice())); + let futurepass = pallet_futurepass::Holders::::get(alice()).unwrap(); + + let payment_asset = setup_sylo_liquidity(futurepass.clone()); + + let calls = create_sylo_calls(); + + for call in calls.iter() { + let caller_xrp_balance_before = XrpCurrency::balance(&alice()); + let caller_token_balance_before = AssetsExt::balance(payment_asset, &alice()); + let futurepass_token_balance_before = AssetsExt::balance(payment_asset, &futurepass); + + let fp_proxy_call = + crate::RuntimeCall::Futurepass(pallet_futurepass::Call::proxy_extrinsic { + futurepass, + call: Box::new(call.clone()), + }); + + let dispatch_info = fp_proxy_call.get_dispatch_info(); + + assert_ok!( as SignedExtension>::pre_dispatch( + ChargeTransactionPayment::from(0), + &alice(), + &fp_proxy_call, + &dispatch_info, + 1, + )); + + let caller_xrp_balance_after = XrpCurrency::balance(&alice()); + let caller_token_balance_after = AssetsExt::balance(payment_asset, &alice()); + let futurepass_token_balance_after = AssetsExt::balance(payment_asset, &futurepass); + + // validate futurepass should only have paid in tokens + assert_eq!(caller_xrp_balance_before, caller_xrp_balance_after); + assert_eq!(caller_token_balance_before, caller_token_balance_after); + + assert!(futurepass_token_balance_before > futurepass_token_balance_after); + } + }); +} + +#[test] +fn sylo_extrinsic_fails_without_sylo_funds() { + ExtBuilder::default().build().execute_with(|| { + // Test executing that calls without setting up the + // liquidity prior + let calls = create_sylo_calls(); + + for call in calls.iter() { + let dispatch_info = call.get_dispatch_info(); + + assert_err!( + as SignedExtension>::pre_dispatch( + ChargeTransactionPayment::from(0), + &alice(), + &call, + &dispatch_info, + 1, + ), + TransactionValidityError::Invalid(InvalidTransaction::Payment) + ); + } + }); +} + +#[test] +fn sylo_extrinsic_fails_without_fee_proxy() { + ExtBuilder::default().build().execute_with(|| { + let calls = create_sylo_calls(); + + for call in calls.iter() { + let dispatch_info = call.get_dispatch_info(); + + assert_err!( + as SignedExtension>::pre_dispatch( + ChargeTransactionPayment::from(0), + &alice(), + &call, + &dispatch_info, + 1, + ), + TransactionValidityError::Invalid(InvalidTransaction::Payment) + ); + } + }); +} + +#[test] +fn sylo_extrinsic_fails_using_call_with_fee_preferences() { + ExtBuilder::default().build().execute_with(|| { + let new_account = create_account(2); + + let payment_asset = setup_sylo_liquidity(new_account.clone()); + + let calls = create_sylo_calls(); + + for call in calls.iter() { + let max_payment: Balance = 10_000_000_000_000_000; + let fee_proxy_call = + crate::RuntimeCall::FeeProxy(pallet_fee_proxy::Call::call_with_fee_preferences { + payment_asset, + max_payment, + call: Box::new(call.clone()), + }); + + let dispatch_info = fee_proxy_call.get_dispatch_info(); + assert_err!( + as SignedExtension>::pre_dispatch( + ChargeTransactionPayment::from(0), + &new_account, + &fee_proxy_call, + &dispatch_info, + 1, + ), + TransactionValidityError::Invalid(InvalidTransaction::Payment) + ); + } + }); +} + +fn setup_sylo_liquidity(new_account: AccountId) -> u32 { + let payment_asset = AssetsExt::next_asset_uuid().unwrap(); + + assert_ok!(AssetsExt::create_asset( + RawOrigin::Signed(alice()).into(), + b"Test".to_vec(), + b"Test".to_vec(), + 6, + None, + None + )); + + assert_eq!(AssetsExt::balance(payment_asset, &bob()), 0); + + // Mint these assets into Alice and new_account + assert_ok!(Assets::mint( + RawOrigin::Signed(alice()).into(), + payment_asset, + alice(), + 10_000_000_000_000_000 + )); + assert_ok!(Assets::mint( + RawOrigin::Signed(alice()).into(), + payment_asset, + new_account, + 10_000_000_000_000_000 + )); + + // Add liquidity to the dex + assert_ok!(Dex::add_liquidity( + RawOrigin::Signed(alice()).into(), + XRP_ASSET_ID, + payment_asset, + 1_000_000_000_000, + 1_000_000_000_000, + 1, + 1, + None, + None, + )); + + assert_ok!(SyloDataVerification::set_payment_asset(RawOrigin::Root.into(), payment_asset)); + + payment_asset +} + +/// Creates a list of calls for all sylo extrinsics which should be charged in Sylo Tokens +fn create_sylo_calls() -> Vec<::RuntimeCall> { + vec![ + crate::RuntimeCall::SyloDataVerification( + pallet_sylo_data_verification::Call::register_resolver { + identifier: BoundedVec::new(), + service_endpoints: BoundedVec::new(), + }, + ), + crate::RuntimeCall::SyloDataVerification( + pallet_sylo_data_verification::Call::update_resolver { + identifier: BoundedVec::new(), + service_endpoints: BoundedVec::new(), + }, + ), + crate::RuntimeCall::SyloDataVerification( + pallet_sylo_data_verification::Call::deregister_resolver { + identifier: BoundedVec::new(), + }, + ), + crate::RuntimeCall::SyloDataVerification( + pallet_sylo_data_verification::Call::create_validation_record { + data_id: BoundedVec::new(), + resolvers: BoundedVec::new(), + data_type: BoundedVec::new(), + tags: BoundedVec::new(), + checksum: H256::from_low_u64_be(123), + }, + ), + crate::RuntimeCall::SyloDataVerification( + pallet_sylo_data_verification::Call::add_validation_record_entry { + data_id: BoundedVec::new(), + checksum: H256::from_low_u64_be(123), + }, + ), + crate::RuntimeCall::SyloDataVerification( + pallet_sylo_data_verification::Call::update_validation_record { + data_id: BoundedVec::new(), + resolvers: None, + data_type: None, + tags: None, + }, + ), + crate::RuntimeCall::SyloDataVerification( + pallet_sylo_data_verification::Call::delete_validation_record { + data_id: BoundedVec::new(), + }, + ), + ] +} diff --git a/runtime/src/weights/mod.rs b/runtime/src/weights/mod.rs index ce39f6879..694cab047 100644 --- a/runtime/src/weights/mod.rs +++ b/runtime/src/weights/mod.rs @@ -28,6 +28,7 @@ pub mod pallet_session; pub mod pallet_sft; pub mod pallet_staking; pub mod pallet_sudo; +pub mod pallet_sylo_data_verification; pub mod pallet_timestamp; pub mod pallet_token_approvals; pub mod pallet_utility; diff --git a/runtime/src/weights/pallet_sylo_data_verification.rs b/runtime/src/weights/pallet_sylo_data_verification.rs new file mode 100644 index 000000000..f1cfff68b --- /dev/null +++ b/runtime/src/weights/pallet_sylo_data_verification.rs @@ -0,0 +1,163 @@ + +//! Autogenerated weights for `pallet_sylo_data_verification` +//! +//! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 4.0.0-dev +//! DATE: 2025-01-16, STEPS: `50`, REPEAT: `20`, LOW RANGE: `[]`, HIGH RANGE: `[]` +//! WORST CASE MAP SIZE: `1000000` +//! HOSTNAME: `ip-172-31-102-147`, CPU: `Intel(R) Xeon(R) CPU E5-2686 v4 @ 2.30GHz` +//! EXECUTION: ``, WASM-EXECUTION: `Compiled`, CHAIN: `Some("dev")`, DB CACHE: 1024 + +// Executed Command: +// ./target/release/seed +// benchmark +// pallet +// --chain=dev +// --steps=50 +// --repeat=20 +// --pallet=pallet-sylo-data-verification +// --extrinsic=* +// --wasm-execution=compiled +// --heap-pages=4096 +// --output +// ./runtime/src/weights/pallet_sylo_data_verification.rs + +#![cfg_attr(rustfmt, rustfmt_skip)] +#![allow(unused_parens)] +#![allow(unused_imports)] +#![allow(missing_docs)] + +use frame_support::{traits::Get, weights::Weight}; +use core::marker::PhantomData; + +/// Weight functions for `pallet_sylo_data_verification`. +pub struct WeightInfo(PhantomData); +impl pallet_sylo_data_verification::WeightInfo for WeightInfo { + /// Storage: `Sylo::SyloAssetId` (r:0 w:1) + /// Proof: `Sylo::SyloAssetId` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `MaxEncodedLen`) + fn set_payment_asset() -> Weight { + // Proof Size summary in bytes: + // Measured: `0` + // Estimated: `0` + // Minimum execution time: 24_224_000 picoseconds. + Weight::from_parts(25_097_000, 0) + .saturating_add(Weight::from_parts(0, 0)) + .saturating_add(T::DbWeight::get().writes(1)) + } + /// Storage: `Sylo::SyloResolverMethod` (r:0 w:1) + /// Proof: `Sylo::SyloResolverMethod` (`max_values`: Some(1), `max_size`: Some(502), added: 997, mode: `MaxEncodedLen`) + fn set_sylo_resolver_method() -> Weight { + // Proof Size summary in bytes: + // Measured: `0` + // Estimated: `0` + // Minimum execution time: 25_480_000 picoseconds. + Weight::from_parts(26_580_000, 0) + .saturating_add(Weight::from_parts(0, 0)) + .saturating_add(T::DbWeight::get().writes(1)) + } + /// Storage: `Sylo::Resolvers` (r:1 w:1) + /// Proof: `Sylo::Resolvers` (`max_values`: None, `max_size`: Some(5551), added: 8026, mode: `MaxEncodedLen`) + /// The range of component `p` is `[1, 10]`. + fn register_resolver(p: u32, ) -> Weight { + // Proof Size summary in bytes: + // Measured: `175` + // Estimated: `9016` + // Minimum execution time: 40_189_000 picoseconds. + Weight::from_parts(40_826_395, 0) + .saturating_add(Weight::from_parts(0, 9016)) + // Standard Error: 16_717 + .saturating_add(Weight::from_parts(2_427_734, 0).saturating_mul(p.into())) + .saturating_add(T::DbWeight::get().reads(1)) + .saturating_add(T::DbWeight::get().writes(1)) + } + /// Storage: `Sylo::Resolvers` (r:1 w:1) + /// Proof: `Sylo::Resolvers` (`max_values`: None, `max_size`: Some(5551), added: 8026, mode: `MaxEncodedLen`) + /// The range of component `p` is `[1, 10]`. + fn update_resolver(p: u32, ) -> Weight { + // Proof Size summary in bytes: + // Measured: `289` + // Estimated: `9016` + // Minimum execution time: 41_635_000 picoseconds. + Weight::from_parts(42_559_132, 0) + .saturating_add(Weight::from_parts(0, 9016)) + // Standard Error: 19_527 + .saturating_add(Weight::from_parts(2_447_564, 0).saturating_mul(p.into())) + .saturating_add(T::DbWeight::get().reads(1)) + .saturating_add(T::DbWeight::get().writes(1)) + } + /// Storage: `Sylo::Resolvers` (r:1 w:1) + /// Proof: `Sylo::Resolvers` (`max_values`: None, `max_size`: Some(5551), added: 8026, mode: `MaxEncodedLen`) + fn deregister_resolver() -> Weight { + // Proof Size summary in bytes: + // Measured: `289` + // Estimated: `9016` + // Minimum execution time: 43_117_000 picoseconds. + Weight::from_parts(43_972_000, 0) + .saturating_add(Weight::from_parts(0, 9016)) + .saturating_add(T::DbWeight::get().reads(1)) + .saturating_add(T::DbWeight::get().writes(1)) + } + /// Storage: `Sylo::ValidationRecords` (r:1 w:1) + /// Proof: `Sylo::ValidationRecords` (`max_values`: None, `max_size`: Some(19724), added: 22199, mode: `MaxEncodedLen`) + /// Storage: `Sylo::SyloResolverMethod` (r:1 w:0) + /// Proof: `Sylo::SyloResolverMethod` (`max_values`: Some(1), `max_size`: Some(502), added: 997, mode: `MaxEncodedLen`) + /// The range of component `q` is `[1, 10]`. + /// The range of component `r` is `[1, 10]`. + fn create_validation_record(q: u32, r: u32, ) -> Weight { + // Proof Size summary in bytes: + // Measured: `201 + q * (22 ±0)` + // Estimated: `23189` + // Minimum execution time: 59_181_000 picoseconds. + Weight::from_parts(52_554_909, 0) + .saturating_add(Weight::from_parts(0, 23189)) + // Standard Error: 27_986 + .saturating_add(Weight::from_parts(2_621_417, 0).saturating_mul(q.into())) + // Standard Error: 27_986 + .saturating_add(Weight::from_parts(1_213_630, 0).saturating_mul(r.into())) + .saturating_add(T::DbWeight::get().reads(2)) + .saturating_add(T::DbWeight::get().writes(1)) + } + /// Storage: `Sylo::ValidationRecords` (r:1 w:1) + /// Proof: `Sylo::ValidationRecords` (`max_values`: None, `max_size`: Some(19724), added: 22199, mode: `MaxEncodedLen`) + fn add_validation_record_entry() -> Weight { + // Proof Size summary in bytes: + // Measured: `317` + // Estimated: `23189` + // Minimum execution time: 48_869_000 picoseconds. + Weight::from_parts(49_821_000, 0) + .saturating_add(Weight::from_parts(0, 23189)) + .saturating_add(T::DbWeight::get().reads(1)) + .saturating_add(T::DbWeight::get().writes(1)) + } + /// Storage: `Sylo::ValidationRecords` (r:1 w:1) + /// Proof: `Sylo::ValidationRecords` (`max_values`: None, `max_size`: Some(19724), added: 22199, mode: `MaxEncodedLen`) + /// Storage: `Sylo::SyloResolverMethod` (r:1 w:0) + /// Proof: `Sylo::SyloResolverMethod` (`max_values`: Some(1), `max_size`: Some(502), added: 997, mode: `MaxEncodedLen`) + /// The range of component `q` is `[1, 10]`. + /// The range of component `r` is `[1, 10]`. + fn update_validation_record(q: u32, r: u32, ) -> Weight { + // Proof Size summary in bytes: + // Measured: `339 + q * (22 ±0)` + // Estimated: `23189` + // Minimum execution time: 75_749_000 picoseconds. + Weight::from_parts(47_859_866, 0) + .saturating_add(Weight::from_parts(0, 23189)) + // Standard Error: 34_288 + .saturating_add(Weight::from_parts(7_561_509, 0).saturating_mul(q.into())) + // Standard Error: 34_288 + .saturating_add(Weight::from_parts(2_809_128, 0).saturating_mul(r.into())) + .saturating_add(T::DbWeight::get().reads(2)) + .saturating_add(T::DbWeight::get().writes(1)) + } + /// Storage: `Sylo::ValidationRecords` (r:1 w:1) + /// Proof: `Sylo::ValidationRecords` (`max_values`: None, `max_size`: Some(19724), added: 22199, mode: `MaxEncodedLen`) + fn delete_validation_record() -> Weight { + // Proof Size summary in bytes: + // Measured: `276` + // Estimated: `23189` + // Minimum execution time: 43_446_000 picoseconds. + Weight::from_parts(44_252_000, 0) + .saturating_add(Weight::from_parts(0, 23189)) + .saturating_add(T::DbWeight::get().reads(1)) + .saturating_add(T::DbWeight::get().writes(1)) + } +}