diff --git a/.gitignore b/.gitignore index 8e803b34c..76c116f55 100644 --- a/.gitignore +++ b/.gitignore @@ -18,6 +18,10 @@ Cargo.lock # Ignore Store files that may be spawned using `cargo run` *.sqlite3 +# Wasm build artefacts to ignore +wasm/node_modules/* +wasm/dist/* + # Ignore mkdocs build files docs/site diff --git a/Cargo.toml b/Cargo.toml deleted file mode 100644 index a750aee46..000000000 --- a/Cargo.toml +++ /dev/null @@ -1,59 +0,0 @@ -[package] -name = "miden-client" -version = "0.2.0" -description = "Client library that facilitates interaction with the Miden rollup" -authors = ["miden contributors"] -readme = "README.md" -license = "MIT" -repository = "https://github.com/0xPolygonMiden/miden-client" -documentation = "https://docs.rs/miden-client/0.2.0" -keywords = ["miden", "client"] -edition = "2021" -rust-version = "1.75" -default-run = "miden-client" - -[[test]] -name = "integration" -path = "tests/integration/main.rs" -required-features = ["integration"] - -[features] -concurrent = [ - "miden-lib/concurrent", - "miden-objects/concurrent", - "miden-tx/concurrent", -] -default = ["std"] -integration = ["testing", "concurrent", "uuid"] -std = ["miden-objects/std"] -testing = ["miden-objects/testing", "miden-lib/testing"] -test_utils = ["miden-objects/testing"] - -[dependencies] -async-trait = { version = "0.1" } -clap = { version = "4.3", features = ["derive"] } -comfy-table = "7.1.0" -figment = { version = "0.10", features = ["toml", "env"] } -lazy_static = "1.4.0" -miden-lib = { version = "0.2", default-features = false } -miden-node-proto = { version = "0.2", default-features = false } -miden-tx = { version = "0.2", default-features = false } -miden-objects = { version = "0.2", features = ["serde"] } -rand = { version = "0.8.5" } -rusqlite = { version = "0.30.0", features = ["bundled"] } -rusqlite_migration = { version = "1.0" } -serde = { version = "1.0", features = ["derive"] } -serde_json = { version = "1.0", features = ["raw_value"] } -tokio = { version = "1.29", features = ["rt-multi-thread", "net", "macros"] } -tonic = { version = "0.11" } -toml = { version = "0.8" } -tracing = { version = "0.1" } -tracing-subscriber = { version = "0.3" } -uuid = { version = "1.6.1", features = ["serde", "v4"], optional = true } - -[dev-dependencies] -# needed for tests to run always with the test utils feature -miden_client = { package = "miden-client", path = ".", features = [ - "test_utils", - "uuid", -] } diff --git a/rust-toolchain b/rust-toolchain deleted file mode 100644 index 3245dca3d..000000000 --- a/rust-toolchain +++ /dev/null @@ -1 +0,0 @@ -1.77 diff --git a/src/cli/account.rs b/src/cli/account.rs deleted file mode 100644 index 53ccee648..000000000 --- a/src/cli/account.rs +++ /dev/null @@ -1,391 +0,0 @@ -use std::{fs, path::PathBuf}; - -use clap::{Parser, ValueEnum}; -use comfy_table::{presets, Attribute, Cell, ContentArrangement, Table}; -use miden_client::{ - client::{accounts, rpc::NodeRpcClient, Client}, - store::Store, -}; -use miden_objects::{ - accounts::{AccountData, AccountId, AccountStorage, AccountType, StorageSlotType}, - assets::{Asset, TokenSymbol}, - crypto::{dsa::rpo_falcon512::SecretKey, rand::FeltRng}, - ZERO, -}; -use miden_tx::utils::{bytes_to_hex_string, Deserializable, Serializable}; -use tracing::info; - -use crate::cli::create_dynamic_table; - -// ACCOUNT COMMAND -// ================================================================================================ - -#[derive(Debug, Clone, Parser)] -#[clap(about = "Create accounts and inspect account details")] -pub enum AccountCmd { - /// List all accounts monitored by this client - #[clap(short_flag = 'l')] - List, - - /// Show details of the account for the specified ID - #[clap(short_flag = 's')] - Show { - // TODO: We should create a value parser for catching input parsing errors earlier (ie AccountID) once complexity grows - #[clap()] - id: String, - #[clap(short, long, default_value_t = false)] - keys: bool, - #[clap(short, long, default_value_t = false)] - vault: bool, - #[clap(short, long, default_value_t = false)] - storage: bool, - #[clap(short, long, default_value_t = false)] - code: bool, - }, - /// Create new account and store it locally - #[clap(short_flag = 'n')] - New { - #[clap(subcommand)] - template: AccountTemplate, - }, - /// Import accounts from binary files (with .mac extension) - #[clap(short_flag = 'i')] - Import { - /// Paths to the files that contains the account data - #[arg()] - filenames: Vec, - }, -} - -#[derive(Debug, Parser, Clone)] -#[clap()] -pub enum AccountTemplate { - /// Creates a basic account (Regular account with immutable code) - BasicImmutable { - #[clap(short, long, value_enum, default_value_t = AccountStorageMode::OffChain)] - storage_type: AccountStorageMode, - }, - /// Creates a basic account (Regular account with mutable code) - BasicMutable { - #[clap(short, long, value_enum, default_value_t = AccountStorageMode::OffChain)] - storage_type: AccountStorageMode, - }, - /// Creates a faucet for fungible tokens - FungibleFaucet { - #[clap(short, long)] - token_symbol: String, - #[clap(short, long)] - decimals: u8, - #[clap(short, long)] - max_supply: u64, - #[clap(short, long, value_enum, default_value_t = AccountStorageMode::OffChain)] - storage_type: AccountStorageMode, - }, - /// Creates a faucet for non-fungible tokens - NonFungibleFaucet { - #[clap(short, long, value_enum, default_value_t = AccountStorageMode::OffChain)] - storage_type: AccountStorageMode, - }, -} - -#[derive(Debug, Clone, Copy, ValueEnum)] -pub enum AccountStorageMode { - OffChain, - OnChain, -} - -impl From for accounts::AccountStorageMode { - fn from(value: AccountStorageMode) -> Self { - match value { - AccountStorageMode::OffChain => accounts::AccountStorageMode::Local, - AccountStorageMode::OnChain => accounts::AccountStorageMode::OnChain, - } - } -} - -impl From<&AccountStorageMode> for accounts::AccountStorageMode { - fn from(value: &AccountStorageMode) -> Self { - accounts::AccountStorageMode::from(*value) - } -} - -impl AccountCmd { - pub fn execute( - &self, - mut client: Client, - ) -> Result<(), String> { - match self { - AccountCmd::List => { - list_accounts(client)?; - }, - AccountCmd::New { template } => { - let client_template = match template { - AccountTemplate::BasicImmutable { storage_type: storage_mode } => { - accounts::AccountTemplate::BasicWallet { - mutable_code: false, - storage_mode: storage_mode.into(), - } - }, - AccountTemplate::BasicMutable { storage_type: storage_mode } => { - accounts::AccountTemplate::BasicWallet { - mutable_code: true, - storage_mode: storage_mode.into(), - } - }, - AccountTemplate::FungibleFaucet { - token_symbol, - decimals, - max_supply, - storage_type: storage_mode, - } => accounts::AccountTemplate::FungibleFaucet { - token_symbol: TokenSymbol::new(token_symbol) - .map_err(|err| format!("error: token symbol is invalid: {}", err))?, - decimals: *decimals, - max_supply: *max_supply, - storage_mode: storage_mode.into(), - }, - AccountTemplate::NonFungibleFaucet { storage_type: _ } => todo!(), - }; - let (_new_account, _account_seed) = client.new_account(client_template)?; - }, - AccountCmd::Show { id, keys, vault, storage, code } => { - let account_id: AccountId = AccountId::from_hex(id) - .map_err(|_| "Input number was not a valid Account Id")?; - show_account(client, account_id, *keys, *vault, *storage, *code)?; - }, - AccountCmd::Import { filenames } => { - validate_paths(filenames, "mac")?; - for filename in filenames { - import_account(&mut client, filename)?; - } - println!("Imported {} accounts.", filenames.len()); - }, - } - Ok(()) - } -} - -// LIST ACCOUNTS -// ================================================================================================ - -fn list_accounts( - client: Client, -) -> Result<(), String> { - let accounts = client.get_accounts()?; - - let mut table = create_dynamic_table(&[ - "Account ID", - "Code Root", - "Vault Root", - "Storage Root", - "Type", - "Storage mode", - "Nonce", - ]); - accounts.iter().for_each(|(acc, _acc_seed)| { - table.add_row(vec![ - acc.id().to_string(), - acc.code_root().to_string(), - acc.vault_root().to_string(), - acc.storage_root().to_string(), - account_type_display_name(&acc.id().account_type()), - storage_type_display_name(&acc.id()), - acc.nonce().as_int().to_string(), - ]); - }); - - println!("{table}"); - Ok(()) -} - -pub fn show_account( - client: Client, - account_id: AccountId, - show_keys: bool, - show_vault: bool, - show_storage: bool, - show_code: bool, -) -> Result<(), String> { - let (account, _account_seed) = client.get_account(account_id)?; - let mut table = create_dynamic_table(&[ - "Account ID", - "Account Hash", - "Type", - "Storage mode", - "Code Root", - "Vault Root", - "Storage Root", - "Nonce", - ]); - table.add_row(vec![ - account.id().to_string(), - account.hash().to_string(), - account_type_display_name(&account.account_type()), - storage_type_display_name(&account_id), - account.code().root().to_string(), - account.vault().asset_tree().root().to_string(), - account.storage().root().to_string(), - account.nonce().as_int().to_string(), - ]); - println!("{table}\n"); - - if show_vault { - let assets = account.vault().assets(); - - println!("Assets: "); - - let mut table = create_dynamic_table(&["Asset Type", "Faucet ID", "Amount"]); - for asset in assets { - let (asset_type, faucet_id, amount) = match asset { - Asset::Fungible(fungible_asset) => { - ("Fungible Asset", fungible_asset.faucet_id(), fungible_asset.amount()) - }, - Asset::NonFungible(non_fungible_asset) => { - ("Non Fungible Asset", non_fungible_asset.faucet_id(), 1) - }, - }; - table.add_row(vec![asset_type, &faucet_id.to_hex(), &amount.to_string()]); - } - - println!("{table}\n"); - } - - if show_storage { - let account_storage = account.storage(); - - println!("Storage: \n"); - - let mut table = create_dynamic_table(&[ - "Item Slot Index", - "Item Slot Type", - "Value Arity", - "Value/Commitment", - ]); - for (idx, entry) in account_storage.layout().iter().enumerate() { - let item = account_storage.get_item(idx as u8); - - // Last entry is reserved so I don't think the user cares about it Also, to keep the - // output smaller, if the [StorageSlotType] is a value and it's 0 we assume it's not - // initialized and skip it - if idx == AccountStorage::SLOT_LAYOUT_COMMITMENT_INDEX as usize { - continue; - } - if matches!(entry, StorageSlotType::Value { value_arity: _value_arity }) - && item == [ZERO; 4].into() - { - continue; - } - - let (slot_type, arity) = match entry { - StorageSlotType::Value { value_arity } => ("Value", value_arity), - StorageSlotType::Array { depth: _depth, value_arity } => ("Array", value_arity), - StorageSlotType::Map { value_arity } => ("Map", value_arity), - }; - table.add_row(vec![&idx.to_string(), slot_type, &arity.to_string(), &item.to_hex()]); - } - println!("{table}\n"); - } - - if show_keys { - let auth_info = client.get_account_auth(account_id)?; - - match auth_info { - miden_client::store::AuthInfo::RpoFalcon512(key_pair) => { - const KEY_PAIR_SIZE: usize = std::mem::size_of::(); - let auth_info: [u8; KEY_PAIR_SIZE] = key_pair - .to_bytes() - .try_into() - .expect("Array size is const and should always exactly fit SecretKey"); - - let mut table = Table::new(); - table - .load_preset(presets::UTF8_HORIZONTAL_ONLY) - .set_content_arrangement(ContentArrangement::DynamicFullWidth) - .set_header(vec![Cell::new("Key Pair").add_attribute(Attribute::Bold)]); - - table.add_row(vec![format!("0x{}\n", bytes_to_hex_string(auth_info))]); - println!("{table}\n"); - }, - }; - } - - if show_code { - let module = account.code().module(); - let procedure_digests = account.code().procedures(); - - println!("Account Code Info:"); - - let mut table = create_dynamic_table(&["Procedure Digests"]); - for digest in procedure_digests { - table.add_row(vec![digest.to_hex()]); - } - println!("{table}\n"); - - let mut code_table = create_dynamic_table(&["Code"]); - code_table.add_row(vec![&module]); - println!("{code_table}\n"); - } - - Ok(()) -} - -// IMPORT ACCOUNT -// ================================================================================================ - -fn import_account( - client: &mut Client, - filename: &PathBuf, -) -> Result<(), String> { - info!( - "Attempting to import account data from {}...", - fs::canonicalize(filename).map_err(|err| err.to_string())?.as_path().display() - ); - let account_data_file_contents = fs::read(filename).map_err(|err| err.to_string())?; - let account_data = - AccountData::read_from_bytes(&account_data_file_contents).map_err(|err| err.to_string())?; - let account_id = account_data.account.id(); - - client.import_account(account_data)?; - println!("Imported account with ID: {}", account_id); - - Ok(()) -} - -// HELPERS -// ================================================================================================ - -/// Checks that all files exist, otherwise returns an error. It also ensures that all files have a -/// specific extension -fn validate_paths(paths: &[PathBuf], expected_extension: &str) -> Result<(), String> { - let invalid_path = paths.iter().find(|path| { - !path.exists() || path.extension().map_or(false, |ext| ext != expected_extension) - }); - - if let Some(path) = invalid_path { - Err(format!( - "The path `{}` does not exist or does not have the appropiate extension", - path.to_string_lossy() - ) - .to_string()) - } else { - Ok(()) - } -} - -fn account_type_display_name(account_type: &AccountType) -> String { - match account_type { - AccountType::FungibleFaucet => "Fungible faucet", - AccountType::NonFungibleFaucet => "Non-fungible faucet", - AccountType::RegularAccountImmutableCode => "Regular", - AccountType::RegularAccountUpdatableCode => "Regular (updatable)", - } - .to_string() -} - -fn storage_type_display_name(account: &AccountId) -> String { - match account.is_on_chain() { - true => "On-chain", - false => "Off-chain", - } - .to_string() -} diff --git a/src/cli/info.rs b/src/cli/info.rs deleted file mode 100644 index 44018cbdc..000000000 --- a/src/cli/info.rs +++ /dev/null @@ -1,20 +0,0 @@ -use miden_client::{ - client::{rpc::NodeRpcClient, Client}, - store::Store, -}; -use miden_objects::crypto::rand::FeltRng; - -pub fn print_client_info( - client: &Client, -) -> Result<(), String> { - print_block_number(client) -} - -// HELPERS -// ================================================================================================ -fn print_block_number( - client: &Client, -) -> Result<(), String> { - println!("block number: {}", client.get_sync_height().map_err(|e| e.to_string())?); - Ok(()) -} diff --git a/src/cli/init.rs b/src/cli/init.rs deleted file mode 100644 index 46c5fd3e8..000000000 --- a/src/cli/init.rs +++ /dev/null @@ -1,73 +0,0 @@ -use std::{ - fs::File, - io::{self, Write}, - path::PathBuf, -}; - -use miden_client::config::{ClientConfig, Endpoint}; - -pub(crate) fn initialize_client(config_file_path: PathBuf) -> Result<(), String> { - let mut client_config = ClientConfig::default(); - - initialize_rpc_config(&mut client_config)?; - initialize_store_config(&mut client_config)?; - - let config_as_toml_string = toml::to_string_pretty(&client_config) - .map_err(|err| format!("error formatting config: {err}"))?; - - println!("Creating config file at: {:?}", config_file_path); - let mut file_handle = File::options() - .write(true) - .create_new(true) - .open(config_file_path) - .map_err(|err| format!("error opening the file: {err}"))?; - file_handle - .write(config_as_toml_string.as_bytes()) - .map_err(|err| format!("error writing to file: {err}"))?; - - Ok(()) -} - -fn initialize_rpc_config(client_config: &mut ClientConfig) -> Result<(), String> { - println!("Protocol (default: http):"); - let mut protocol: String = String::new(); - io::stdin().read_line(&mut protocol).expect("Should read line"); - protocol = protocol.trim().to_string(); - if protocol.is_empty() { - protocol = client_config.rpc.endpoint.protocol().to_string(); - } - - println!("Host (default: localhost):"); - let mut host: String = String::new(); - io::stdin().read_line(&mut host).expect("Should read line"); - host = host.trim().to_string(); - if host.is_empty() { - host = client_config.rpc.endpoint.host().to_string(); - } - - println!("Node RPC Port (default: 57291):"); - let mut port_str: String = String::new(); - io::stdin().read_line(&mut port_str).expect("Should read line"); - port_str = port_str.trim().to_string(); - let port: u16 = if !port_str.is_empty() { - port_str.parse().map_err(|err| format!("Error parsing port: {err}"))? - } else { - client_config.rpc.endpoint.port() - }; - - client_config.rpc.endpoint = Endpoint::new(protocol, host, port); - - Ok(()) -} - -fn initialize_store_config(client_config: &mut ClientConfig) -> Result<(), String> { - println!("Sqlite file path (default: ./store.sqlite3):"); - let mut database_filepath: String = String::new(); - io::stdin().read_line(&mut database_filepath).expect("Should read line"); - database_filepath = database_filepath.trim().to_string(); - if !database_filepath.is_empty() { - client_config.store.database_filepath = database_filepath; - } - - Ok(()) -} diff --git a/src/cli/input_notes.rs b/src/cli/input_notes.rs deleted file mode 100644 index 0d29219b5..000000000 --- a/src/cli/input_notes.rs +++ /dev/null @@ -1,446 +0,0 @@ -use std::{ - fs::File, - io::{Read, Write}, - path::PathBuf, -}; - -use clap::ValueEnum; -use comfy_table::{presets, Attribute, Cell, ContentArrangement, Table}; -use miden_client::{ - client::rpc::NodeRpcClient, - errors::ClientError, - store::{InputNoteRecord, NoteFilter as ClientNoteFilter, Store}, -}; -use miden_objects::{ - crypto::rand::FeltRng, - notes::{NoteId, NoteInputs}, - Digest, -}; -use miden_tx::utils::{Deserializable, Serializable}; - -use super::{Client, Parser}; -use crate::cli::{create_dynamic_table, get_note_with_id_prefix}; - -#[derive(Clone, Debug, ValueEnum)] -pub enum NoteFilter { - Pending, - Committed, - Consumed, -} - -#[derive(Debug, Parser, Clone)] -#[clap(about = "View and manage input notes")] -pub enum InputNotes { - /// List input notes - #[clap(short_flag = 'l')] - List { - /// Filter the displayed note list - #[clap(short, long)] - filter: Option, - }, - - /// Show details of the input note for the specified note ID - #[clap(short_flag = 's')] - Show { - /// Note ID of the input note to show - #[clap()] - id: String, - - /// Show note script - #[clap(short, long, default_value = "false")] - script: bool, - - /// Show note vault - #[clap(short, long, default_value = "false")] - vault: bool, - - /// Show note inputs - #[clap(short, long, default_value = "false")] - inputs: bool, - }, - - /// Export input note data to a binary file - #[clap(short_flag = 'e')] - Export { - /// Note ID of the input note to show - #[clap()] - id: String, - - /// Path to the file that will contain the input note data. If not provided, the filename will be the input note ID - #[clap()] - filename: Option, - }, - - /// Import input note data from a binary file - #[clap(short_flag = 'i')] - Import { - /// Path to the file that contains the input note data - #[clap()] - filename: PathBuf, - }, -} - -impl InputNotes { - pub fn execute( - &self, - mut client: Client, - ) -> Result<(), String> { - match self { - InputNotes::List { filter } => { - let filter = match filter { - Some(NoteFilter::Committed) => ClientNoteFilter::Committed, - Some(NoteFilter::Consumed) => ClientNoteFilter::Consumed, - Some(NoteFilter::Pending) => ClientNoteFilter::Pending, - None => ClientNoteFilter::All, - }; - - list_input_notes(client, filter)?; - }, - InputNotes::Show { id, script, vault, inputs } => { - show_input_note(client, id.to_owned(), *script, *vault, *inputs)?; - }, - InputNotes::Export { id, filename } => { - export_note(&client, id, filename.clone())?; - println!("Succesfully exported note {}", id); - }, - InputNotes::Import { filename } => { - let note_id = import_note(&mut client, filename.clone())?; - println!("Succesfully imported note {}", note_id.inner()); - }, - } - Ok(()) - } -} - -// LIST INPUT NOTES -// ================================================================================================ -fn list_input_notes( - client: Client, - filter: ClientNoteFilter, -) -> Result<(), String> { - let notes = client.get_input_notes(filter)?; - print_notes_summary(¬es)?; - Ok(()) -} - -// EXPORT INPUT NOTE -// ================================================================================================ -pub fn export_note( - client: &Client, - note_id: &str, - filename: Option, -) -> Result { - let note_id = Digest::try_from(note_id) - .map_err(|err| format!("Failed to parse input note id: {}", err))? - .into(); - let note = client.get_input_note(note_id)?; - - let file_path = filename.unwrap_or_else(|| { - let mut dir = PathBuf::new(); - dir.push(note_id.inner().to_string()); - dir - }); - - let mut file = File::create(file_path).map_err(|err| err.to_string())?; - - file.write_all(¬e.to_bytes()).map_err(|err| err.to_string())?; - - Ok(file) -} - -// IMPORT INPUT NOTE -// ================================================================================================ -pub fn import_note( - client: &mut Client, - filename: PathBuf, -) -> Result { - let mut contents = vec![]; - let mut _file = File::open(filename) - .and_then(|mut f| f.read_to_end(&mut contents)) - .map_err(|err| err.to_string()); - - // TODO: When importing a RecordedNote we want to make sure that the note actually exists in the chain (RPC call) - // and start monitoring its nullifiers (ie, update the list of relevant tags in the state sync table) - let input_note_record = - InputNoteRecord::read_from_bytes(&contents).map_err(|err| err.to_string())?; - - let note_id = input_note_record.id(); - client.import_input_note(input_note_record)?; - - Ok(note_id) -} - -// SHOW INPUT NOTE -// ================================================================================================ -fn show_input_note( - client: Client, - note_id: String, - show_script: bool, - show_vault: bool, - show_inputs: bool, -) -> Result<(), String> { - let input_note_record = - get_note_with_id_prefix(&client, ¬e_id).map_err(|err| err.to_string())?; - - // print note summary - print_notes_summary(core::iter::once(&input_note_record))?; - - let mut table = Table::new(); - table - .load_preset(presets::UTF8_HORIZONTAL_ONLY) - .set_content_arrangement(ContentArrangement::DynamicFullWidth); - - // print note script - if show_script { - let script = input_note_record.details().script(); - - table - .add_row(vec![ - Cell::new("Note Script hash").add_attribute(Attribute::Bold), - Cell::new(script.hash()), - ]) - .add_row(vec![ - Cell::new("Note Script code").add_attribute(Attribute::Bold), - Cell::new(script.code()), - ]); - }; - - // print note vault - if show_vault { - table - .add_row(vec![ - Cell::new("Note Vault hash").add_attribute(Attribute::Bold), - Cell::new(input_note_record.assets().commitment()), - ]) - .add_row(vec![Cell::new("Note Vault").add_attribute(Attribute::Bold)]); - - input_note_record.assets().iter().for_each(|asset| { - table.add_row(vec![Cell::new(format!("{:?}", asset))]); - }) - }; - - if show_inputs { - let inputs = NoteInputs::new(input_note_record.details().inputs().clone()) - .map_err(ClientError::NoteError)?; - - table - .add_row(vec![ - Cell::new("Note Inputs hash").add_attribute(Attribute::Bold), - Cell::new(inputs.commitment()), - ]) - .add_row(vec![Cell::new("Note Inputs").add_attribute(Attribute::Bold)]); - - inputs.values().iter().enumerate().for_each(|(idx, input)| { - table.add_row(vec![Cell::new(idx).add_attribute(Attribute::Bold), Cell::new(input)]); - }); - }; - - println!("{table}"); - Ok(()) -} - -// HELPERS -// ================================================================================================ -fn print_notes_summary<'a, I>(notes: I) -> Result<(), String> -where - I: IntoIterator, -{ - let mut table = create_dynamic_table(&[ - "Note ID", - "Script Hash", - "Vault Vash", - "Inputs Hash", - "Serial Num", - "Commit Height", - ]); - - for input_note_record in notes { - let commit_height = input_note_record - .inclusion_proof() - .map(|proof| proof.origin().block_num.to_string()) - .unwrap_or("-".to_string()); - - let script = input_note_record.details().script(); - - let inputs = NoteInputs::new(input_note_record.details().inputs().clone()) - .map_err(ClientError::NoteError)?; - - table.add_row(vec![ - input_note_record.id().inner().to_string(), - script.hash().to_string(), - input_note_record.assets().commitment().to_string(), - inputs.commitment().to_string(), - Digest::new(input_note_record.details().serial_num()).to_string(), - commit_height, - ]); - } - - println!("{table}"); - - Ok(()) -} - -// TESTS -// ================================================================================================ - -#[cfg(test)] -mod tests { - use std::env::temp_dir; - - use miden_client::{ - client::get_random_coin, - config::{ClientConfig, Endpoint}, - errors::NoteIdPrefixFetchError, - mock::{mock_full_chain_mmr_and_notes, mock_notes, MockClient, MockRpcApi}, - store::{sqlite_store::SqliteStore, InputNoteRecord}, - }; - use miden_lib::transaction::TransactionKernel; - use uuid::Uuid; - - use crate::cli::{ - get_note_with_id_prefix, - input_notes::{export_note, import_note}, - }; - - #[tokio::test] - async fn import_export_recorded_note() { - // generate test client - let mut path = temp_dir(); - path.push(Uuid::new_v4().to_string()); - let client_config = ClientConfig::new( - path.into_os_string().into_string().unwrap().try_into().unwrap(), - Endpoint::default().into(), - ); - - let store = SqliteStore::new((&client_config).into()).unwrap(); - let rng = get_random_coin(); - let executor_store = SqliteStore::new((&client_config).into()).unwrap(); - - let mut client = MockClient::new( - MockRpcApi::new(&Endpoint::default().to_string()), - rng, - store, - executor_store, - ) - .unwrap(); - - // generate test data - let assembler = TransactionKernel::assembler(); - let (consumed_notes, created_notes) = mock_notes(&assembler); - let (_, committed_notes, ..) = mock_full_chain_mmr_and_notes(consumed_notes); - - let committed_note: InputNoteRecord = committed_notes.first().unwrap().clone().into(); - let pending_note = InputNoteRecord::from(created_notes.first().unwrap().clone()); - - client.import_input_note(committed_note.clone()).unwrap(); - client.import_input_note(pending_note.clone()).unwrap(); - assert!(pending_note.inclusion_proof().is_none()); - assert!(committed_note.inclusion_proof().is_some()); - - let mut filename_path = temp_dir(); - filename_path.push("test_import"); - - let mut filename_path_pending = temp_dir(); - filename_path_pending.push("test_import_pending"); - - export_note(&client, &committed_note.id().inner().to_string(), Some(filename_path.clone())) - .unwrap(); - - assert!(filename_path.exists()); - - export_note( - &client, - &pending_note.id().inner().to_string(), - Some(filename_path_pending.clone()), - ) - .unwrap(); - - assert!(filename_path_pending.exists()); - - // generate test client to import notes to - let mut path = temp_dir(); - path.push(Uuid::new_v4().to_string()); - let client_config = ClientConfig::new( - path.into_os_string().into_string().unwrap().try_into().unwrap(), - Endpoint::default().into(), - ); - let store = SqliteStore::new((&client_config).into()).unwrap(); - let executor_store = SqliteStore::new((&client_config).into()).unwrap(); - - let mut client = MockClient::new( - MockRpcApi::new(&Endpoint::default().to_string()), - rng, - store, - executor_store, - ) - .unwrap(); - - import_note(&mut client, filename_path).unwrap(); - let imported_note_record: InputNoteRecord = - client.get_input_note(committed_note.id()).unwrap(); - - assert_eq!(committed_note.id(), imported_note_record.id()); - - import_note(&mut client, filename_path_pending).unwrap(); - let imported_pending_note_record = client.get_input_note(pending_note.id()).unwrap(); - - assert_eq!(imported_pending_note_record.id(), pending_note.id()); - } - - #[tokio::test] - async fn get_input_note_with_prefix() { - // generate test client - let mut path = temp_dir(); - path.push(Uuid::new_v4().to_string()); - let client_config = ClientConfig::new( - path.into_os_string().into_string().unwrap().try_into().unwrap(), - Endpoint::default().into(), - ); - - let store = SqliteStore::new((&client_config).into()).unwrap(); - let rng = get_random_coin(); - let executor_store = SqliteStore::new((&client_config).into()).unwrap(); - - let mut client = MockClient::new( - MockRpcApi::new(&Endpoint::default().to_string()), - rng, - store, - executor_store, - ) - .unwrap(); - - // Ensure we get an error if no note is found - let non_existent_note_id = "0x123456"; - assert_eq!( - get_note_with_id_prefix(&client, non_existent_note_id), - Err(NoteIdPrefixFetchError::NoMatch(non_existent_note_id.to_string())) - ); - - // generate test data - let assembler = TransactionKernel::assembler(); - let (consumed_notes, created_notes) = mock_notes(&assembler); - let (_, notes, ..) = mock_full_chain_mmr_and_notes(consumed_notes); - - let committed_note: InputNoteRecord = notes.first().unwrap().clone().into(); - let pending_note = InputNoteRecord::from(created_notes.first().unwrap().clone()); - - client.import_input_note(committed_note.clone()).unwrap(); - client.import_input_note(pending_note.clone()).unwrap(); - assert!(pending_note.inclusion_proof().is_none()); - assert!(committed_note.inclusion_proof().is_some()); - - // Check that we can fetch Both notes - let note = get_note_with_id_prefix(&client, &committed_note.id().to_hex()).unwrap(); - assert_eq!(note.id(), committed_note.id()); - - let note = get_note_with_id_prefix(&client, &pending_note.id().to_hex()).unwrap(); - assert_eq!(note.id(), pending_note.id()); - - // Check that we get an error if many match - let note_id_with_many_matches = "0x"; - assert_eq!( - get_note_with_id_prefix(&client, note_id_with_many_matches), - Err(NoteIdPrefixFetchError::MultipleMatches(note_id_with_many_matches.to_string())) - ); - } -} diff --git a/src/cli/mod.rs b/src/cli/mod.rs deleted file mode 100644 index d02ca2e3b..000000000 --- a/src/cli/mod.rs +++ /dev/null @@ -1,163 +0,0 @@ -use std::path::Path; - -use clap::Parser; -use comfy_table::{presets, Attribute, Cell, ContentArrangement, Table}; -use figment::{ - providers::{Format, Toml}, - Figment, -}; -use miden_client::{ - client::{ - get_random_coin, - rpc::{NodeRpcClient, TonicRpcClient}, - Client, - }, - config::ClientConfig, - errors::{ClientError, NoteIdPrefixFetchError}, - store::{sqlite_store::SqliteStore, InputNoteRecord, NoteFilter as ClientNoteFilter, Store}, -}; -use miden_objects::crypto::rand::FeltRng; -#[cfg(not(feature = "mock"))] -use miden_objects::crypto::rand::RpoRandomCoin; - -mod account; -mod info; -mod init; -mod input_notes; -mod sync; -mod tags; -mod transactions; - -/// Config file name -const CLIENT_CONFIG_FILE_NAME: &str = "miden-client.toml"; - -/// Root CLI struct -#[derive(Parser, Debug)] -#[clap(name = "Miden", about = "Miden client", version, rename_all = "kebab-case")] -pub struct Cli { - #[clap(subcommand)] - action: Command, -} - -/// CLI actions -#[derive(Debug, Parser)] -pub enum Command { - #[clap(subcommand)] - Account(account::AccountCmd), - Init, - #[clap(subcommand)] - InputNotes(input_notes::InputNotes), - /// Sync this client with the latest state of the Miden network. - Sync, - /// View a summary of the current client state - Info, - #[clap(subcommand)] - Tags(tags::TagsCmd), - #[clap(subcommand, name = "tx")] - #[clap(visible_alias = "transaction")] - Transaction(transactions::Transaction), -} - -/// CLI entry point -impl Cli { - pub async fn execute(&self) -> Result<(), String> { - let mut current_dir = std::env::current_dir().map_err(|err| err.to_string())?; - current_dir.push(CLIENT_CONFIG_FILE_NAME); - - // Check if it's an init command before anything else. When we run the init command for the first time we won't - // have a config file and thus creating the store would not be possible. - if matches!(&self.action, Command::Init) { - init::initialize_client(current_dir.clone())?; - return Ok(()); - } - - // Create the client - let client_config = load_config(current_dir.as_path())?; - let rpc_endpoint = client_config.rpc.endpoint.to_string(); - let store = SqliteStore::new((&client_config).into()).map_err(ClientError::StoreError)?; - let rng = get_random_coin(); - let executor_store = - miden_client::store::sqlite_store::SqliteStore::new((&client_config).into()) - .map_err(ClientError::StoreError)?; - - let client: Client = - Client::new(TonicRpcClient::new(&rpc_endpoint), rng, store, executor_store)?; - - // Execute cli command - match &self.action { - Command::Account(account) => account.execute(client), - Command::Init => Ok(()), - Command::Info => info::print_client_info(&client), - Command::InputNotes(notes) => notes.execute(client), - Command::Sync => sync::sync_state(client).await, - Command::Tags(tags) => tags.execute(client).await, - Command::Transaction(transaction) => transaction.execute(client).await, - } - } -} - -/// Loads the client configuration. -/// -/// This function will look for the configuration file at the provided path. If the path is -/// relative, searches in parent directories all the way to the root as well. -pub fn load_config(config_file: &Path) -> Result { - Figment::from(Toml::file(config_file)) - .extract() - .map_err(|err| format!("Failed to load {} config file: {err}", config_file.display())) -} - -pub fn create_dynamic_table(headers: &[&str]) -> Table { - let header_cells = headers - .iter() - .map(|header| Cell::new(header).add_attribute(Attribute::Bold)) - .collect::>(); - - let mut table = Table::new(); - table - .load_preset(presets::UTF8_FULL) - .set_content_arrangement(ContentArrangement::DynamicFullWidth) - .set_header(header_cells); - - table -} - -/// Returns all client's notes whose ID starts with `note_id_prefix` -/// -/// # Errors -/// -/// - Returns [NoteIdPrefixFetchError::NoMatch] if we were unable to find any note where -/// `note_id_prefix` is a prefix of its id. -/// - Returns [NoteIdPrefixFetchError::MultipleMatches] if there were more than one note found -/// where `note_id_prefix` is a prefix of its id. -pub(crate) fn get_note_with_id_prefix( - client: &Client, - note_id_prefix: &str, -) -> Result { - let input_note_records = client - .get_input_notes(ClientNoteFilter::All) - .map_err(|err| { - tracing::error!("Error when fetching all notes from the store: {err}"); - NoteIdPrefixFetchError::NoMatch(note_id_prefix.to_string()) - })? - .into_iter() - .filter(|note_record| note_record.id().to_hex().starts_with(note_id_prefix)) - .collect::>(); - - if input_note_records.is_empty() { - return Err(NoteIdPrefixFetchError::NoMatch(note_id_prefix.to_string())); - } - if input_note_records.len() > 1 { - let input_note_record_ids = input_note_records - .iter() - .map(|input_note_record| input_note_record.id()) - .collect::>(); - tracing::error!( - "Multiple notes found for the prefix {}: {:?}", - note_id_prefix, - input_note_record_ids - ); - return Err(NoteIdPrefixFetchError::MultipleMatches(note_id_prefix.to_string())); - } - - Ok(input_note_records[0].clone()) -} diff --git a/src/cli/sync.rs b/src/cli/sync.rs deleted file mode 100644 index 96f9d8af4..000000000 --- a/src/cli/sync.rs +++ /dev/null @@ -1,13 +0,0 @@ -use miden_client::{ - client::{rpc::NodeRpcClient, Client}, - store::Store, -}; -use miden_objects::crypto::rand::FeltRng; - -pub async fn sync_state( - mut client: Client, -) -> Result<(), String> { - let block_num = client.sync_state().await?; - println!("State synced to block {}", block_num); - Ok(()) -} diff --git a/src/cli/tags.rs b/src/cli/tags.rs deleted file mode 100644 index 6d0d0ca79..000000000 --- a/src/cli/tags.rs +++ /dev/null @@ -1,55 +0,0 @@ -use miden_client::{client::rpc::NodeRpcClient, store::Store}; -use miden_objects::crypto::rand::FeltRng; - -use super::{Client, Parser}; - -#[derive(Debug, Parser, Clone)] -#[clap(about = "View and add tags")] -pub enum TagsCmd { - /// List all tags monitored by this client - #[clap(short_flag = 'l')] - List, - - /// Add a new tag to the list of tags monitored by this client - #[clap(short_flag = 'a')] - Add { - #[clap()] - tag: u64, - }, -} - -impl TagsCmd { - pub async fn execute( - &self, - client: Client, - ) -> Result<(), String> { - match self { - TagsCmd::List => { - list_tags(client)?; - }, - TagsCmd::Add { tag } => { - add_tag(client, *tag)?; - }, - } - Ok(()) - } -} - -// HELPERS -// ================================================================================================ -fn list_tags( - client: Client, -) -> Result<(), String> { - let tags = client.get_note_tags()?; - println!("tags: {:?}", tags); - Ok(()) -} - -fn add_tag( - mut client: Client, - tag: u64, -) -> Result<(), String> { - client.add_note_tag(tag)?; - println!("tag {} added", tag); - Ok(()) -} diff --git a/src/cli/transactions.rs b/src/cli/transactions.rs deleted file mode 100644 index 05e0dd3da..000000000 --- a/src/cli/transactions.rs +++ /dev/null @@ -1,256 +0,0 @@ -use clap::ValueEnum; -use miden_client::{ - client::{ - rpc::NodeRpcClient, - transactions::{ - transaction_request::{PaymentTransactionData, TransactionTemplate}, - TransactionRecord, - }, - }, - store::{Store, TransactionFilter}, -}; -use miden_objects::{ - accounts::AccountId, - assets::FungibleAsset, - crypto::rand::FeltRng, - notes::{NoteId, NoteType as MidenNoteType}, -}; -use tracing::info; - -use super::{get_note_with_id_prefix, Client, Parser}; -use crate::cli::create_dynamic_table; - -#[derive(Debug, Clone, Copy, ValueEnum)] -pub enum NoteType { - Public, - Private, -} - -impl From<&NoteType> for MidenNoteType { - fn from(note_type: &NoteType) -> Self { - match note_type { - NoteType::Public => MidenNoteType::Public, - NoteType::Private => MidenNoteType::OffChain, - } - } -} - -#[derive(Clone, Debug, Parser)] -#[clap()] -pub enum TransactionType { - /// Create a pay-to-id transaction. - P2ID { - sender_account_id: String, - target_account_id: String, - faucet_id: String, - amount: u64, - #[clap(short, long, value_enum)] - note_type: NoteType, - }, - /// Mint `amount` tokens from the specified fungible faucet (corresponding to `faucet_id`). The created note can then be then consumed by - /// `target_account_id`. - Mint { - target_account_id: String, - faucet_id: String, - amount: u64, - #[clap(short, long, value_enum)] - note_type: NoteType, - }, - /// Create a pay-to-id with recall transaction. - P2IDR { - sender_account_id: String, - target_account_id: String, - faucet_id: String, - amount: u64, - recall_height: u32, - #[clap(short, long, value_enum)] - note_type: NoteType, - }, - /// Consume with the account corresponding to `account_id` all of the notes from `list_of_notes`. - ConsumeNotes { - account_id: String, - /// A list of note IDs or the hex prefixes of their corresponding IDs - list_of_notes: Vec, - }, -} - -#[derive(Debug, Parser, Clone)] -#[clap(about = "Execute and view transactions")] -pub enum Transaction { - /// List currently tracked transactions - #[clap(short_flag = 'l')] - List, - /// Execute a transaction, prove and submit it to the node. Once submitted, it - /// gets tracked by the client - #[clap(short_flag = 'n')] - New { - #[clap(subcommand)] - transaction_type: TransactionType, - }, -} - -impl Transaction { - pub async fn execute( - &self, - mut client: Client, - ) -> Result<(), String> { - match self { - Transaction::List => { - list_transactions(client)?; - }, - Transaction::New { transaction_type } => { - new_transaction(&mut client, transaction_type).await?; - }, - } - Ok(()) - } -} - -// NEW TRANSACTION -// ================================================================================================ -async fn new_transaction( - client: &mut Client, - transaction_type: &TransactionType, -) -> Result<(), String> { - let transaction_template: TransactionTemplate = - build_transaction_template(client, transaction_type)?; - - let transaction_request = client.build_transaction_request(transaction_template)?; - let transaction_execution_result = client.new_transaction(transaction_request)?; - - info!("Executed transaction, proving and then submitting..."); - - client.submit_transaction(transaction_execution_result).await?; - - Ok(()) -} - -/// Builds a [TransactionTemplate] based on the transaction type provided via cli args -/// -/// For [TransactionTemplate::ConsumeNotes], it'll try to find the corresponding notes by using the -/// provided IDs as prefixes -fn build_transaction_template( - client: &Client, - transaction_type: &TransactionType, -) -> Result { - match transaction_type { - TransactionType::P2ID { - sender_account_id, - target_account_id, - faucet_id, - amount, - note_type, - } => { - let faucet_id = AccountId::from_hex(faucet_id).map_err(|err| err.to_string())?; - let fungible_asset = - FungibleAsset::new(faucet_id, *amount).map_err(|err| err.to_string())?.into(); - let sender_account_id = - AccountId::from_hex(sender_account_id).map_err(|err| err.to_string())?; - let target_account_id = - AccountId::from_hex(target_account_id).map_err(|err| err.to_string())?; - - let payment_transaction = - PaymentTransactionData::new(fungible_asset, sender_account_id, target_account_id); - - Ok(TransactionTemplate::PayToId(payment_transaction, note_type.into())) - }, - TransactionType::P2IDR { - sender_account_id, - target_account_id, - faucet_id, - amount, - recall_height, - note_type, - } => { - let faucet_id = AccountId::from_hex(faucet_id).map_err(|err| err.to_string())?; - let fungible_asset = - FungibleAsset::new(faucet_id, *amount).map_err(|err| err.to_string())?.into(); - let sender_account_id = - AccountId::from_hex(sender_account_id).map_err(|err| err.to_string())?; - let target_account_id = - AccountId::from_hex(target_account_id).map_err(|err| err.to_string())?; - - let payment_transaction = - PaymentTransactionData::new(fungible_asset, sender_account_id, target_account_id); - Ok(TransactionTemplate::PayToIdWithRecall( - payment_transaction, - *recall_height, - note_type.into(), - )) - }, - TransactionType::Mint { - faucet_id, - target_account_id, - amount, - note_type, - } => { - let faucet_id = AccountId::from_hex(faucet_id).map_err(|err| err.to_string())?; - let fungible_asset = - FungibleAsset::new(faucet_id, *amount).map_err(|err| err.to_string())?; - let target_account_id = - AccountId::from_hex(target_account_id).map_err(|err| err.to_string())?; - - Ok(TransactionTemplate::MintFungibleAsset( - fungible_asset, - target_account_id, - note_type.into(), - )) - }, - TransactionType::ConsumeNotes { account_id, list_of_notes } => { - let list_of_notes = list_of_notes - .iter() - .map(|note_id| { - get_note_with_id_prefix(client, note_id) - .map(|note_record| note_record.id()) - .map_err(|err| err.to_string()) - }) - .collect::, _>>()?; - - let account_id = AccountId::from_hex(account_id).map_err(|err| err.to_string())?; - - Ok(TransactionTemplate::ConsumeNotes(account_id, list_of_notes)) - }, - } -} - -// LIST TRANSACTIONS -// ================================================================================================ -fn list_transactions( - client: Client, -) -> Result<(), String> { - let transactions = client.get_transactions(TransactionFilter::All)?; - print_transactions_summary(&transactions); - Ok(()) -} - -// HELPERS -// ================================================================================================ -fn print_transactions_summary<'a, I>(executed_transactions: I) -where - I: IntoIterator, -{ - let mut table = create_dynamic_table(&[ - "ID", - "Status", - "Account ID", - "Script Hash", - "Input Notes Count", - "Output Notes Count", - ]); - - for tx in executed_transactions { - table.add_row(vec![ - tx.id.to_string(), - tx.transaction_status.to_string(), - tx.account_id.to_string(), - tx.transaction_script - .as_ref() - .map(|x| x.hash().to_string()) - .unwrap_or("-".to_string()), - tx.input_note_nullifiers.len().to_string(), - tx.output_notes.num_notes().to_string(), - ]); - } - - println!("{table}"); -} diff --git a/src/client/accounts.rs b/src/client/accounts.rs deleted file mode 100644 index 0780a6751..000000000 --- a/src/client/accounts.rs +++ /dev/null @@ -1,318 +0,0 @@ -use miden_lib::AuthScheme; -use miden_objects::{ - accounts::{ - Account, AccountData, AccountId, AccountStorageType, AccountStub, AccountType, AuthData, - }, - assets::TokenSymbol, - crypto::{ - dsa::rpo_falcon512::SecretKey, - rand::{FeltRng, RpoRandomCoin}, - }, - Digest, Felt, Word, -}; - -use super::{rpc::NodeRpcClient, Client}; -use crate::{ - errors::ClientError, - store::{AuthInfo, Store}, -}; - -pub enum AccountTemplate { - BasicWallet { - mutable_code: bool, - storage_mode: AccountStorageMode, - }, - FungibleFaucet { - token_symbol: TokenSymbol, - decimals: u8, - max_supply: u64, - storage_mode: AccountStorageMode, - }, -} - -// TODO: Review this enum and variant names to have a consistent naming across all crates -#[derive(Debug, Clone, Copy)] -pub enum AccountStorageMode { - Local, - OnChain, -} - -impl From for AccountStorageType { - fn from(mode: AccountStorageMode) -> Self { - match mode { - AccountStorageMode::Local => AccountStorageType::OffChain, - AccountStorageMode::OnChain => AccountStorageType::OnChain, - } - } -} - -impl Client { - // ACCOUNT CREATION - // -------------------------------------------------------------------------------------------- - - /// Creates a new [Account] based on an [AccountTemplate] and saves it in the store - pub fn new_account( - &mut self, - template: AccountTemplate, - ) -> Result<(Account, Word), ClientError> { - let account_and_seed = match template { - AccountTemplate::BasicWallet { mutable_code, storage_mode } => { - self.new_basic_wallet(mutable_code, storage_mode) - }, - AccountTemplate::FungibleFaucet { - token_symbol, - decimals, - max_supply, - storage_mode, - } => self.new_fungible_faucet(token_symbol, decimals, max_supply, storage_mode), - }?; - - Ok(account_and_seed) - } - - /// Saves in the store the [Account] corresponding to `account_data`. - /// - /// # Errors - /// - /// Will return an error if trying to import a new account without providing its seed - /// - /// # Panics - /// - /// Will panic when trying to import a non-new account without a seed since this functionality - /// is not currently implemented - pub fn import_account(&mut self, account_data: AccountData) -> Result<(), ClientError> { - match account_data.auth { - AuthData::RpoFalcon512Seed(key_pair_seed) => { - // NOTE: The seed should probably come from a different format from miden-base's AccountData - let seed = Digest::try_from(&key_pair_seed)?.into(); - let mut rng = RpoRandomCoin::new(seed); - - let key_pair = SecretKey::with_rng(&mut rng); - - let account_seed = if !account_data.account.is_new() - && account_data.account_seed.is_some() - { - tracing::warn!("Imported an existing account and still provided a seed when it is not needed. It's possible that the account's file was incorrectly generated. The seed will be ignored."); - // Ignore the seed since it's not a new account - - // TODO: The alternative approach to this is to store the seed anyway, but - // ignore it at the point of executing against this transaction, but that - // approach seems a little bit more incorrect - None - } else { - account_data.account_seed - }; - - self.insert_account( - &account_data.account, - account_seed, - &AuthInfo::RpoFalcon512(key_pair), - ) - }, - } - } - - /// Creates a new regular account and saves it in the store along with its seed and auth data - fn new_basic_wallet( - &mut self, - mutable_code: bool, - account_storage_mode: AccountStorageMode, - ) -> Result<(Account, Word), ClientError> { - let key_pair = SecretKey::with_rng(&mut self.rng); - - let auth_scheme: AuthScheme = AuthScheme::RpoFalcon512 { pub_key: key_pair.public_key() }; - - // we need to use an initial seed to create the wallet account - let mut init_seed = [0u8; 32]; - self.rng.fill_bytes(&mut init_seed); - - let (account, seed) = if !mutable_code { - miden_lib::accounts::wallets::create_basic_wallet( - init_seed, - auth_scheme, - AccountType::RegularAccountImmutableCode, - account_storage_mode.into(), - ) - } else { - miden_lib::accounts::wallets::create_basic_wallet( - init_seed, - auth_scheme, - AccountType::RegularAccountUpdatableCode, - account_storage_mode.into(), - ) - }?; - - self.insert_account(&account, Some(seed), &AuthInfo::RpoFalcon512(key_pair))?; - Ok((account, seed)) - } - - fn new_fungible_faucet( - &mut self, - token_symbol: TokenSymbol, - decimals: u8, - max_supply: u64, - account_storage_mode: AccountStorageMode, - ) -> Result<(Account, Word), ClientError> { - let key_pair = SecretKey::with_rng(&mut self.rng); - - let auth_scheme: AuthScheme = AuthScheme::RpoFalcon512 { pub_key: key_pair.public_key() }; - - // we need to use an initial seed to create the wallet account - let mut init_seed = [0u8; 32]; - self.rng.fill_bytes(&mut init_seed); - - let (account, seed) = miden_lib::accounts::faucets::create_basic_fungible_faucet( - init_seed, - token_symbol, - decimals, - Felt::try_from(max_supply.to_le_bytes().as_slice()) - .expect("u64 can be safely converted to a field element"), - account_storage_mode.into(), - auth_scheme, - )?; - - self.insert_account(&account, Some(seed), &AuthInfo::RpoFalcon512(key_pair))?; - Ok((account, seed)) - } - - /// Inserts a new account into the client's store. - /// - /// # Errors - /// - /// If an account is new and no seed is provided, the function errors out because the client - /// cannot execute transactions against new accounts for which it does not know the seed. - pub fn insert_account( - &mut self, - account: &Account, - account_seed: Option, - auth_info: &AuthInfo, - ) -> Result<(), ClientError> { - if account.is_new() && account_seed.is_none() { - return Err(ClientError::ImportNewAccountWithoutSeed); - } - - self.store - .insert_account(account, account_seed, auth_info) - .map_err(ClientError::StoreError) - } - - // ACCOUNT DATA RETRIEVAL - // -------------------------------------------------------------------------------------------- - - /// Returns summary info about the accounts managed by this client. - pub fn get_accounts(&self) -> Result)>, ClientError> { - self.store.get_account_stubs().map_err(|err| err.into()) - } - - /// Returns summary info about the specified account. - pub fn get_account( - &self, - account_id: AccountId, - ) -> Result<(Account, Option), ClientError> { - self.store.get_account(account_id).map_err(|err| err.into()) - } - - /// Returns summary info about the specified account. - pub fn get_account_stub_by_id( - &self, - account_id: AccountId, - ) -> Result<(AccountStub, Option), ClientError> { - self.store.get_account_stub(account_id).map_err(|err| err.into()) - } - - /// Returns an [AuthInfo] object utilized to authenticate an account. - /// - /// # Errors - /// - /// Returns a [ClientError::StoreError] with a [StoreError::AccountDataNotFound](crate::errors::StoreError::AccountDataNotFound) if the provided ID does - /// not correspond to an existing account. - pub fn get_account_auth(&self, account_id: AccountId) -> Result { - self.store.get_account_auth(account_id).map_err(|err| err.into()) - } -} - -// TESTS -// ================================================================================================ - -#[cfg(test)] -pub mod tests { - use miden_objects::{ - accounts::{Account, AccountData, AccountId, AuthData}, - crypto::dsa::rpo_falcon512::SecretKey, - Word, - }; - - use crate::{ - mock::{ - get_account_with_default_account_code, get_new_account_with_default_account_code, - ACCOUNT_ID_FUNGIBLE_FAUCET_ON_CHAIN, ACCOUNT_ID_REGULAR, - }, - store::{sqlite_store::tests::create_test_client, AuthInfo}, - }; - - fn create_account_data(account_id: u64) -> AccountData { - let account_id = AccountId::try_from(account_id).unwrap(); - let account = get_account_with_default_account_code(account_id, Word::default(), None); - - AccountData::new( - account.clone(), - Some(Word::default()), - AuthData::RpoFalcon512Seed([0; 32]), - ) - } - - pub fn create_initial_accounts_data() -> Vec { - let account = create_account_data(ACCOUNT_ID_REGULAR); - - let faucet_account = create_account_data(ACCOUNT_ID_FUNGIBLE_FAUCET_ON_CHAIN); - - // Create Genesis state and save it to a file - let accounts = vec![account, faucet_account]; - - accounts - } - - #[test] - pub fn try_import_new_account() { - // generate test client - let mut client = create_test_client(); - - let account = get_new_account_with_default_account_code( - AccountId::try_from(ACCOUNT_ID_REGULAR).unwrap(), - Word::default(), - None, - ); - - let key_pair = SecretKey::new(); - - assert!(client - .insert_account(&account, None, &AuthInfo::RpoFalcon512(key_pair.clone())) - .is_err()); - assert!(client - .insert_account(&account, Some(Word::default()), &AuthInfo::RpoFalcon512(key_pair)) - .is_ok()); - } - - #[tokio::test] - async fn load_accounts_test() { - // generate test client - let mut client = create_test_client(); - - let created_accounts_data = create_initial_accounts_data(); - - for account_data in created_accounts_data.clone() { - client.import_account(account_data).unwrap(); - } - - let expected_accounts: Vec = created_accounts_data - .into_iter() - .map(|account_data| account_data.account) - .collect(); - let accounts = client.get_accounts().unwrap(); - - assert_eq!(accounts.len(), 2); - for (client_acc, expected_acc) in accounts.iter().zip(expected_accounts.iter()) { - assert_eq!(client_acc.0.hash(), expected_acc.hash()); - } - } -} diff --git a/src/client/chain_data.rs b/src/client/chain_data.rs deleted file mode 100644 index 8962a9173..000000000 --- a/src/client/chain_data.rs +++ /dev/null @@ -1,30 +0,0 @@ -use miden_objects::crypto::rand::FeltRng; -#[cfg(test)] -use miden_objects::BlockHeader; - -#[cfg(test)] -use crate::{ - client::{rpc::NodeRpcClient, Client}, - errors::ClientError, - store::Store, -}; - -#[cfg(test)] -impl Client { - pub fn get_block_headers_in_range( - &self, - start: u32, - finish: u32, - ) -> Result, ClientError> { - self.store - .get_block_headers(&(start..=finish).collect::>()) - .map_err(ClientError::StoreError) - } - - pub fn get_block_headers( - &self, - block_numbers: &[u32], - ) -> Result, ClientError> { - self.store.get_block_headers(block_numbers).map_err(ClientError::StoreError) - } -} diff --git a/src/client/mod.rs b/src/client/mod.rs deleted file mode 100644 index c0bd944e3..000000000 --- a/src/client/mod.rs +++ /dev/null @@ -1,94 +0,0 @@ -use miden_objects::{ - crypto::rand::{FeltRng, RpoRandomCoin}, - Felt, -}; -use miden_tx::TransactionExecutor; -use rand::Rng; - -use crate::{errors::ClientError, store::Store}; - -pub mod rpc; -use rpc::NodeRpcClient; - -pub mod accounts; -#[cfg(test)] -mod chain_data; -mod note_screener; -mod notes; -pub(crate) mod sync; -pub mod transactions; -pub(crate) use note_screener::NoteScreener; - -use crate::store::data_store::ClientDataStore; - -// MIDEN CLIENT -// ================================================================================================ - -/// A light client for connecting to the Miden rollup network. -/// -/// Miden client is responsible for managing a set of accounts. Specifically, the client: -/// - Keeps track of the current and historical states of a set of accounts and related objects -/// such as notes and transactions. -/// - Connects to one or more Miden nodes to periodically sync with the current state of the -/// network. -/// - Executes, proves, and submits transactions to the network as directed by the user. -pub struct Client { - /// The client's store, which provides a way to write and read entities to provide persistence. - store: S, - /// An instance of [FeltRng] which provides randomness tools for generating new keys, - /// serial numbers, etc. - rng: R, - /// An instance of [NodeRpcClient] which provides a way for the client to connect to the - /// Miden node. - rpc_api: N, - tx_executor: TransactionExecutor>, -} - -impl Client { - // CONSTRUCTOR - // -------------------------------------------------------------------------------------------- - - /// Returns a new instance of [Client]. - /// - /// ## Arguments - /// - /// - `api`: An instance of [NodeRpcClient] which provides a way for the client to connect to the Miden node. - /// - `store`: An instance of [Store], which provides a way to write and read entities to provide persistence. - /// - `executor_store`: An instance of [Store] that provides a way for [TransactionExecutor] to - /// retrieve relevant inputs at the moment of transaction execution. It should be the same - /// store as the one for `store`, but it doesn't have to be the **same instance** - /// - /// # Errors - /// - /// Returns an error if the client could not be instantiated. - pub fn new(api: N, rng: R, store: S, executor_store: S) -> Result { - Ok(Self { - store, - rng, - rpc_api: api, - tx_executor: TransactionExecutor::new(ClientDataStore::new(executor_store)), - }) - } - - #[cfg(any(test, feature = "test_utils"))] - pub fn rpc_api(&mut self) -> &mut N { - &mut self.rpc_api - } - - #[cfg(any(test, feature = "test_utils"))] - pub fn store(&mut self) -> &mut S { - &mut self.store - } -} - -// HELPERS -// -------------------------------------------------------------------------------------------- - -/// Gets [RpoRandomCoin] from the client -pub fn get_random_coin() -> RpoRandomCoin { - // TODO: Initialize coin status once along with the client and persist status for retrieval - let mut rng = rand::thread_rng(); - let coin_seed: [u64; 4] = rng.gen(); - - RpoRandomCoin::new(coin_seed.map(Felt::new)) -} diff --git a/src/client/note_screener.rs b/src/client/note_screener.rs deleted file mode 100644 index 676f93a4a..000000000 --- a/src/client/note_screener.rs +++ /dev/null @@ -1,239 +0,0 @@ -use std::collections::BTreeSet; - -use miden_objects::{accounts::AccountId, assets::Asset, notes::Note, Word}; - -use crate::{ - errors::{InvalidNoteInputsError, ScreenerError}, - store::Store, -}; - -// KNOWN SCRIPT ROOTS -// -------------------------------------------------------------------------------------------- -pub(crate) const P2ID_NOTE_SCRIPT_ROOT: &str = - "0xcdfd70344b952980272119bc02b837d14c07bbfc54f86a254422f39391b77b35"; -pub(crate) const P2IDR_NOTE_SCRIPT_ROOT: &str = - "0x41e5727b99a12b36066c09854d39d64dd09d9265c442a9be3626897572bf1745"; -pub(crate) const SWAP_NOTE_SCRIPT_ROOT: &str = - "0x5852920f88985b651cf7ef5e48623f898b6c292f4a2c25dd788ff8b46dd90417"; - -#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] -pub enum NoteRelevance { - /// The note can be consumed at any time. - Always, - /// The note can be consumed after the block with the specified number. - After(u32), -} - -pub struct NoteScreener<'a, S: Store> { - store: &'a S, -} - -impl<'a, S: Store> NoteScreener<'a, S> { - pub fn new(store: &'a S) -> Self { - Self { store } - } - - /// Returns a vector of tuples describing the relevance of the provided note to the - /// accounts monitored by this screener. - /// - /// Does a fast check for known scripts (P2ID, P2IDR, SWAP). We're currently - /// unable to execute notes that are not committed so a slow check for other scripts is currently - /// not available. - pub fn check_relevance( - &self, - note: &Note, - ) -> Result, ScreenerError> { - let account_ids = BTreeSet::from_iter(self.store.get_account_ids()?); - - let script_hash = note.script().hash().to_string(); - let note_relevance = match script_hash.as_str() { - P2ID_NOTE_SCRIPT_ROOT => Self::check_p2id_relevance(note, &account_ids)?, - P2IDR_NOTE_SCRIPT_ROOT => Self::check_p2idr_relevance(note, &account_ids)?, - SWAP_NOTE_SCRIPT_ROOT => self.check_swap_relevance(note, &account_ids)?, - _ => self.check_script_relevance(note, &account_ids)?, - }; - - Ok(note_relevance) - } - - fn check_p2id_relevance( - note: &Note, - account_ids: &BTreeSet, - ) -> Result, ScreenerError> { - let mut note_inputs_iter = note.inputs().values().iter(); - let account_id_felt = note_inputs_iter - .next() - .ok_or(InvalidNoteInputsError::NumInputsError(note.id(), 1))?; - - if note_inputs_iter.next().is_some() { - return Err(InvalidNoteInputsError::NumInputsError(note.id(), 1).into()); - } - - let account_id = AccountId::try_from(*account_id_felt) - .map_err(|err| InvalidNoteInputsError::AccountError(note.id(), err))?; - - if !account_ids.contains(&account_id) { - return Ok(vec![]); - } - Ok(vec![(account_id, NoteRelevance::Always)]) - } - - fn check_p2idr_relevance( - note: &Note, - account_ids: &BTreeSet, - ) -> Result, ScreenerError> { - let mut note_inputs_iter = note.inputs().values().iter(); - let account_id_felt = note_inputs_iter - .next() - .ok_or(InvalidNoteInputsError::NumInputsError(note.id(), 2))?; - let recall_height_felt = note_inputs_iter - .next() - .ok_or(InvalidNoteInputsError::NumInputsError(note.id(), 2))?; - - if note_inputs_iter.next().is_some() { - return Err(InvalidNoteInputsError::NumInputsError(note.id(), 2).into()); - } - - let sender = note.metadata().sender(); - let recall_height: u32 = recall_height_felt.as_int().try_into().map_err(|_err| { - InvalidNoteInputsError::BlockNumberError(note.id(), recall_height_felt.as_int()) - })?; - - let account_id = AccountId::try_from(*account_id_felt) - .map_err(|err| InvalidNoteInputsError::AccountError(note.id(), err))?; - - Ok(vec![ - (account_id, NoteRelevance::Always), - (sender, NoteRelevance::After(recall_height)), - ] - .into_iter() - .filter(|(account_id, _relevance)| account_ids.contains(account_id)) - .collect()) - } - - /// Checks if a swap note can be consumed by any account whose id is in `account_ids` - /// - /// This implementation serves as a placeholder as we're currently not able to create, execute - /// and send SWAP NOTES. Hence, it's also untested. The main logic should be the same: for each - /// account check if it has enough of the wanted asset. - /// This is also very inefficient as we're loading the full accounts. We should instead just - /// load the account's vaults, or even have a function in the `Store` to do this. - /// - /// TODO: test/revisit this in the future - fn check_swap_relevance( - &self, - note: &Note, - account_ids: &BTreeSet, - ) -> Result, ScreenerError> { - let note_inputs = note.inputs().to_vec(); - if note_inputs.len() != 9 { - return Ok(Vec::new()); - } - - // get the demanded asset from the note's inputs - let asset: Asset = - Word::from([note_inputs[4], note_inputs[5], note_inputs[6], note_inputs[7]]) - .try_into() - .map_err(|err| InvalidNoteInputsError::AssetError(note.id(), err))?; - let asset_faucet_id = AccountId::try_from(asset.vault_key()[3]) - .map_err(|err| InvalidNoteInputsError::AccountError(note.id(), err))?; - - let mut accounts_with_relevance = Vec::new(); - - for account_id in account_ids { - let (account, _) = self.store.get_account(*account_id)?; - - // Check that the account can cover the demanded asset - match asset { - Asset::NonFungible(_non_fungible_asset) - if account.vault().has_non_fungible_asset(asset).expect( - "Should be able to query has_non_fungible_asset for an Asset::NonFungible", - ) => - { - accounts_with_relevance.push((*account_id, NoteRelevance::Always)) - }, - Asset::Fungible(fungible_asset) - if account - .vault() - .get_balance(asset_faucet_id) - .expect("Should be able to query get_balance for an Asset::Fungible") - >= fungible_asset.amount() => - { - accounts_with_relevance.push((*account_id, NoteRelevance::Always)) - }, - _ => {}, - } - } - - Ok(accounts_with_relevance) - } - - fn check_script_relevance( - &self, - _note: &Note, - account_ids: &BTreeSet, - ) -> Result, ScreenerError> { - // TODO: try to execute the note script against relevant accounts; this will - // require querying data from the store - Ok(account_ids - .iter() - .map(|account_id| (*account_id, NoteRelevance::Always)) - .collect()) - } -} - -#[cfg(test)] -mod tests { - use miden_lib::notes::{create_p2id_note, create_p2idr_note, create_swap_note}; - use miden_objects::{ - accounts::{AccountId, ACCOUNT_ID_FUNGIBLE_FAUCET_OFF_CHAIN}, - assets::FungibleAsset, - crypto::rand::RpoRandomCoin, - notes::NoteType, - }; - - use crate::client::note_screener::{ - P2IDR_NOTE_SCRIPT_ROOT, P2ID_NOTE_SCRIPT_ROOT, SWAP_NOTE_SCRIPT_ROOT, - }; - - // We need to make sure the script roots we use for filters are in line with the note scripts - // coming from Miden objects - #[test] - fn ensure_correct_script_roots() { - // create dummy data for the notes - let faucet_id: AccountId = ACCOUNT_ID_FUNGIBLE_FAUCET_OFF_CHAIN.try_into().unwrap(); - let account_id: AccountId = ACCOUNT_ID_FUNGIBLE_FAUCET_OFF_CHAIN.try_into().unwrap(); - let rng = RpoRandomCoin::new(Default::default()); - - // create dummy notes to compare note script roots - let p2id_note = create_p2id_note( - account_id, - account_id, - vec![FungibleAsset::new(faucet_id, 100u64).unwrap().into()], - NoteType::OffChain, - rng, - ) - .unwrap(); - let p2idr_note = create_p2idr_note( - account_id, - account_id, - vec![FungibleAsset::new(faucet_id, 100u64).unwrap().into()], - NoteType::OffChain, - 10, - rng, - ) - .unwrap(); - let (swap_note, _serial_num) = create_swap_note( - account_id, - FungibleAsset::new(faucet_id, 100u64).unwrap().into(), - FungibleAsset::new(faucet_id, 100u64).unwrap().into(), - NoteType::OffChain, - rng, - ) - .unwrap(); - - assert_eq!(p2id_note.script().hash().to_string(), P2ID_NOTE_SCRIPT_ROOT); - assert_eq!(p2idr_note.script().hash().to_string(), P2IDR_NOTE_SCRIPT_ROOT); - assert_eq!(swap_note.script().hash().to_string(), SWAP_NOTE_SCRIPT_ROOT); - } -} diff --git a/src/client/notes.rs b/src/client/notes.rs deleted file mode 100644 index c4b2e9c5a..000000000 --- a/src/client/notes.rs +++ /dev/null @@ -1,30 +0,0 @@ -use miden_objects::{crypto::rand::FeltRng, notes::NoteId}; - -use super::{rpc::NodeRpcClient, Client}; -use crate::{ - errors::ClientError, - store::{InputNoteRecord, NoteFilter, Store}, -}; - -impl Client { - // INPUT NOTE DATA RETRIEVAL - // -------------------------------------------------------------------------------------------- - - /// Returns input notes managed by this client. - pub fn get_input_notes(&self, filter: NoteFilter) -> Result, ClientError> { - self.store.get_input_notes(filter).map_err(|err| err.into()) - } - - /// Returns the input note with the specified hash. - pub fn get_input_note(&self, note_id: NoteId) -> Result { - self.store.get_input_note(note_id).map_err(|err| err.into()) - } - - // INPUT NOTE CREATION - // -------------------------------------------------------------------------------------------- - - /// Imports a new input note into the client's store. - pub fn import_input_note(&mut self, note: InputNoteRecord) -> Result<(), ClientError> { - self.store.insert_input_note(¬e).map_err(|err| err.into()) - } -} diff --git a/src/client/rpc/mod.rs b/src/client/rpc/mod.rs deleted file mode 100644 index 98e058c41..000000000 --- a/src/client/rpc/mod.rs +++ /dev/null @@ -1,193 +0,0 @@ -use core::fmt; - -use async_trait::async_trait; -use miden_objects::{ - accounts::{Account, AccountId}, - crypto::merkle::{MerklePath, MmrDelta}, - notes::{Note, NoteId, NoteMetadata, NoteTag}, - transaction::ProvenTransaction, - BlockHeader, Digest, -}; - -use crate::errors::NodeRpcClientError; - -mod tonic_client; -pub use tonic_client::TonicRpcClient; - -// NOTE DETAILS -// ================================================================================================ - -/// Describes the possible responses from the `GetNotesById` endpoint for a single note -pub enum NoteDetails { - OffChain(NoteId, NoteMetadata, NoteInclusionDetails), - Public(Note, NoteInclusionDetails), -} - -/// Contains information related to the note inclusion, but not related to the block header -/// that contains the note -pub struct NoteInclusionDetails { - pub block_num: u32, - pub note_index: u32, - pub merkle_path: MerklePath, -} - -impl NoteInclusionDetails { - pub fn new(block_num: u32, note_index: u32, merkle_path: MerklePath) -> Self { - Self { block_num, note_index, merkle_path } - } -} - -// NODE RPC CLIENT TRAIT -// ================================================================================================ - -/// Defines the interface for communicating with the Miden node. -/// -/// The implementers are responsible for connecting to the Miden node, handling endpoint -/// requests/responses, and translating responses into domain objects relevant for each of the -/// endpoints. -#[async_trait] -pub trait NodeRpcClient { - /// Given a Proven Transaction, send it to the node for it to be included in a future block - /// using the `/SubmitProvenTransaction` rpc endpoint - async fn submit_proven_transaction( - &mut self, - proven_transaction: ProvenTransaction, - ) -> Result<(), NodeRpcClientError>; - - /// Given a block number, fetches the block header corresponding to that height from the node - /// using the `/GetBlockHeaderByNumber` endpoint - /// - /// When `None` is provided, returns info regarding the latest block - async fn get_block_header_by_number( - &mut self, - block_number: Option, - ) -> Result; - - /// Fetches note-related data for a list of [NoteId] using the `/GetNotesById` rpc endpoint - /// - /// For any NoteType::Offchain note, the return data is only the [NoteMetadata], whereas - /// for NoteType::Onchain notes, the return data includes all details. - async fn get_notes_by_id( - &mut self, - note_ids: &[NoteId], - ) -> Result, NodeRpcClientError>; - - /// Fetches info from the node necessary to perform a state sync using the - /// `/SyncState` rpc endpoint - /// - /// - `block_num` is the last block number known by the client. The returned [StateSyncInfo] - /// should contain data starting from the next block, until the first block which contains a - /// note of matching the requested tag, or the chain tip if there are no notes. - /// - `account_ids` is a list of account ids and determines the accounts the client is interested - /// in and should receive account updates of. - /// - `note_tags` is a list of tags used to filter the notes the client is interested in, which - /// serves as a "note group" filter. Notice that you can't filter by a specific note id - /// - `nullifiers_tags` similar to `note_tags`, is a list of tags used to filter the nullifiers - /// corresponding to some notes the client is interested in - async fn sync_state( - &mut self, - block_num: u32, - account_ids: &[AccountId], - note_tags: &[NoteTag], - nullifiers_tags: &[u16], - ) -> Result; - - /// Fetches the current state of an account from the node using the `/GetAccountDetails` rpc endpoint - /// - /// - `account_id` is the id of the wanted account. - async fn get_account_update( - &mut self, - account_id: AccountId, - ) -> Result; -} - -// STATE SYNC INFO -// ================================================================================================ - -/// Represents a `SyncStateResponse` with fields converted into domain types -pub struct StateSyncInfo { - /// The block number of the chain tip at the moment of the response - pub chain_tip: u32, - /// The returned block header - pub block_header: BlockHeader, - /// MMR delta that contains data for (current_block.num, incoming_block_header.num-1) - pub mmr_delta: MmrDelta, - /// Tuples of AccountId alongside their new account hashes - pub account_hash_updates: Vec<(AccountId, Digest)>, - /// List of tuples of Note ID, Note Index and Merkle Path for all new notes - pub note_inclusions: Vec, - /// List of nullifiers that identify spent notes - pub nullifiers: Vec, -} - -// COMMITTED NOTE -// ================================================================================================ - -/// Represents a committed note, returned as part of a `SyncStateResponse` -pub struct CommittedNote { - /// Note ID of the committed note - note_id: NoteId, - /// Note index for the note merkle tree - note_index: u32, - /// Merkle path for the note merkle tree up to the block's note root - merkle_path: MerklePath, - /// Note metadata - metadata: NoteMetadata, -} - -impl CommittedNote { - pub fn new( - note_id: NoteId, - note_index: u32, - merkle_path: MerklePath, - metadata: NoteMetadata, - ) -> Self { - Self { - note_id, - note_index, - merkle_path, - metadata, - } - } - - pub fn note_id(&self) -> &NoteId { - &self.note_id - } - - pub fn note_index(&self) -> u32 { - self.note_index - } - - pub fn merkle_path(&self) -> &MerklePath { - &self.merkle_path - } - - #[allow(dead_code)] - pub fn metadata(&self) -> NoteMetadata { - self.metadata - } -} - -// RPC API ENDPOINT -// ================================================================================================ -// -#[derive(Debug)] -pub enum NodeRpcClientEndpoint { - GetAccountDetails, - GetBlockHeaderByNumber, - SyncState, - SubmitProvenTx, -} - -impl fmt::Display for NodeRpcClientEndpoint { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - NodeRpcClientEndpoint::GetAccountDetails => write!(f, "get_account_details"), - NodeRpcClientEndpoint::GetBlockHeaderByNumber => { - write!(f, "get_block_header_by_number") - }, - NodeRpcClientEndpoint::SyncState => write!(f, "sync_state"), - NodeRpcClientEndpoint::SubmitProvenTx => write!(f, "submit_proven_transaction"), - } - } -} diff --git a/src/client/sync.rs b/src/client/sync.rs deleted file mode 100644 index c910508eb..000000000 --- a/src/client/sync.rs +++ /dev/null @@ -1,475 +0,0 @@ -use std::collections::BTreeSet; - -use crypto::merkle::{InOrderIndex, MmrDelta, MmrPeaks, PartialMmr}; -use miden_objects::{ - accounts::{Account, AccountId, AccountStub}, - crypto::{self, rand::FeltRng}, - notes::{NoteExecutionMode, NoteId, NoteInclusionProof, NoteTag}, - transaction::{InputNote, TransactionId}, - BlockHeader, Digest, -}; -use tracing::{info, warn}; - -use super::{ - rpc::{CommittedNote, NodeRpcClient, NoteDetails}, - transactions::TransactionRecord, - Client, -}; -use crate::{ - errors::{ClientError, StoreError}, - store::{ChainMmrNodeFilter, NoteFilter, Store, TransactionFilter}, -}; - -pub enum SyncStatus { - SyncedToLastBlock(u32), - SyncedToBlock(u32), -} - -/// Contains information about new notes as consequence of a sync -pub struct SyncedNewNotes { - /// A list of public notes that have been received on sync - new_public_notes: Vec, - /// A list of note IDs alongside their inclusion proofs for locally-tracked - /// notes - new_inclusion_proofs: Vec<(NoteId, NoteInclusionProof)>, -} - -impl SyncedNewNotes { - pub fn new( - new_public_notes: Vec, - new_inclusion_proofs: Vec<(NoteId, NoteInclusionProof)>, - ) -> Self { - Self { new_public_notes, new_inclusion_proofs } - } - - pub fn new_public_notes(&self) -> &[InputNote] { - &self.new_public_notes - } - - pub fn new_inclusion_proofs(&self) -> &[(NoteId, NoteInclusionProof)] { - &self.new_inclusion_proofs - } - - /// Returns whether no new note-related information has been retrieved - pub fn is_empty(&self) -> bool { - self.new_inclusion_proofs.is_empty() && self.new_public_notes.is_empty() - } -} - -// CONSTANTS -// ================================================================================================ - -/// The number of bits to shift identifiers for in use of filters. -pub const FILTER_ID_SHIFT: u8 = 48; - -impl Client { - // SYNC STATE - // -------------------------------------------------------------------------------------------- - - /// Returns the block number of the last state sync block. - pub fn get_sync_height(&self) -> Result { - self.store.get_sync_height().map_err(|err| err.into()) - } - - /// Returns the list of note tags tracked by the client. - pub fn get_note_tags(&self) -> Result, ClientError> { - self.store.get_note_tags().map_err(|err| err.into()) - } - - /// Adds a note tag for the client to track. - pub fn add_note_tag(&mut self, tag: u64) -> Result<(), ClientError> { - match self.store.add_note_tag(tag).map_err(|err| err.into()) { - Ok(true) => Ok(()), - Ok(false) => { - warn!("Tag {} is already being tracked", tag); - Ok(()) - }, - Err(err) => Err(err), - } - } - - /// Syncs the client's state with the current state of the Miden network. - /// Before doing so, it ensures the genesis block exists in the local store. - /// - /// Returns the block number the client has been synced to. - pub async fn sync_state(&mut self) -> Result { - self.ensure_genesis_in_place().await?; - loop { - let response = self.sync_state_once().await?; - if let SyncStatus::SyncedToLastBlock(v) = response { - return Ok(v); - } - } - } - - /// Attempts to retrieve the genesis block from the store. If not found, - /// it requests it from the node and store it. - async fn ensure_genesis_in_place(&mut self) -> Result<(), ClientError> { - let genesis = self.store.get_block_header_by_num(0); - - match genesis { - Ok(_) => Ok(()), - Err(StoreError::BlockHeaderNotFound(0)) => self.retrieve_and_store_genesis().await, - Err(err) => Err(ClientError::StoreError(err)), - } - } - - /// Calls `get_block_header_by_number` requesting the genesis block and storing it - /// in the local database - async fn retrieve_and_store_genesis(&mut self) -> Result<(), ClientError> { - let genesis_block = self.rpc_api.get_block_header_by_number(Some(0)).await?; - - let blank_mmr_peaks = - MmrPeaks::new(0, vec![]).expect("Blank MmrPeaks should not fail to instantiate"); - // NOTE: If genesis block data ever includes notes in the future, the third parameter in - // this `insert_block_header` call may be `true` - self.store.insert_block_header(genesis_block, blank_mmr_peaks, false)?; - Ok(()) - } - - async fn sync_state_once(&mut self) -> Result { - let current_block_num = self.store.get_sync_height()?; - - let accounts: Vec = self - .store - .get_account_stubs()? - .into_iter() - .map(|(acc_stub, _)| acc_stub) - .collect(); - - let note_tags: Vec = accounts - .iter() - .map(|acc| NoteTag::from_account_id(acc.id(), NoteExecutionMode::Local)) - .collect::, _>>()?; - - // To receive information about added nullifiers, we reduce them to the higher 16 bits - // Note that besides filtering by nullifier prefixes, the node also filters by block number - // (it only returns nullifiers from current_block_num until response.block_header.block_num()) - let nullifiers_tags: Vec = self - .store - .get_unspent_input_note_nullifiers()? - .iter() - .map(|nullifier| (nullifier.inner()[3].as_int() >> FILTER_ID_SHIFT) as u16) - .collect(); - - // Send request - let account_ids: Vec = accounts.iter().map(|acc| acc.id()).collect(); - let response = self - .rpc_api - .sync_state(current_block_num, &account_ids, ¬e_tags, &nullifiers_tags) - .await?; - - // We don't need to continue if the chain has not advanced - if response.block_header.block_num() == current_block_num { - return Ok(SyncStatus::SyncedToLastBlock(current_block_num)); - } - - let new_note_details = - self.get_note_details(response.note_inclusions, &response.block_header).await?; - - let (onchain_accounts, offchain_accounts): (Vec<_>, Vec<_>) = - accounts.into_iter().partition(|account_stub| account_stub.id().is_on_chain()); - - let updated_onchain_accounts = self - .get_updated_onchain_accounts(&response.account_hash_updates, &onchain_accounts) - .await?; - self.validate_local_account_hashes(&response.account_hash_updates, &offchain_accounts)?; - - // Derive new nullifiers data - let new_nullifiers = self.get_new_nullifiers(response.nullifiers)?; - - // Build PartialMmr with current data and apply updates - let (new_peaks, new_authentication_nodes) = { - let current_partial_mmr = self.build_current_partial_mmr()?; - - let (current_block, has_relevant_notes) = - self.store.get_block_header_by_num(current_block_num)?; - - apply_mmr_changes( - current_partial_mmr, - response.mmr_delta, - current_block, - has_relevant_notes, - )? - }; - - let note_ids: Vec = - new_note_details.new_inclusion_proofs.iter().map(|(id, _)| (*id)).collect(); - - let uncommitted_transactions = - self.store.get_transactions(TransactionFilter::Uncomitted)?; - - let transactions_to_commit = get_transactions_to_commit( - &uncommitted_transactions, - ¬e_ids, - &new_nullifiers, - &response.account_hash_updates, - ); - - // Apply received and computed updates to the store - self.store - .apply_state_sync( - response.block_header, - new_nullifiers, - new_note_details, - &transactions_to_commit, - new_peaks, - &new_authentication_nodes, - &updated_onchain_accounts, - ) - .map_err(ClientError::StoreError)?; - - if response.chain_tip == response.block_header.block_num() { - Ok(SyncStatus::SyncedToLastBlock(response.chain_tip)) - } else { - Ok(SyncStatus::SyncedToBlock(response.block_header.block_num())) - } - } - - // HELPERS - // -------------------------------------------------------------------------------------------- - - /// Extracts information about notes that the client is interested in, creating the note inclusion - /// proof in order to correctly update store data - async fn get_note_details( - &mut self, - committed_notes: Vec, - block_header: &BlockHeader, - ) -> Result { - // We'll only pick committed notes that we are tracking as input/output notes. Since the - // sync response contains notes matching either the provided accounts or the provided tag - // we might get many notes when we only care about a few of those. - - let mut new_public_notes = vec![]; - let mut local_notes_proofs = vec![]; - - let pending_input_notes = - self.store.get_input_notes(NoteFilter::Pending)?.into_iter().map(|n| n.id()); - - let pending_output_notes = - self.store.get_output_notes(NoteFilter::Pending)?.into_iter().map(|n| n.id()); - - let mut all_pending_notes: BTreeSet = BTreeSet::new(); - - pending_input_notes.chain(pending_output_notes).for_each(|id| { - all_pending_notes.insert(id); - }); - - for committed_note in committed_notes { - if all_pending_notes.contains(committed_note.note_id()) { - // The note belongs to our locally tracked set of pending notes, build the inclusion proof - let note_with_inclusion_proof = NoteInclusionProof::new( - block_header.block_num(), - block_header.sub_hash(), - block_header.note_root(), - committed_note.note_index().into(), - committed_note.merkle_path().clone(), - ) - .map_err(ClientError::NoteError) - .map(|proof| (*committed_note.note_id(), proof))?; - - local_notes_proofs.push(note_with_inclusion_proof); - } else { - // The note is public and we are not tracking it, push to the list of IDs to query - new_public_notes.push(*committed_note.note_id()); - } - } - - // Query the node for input note data and build the entities - let new_public_notes = - self.fetch_public_note_details(&new_public_notes, block_header).await?; - - Ok(SyncedNewNotes::new(new_public_notes, local_notes_proofs)) - } - - /// Queries the node for all received notes that are not being locally tracked in the client - /// - /// The client can receive metadata for private notes that it's not tracking. In this case, - /// notes are ignored for now as they become useless until details are imported. - async fn fetch_public_note_details( - &mut self, - query_notes: &[NoteId], - block_header: &BlockHeader, - ) -> Result, ClientError> { - if query_notes.is_empty() { - return Ok(vec![]); - } - info!("Getting note details for notes that are not being tracked."); - - let notes_data = self.rpc_api.get_notes_by_id(query_notes).await?; - let mut return_notes = Vec::with_capacity(query_notes.len()); - for note_data in notes_data { - match note_data { - NoteDetails::OffChain(id, ..) => { - // TODO: Is there any benefit to not ignoring these? In any case we do not have - // the recipient which is mandatory right now. - info!("Note {} is private but the client is not tracking it, ignoring.", id); - }, - NoteDetails::Public(note, inclusion_proof) => { - info!("Retrieved details for Note ID {}.", note.id()); - let note_inclusion_proof = NoteInclusionProof::new( - block_header.block_num(), - block_header.sub_hash(), - block_header.note_root(), - inclusion_proof.note_index as u64, - inclusion_proof.merkle_path, - ) - .map_err(ClientError::NoteError)?; - - return_notes.push(InputNote::new(note, note_inclusion_proof)) - }, - } - } - Ok(return_notes) - } - - /// Builds the current view of the chain's [PartialMmr]. Because we want to add all new - /// authentication nodes that could come from applying the MMR updates, we need to track all - /// known leaves thus far. - /// - /// As part of the syncing process, we add the current block number so we don't need to - /// track it here. - pub(crate) fn build_current_partial_mmr(&self) -> Result { - let current_block_num = self.store.get_sync_height()?; - - let tracked_nodes = self.store.get_chain_mmr_nodes(ChainMmrNodeFilter::All)?; - let current_peaks = self.store.get_chain_mmr_peaks_by_block_num(current_block_num)?; - - let track_latest = if current_block_num != 0 { - match self.store.get_block_header_by_num(current_block_num - 1) { - Ok((_, previous_block_had_notes)) => Ok(previous_block_had_notes), - Err(StoreError::BlockHeaderNotFound(_)) => Ok(false), - Err(err) => Err(ClientError::StoreError(err)), - }? - } else { - false - }; - - Ok(PartialMmr::from_parts(current_peaks, tracked_nodes, track_latest)) - } - - /// Extracts information about nullifiers for unspent input notes that the client is tracking - /// from the received [SyncStateResponse] - fn get_new_nullifiers(&self, new_nullifiers: Vec) -> Result, ClientError> { - // Get current unspent nullifiers - let nullifiers = self - .store - .get_unspent_input_note_nullifiers()? - .iter() - .map(|nullifier| nullifier.inner()) - .collect::>(); - - let new_nullifiers = new_nullifiers - .into_iter() - .filter(|nullifier| nullifiers.contains(nullifier)) - .collect(); - - Ok(new_nullifiers) - } - - async fn get_updated_onchain_accounts( - &mut self, - account_updates: &[(AccountId, Digest)], - current_onchain_accounts: &[AccountStub], - ) -> Result, ClientError> { - let mut accounts_to_update: Vec = Vec::new(); - for (remote_account_id, remote_account_hash) in account_updates { - // check if this updated account is tracked by the client - let current_account = current_onchain_accounts - .iter() - .find(|acc| *remote_account_id == acc.id() && *remote_account_hash != acc.hash()); - - if let Some(tracked_account) = current_account { - info!("On-chain account hash difference detected for account with ID: {}. Fetching node for updates...", tracked_account.id()); - let account = self.rpc_api.get_account_update(tracked_account.id()).await?; - accounts_to_update.push(account); - } - } - Ok(accounts_to_update) - } - - /// Validates account hash updates and returns an error if there is a mismatch. - fn validate_local_account_hashes( - &mut self, - account_updates: &[(AccountId, Digest)], - current_offchain_accounts: &[AccountStub], - ) -> Result<(), ClientError> { - for (remote_account_id, remote_account_hash) in account_updates { - // ensure that if we track that account, it has the same hash - let mismatched_accounts = current_offchain_accounts - .iter() - .find(|acc| *remote_account_id == acc.id() && *remote_account_hash != acc.hash()); - - // OffChain accounts should always have the latest known state - if mismatched_accounts.is_some() { - return Err(StoreError::AccountHashMismatch(*remote_account_id).into()); - } - } - Ok(()) - } -} - -// UTILS -// -------------------------------------------------------------------------------------------- - -/// Applies changes to the Mmr structure, storing authentication nodes for leaves we track -/// and returns the updated [PartialMmr] -fn apply_mmr_changes( - current_partial_mmr: PartialMmr, - mmr_delta: MmrDelta, - current_block_header: BlockHeader, - current_block_has_relevant_notes: bool, -) -> Result<(MmrPeaks, Vec<(InOrderIndex, Digest)>), StoreError> { - let mut partial_mmr: PartialMmr = current_partial_mmr; - - // First, apply curent_block to the Mmr - let new_authentication_nodes = partial_mmr - .add(current_block_header.hash(), current_block_has_relevant_notes) - .into_iter(); - - // Apply the Mmr delta to bring Mmr to forest equal to chain tip - let new_authentication_nodes: Vec<(InOrderIndex, Digest)> = partial_mmr - .apply(mmr_delta) - .map_err(StoreError::MmrError)? - .into_iter() - .chain(new_authentication_nodes) - .collect(); - - Ok((partial_mmr.peaks(), new_authentication_nodes)) -} - -/// Returns the list of transactions that should be marked as committed based on the state update info -/// -/// To set an uncommitted transaction as committed three things must hold: -/// -/// - All of the transaction's output notes are committed -/// - All of the transaction's input notes are consumed, which means we got their nullifiers as -/// part of the update -/// - The account corresponding to the transaction hash matches the transaction's -// final_account_state -fn get_transactions_to_commit( - uncommitted_transactions: &[TransactionRecord], - _note_ids: &[NoteId], - nullifiers: &[Digest], - account_hash_updates: &[(AccountId, Digest)], -) -> Vec { - uncommitted_transactions - .iter() - .filter(|t| { - // TODO: based on the discussion in - // https://github.com/0xPolygonMiden/miden-client/issues/144, we should be aware - // that in the future it'll be possible to have many transactions modifying an - // account be included in a single block. If that happens, we'll need to rewrite - // this check - - // TODO: Review this. Because we receive note IDs based on account ID tags, - // we cannot base the status change on output notes alone; - t.input_note_nullifiers.iter().all(|n| nullifiers.contains(n)) - //&& t.output_notes.iter().all(|n| note_ids.contains(&n.id())) - && account_hash_updates.iter().any(|(account_id, account_hash)| { - *account_id == t.account_id && *account_hash == t.final_account_state - }) - }) - .map(|t| t.id) - .collect() -} diff --git a/src/client/transactions/asm/transaction_scripts/auth_consume_notes.masm b/src/client/transactions/asm/transaction_scripts/auth_consume_notes.masm deleted file mode 100644 index 5cf19e425..000000000 --- a/src/client/transactions/asm/transaction_scripts/auth_consume_notes.masm +++ /dev/null @@ -1,5 +0,0 @@ -use.miden::contracts::auth::basic->auth_tx - -begin - call.auth_tx::auth_tx_rpo_falcon512 -end diff --git a/src/client/transactions/asm/transaction_scripts/auth_send_asset.masm b/src/client/transactions/asm/transaction_scripts/auth_send_asset.masm deleted file mode 100644 index 540c95e5f..000000000 --- a/src/client/transactions/asm/transaction_scripts/auth_send_asset.masm +++ /dev/null @@ -1,12 +0,0 @@ -use.miden::contracts::auth::basic->auth_tx -use.miden::contracts::wallets::basic->wallet - -begin - push.{recipient} - push.{note_type} - push.{tag} - push.{asset} - call.wallet::send_asset - drop drop dropw dropw - call.auth_tx::auth_tx_rpo_falcon512 -end diff --git a/src/client/transactions/asm/transaction_scripts/distribute_fungible_asset.masm b/src/client/transactions/asm/transaction_scripts/distribute_fungible_asset.masm deleted file mode 100644 index d26a77558..000000000 --- a/src/client/transactions/asm/transaction_scripts/distribute_fungible_asset.masm +++ /dev/null @@ -1,13 +0,0 @@ -use.miden::contracts::faucets::basic_fungible->faucet -use.miden::contracts::auth::basic->auth_tx - -begin - push.{recipient} - push.{note_type} - push.{tag} - push.{amount} - call.faucet::distribute - - call.auth_tx::auth_tx_rpo_falcon512 - dropw dropw -end diff --git a/src/client/transactions/mod.rs b/src/client/transactions/mod.rs deleted file mode 100644 index a656a9306..000000000 --- a/src/client/transactions/mod.rs +++ /dev/null @@ -1,440 +0,0 @@ -use alloc::collections::{BTreeMap, BTreeSet}; - -use miden_lib::notes::{create_p2id_note, create_p2idr_note}; -use miden_objects::{ - accounts::{AccountDelta, AccountId}, - assembly::ProgramAst, - assets::FungibleAsset, - crypto::rand::RpoRandomCoin, - notes::{Note, NoteId, NoteType}, - transaction::{ - ExecutedTransaction, OutputNotes, ProvenTransaction, TransactionArgs, TransactionId, - TransactionScript, - }, - Digest, Felt, Word, -}; -use miden_tx::{ProvingOptions, ScriptTarget, TransactionProver}; -use rand::Rng; -use tracing::info; - -use self::transaction_request::{PaymentTransactionData, TransactionRequest, TransactionTemplate}; -use super::{note_screener::NoteRelevance, rpc::NodeRpcClient, Client, FeltRng}; -use crate::{ - client::NoteScreener, - errors::ClientError, - store::{AuthInfo, Store, TransactionFilter}, -}; - -pub mod transaction_request; - -// TRANSACTION RESULT -// -------------------------------------------------------------------------------------------- - -/// Represents the result of executing a transaction by the client -/// -/// It contains an [ExecutedTransaction], a list of [Note] that describe the details of the notes -/// created by the transaction execution, and a list of `usize` `relevant_notes` that contain the -/// indices of `output_notes` that are relevant to the client -pub struct TransactionResult { - executed_transaction: ExecutedTransaction, - output_notes: Vec, - relevant_notes: Option>>, -} - -impl TransactionResult { - pub fn new(executed_transaction: ExecutedTransaction, created_notes: Vec) -> Self { - Self { - executed_transaction, - output_notes: created_notes, - relevant_notes: None, - } - } - - pub fn executed_transaction(&self) -> &ExecutedTransaction { - &self.executed_transaction - } - - pub fn created_notes(&self) -> &Vec { - &self.output_notes - } - - pub fn relevant_notes(&self) -> Vec<&Note> { - if let Some(relevant_notes) = &self.relevant_notes { - relevant_notes - .keys() - .map(|note_index| &self.output_notes[*note_index]) - .collect() - } else { - self.created_notes().iter().collect() - } - } - - pub fn set_relevant_notes( - &mut self, - relevant_notes: BTreeMap>, - ) { - self.relevant_notes = Some(relevant_notes); - } - - pub fn block_num(&self) -> u32 { - self.executed_transaction.block_header().block_num() - } - - pub fn transaction_arguments(&self) -> &TransactionArgs { - self.executed_transaction.tx_args() - } - - pub fn account_delta(&self) -> &AccountDelta { - self.executed_transaction.account_delta() - } -} - -// TRANSACTION RECORD -// -------------------------------------------------------------------------------------------- - -/// Describes a transaction that has been executed and is being tracked on the Client -/// -/// Currently, the `commit_height` (and `committed` status) is set based on the height -/// at which the transaction's output notes are committed. -pub struct TransactionRecord { - pub id: TransactionId, - pub account_id: AccountId, - pub init_account_state: Digest, - pub final_account_state: Digest, - pub input_note_nullifiers: Vec, - pub output_notes: OutputNotes, - pub transaction_script: Option, - pub block_num: u32, - pub transaction_status: TransactionStatus, -} - -impl TransactionRecord { - #[allow(clippy::too_many_arguments)] - pub fn new( - id: TransactionId, - account_id: AccountId, - init_account_state: Digest, - final_account_state: Digest, - input_note_nullifiers: Vec, - output_notes: OutputNotes, - transaction_script: Option, - block_num: u32, - transaction_status: TransactionStatus, - ) -> TransactionRecord { - TransactionRecord { - id, - account_id, - init_account_state, - final_account_state, - input_note_nullifiers, - output_notes, - transaction_script, - block_num, - transaction_status, - } - } -} - -/// Represents the status of a transaction -pub enum TransactionStatus { - /// Transaction has been submitted but not yet committed - Pending, - /// Transaction has been committed and included at the specified block number - Committed(u32), -} - -impl std::fmt::Display for TransactionStatus { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - TransactionStatus::Pending => write!(f, "Pending"), - TransactionStatus::Committed(block_number) => { - write!(f, "Committed (Block: {})", block_number) - }, - } - } -} - -impl Client { - // TRANSACTION DATA RETRIEVAL - // -------------------------------------------------------------------------------------------- - - /// Retrieves tracked transactions, filtered by [TransactionFilter]. - pub fn get_transactions( - &self, - filter: TransactionFilter, - ) -> Result, ClientError> { - self.store.get_transactions(filter).map_err(|err| err.into()) - } - - // TRANSACTION - // -------------------------------------------------------------------------------------------- - - /// Compiles a [TransactionTemplate] into a [TransactionRequest] that can be then executed by the - /// client - pub fn build_transaction_request( - &mut self, - transaction_template: TransactionTemplate, - ) -> Result { - let account_id = transaction_template.account_id(); - let account_auth = self.store.get_account_auth(account_id)?; - - match transaction_template { - TransactionTemplate::ConsumeNotes(_, notes) => { - let program_ast = ProgramAst::parse(transaction_request::AUTH_CONSUME_NOTES_SCRIPT) - .expect("shipped MASM is well-formed"); - let notes = notes.iter().map(|id| (*id, None)).collect(); - - let tx_script = { - let script_inputs = vec![account_auth.into_advice_inputs()]; - self.tx_executor.compile_tx_script(program_ast, script_inputs, vec![])? - }; - Ok(TransactionRequest::new(account_id, notes, vec![], Some(tx_script))) - }, - TransactionTemplate::MintFungibleAsset(asset, target_account_id, note_type) => { - self.build_mint_tx_request(asset, account_auth, target_account_id, note_type) - }, - TransactionTemplate::PayToId(payment_data, note_type) => { - self.build_p2id_tx_request(account_auth, payment_data, None, note_type) - }, - TransactionTemplate::PayToIdWithRecall(payment_data, recall_height, note_type) => self - .build_p2id_tx_request(account_auth, payment_data, Some(recall_height), note_type), - } - } - - /// Creates and executes a transaction specified by the template, but does not change the - /// local database. - /// - /// # Errors - /// - /// - Returns [ClientError::MissingOutputNotes] if the [TransactionRequest] ouput notes are - /// not a subset of executor's output notes - /// - Returns a [ClientError::TransactionExecutionError] - pub fn new_transaction( - &mut self, - transaction_request: TransactionRequest, - ) -> Result { - let account_id = transaction_request.account_id(); - self.tx_executor - .load_account(account_id) - .map_err(ClientError::TransactionExecutionError)?; - - let block_num = self.store.get_sync_height()?; - - let note_ids = transaction_request.get_input_note_ids(); - - let output_notes = transaction_request.expected_output_notes().to_vec(); - - // Execute the transaction and get the witness - let executed_transaction = self.tx_executor.execute_transaction( - account_id, - block_num, - ¬e_ids, - transaction_request.into(), - )?; - - // Check that the expected output notes is a subset of the transaction's output notes - let tx_note_ids: BTreeSet = - executed_transaction.output_notes().iter().map(|n| n.id()).collect(); - - let missing_note_ids: Vec = output_notes - .iter() - .filter_map(|n| (!tx_note_ids.contains(&n.id())).then_some(n.id())) - .collect(); - - if !missing_note_ids.is_empty() { - return Err(ClientError::MissingOutputNotes(missing_note_ids)); - } - - Ok(TransactionResult::new(executed_transaction, output_notes)) - } - - /// Proves the specified transaction witness, submits it to the node, and stores the transaction in - /// the local database for tracking. - pub async fn submit_transaction( - &mut self, - tx_result: TransactionResult, - ) -> Result<(), ClientError> { - let transaction_prover = TransactionProver::new(ProvingOptions::default()); - - let proven_transaction = - transaction_prover.prove_transaction(tx_result.executed_transaction().clone())?; - - info!("Proved transaction, submitting to the node..."); - - self.submit_proven_transaction_request(proven_transaction.clone()).await?; - - let note_screener = NoteScreener::new(&self.store); - let mut relevant_notes = BTreeMap::new(); - - for (idx, note) in tx_result.created_notes().iter().enumerate() { - let account_relevance = note_screener.check_relevance(note)?; - if !account_relevance.is_empty() { - relevant_notes.insert(idx, account_relevance); - } - } - - let mut tx_result = tx_result; - tx_result.set_relevant_notes(relevant_notes); - - // Transaction was proven and submitted to the node correctly, persist note details and update account - self.store.apply_transaction(tx_result)?; - - Ok(()) - } - - /// Compiles the provided transaction script source and inputs into a [TransactionScript] and - /// checks (to the extent possible) that the transaction script can be executed against all - /// accounts with the specified interfaces. - pub fn compile_tx_script( - &self, - program: ProgramAst, - inputs: T, - target_account_procs: Vec, - ) -> Result - where - T: IntoIterator)>, - { - self.tx_executor - .compile_tx_script(program, inputs, target_account_procs) - .map_err(ClientError::TransactionExecutionError) - } - - async fn submit_proven_transaction_request( - &mut self, - proven_transaction: ProvenTransaction, - ) -> Result<(), ClientError> { - Ok(self.rpc_api.submit_proven_transaction(proven_transaction).await?) - } - - // HELPERS - // -------------------------------------------------------------------------------------------- - - /// Gets [RpoRandomCoin] from the client - fn get_random_coin(&self) -> RpoRandomCoin { - // TODO: Initialize coin status once along with the client and persist status for retrieval - let mut rng = rand::thread_rng(); - let coin_seed: [u64; 4] = rng.gen(); - - RpoRandomCoin::new(coin_seed.map(Felt::new)) - } - - /// Helper to build a [TransactionRequest] for P2ID-type transactions easily. - /// - /// - auth_info has to be from the executor account - /// - If recall_height is Some(), a P2IDR note will be created. Otherwise, a P2ID is created. - fn build_p2id_tx_request( - &self, - auth_info: AuthInfo, - payment_data: PaymentTransactionData, - recall_height: Option, - note_type: NoteType, - ) -> Result { - let random_coin = self.get_random_coin(); - - let created_note = if let Some(recall_height) = recall_height { - create_p2idr_note( - payment_data.account_id(), - payment_data.target_account_id(), - vec![payment_data.asset()], - note_type, - recall_height, - random_coin, - )? - } else { - create_p2id_note( - payment_data.account_id(), - payment_data.target_account_id(), - vec![payment_data.asset()], - note_type, - random_coin, - )? - }; - - let recipient = created_note - .recipient_digest() - .iter() - .map(|x| x.as_int().to_string()) - .collect::>() - .join("."); - - let note_tag = created_note.metadata().tag().inner(); - - let tx_script = ProgramAst::parse( - &transaction_request::AUTH_SEND_ASSET_SCRIPT - .replace("{recipient}", &recipient) - .replace("{note_type}", &Felt::new(note_type as u64).to_string()) - .replace("{tag}", &Felt::new(note_tag.into()).to_string()) - .replace("{asset}", &prepare_word(&payment_data.asset().into()).to_string()), - ) - .expect("shipped MASM is well-formed"); - - let tx_script = { - let script_inputs = vec![auth_info.into_advice_inputs()]; - self.tx_executor.compile_tx_script(tx_script, script_inputs, vec![])? - }; - - Ok(TransactionRequest::new( - payment_data.account_id(), - BTreeMap::new(), - vec![created_note], - Some(tx_script), - )) - } - - /// Helper to build a [TransactionRequest] for transaction to mint fungible tokens. - /// - /// - faucet_auth_info has to be from the faucet account - fn build_mint_tx_request( - &self, - asset: FungibleAsset, - faucet_auth_info: AuthInfo, - target_account_id: AccountId, - note_type: NoteType, - ) -> Result { - let random_coin = self.get_random_coin(); - let created_note = create_p2id_note( - asset.faucet_id(), - target_account_id, - vec![asset.into()], - note_type, - random_coin, - )?; - - let recipient = created_note - .recipient_digest() - .iter() - .map(|x| x.as_int().to_string()) - .collect::>() - .join("."); - - let note_tag = created_note.metadata().tag().inner(); - - let tx_script = ProgramAst::parse( - &transaction_request::DISTRIBUTE_FUNGIBLE_ASSET_SCRIPT - .replace("{recipient}", &recipient) - .replace("{note_type}", &Felt::new(note_type as u64).to_string()) - .replace("{tag}", &Felt::new(note_tag.into()).to_string()) - .replace("{amount}", &Felt::new(asset.amount()).to_string()), - ) - .expect("shipped MASM is well-formed"); - - let tx_script = { - let script_inputs = vec![faucet_auth_info.into_advice_inputs()]; - self.tx_executor.compile_tx_script(tx_script, script_inputs, vec![])? - }; - - Ok(TransactionRequest::new( - asset.faucet_id(), - BTreeMap::new(), - vec![created_note], - Some(tx_script), - )) - } -} - -// HELPERS -// ================================================================================================ - -pub(crate) fn prepare_word(word: &Word) -> String { - word.iter().map(|x| x.as_int().to_string()).collect::>().join(".") -} diff --git a/src/client/transactions/transaction_request.rs b/src/client/transactions/transaction_request.rs deleted file mode 100644 index 37b58f879..000000000 --- a/src/client/transactions/transaction_request.rs +++ /dev/null @@ -1,170 +0,0 @@ -use alloc::collections::BTreeMap; - -use miden_objects::{ - accounts::AccountId, - assets::{Asset, FungibleAsset}, - notes::{Note, NoteId, NoteType}, - transaction::{TransactionArgs, TransactionScript}, - vm::AdviceMap, - Word, -}; - -// MASM SCRIPTS -// -------------------------------------------------------------------------------------------- - -pub const AUTH_CONSUME_NOTES_SCRIPT: &str = - include_str!("asm/transaction_scripts/auth_consume_notes.masm"); -pub const DISTRIBUTE_FUNGIBLE_ASSET_SCRIPT: &str = - include_str!("asm/transaction_scripts/distribute_fungible_asset.masm"); -pub const AUTH_SEND_ASSET_SCRIPT: &str = - include_str!("asm/transaction_scripts/auth_send_asset.masm"); - -// TRANSACTION REQUEST -// -------------------------------------------------------------------------------------------- - -pub type NoteArgs = Word; - -/// Represents the most general way of defining an executable transaction -#[derive(Clone, Debug)] -pub struct TransactionRequest { - /// ID of the account against which the transactions is to be executed. - account_id: AccountId, - /// Notes to be consumed by the transaction together with their (optional) arguments. - input_notes: BTreeMap>, - /// A list of notes expected to be generated by the transactions. - expected_output_notes: Vec, - /// Optional transaction script (together with its arguments). - tx_script: Option, -} - -impl TransactionRequest { - // CONSTRUCTORS - // -------------------------------------------------------------------------------------------- - - pub fn new( - account_id: AccountId, - input_notes: BTreeMap>, - expected_output_notes: Vec, - tx_script: Option, - ) -> Self { - Self { - account_id, - input_notes, - expected_output_notes, - tx_script, - } - } - - // PUBLIC ACCESSORS - // -------------------------------------------------------------------------------------------- - - pub fn account_id(&self) -> AccountId { - self.account_id - } - - pub fn input_notes(&self) -> &BTreeMap> { - &self.input_notes - } - - pub fn get_input_note_ids(&self) -> Vec { - self.input_notes.keys().cloned().collect() - } - - pub fn get_note_args(&self) -> BTreeMap { - self.input_notes - .iter() - .filter_map(|(note, args)| args.map(|a| (*note, a))) - .collect() - } - - pub fn expected_output_notes(&self) -> &[Note] { - &self.expected_output_notes - } - - pub fn tx_script(&self) -> Option<&TransactionScript> { - self.tx_script.as_ref() - } -} - -impl From for TransactionArgs { - fn from(val: TransactionRequest) -> Self { - let note_args = val.get_note_args(); - let mut tx_args = TransactionArgs::new(val.tx_script, Some(note_args), AdviceMap::new()); - - let output_notes = val.expected_output_notes.into_iter(); - tx_args.extend_expected_output_notes(output_notes); - - tx_args - } -} - -// TRANSACTION TEMPLATE -// -------------------------------------------------------------------------------------------- - -#[derive(Clone, Debug)] -pub enum TransactionTemplate { - /// Consume the specified notes against an account. - ConsumeNotes(AccountId, Vec), - /// Mint fungible assets using a faucet account and creates a note with the specified - /// type that can be consumed by the target Account ID - MintFungibleAsset(FungibleAsset, AccountId, NoteType), - /// Creates a pay-to-id note with the specified type directed to a specific account - PayToId(PaymentTransactionData, NoteType), - /// Creates a pay-to-id note directed to a specific account, specifying a block height after - /// which the note can be recalled - PayToIdWithRecall(PaymentTransactionData, u32, NoteType), -} - -impl TransactionTemplate { - /// Returns the [AccountId] of the account which the transaction will be executed against - pub fn account_id(&self) -> AccountId { - match self { - TransactionTemplate::ConsumeNotes(account_id, _) => *account_id, - TransactionTemplate::MintFungibleAsset(asset, ..) => asset.faucet_id(), - TransactionTemplate::PayToId(payment_data, _) => payment_data.account_id(), - TransactionTemplate::PayToIdWithRecall(payment_data, ..) => payment_data.account_id(), - } - } -} - -// PAYMENT TRANSACTION DATA -// -------------------------------------------------------------------------------------------- - -#[derive(Clone, Debug)] -pub struct PaymentTransactionData { - asset: Asset, - sender_account_id: AccountId, - target_account_id: AccountId, -} - -impl PaymentTransactionData { - // CONSTRUCTORS - // -------------------------------------------------------------------------------------------- - - pub fn new( - asset: Asset, - sender_account_id: AccountId, - target_account_id: AccountId, - ) -> PaymentTransactionData { - PaymentTransactionData { - asset, - sender_account_id, - target_account_id, - } - } - - /// Returns the executor [AccountId] - pub fn account_id(&self) -> AccountId { - self.sender_account_id - } - - /// Returns the target [AccountId] - pub fn target_account_id(&self) -> AccountId { - self.target_account_id - } - - /// Returns the transaction [Asset] - pub fn asset(&self) -> Asset { - self.asset - } -} diff --git a/src/config.rs b/src/config.rs deleted file mode 100644 index be5fc26c5..000000000 --- a/src/config.rs +++ /dev/null @@ -1,156 +0,0 @@ -use core::fmt; -use std::path::PathBuf; - -use figment::{ - value::{Dict, Map}, - Metadata, Profile, Provider, -}; -use serde::{Deserialize, Serialize}; - -// CLIENT CONFIG -// ================================================================================================ - -/// Configuration options of Miden client. -#[derive(Debug, Default, Deserialize, Eq, PartialEq, Serialize)] -pub struct ClientConfig { - /// Describes settings related to the RPC endpoint - pub rpc: RpcConfig, - /// Describes settings related to the store. - pub store: StoreConfig, -} - -impl ClientConfig { - /// Returns a new instance of [ClientConfig] with the specified store path and node endpoint. - pub const fn new(store: StoreConfig, rpc: RpcConfig) -> Self { - Self { store, rpc } - } -} - -// Make `ClientConfig` a provider itself for composability. -impl Provider for ClientConfig { - fn metadata(&self) -> Metadata { - Metadata::named("Library Config") - } - - fn data(&self) -> Result, figment::Error> { - figment::providers::Serialized::defaults(ClientConfig::default()).data() - } - - fn profile(&self) -> Option { - // Optionally, a profile that's selected by default. - None - } -} - -// ENDPOINT -// ================================================================================================ - -#[derive(Clone, Debug, Deserialize, Eq, Ord, PartialEq, PartialOrd, Serialize)] -pub struct Endpoint { - protocol: String, - host: String, - port: u16, -} - -impl Endpoint { - /// Returns a new instance of [Endpoint] with the specified protocol, host, and port. - pub const fn new(protocol: String, host: String, port: u16) -> Self { - Self { protocol, host, port } - } -} - -impl Endpoint { - pub fn protocol(&self) -> &str { - &self.protocol - } - - pub fn host(&self) -> &str { - &self.host - } - - pub fn port(&self) -> u16 { - self.port - } -} - -impl fmt::Display for Endpoint { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}://{}:{}", self.protocol, self.host, self.port) - } -} - -impl Default for Endpoint { - fn default() -> Self { - const MIDEN_NODE_PORT: u16 = 57291; - - Self { - protocol: "http".to_string(), - host: "localhost".to_string(), - port: MIDEN_NODE_PORT, - } - } -} - -// STORE CONFIG -// ================================================================================================ - -#[derive(Debug, Deserialize, Eq, PartialEq, Serialize)] -pub struct StoreConfig { - pub database_filepath: String, -} - -impl From<&ClientConfig> for StoreConfig { - fn from(config: &ClientConfig) -> Self { - Self { - database_filepath: config.store.database_filepath.clone(), - } - } -} - -impl TryFrom<&str> for StoreConfig { - type Error = String; - fn try_from(value: &str) -> Result { - StoreConfig::try_from(value.to_string()) - } -} - -// TODO: Implement error checking for invalid paths, or make it based on Path types -impl TryFrom for StoreConfig { - type Error = String; - fn try_from(value: String) -> Result { - Ok(Self { database_filepath: value }) - } -} - -impl Default for StoreConfig { - fn default() -> Self { - const STORE_FILENAME: &str = "store.sqlite3"; - - // Get current directory - let exec_dir = PathBuf::new(); - - // Append filepath - let database_filepath = exec_dir - .join(STORE_FILENAME) - .into_os_string() - .into_string() - .expect("Creating the hardcoded store path should not panic"); - - Self { database_filepath } - } -} - -// RPC CONFIG -// ================================================================================================ - -#[derive(Debug, Default, Deserialize, Eq, PartialEq, Serialize)] -pub struct RpcConfig { - /// Address of the Miden node to connect to. - pub endpoint: Endpoint, -} - -impl From for RpcConfig { - fn from(value: Endpoint) -> Self { - Self { endpoint: value } - } -} diff --git a/src/errors.rs b/src/errors.rs deleted file mode 100644 index a89ad104b..000000000 --- a/src/errors.rs +++ /dev/null @@ -1,474 +0,0 @@ -use core::fmt; - -use miden_node_proto::errors::ConversionError; -use miden_objects::{ - accounts::AccountId, crypto::merkle::MmrError, notes::NoteId, AccountError, AssetError, - AssetVaultError, Digest, NoteError, TransactionScriptError, -}; -use miden_tx::{ - utils::{DeserializationError, HexParseError}, - DataStoreError, TransactionExecutorError, TransactionProverError, -}; - -// CLIENT ERROR -// ================================================================================================ - -#[derive(Debug)] -pub enum ClientError { - AccountError(AccountError), - AssetError(AssetError), - DataDeserializationError(DeserializationError), - HexParseError(HexParseError), - ImportNewAccountWithoutSeed, - MissingOutputNotes(Vec), - NoteError(NoteError), - NoConsumableNoteForAccount(AccountId), - NodeRpcClientError(NodeRpcClientError), - ScreenerError(ScreenerError), - StoreError(StoreError), - TransactionExecutionError(TransactionExecutorError), - TransactionProvingError(TransactionProverError), -} - -impl fmt::Display for ClientError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - ClientError::AccountError(err) => write!(f, "account error: {err}"), - ClientError::DataDeserializationError(err) => { - write!(f, "data deserialization error: {err}") - }, - ClientError::AssetError(err) => write!(f, "asset error: {err}"), - ClientError::HexParseError(err) => write!(f, "error turning array to Digest: {err}"), - ClientError::ImportNewAccountWithoutSeed => write!( - f, - "import account error: can't import a new account without its initial seed" - ), - ClientError::MissingOutputNotes(note_ids) => { - write!( - f, - "transaction error: The transaction did not produce expected Note IDs: {}", - note_ids.iter().map(|&id| id.to_hex()).collect::>().join(", ") - ) - }, - ClientError::NoConsumableNoteForAccount(account_id) => { - write!(f, "No consumable note for account ID {}", account_id) - }, - ClientError::NoteError(err) => write!(f, "note error: {err}"), - ClientError::NodeRpcClientError(err) => write!(f, "rpc api error: {err}"), - ClientError::ScreenerError(err) => write!(f, "note screener error: {err}"), - ClientError::StoreError(err) => write!(f, "store error: {err}"), - ClientError::TransactionExecutionError(err) => { - write!(f, "transaction executor error: {err}") - }, - ClientError::TransactionProvingError(err) => { - write!(f, "transaction prover error: {err}") - }, - } - } -} - -// CONVERSIONS -// ================================================================================================ - -impl From for ClientError { - fn from(err: AccountError) -> Self { - Self::AccountError(err) - } -} - -impl From for ClientError { - fn from(err: DeserializationError) -> Self { - Self::DataDeserializationError(err) - } -} - -impl From for ClientError { - fn from(err: HexParseError) -> Self { - Self::HexParseError(err) - } -} - -impl From for ClientError { - fn from(err: NoteError) -> Self { - Self::NoteError(err) - } -} - -impl From for ClientError { - fn from(err: NodeRpcClientError) -> Self { - Self::NodeRpcClientError(err) - } -} - -impl From for ClientError { - fn from(err: StoreError) -> Self { - Self::StoreError(err) - } -} - -impl From for ClientError { - fn from(err: TransactionExecutorError) -> Self { - Self::TransactionExecutionError(err) - } -} - -impl From for ClientError { - fn from(err: TransactionProverError) -> Self { - Self::TransactionProvingError(err) - } -} - -impl From for ClientError { - fn from(err: ScreenerError) -> Self { - Self::ScreenerError(err) - } -} - -impl From for ClientError { - fn from(err: rusqlite::Error) -> Self { - Self::StoreError(StoreError::from(err)) - } -} - -impl From for String { - fn from(err: ClientError) -> String { - err.to_string() - } -} - -#[cfg(feature = "std")] -impl std::error::Error for ClientError {} - -// STORE ERROR -// ================================================================================================ - -#[derive(Debug)] -pub enum StoreError { - AssetVaultError(AssetVaultError), - AccountCodeDataNotFound(Digest), - AccountDataNotFound(AccountId), - AccountError(AccountError), - AccountHashMismatch(AccountId), - AccountStorageNotFound(Digest), - BlockHeaderNotFound(u32), - ChainMmrNodeNotFound(u64), - DatabaseError(String), - DataDeserializationError(DeserializationError), - HexParseError(HexParseError), - InputNoteNotFound(NoteId), - InputSerializationError(serde_json::Error), - JsonDataDeserializationError(serde_json::Error), - MmrError(MmrError), - NoteInclusionProofError(NoteError), - NoteTagAlreadyTracked(u64), - ParsingError(String), - QueryError(String), - RpcTypeConversionFailure(ConversionError), - TransactionScriptError(TransactionScriptError), - VaultDataNotFound(Digest), -} - -impl From for StoreError { - fn from(value: AssetVaultError) -> Self { - StoreError::AssetVaultError(value) - } -} - -impl From for StoreError { - fn from(value: AccountError) -> Self { - StoreError::AccountError(value) - } -} - -impl From for StoreError { - fn from(value: rusqlite_migration::Error) -> Self { - StoreError::DatabaseError(value.to_string()) - } -} -impl From for StoreError { - fn from(value: rusqlite::Error) -> Self { - match value { - rusqlite::Error::FromSqlConversionFailure(..) - | rusqlite::Error::IntegralValueOutOfRange(..) - | rusqlite::Error::InvalidColumnIndex(_) - | rusqlite::Error::InvalidColumnType(..) => StoreError::ParsingError(value.to_string()), - rusqlite::Error::InvalidParameterName(_) - | rusqlite::Error::InvalidColumnName(_) - | rusqlite::Error::StatementChangedRows(_) - | rusqlite::Error::ExecuteReturnedResults - | rusqlite::Error::InvalidQuery - | rusqlite::Error::MultipleStatement - | rusqlite::Error::InvalidParameterCount(..) - | rusqlite::Error::QueryReturnedNoRows => StoreError::QueryError(value.to_string()), - _ => StoreError::DatabaseError(value.to_string()), - } - } -} - -impl From for StoreError { - fn from(value: DeserializationError) -> Self { - StoreError::DataDeserializationError(value) - } -} - -impl From for StoreError { - fn from(value: HexParseError) -> Self { - StoreError::HexParseError(value) - } -} - -impl From for StoreError { - fn from(value: MmrError) -> Self { - StoreError::MmrError(value) - } -} - -impl From for StoreError { - fn from(value: NoteError) -> Self { - StoreError::NoteInclusionProofError(value) - } -} - -impl From for StoreError { - fn from(value: TransactionScriptError) -> Self { - StoreError::TransactionScriptError(value) - } -} - -impl fmt::Display for StoreError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - use StoreError::*; - match self { - AssetVaultError(err) => { - write!(f, "asset vault with root {} not found", err) - }, - AccountCodeDataNotFound(root) => { - write!(f, "account code data with root {} not found", root) - }, - AccountDataNotFound(account_id) => { - write!(f, "Account data was not found for Account Id {account_id}") - }, - AccountError(err) => write!(f, "error instantiating Account: {err}"), - AccountHashMismatch(account_id) => { - write!(f, "account hash mismatch for account {account_id}") - }, - AccountStorageNotFound(root) => { - write!(f, "account storage data with root {} not found", root) - }, - BlockHeaderNotFound(block_number) => { - write!(f, "block header for block {} not found", block_number) - }, - ChainMmrNodeNotFound(node_index) => { - write!(f, "chain mmr node at index {} not found", node_index) - }, - DatabaseError(err) => write!(f, "database-related non-query error: {err}"), - DataDeserializationError(err) => { - write!(f, "error deserializing data from the store: {err}") - }, - HexParseError(err) => { - write!(f, "error parsing hex: {err}") - }, - InputNoteNotFound(note_id) => { - write!(f, "input note with note id {} not found", note_id.inner()) - }, - InputSerializationError(err) => { - write!(f, "error trying to serialize inputs for the store: {err}") - }, - JsonDataDeserializationError(err) => { - write!(f, "error deserializing data from JSON from the store: {err}") - }, - MmrError(err) => write!(f, "error constructing mmr: {err}"), - NoteTagAlreadyTracked(tag) => write!(f, "note tag {} is already being tracked", tag), - NoteInclusionProofError(error) => { - write!(f, "inclusion proof creation error: {}", error) - }, - ParsingError(err) => { - write!(f, "failed to parse data retrieved from the database: {err}") - }, - QueryError(err) => write!(f, "failed to retrieve data from the database: {err}"), - TransactionScriptError(err) => { - write!(f, "error instantiating transaction script: {err}") - }, - VaultDataNotFound(root) => write!(f, "account vault data for root {} not found", root), - RpcTypeConversionFailure(err) => write!(f, "failed to convert data: {err}"), - } - } -} - -impl From for DataStoreError { - fn from(value: StoreError) -> Self { - match value { - StoreError::AccountDataNotFound(account_id) => { - DataStoreError::AccountNotFound(account_id) - }, - StoreError::BlockHeaderNotFound(block_num) => DataStoreError::BlockNotFound(block_num), - StoreError::InputNoteNotFound(note_id) => DataStoreError::NoteNotFound(note_id), - err => DataStoreError::InternalError(err.to_string()), - } - } -} - -#[cfg(feature = "std")] -impl std::error::Error for StoreError {} - -// API CLIENT ERROR -// ================================================================================================ - -#[derive(Debug)] -pub enum NodeRpcClientError { - ConnectionError(String), - ConversionFailure(String), - DeserializationError(DeserializationError), - ExpectedFieldMissing(String), - InvalidAccountReceived(String), - NoteError(NoteError), - RequestError(String, String), -} - -impl fmt::Display for NodeRpcClientError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - NodeRpcClientError::ConnectionError(err) => { - write!(f, "failed to connect to the API server: {err}") - }, - NodeRpcClientError::ConversionFailure(err) => { - write!(f, "failed to convert RPC data: {err}") - }, - NodeRpcClientError::DeserializationError(err) => { - write!(f, "failed to deserialize RPC data: {err}") - }, - NodeRpcClientError::ExpectedFieldMissing(err) => { - write!(f, "rpc API response missing an expected field: {err}") - }, - NodeRpcClientError::InvalidAccountReceived(account_error) => { - write!(f, "rpc API response contained an invalid account: {account_error}") - }, - NodeRpcClientError::NoteError(err) => { - write!(f, "rpc API note failed to validate: {err}") - }, - NodeRpcClientError::RequestError(endpoint, err) => { - write!(f, "rpc request failed for {endpoint}: {err}") - }, - } - } -} - -impl From for NodeRpcClientError { - fn from(err: AccountError) -> Self { - Self::InvalidAccountReceived(err.to_string()) - } -} - -impl From for NodeRpcClientError { - fn from(err: DeserializationError) -> Self { - Self::DeserializationError(err) - } -} - -impl From for NodeRpcClientError { - fn from(err: NoteError) -> Self { - Self::NoteError(err) - } -} - -impl From for NodeRpcClientError { - fn from(err: ConversionError) -> Self { - Self::ConversionFailure(err.to_string()) - } -} - -// NOTE ID PREFIX FETCH ERROR -// ================================================================================================ - -/// Error when Looking for a specific note ID from a partial ID -#[derive(Debug, Eq, PartialEq)] -pub enum NoteIdPrefixFetchError { - NoMatch(String), - MultipleMatches(String), -} - -impl fmt::Display for NoteIdPrefixFetchError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - NoteIdPrefixFetchError::NoMatch(note_id) => { - write!(f, "No matches were found with the input prefix {note_id}.") - }, - NoteIdPrefixFetchError::MultipleMatches(note_id) => { - write!( - f, - "found more than one note for the provided ID {note_id} and only one match is expected." - ) - }, - } - } -} - -// NOTE SCREENER ERROR -// ================================================================================================ - -/// Error when screening notes to check relevance to a client -#[derive(Debug)] -pub enum ScreenerError { - InvalidNoteInputsError(InvalidNoteInputsError), - StoreError(StoreError), -} - -impl From for ScreenerError { - fn from(error: InvalidNoteInputsError) -> Self { - Self::InvalidNoteInputsError(error) - } -} - -impl From for ScreenerError { - fn from(error: StoreError) -> Self { - Self::StoreError(error) - } -} - -impl fmt::Display for ScreenerError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - ScreenerError::InvalidNoteInputsError(note_inputs_err) => { - write!(f, "error while processing note inputs: {note_inputs_err}") - }, - ScreenerError::StoreError(store_error) => { - write!(f, "error while fetching data from the store: {store_error}") - }, - } - } -} - -#[derive(Debug)] -pub enum InvalidNoteInputsError { - AccountError(NoteId, AccountError), - AssetError(NoteId, AssetError), - NumInputsError(NoteId, usize), - BlockNumberError(NoteId, u64), -} - -impl fmt::Display for InvalidNoteInputsError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - InvalidNoteInputsError::AccountError(note_id, account_error) => { - write!(f, "account error for note with ID {}: {account_error}", note_id.to_hex()) - }, - InvalidNoteInputsError::AssetError(note_id, asset_error) => { - write!(f, "asset error for note with ID {}: {asset_error}", note_id.to_hex()) - }, - InvalidNoteInputsError::NumInputsError(note_id, expected_num_inputs) => { - write!( - f, - "expected {expected_num_inputs} note inputs for note with ID {}", - note_id.to_hex() - ) - }, - InvalidNoteInputsError::BlockNumberError(note_id, read_height) => { - write!( - f, - "note input representing block with value {read_height} for note with ID {}", - note_id.to_hex() - ) - }, - } - } -} diff --git a/src/lib.rs b/src/lib.rs deleted file mode 100644 index 6c9dbf41c..000000000 --- a/src/lib.rs +++ /dev/null @@ -1,12 +0,0 @@ -extern crate alloc; - -pub mod client; -pub mod config; -pub mod errors; -pub mod store; - -#[cfg(any(test, feature = "test_utils"))] -pub mod mock; - -#[cfg(test)] -mod tests; diff --git a/src/main.rs b/src/main.rs deleted file mode 100644 index 376284811..000000000 --- a/src/main.rs +++ /dev/null @@ -1,16 +0,0 @@ -use clap::Parser; - -mod cli; -use cli::Cli; - -#[tokio::main] -async fn main() { - tracing_subscriber::fmt::init(); - // read command-line args - let cli = Cli::parse(); - - // execute cli action - if let Err(error) = cli.execute().await { - println!("{}", error); - } -} diff --git a/src/mock.rs b/src/mock.rs deleted file mode 100644 index d4af4fa3a..000000000 --- a/src/mock.rs +++ /dev/null @@ -1,749 +0,0 @@ -use alloc::collections::BTreeMap; - -use async_trait::async_trait; -use miden_lib::{transaction::TransactionKernel, AuthScheme}; -use miden_node_proto::generated::{ - account::AccountId as ProtoAccountId, - block_header::BlockHeader as NodeBlockHeader, - note::NoteSyncRecord, - requests::{GetBlockHeaderByNumberRequest, SyncStateRequest}, - responses::{NullifierUpdate, SyncStateResponse}, -}; -use miden_objects::{ - accounts::{ - get_account_seed_single, Account, AccountCode, AccountId, AccountStorage, - AccountStorageType, AccountType, SlotItem, StorageSlot, ACCOUNT_ID_OFF_CHAIN_SENDER, - }, - assembly::{Assembler, ModuleAst, ProgramAst}, - assets::{Asset, AssetVault, FungibleAsset, TokenSymbol}, - crypto::{ - dsa::rpo_falcon512::SecretKey, - merkle::{Mmr, MmrDelta, NodeIndex, SimpleSmt}, - rand::RpoRandomCoin, - }, - notes::{ - Note, NoteAssets, NoteId, NoteInclusionProof, NoteInputs, NoteMetadata, NoteRecipient, - NoteScript, NoteTag, NoteType, - }, - transaction::{InputNote, ProvenTransaction}, - BlockHeader, Felt, Word, NOTE_TREE_DEPTH, -}; -use rand::Rng; -use tonic::{Response, Status}; - -use crate::{ - client::{ - rpc::{ - NodeRpcClient, NodeRpcClientEndpoint, NoteDetails, NoteInclusionDetails, StateSyncInfo, - }, - sync::FILTER_ID_SHIFT, - transactions::{ - prepare_word, - transaction_request::{PaymentTransactionData, TransactionTemplate}, - }, - Client, - }, - errors::NodeRpcClientError, - store::{sqlite_store::SqliteStore, AuthInfo}, -}; - -pub type MockClient = Client; - -// MOCK CONSTS -// ================================================================================================ - -pub const ACCOUNT_ID_REGULAR_ACCOUNT_UPDATABLE_CODE_ON_CHAIN: u64 = 3238098370154045919; -pub const ACCOUNT_ID_REGULAR: u64 = ACCOUNT_ID_OFF_CHAIN_SENDER; -pub const ACCOUNT_ID_FUNGIBLE_FAUCET_ON_CHAIN: u64 = 0b1010011100 << 54; -pub const DEFAULT_ACCOUNT_CODE: &str = " - use.miden::contracts::wallets::basic->basic_wallet - use.miden::contracts::auth::basic->basic_eoa - - export.basic_wallet::receive_asset - export.basic_wallet::send_asset - export.basic_eoa::auth_tx_rpo_falcon512 -"; - -/// Mock RPC API -/// -/// This struct implements the RPC API used by the client to communicate with the node. It is -/// intended to be used for testing purposes only. -pub struct MockRpcApi { - pub state_sync_requests: BTreeMap, - pub genesis_block: BlockHeader, - pub notes: BTreeMap, -} - -impl Default for MockRpcApi { - fn default() -> Self { - let (genesis_block, state_sync_requests, notes) = generate_state_sync_mock_requests(); - Self { - state_sync_requests, - genesis_block, - notes, - } - } -} - -impl MockRpcApi { - pub fn new(_config_endpoint: &str) -> Self { - Self::default() - } -} - -#[async_trait] -impl NodeRpcClient for MockRpcApi { - /// Executes the specified sync state request and returns the response. - async fn sync_state( - &mut self, - block_num: u32, - _account_ids: &[AccountId], - _note_tags: &[NoteTag], - _nullifiers_tags: &[u16], - ) -> Result { - // Match request -> response through block_num - let response = - match self.state_sync_requests.iter().find(|(req, _)| req.block_num == block_num) { - Some((_req, response)) => { - let response = response.clone(); - Ok(Response::new(response)) - }, - None => Err(NodeRpcClientError::RequestError( - NodeRpcClientEndpoint::SyncState.to_string(), - Status::not_found("no response for sync state request").to_string(), - )), - }?; - - response.into_inner().try_into() - } - - /// Creates and executes a [GetBlockHeaderByNumberRequest]. - /// Only used for retrieving genesis block right now so that's the only case we need to cover. - async fn get_block_header_by_number( - &mut self, - block_num: Option, - ) -> Result { - let request = GetBlockHeaderByNumberRequest { block_num }; - - if request.block_num == Some(0) { - return Ok(self.genesis_block); - } - panic!("get_block_header_by_number is supposed to be only used for genesis block") - } - - async fn get_notes_by_id( - &mut self, - note_ids: &[NoteId], - ) -> Result, NodeRpcClientError> { - // assume all off-chain notes for now - let hit_notes = note_ids.iter().filter_map(|id| self.notes.get(id)); - let mut return_notes = vec![]; - for note in hit_notes { - if note.note().metadata().note_type() != NoteType::Public { - panic!("this function assumes all notes are offchain for now"); - } - let inclusion_details = NoteInclusionDetails::new( - note.proof().origin().block_num, - note.proof().origin().node_index.value() as u32, - note.proof().note_path().clone(), - ); - return_notes.push(NoteDetails::OffChain( - note.id(), - *note.note().metadata(), - inclusion_details, - )); - } - Ok(return_notes) - } - - async fn submit_proven_transaction( - &mut self, - _proven_transaction: ProvenTransaction, - ) -> std::result::Result<(), NodeRpcClientError> { - // TODO: add some basic validations to test error cases - Ok(()) - } - - async fn get_account_update( - &mut self, - _account_id: AccountId, - ) -> Result { - panic!("shouldn't be used for now") - } -} - -// HELPERS -// ================================================================================================ - -/// Generates genesis block header, mock sync state requests and responses -fn create_mock_sync_state_request_for_account_and_notes( - account_id: AccountId, - output_notes: &[Note], - consumed_notes: &[InputNote], - genesis_block: &BlockHeader, - mmr_delta: Option>, - tracked_block_headers: Option>, -) -> BTreeMap { - let mut requests: BTreeMap = BTreeMap::new(); - - let accounts = vec![ProtoAccountId { id: u64::from(account_id) }]; - - let nullifiers: Vec = consumed_notes - .iter() - .map(|note| (note.note().nullifier().as_elements()[3].as_int() >> FILTER_ID_SHIFT) as u32) - .collect(); - - let account = get_account_with_default_account_code(account_id, Word::default(), None); - - // This assumes the callee provides either both `tracked_block_headers` and `mmr_delta` are - // provided or not provided - let (tracked_block_headers, mmr_delta) = - if let Some(tracked_block_headers) = tracked_block_headers { - (tracked_block_headers, mmr_delta.unwrap()) - } else { - let mut mocked_tracked_headers = - vec![BlockHeader::mock(8, None, None, &[]), BlockHeader::mock(10, None, None, &[])]; - - let all_mocked_block_headers = vec![ - *genesis_block, - BlockHeader::mock(1, None, None, &[]), - BlockHeader::mock(2, None, None, &[]), - BlockHeader::mock(3, None, None, &[]), - BlockHeader::mock(4, None, None, &[]), - BlockHeader::mock(5, None, None, &[]), - BlockHeader::mock(6, None, None, &[]), - BlockHeader::mock(7, None, None, &[]), - mocked_tracked_headers[0], - BlockHeader::mock(9, None, None, &[]), - mocked_tracked_headers[1], - ]; - - let mut mmr = Mmr::default(); - let mut mocked_mmr_deltas = vec![]; - - for (block_num, block_header) in all_mocked_block_headers.iter().enumerate() { - if block_num == 8 { - mocked_mmr_deltas.push(mmr.get_delta(1, mmr.forest()).unwrap()); - } - if block_num == 10 { - // Fix mocked block chain root - mocked_tracked_headers[1] = BlockHeader::mock( - 10, - Some(mmr.peaks(mmr.forest()).unwrap().hash_peaks()), - None, - &[], - ); - mocked_mmr_deltas.push(mmr.get_delta(9, mmr.forest()).unwrap()); - } - mmr.add(block_header.hash()); - } - - (mocked_tracked_headers, mocked_mmr_deltas) - }; - - let chain_tip = tracked_block_headers.last().map(|header| header.block_num()).unwrap_or(10); - let mut deltas_iter = mmr_delta.into_iter(); - let mut created_notes_iter = output_notes.iter(); - - for (block_order, block_header) in tracked_block_headers.iter().enumerate() { - let request = SyncStateRequest { - block_num: if block_order == 0 { - 0 - } else { - tracked_block_headers[block_order - 1].block_num() - }, - account_ids: accounts.clone(), - note_tags: vec![], - nullifiers: nullifiers.clone(), - }; - - // create a state sync response - let response = SyncStateResponse { - chain_tip, - mmr_delta: deltas_iter.next().map(miden_node_proto::generated::mmr::MmrDelta::from), - block_header: Some(NodeBlockHeader::from(*block_header)), - accounts: vec![], - notes: vec![NoteSyncRecord { - note_index: 0, - note_id: Some(created_notes_iter.next().unwrap().id().into()), - sender: Some(account.id().into()), - tag: 0u32, - note_type: NoteType::OffChain as u32, - merkle_path: Some(miden_node_proto::generated::merkle::MerklePath::default()), - }], - nullifiers: vec![NullifierUpdate { - nullifier: Some(consumed_notes.first().unwrap().note().nullifier().inner().into()), - block_num: 7, - }], - }; - requests.insert(request, response); - } - - requests -} - -/// Generates mock sync state requests and responses -fn generate_state_sync_mock_requests() -> ( - BlockHeader, - BTreeMap, - BTreeMap, -) { - let account_id = AccountId::try_from(ACCOUNT_ID_REGULAR).unwrap(); - - // create sync state requests - let assembler = TransactionKernel::assembler(); - let (consumed_notes, created_notes) = mock_notes(&assembler); - let (_, input_notes, ..) = mock_full_chain_mmr_and_notes(consumed_notes); - - let genesis_block = BlockHeader::mock(0, None, None, &[]); - - let state_sync_request_responses = create_mock_sync_state_request_for_account_and_notes( - account_id, - &created_notes, - &input_notes, - &genesis_block, - None, - None, - ); - let input_notes = input_notes.iter().map(|n| (n.note().id(), n.clone())).collect(); - (genesis_block, state_sync_request_responses, input_notes) -} - -pub fn mock_full_chain_mmr_and_notes( - consumed_notes: Vec, -) -> (Mmr, Vec, Vec, Vec) { - let mut note_trees = Vec::new(); - - // TODO: Consider how to better represent note authentication data. - // we use the index for both the block number and the leaf index in the note tree - for (index, note) in consumed_notes.iter().enumerate() { - let tree_index = 2 * index; - let smt_entries = vec![ - (tree_index as u64, note.id().into()), - ((tree_index + 1) as u64, note.metadata().into()), - ]; - let smt: SimpleSmt = SimpleSmt::with_leaves(smt_entries).unwrap(); - note_trees.push(smt); - } - - let mut note_tree_iter = note_trees.iter(); - let mut mmr_deltas = Vec::new(); - - // create a dummy chain of block headers - let block_chain = vec![ - BlockHeader::mock(0, None, note_tree_iter.next().map(|x| x.root()), &[]), - BlockHeader::mock(1, None, note_tree_iter.next().map(|x| x.root()), &[]), - BlockHeader::mock(2, None, note_tree_iter.next().map(|x| x.root()), &[]), - BlockHeader::mock(3, None, note_tree_iter.next().map(|x| x.root()), &[]), - BlockHeader::mock(4, None, note_tree_iter.next().map(|x| x.root()), &[]), - BlockHeader::mock(5, None, note_tree_iter.next().map(|x| x.root()), &[]), - BlockHeader::mock(6, None, note_tree_iter.next().map(|x| x.root()), &[]), - ]; - - // instantiate and populate MMR - let mut mmr = Mmr::default(); - for (block_num, block_header) in block_chain.iter().enumerate() { - if block_num == 2 { - mmr_deltas.push(mmr.get_delta(1, mmr.forest()).unwrap()); - } - if block_num == 4 { - mmr_deltas.push(mmr.get_delta(3, mmr.forest()).unwrap()); - } - if block_num == 6 { - mmr_deltas.push(mmr.get_delta(5, mmr.forest()).unwrap()); - } - mmr.add(block_header.hash()); - } - - // set origin for consumed notes using chain and block data - let recorded_notes = consumed_notes - .into_iter() - .enumerate() - .map(|(index, note)| { - let block_header = &block_chain[index]; - let auth_index = NodeIndex::new(NOTE_TREE_DEPTH, index as u64).unwrap(); - InputNote::new( - note, - NoteInclusionProof::new( - block_header.block_num(), - block_header.sub_hash(), - block_header.note_root(), - index as u64, - note_trees[index].open(&auth_index.try_into().unwrap()).path, - ) - .unwrap(), - ) - }) - .collect::>(); - - ( - mmr, - recorded_notes, - vec![block_chain[2], block_chain[4], block_chain[6]], - mmr_deltas, - ) -} - -/// inserts mock note and account data into the client and returns the last block header of mocked -/// chain -pub async fn insert_mock_data(client: &mut MockClient) -> Vec { - // mock notes - let account = get_account_with_default_account_code( - AccountId::try_from(ACCOUNT_ID_REGULAR_ACCOUNT_UPDATABLE_CODE_ON_CHAIN).unwrap(), - Word::default(), - None, - ); - - let init_seed: [u8; 32] = [0; 32]; - let account_seed = get_account_seed_single( - init_seed, - account.account_type(), - miden_objects::accounts::AccountStorageType::OffChain, - account.code().root(), - account.storage().root(), - ) - .unwrap(); - - let assembler = TransactionKernel::assembler(); - let (consumed_notes, created_notes) = mock_notes(&assembler); - let (_mmr, consumed_notes, tracked_block_headers, mmr_deltas) = - mock_full_chain_mmr_and_notes(consumed_notes); - - // insert notes into database - for note in consumed_notes.clone() { - client.import_input_note(note.into()).unwrap(); - } - - // insert notes into database - for note in created_notes.clone() { - client.import_input_note(note.into()).unwrap(); - } - - // insert account - let key_pair = SecretKey::new(); - client - .insert_account(&account, Some(account_seed), &AuthInfo::RpoFalcon512(key_pair)) - .unwrap(); - - let genesis_block = BlockHeader::mock(0, None, None, &[]); - - client.rpc_api().state_sync_requests = create_mock_sync_state_request_for_account_and_notes( - account.id(), - &created_notes, - &consumed_notes, - &genesis_block, - Some(mmr_deltas), - Some(tracked_block_headers.clone()), - ); - - tracked_block_headers -} - -pub async fn create_mock_transaction(client: &mut MockClient) { - let key_pair = SecretKey::new(); - let auth_scheme: miden_lib::AuthScheme = - miden_lib::AuthScheme::RpoFalcon512 { pub_key: key_pair.public_key() }; - - let mut rng = rand::thread_rng(); - // we need to use an initial seed to create the wallet account - let init_seed: [u8; 32] = rand::Rng::gen(&mut rng); - - let (sender_account, seed) = miden_lib::accounts::wallets::create_basic_wallet( - init_seed, - auth_scheme, - AccountType::RegularAccountImmutableCode, - AccountStorageType::OffChain, - ) - .unwrap(); - - client - .insert_account(&sender_account, Some(seed), &AuthInfo::RpoFalcon512(key_pair)) - .unwrap(); - - let key_pair = SecretKey::new(); - let auth_scheme: miden_lib::AuthScheme = - miden_lib::AuthScheme::RpoFalcon512 { pub_key: key_pair.public_key() }; - - let mut rng = rand::thread_rng(); - // we need to use an initial seed to create the wallet account - let init_seed: [u8; 32] = rand::Rng::gen(&mut rng); - - let (target_account, seed) = miden_lib::accounts::wallets::create_basic_wallet( - init_seed, - auth_scheme, - AccountType::RegularAccountImmutableCode, - AccountStorageType::OffChain, - ) - .unwrap(); - - client - .insert_account(&target_account, Some(seed), &AuthInfo::RpoFalcon512(key_pair)) - .unwrap(); - - let key_pair = SecretKey::new(); - let auth_scheme: miden_lib::AuthScheme = - miden_lib::AuthScheme::RpoFalcon512 { pub_key: key_pair.public_key() }; - - let mut rng = rand::thread_rng(); - // we need to use an initial seed to create the wallet account - let init_seed: [u8; 32] = rand::Rng::gen(&mut rng); - - let max_supply = 10000u64.to_le_bytes(); - - let (faucet, seed) = miden_lib::accounts::faucets::create_basic_fungible_faucet( - init_seed, - miden_objects::assets::TokenSymbol::new("MOCK").unwrap(), - 4u8, - Felt::try_from(max_supply.as_slice()).unwrap(), - AccountStorageType::OffChain, - auth_scheme, - ) - .unwrap(); - - client - .insert_account(&faucet, Some(seed), &AuthInfo::RpoFalcon512(key_pair)) - .unwrap(); - - let asset: miden_objects::assets::Asset = FungibleAsset::new(faucet.id(), 5u64).unwrap().into(); - - // Insert a P2ID transaction object - - let transaction_template = TransactionTemplate::PayToId( - PaymentTransactionData::new(asset, sender_account.id(), target_account.id()), - NoteType::OffChain, - ); - - let transaction_request = client.build_transaction_request(transaction_template).unwrap(); - let transaction_execution_result = client.new_transaction(transaction_request).unwrap(); - - client.submit_transaction(transaction_execution_result).await.unwrap(); -} - -pub fn mock_fungible_faucet_account( - id: AccountId, - initial_balance: u64, - key_pair: SecretKey, -) -> Account { - let mut rng = rand::thread_rng(); - let init_seed: [u8; 32] = rng.gen(); - let auth_scheme: AuthScheme = AuthScheme::RpoFalcon512 { pub_key: key_pair.public_key() }; - - let (faucet, _seed) = miden_lib::accounts::faucets::create_basic_fungible_faucet( - init_seed, - TokenSymbol::new("TST").unwrap(), - 10u8, - Felt::try_from(initial_balance.to_le_bytes().as_slice()) - .expect("u64 can be safely converted to a field element"), - AccountStorageType::OffChain, - auth_scheme, - ) - .unwrap(); - - let faucet_storage_slot_1 = - [Felt::new(initial_balance), Felt::new(0), Felt::new(0), Felt::new(0)]; - let faucet_account_storage = AccountStorage::new(vec![ - SlotItem { - index: 0, - slot: StorageSlot::new_value(key_pair.public_key().into()), - }, - SlotItem { - index: 1, - slot: StorageSlot::new_value(faucet_storage_slot_1), - }, - ]) - .unwrap(); - - Account::new( - id, - AssetVault::new(&[]).unwrap(), - faucet_account_storage.clone(), - faucet.code().clone(), - Felt::new(10u64), - ) -} - -pub fn mock_notes(assembler: &Assembler) -> (Vec, Vec) { - const ACCOUNT_ID_FUNGIBLE_FAUCET_ON_CHAIN_1: u64 = - 0b1010010001111111010110100011011110101011010001101111110110111100u64; - const ACCOUNT_ID_FUNGIBLE_FAUCET_ON_CHAIN_2: u64 = - 0b1010000101101010101101000110111101010110100011011110100011011101u64; - const ACCOUNT_ID_FUNGIBLE_FAUCET_ON_CHAIN_3: u64 = - 0b1010011001011010101101000110111101010110100011011101000110111100u64; - // Note Assets - let faucet_id_1 = AccountId::try_from(ACCOUNT_ID_FUNGIBLE_FAUCET_ON_CHAIN_1).unwrap(); - let faucet_id_2 = AccountId::try_from(ACCOUNT_ID_FUNGIBLE_FAUCET_ON_CHAIN_2).unwrap(); - let faucet_id_3 = AccountId::try_from(ACCOUNT_ID_FUNGIBLE_FAUCET_ON_CHAIN_3).unwrap(); - let fungible_asset_1: Asset = FungibleAsset::new(faucet_id_1, 100).unwrap().into(); - let fungible_asset_2: Asset = FungibleAsset::new(faucet_id_2, 150).unwrap().into(); - let fungible_asset_3: Asset = FungibleAsset::new(faucet_id_3, 7).unwrap().into(); - - // Sender account - let sender = AccountId::try_from(ACCOUNT_ID_REGULAR).unwrap(); - - // CREATED NOTES - // -------------------------------------------------------------------------------------------- - // create note script - let note_program_ast = ProgramAst::parse("begin push.1 drop end").unwrap(); - let (note_script, _) = NoteScript::new(note_program_ast, assembler).unwrap(); - - // Created Notes - const SERIAL_NUM_4: Word = [Felt::new(13), Felt::new(14), Felt::new(15), Felt::new(16)]; - let note_metadata = - NoteMetadata::new(sender, NoteType::OffChain, 1u32.into(), Default::default()).unwrap(); - let note_assets = NoteAssets::new(vec![fungible_asset_1]).unwrap(); - let note_recipient = - NoteRecipient::new(SERIAL_NUM_4, note_script.clone(), NoteInputs::new(vec![]).unwrap()); - - let created_note_1 = Note::new(note_assets, note_metadata, note_recipient); - - const SERIAL_NUM_5: Word = [Felt::new(17), Felt::new(18), Felt::new(19), Felt::new(20)]; - let note_metadata = - NoteMetadata::new(sender, NoteType::OffChain, 2u32.into(), Default::default()).unwrap(); - let note_recipient = - NoteRecipient::new(SERIAL_NUM_5, note_script.clone(), NoteInputs::new(vec![]).unwrap()); - let note_assets = NoteAssets::new(vec![fungible_asset_2]).unwrap(); - let created_note_2 = Note::new(note_assets, note_metadata, note_recipient); - - const SERIAL_NUM_6: Word = [Felt::new(21), Felt::new(22), Felt::new(23), Felt::new(24)]; - let note_metadata = - NoteMetadata::new(sender, NoteType::OffChain, 2u32.into(), Default::default()).unwrap(); - let note_assets = NoteAssets::new(vec![fungible_asset_3]).unwrap(); - let note_recipient = - NoteRecipient::new(SERIAL_NUM_6, note_script, NoteInputs::new(vec![Felt::new(2)]).unwrap()); - let created_note_3 = Note::new(note_assets, note_metadata, note_recipient); - - let created_notes = vec![created_note_1, created_note_2, created_note_3]; - - // CONSUMED NOTES - // -------------------------------------------------------------------------------------------- - - // create note 1 script - let note_1_script_src = format!( - "\ - begin - # create note 0 - push.{created_note_0_recipient} - push.{created_note_0_tag} - push.{created_note_0_asset} - # MAST root of the `create_note` mock account procedure - # call.0xacb46cadec8d1721934827ed161b851f282f1f4b88b72391a67fed668b1a00ba - drop dropw dropw - - # create note 1 - push.{created_note_1_recipient} - push.{created_note_1_tag} - push.{created_note_1_asset} - # MAST root of the `create_note` mock account procedure - # call.0xacb46cadec8d1721934827ed161b851f282f1f4b88b72391a67fed668b1a00ba - drop dropw dropw - end - ", - created_note_0_recipient = prepare_word(&created_notes[0].recipient_digest()), - created_note_0_tag = created_notes[0].metadata().tag(), - created_note_0_asset = prepare_assets(created_notes[0].assets())[0], - created_note_1_recipient = prepare_word(&created_notes[1].recipient_digest()), - created_note_1_tag = created_notes[1].metadata().tag(), - created_note_1_asset = prepare_assets(created_notes[1].assets())[0], - ); - let note_1_script_ast = ProgramAst::parse(¬e_1_script_src).unwrap(); - let (_note_1_script, _) = NoteScript::new(note_1_script_ast, assembler).unwrap(); - - // create note 2 script - let note_2_script_src = format!( - "\ - begin - # create note 2 - push.{created_note_2_recipient} - push.{created_note_2_tag} - push.{created_note_2_asset} - # MAST root of the `create_note` mock account procedure - # call.0xacb46cadec8d1721934827ed161b851f282f1f4b88b72391a67fed668b1a00ba - drop dropw dropw - end - ", - created_note_2_recipient = prepare_word(&created_notes[2].recipient_digest()), - created_note_2_tag = created_notes[2].metadata().tag(), - created_note_2_asset = prepare_assets(created_notes[2].assets())[0], - ); - let note_2_script_ast = ProgramAst::parse(¬e_2_script_src).unwrap(); - let (note_2_script, _) = NoteScript::new(note_2_script_ast, assembler).unwrap(); - - // Consumed Notes - const SERIAL_NUM_1: Word = [Felt::new(1), Felt::new(2), Felt::new(3), Felt::new(4)]; - let note_metadata = - NoteMetadata::new(sender, NoteType::OffChain, 1u32.into(), Default::default()).unwrap(); - let note_recipient = NoteRecipient::new( - SERIAL_NUM_1, - note_2_script.clone(), - NoteInputs::new(vec![Felt::new(1)]).unwrap(), - ); - let note_assets = NoteAssets::new(vec![fungible_asset_1]).unwrap(); - let consumed_note_1 = Note::new(note_assets, note_metadata, note_recipient); - - const SERIAL_NUM_2: Word = [Felt::new(5), Felt::new(6), Felt::new(7), Felt::new(8)]; - let note_metadata = - NoteMetadata::new(sender, NoteType::OffChain, 2u32.into(), Default::default()).unwrap(); - let note_assets = NoteAssets::new(vec![fungible_asset_2, fungible_asset_3]).unwrap(); - let note_recipient = NoteRecipient::new( - SERIAL_NUM_2, - note_2_script, - NoteInputs::new(vec![Felt::new(2)]).unwrap(), - ); - - let consumed_note_2 = Note::new(note_assets, note_metadata, note_recipient); - - let consumed_notes = vec![consumed_note_1, consumed_note_2]; - - (consumed_notes, created_notes) -} - -fn get_account_with_nonce( - account_id: AccountId, - public_key: Word, - assets: Option, - nonce: u64, -) -> Account { - let account_code_src = DEFAULT_ACCOUNT_CODE; - let account_code_ast = ModuleAst::parse(account_code_src).unwrap(); - let account_assembler = TransactionKernel::assembler(); - - let account_code = AccountCode::new(account_code_ast, &account_assembler).unwrap(); - let slot_item = SlotItem { - index: 0, - slot: StorageSlot::new_value(public_key), - }; - let account_storage = AccountStorage::new(vec![slot_item]).unwrap(); - - let asset_vault = match assets { - Some(asset) => AssetVault::new(&[asset]).unwrap(), - None => AssetVault::new(&[]).unwrap(), - }; - - Account::new(account_id, asset_vault, account_storage, account_code, Felt::new(nonce)) -} - -pub fn get_account_with_default_account_code( - account_id: AccountId, - public_key: Word, - assets: Option, -) -> Account { - get_account_with_nonce(account_id, public_key, assets, 1) -} - -pub fn get_new_account_with_default_account_code( - account_id: AccountId, - public_key: Word, - assets: Option, -) -> Account { - get_account_with_nonce(account_id, public_key, assets, 0) -} - -fn prepare_assets(note_assets: &NoteAssets) -> Vec { - let mut assets = Vec::new(); - for &asset in note_assets.iter() { - let asset_word: Word = asset.into(); - let asset_str = prepare_word(&asset_word); - assets.push(asset_str); - } - assets -} diff --git a/src/store/data_store.rs b/src/store/data_store.rs deleted file mode 100644 index 76cb3480d..000000000 --- a/src/store/data_store.rs +++ /dev/null @@ -1,186 +0,0 @@ -use alloc::collections::BTreeSet; - -use miden_objects::{ - accounts::AccountId, - assembly::ModuleAst, - crypto::merkle::{InOrderIndex, MerklePath, PartialMmr}, - notes::NoteId, - transaction::{ChainMmr, InputNote, InputNotes}, - BlockHeader, -}; -use miden_tx::{DataStore, DataStoreError, TransactionInputs}; - -use super::{ChainMmrNodeFilter, NoteFilter, Store}; -use crate::errors::{ClientError, StoreError}; - -// DATA STORE -// ================================================================================================ - -pub struct ClientDataStore { - /// Local database containing information about the accounts managed by this client. - pub(crate) store: S, -} - -impl ClientDataStore { - pub fn new(store: S) -> Self { - Self { store } - } -} - -impl DataStore for ClientDataStore { - fn get_transaction_inputs( - &self, - account_id: AccountId, - block_num: u32, - notes: &[NoteId], - ) -> Result { - // First validate that no note has already been consumed - let unspent_notes = self - .store - .get_input_notes(NoteFilter::Committed)? - .iter() - .map(|note_record| note_record.id()) - .collect::>(); - - for note_id in notes { - if !unspent_notes.contains(note_id) { - return Err(DataStoreError::NoteAlreadyConsumed(*note_id)); - } - } - - // Construct Account - let (account, seed) = self.store.get_account(account_id)?; - - // Get header data - let (block_header, _had_notes) = self.store.get_block_header_by_num(block_num)?; - - let mut list_of_notes = vec![]; - - let mut notes_blocks: Vec = vec![]; - for note_id in notes { - let input_note_record = self.store.get_input_note(*note_id)?; - - let input_note: InputNote = input_note_record - .try_into() - .map_err(|err: ClientError| DataStoreError::InternalError(err.to_string()))?; - - list_of_notes.push(input_note.clone()); - - let note_block_num = input_note.proof().origin().block_num; - - if note_block_num != block_num { - notes_blocks.push(note_block_num); - } - } - - let notes_blocks: Vec = self - .store - .get_block_headers(¬es_blocks)? - .iter() - .map(|(header, _has_notes)| *header) - .collect(); - - let partial_mmr = build_partial_mmr_with_paths(&self.store, block_num, ¬es_blocks)?; - let chain_mmr = ChainMmr::new(partial_mmr, notes_blocks) - .map_err(|err| DataStoreError::InternalError(err.to_string()))?; - - let input_notes = - InputNotes::new(list_of_notes).map_err(DataStoreError::InvalidTransactionInput)?; - - TransactionInputs::new(account, seed, block_header, chain_mmr, input_notes) - .map_err(DataStoreError::InvalidTransactionInput) - } - - fn get_account_code(&self, account_id: AccountId) -> Result { - let (account, _seed) = self.store.get_account(account_id)?; - let module_ast = account.code().module().clone(); - - Ok(module_ast) - } -} - -/// Builds a [PartialMmr] with a specified forest number and a list of blocks that should be -/// authenticated. -/// -/// `authenticated_blocks` cannot contain `forest`. For authenticating the last block we have, -/// the kernel extends the MMR which is why it's not needed here. -fn build_partial_mmr_with_paths( - store: &S, - forest: u32, - authenticated_blocks: &[BlockHeader], -) -> Result { - let mut partial_mmr: PartialMmr = { - let current_peaks = store.get_chain_mmr_peaks_by_block_num(forest)?; - - PartialMmr::from_peaks(current_peaks) - }; - - let block_nums: Vec = authenticated_blocks.iter().map(|b| b.block_num()).collect(); - - let authentication_paths = - get_authentication_path_for_blocks(store, &block_nums, partial_mmr.forest())?; - - for (header, path) in authenticated_blocks.iter().zip(authentication_paths.iter()) { - partial_mmr - .track(header.block_num() as usize, header.hash(), path) - .map_err(|err| DataStoreError::InternalError(err.to_string()))?; - } - - Ok(partial_mmr) -} - -/// Retrieves all Chain MMR nodes required for authenticating the set of blocks, and then -/// constructs the path for each of them. -/// -/// This method assumes `block_nums` cannot contain `forest`. -pub fn get_authentication_path_for_blocks( - store: &S, - block_nums: &[u32], - forest: usize, -) -> Result, StoreError> { - let mut node_indices = BTreeSet::new(); - - // Calculate all needed nodes indices for generating the paths - for block_num in block_nums { - let path_depth = mmr_merkle_path_len(*block_num as usize, forest); - - let mut idx = InOrderIndex::from_leaf_pos(*block_num as usize); - - for _ in 0..path_depth { - node_indices.insert(idx.sibling()); - idx = idx.parent(); - } - } - - // Get all Mmr nodes based on collected indices - let node_indices: Vec = node_indices.into_iter().collect(); - - let filter = ChainMmrNodeFilter::List(&node_indices); - let mmr_nodes = store.get_chain_mmr_nodes(filter)?; - - // Construct authentication paths - let mut authentication_paths = vec![]; - for block_num in block_nums { - let mut merkle_nodes = vec![]; - let mut idx = InOrderIndex::from_leaf_pos(*block_num as usize); - - while let Some(node) = mmr_nodes.get(&idx.sibling()) { - merkle_nodes.push(*node); - idx = idx.parent(); - } - let path = MerklePath::new(merkle_nodes); - authentication_paths.push(path); - } - - Ok(authentication_paths) -} - -/// Calculates the merkle path length for an MMR of a specific forest and a leaf index -/// `leaf_index` is a 0-indexed leaf number and `forest` is the total amount of leaves -/// in the MMR at this point. -fn mmr_merkle_path_len(leaf_index: usize, forest: usize) -> usize { - let before = forest & leaf_index; - let after = forest ^ before; - - after.ilog2() as usize -} diff --git a/src/store/mod.rs b/src/store/mod.rs deleted file mode 100644 index 8b49b6cc0..000000000 --- a/src/store/mod.rs +++ /dev/null @@ -1,327 +0,0 @@ -use alloc::collections::BTreeMap; - -use clap::error::Result; -use miden_objects::{ - accounts::{Account, AccountId, AccountStub}, - crypto::{ - dsa::rpo_falcon512::SecretKey, - merkle::{InOrderIndex, MmrPeaks}, - }, - notes::{NoteId, Nullifier}, - transaction::TransactionId, - BlockHeader, Digest, Felt, Word, -}; -use miden_tx::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; - -use crate::{ - client::{ - sync::SyncedNewNotes, - transactions::{TransactionRecord, TransactionResult}, - }, - errors::StoreError, -}; - -pub mod data_store; -pub mod sqlite_store; - -mod note_record; -pub use note_record::{InputNoteRecord, NoteRecordDetails, NoteStatus, OutputNoteRecord}; - -// STORE TRAIT -// ================================================================================================ - -/// The [Store] trait exposes all methods that the client store needs in order to track the current -/// state. -/// -/// All update functions are implied to be atomic. That is, if multiple entities are meant to be -/// updated as part of any single function and an error is returned during its execution, any -/// changes that might have happened up to that point need to be rolled back and discarded. -pub trait Store { - // TRANSACTIONS - // -------------------------------------------------------------------------------------------- - - /// Retrieves stored transactions, filtered by [TransactionFilter]. - fn get_transactions( - &self, - filter: TransactionFilter, - ) -> Result, StoreError>; - - /// Applies a transaction, atomically updating the current state based on the - /// [TransactionResult] - /// - /// An update involves: - /// - Applying the resulting [AccountDelta](miden_objects::accounts::AccountDelta) and storing the new [Account] state - /// - Storing new notes as a result of the transaction execution - /// - Inserting the transaction into the store to track - fn apply_transaction(&mut self, tx_result: TransactionResult) -> Result<(), StoreError>; - - // NOTES - // -------------------------------------------------------------------------------------------- - - /// Retrieves the input notes from the store - fn get_input_notes(&self, filter: NoteFilter) -> Result, StoreError>; - - /// Retrieves the output notes from the store - fn get_output_notes(&self, filter: NoteFilter) -> Result, StoreError>; - - /// Retrieves an [InputNoteRecord] for the input note corresponding to the specified ID from - /// the store. - /// - /// # Errors - /// - /// Returns a [StoreError::InputNoteNotFound] if there is no Note with the provided ID - fn get_input_note(&self, note_id: NoteId) -> Result; - - /// Returns the nullifiers of all unspent input notes - /// - /// The default implementation of this method uses [Store::get_input_notes]. - fn get_unspent_input_note_nullifiers(&self) -> Result, StoreError> { - let nullifiers = self - .get_input_notes(NoteFilter::Committed)? - .iter() - .map(|input_note| Ok(Nullifier::from(Digest::try_from(input_note.nullifier())?))) - .collect::, _>>(); - - nullifiers - } - - /// Inserts the provided input note into the database - fn insert_input_note(&mut self, note: &InputNoteRecord) -> Result<(), StoreError>; - - // CHAIN DATA - // -------------------------------------------------------------------------------------------- - - /// Retrieves a vector of [BlockHeader]s filtered by the provided block numbers. - /// - /// The returned vector may not contain some or all of the requested block headers. It's up to - /// the callee to check whether all requested block headers were found. - /// - /// For each block header an additional boolean value is returned representing whether the block - /// contains notes relevant to the client. - fn get_block_headers( - &self, - block_numbers: &[u32], - ) -> Result, StoreError>; - - /// Retrieves a [BlockHeader] corresponding to the provided block number and a boolean value - /// that represents whether the block contains notes relevant to the client. - /// - /// The default implementation of this method uses [Store::get_block_headers]. - /// - /// # Errors - /// Returns a [StoreError::BlockHeaderNotFound] if the block was not found. - fn get_block_header_by_num( - &self, - block_number: u32, - ) -> Result<(BlockHeader, bool), StoreError> { - self.get_block_headers(&[block_number]) - .map(|block_headers_list| block_headers_list.first().cloned()) - .and_then(|block_header| { - block_header.ok_or(StoreError::BlockHeaderNotFound(block_number)) - }) - } - - /// Retrieves a list of [BlockHeader] that include relevant notes to the client. - fn get_tracked_block_headers(&self) -> Result, StoreError>; - - /// Retrieves all MMR authentication nodes based on [ChainMmrNodeFilter]. - fn get_chain_mmr_nodes( - &self, - filter: ChainMmrNodeFilter, - ) -> Result, StoreError>; - - /// Returns peaks information from the blockchain by a specific block number. - /// - /// If there is no chain MMR info stored for the provided block returns an empty [MmrPeaks] - fn get_chain_mmr_peaks_by_block_num(&self, block_num: u32) -> Result; - - /// Inserts a block header into the store, alongside peaks information at the block's height. - /// - /// `has_client_notes` describes whether the block has relevant notes to the client; this means - /// the client might want to authenticate merkle paths based on this value. - fn insert_block_header( - &self, - block_header: BlockHeader, - chain_mmr_peaks: MmrPeaks, - has_client_notes: bool, - ) -> Result<(), StoreError>; - - // ACCOUNT - // -------------------------------------------------------------------------------------------- - - /// Returns the account IDs of all accounts stored in the database - fn get_account_ids(&self) -> Result, StoreError>; - - /// Returns a list of [AccountStub] of all accounts stored in the database along with the seeds - /// used to create them. - /// - /// Said accounts' state is the state after the last performed sync. - fn get_account_stubs(&self) -> Result)>, StoreError>; - - /// Retrieves an [AccountStub] object for the specified [AccountId] along with the seed - /// used to create it. The seed will be returned if the account is new, otherwise it - /// will be `None`. - /// - /// Said account's state is the state according to the last sync performed. - /// - /// # Errors - /// Returns a `StoreError::AccountDataNotFound` if there is no account for the provided ID - fn get_account_stub( - &self, - account_id: AccountId, - ) -> Result<(AccountStub, Option), StoreError>; - - /// Retrieves a full [Account] object. The seed will be returned if the account is new, - /// otherwise it will be `None`. - /// - /// This function returns the [Account]'s latest state. If the account is new (that is, has - /// never executed a trasaction), the returned seed will be `Some(Word)`; otherwise the seed - /// will be `None` - /// - /// # Errors - /// - /// Returns a `StoreError::AccountDataNotFound` if there is no account for the provided ID - fn get_account(&self, account_id: AccountId) -> Result<(Account, Option), StoreError>; - - /// Retrieves an account's [AuthInfo], utilized to authenticate the account. - /// - /// # Errors - /// - /// Returns a `StoreError::AccountDataNotFound` if there is no account for the provided ID - fn get_account_auth(&self, account_id: AccountId) -> Result; - - /// Inserts an [Account] along with the seed used to create it and its [AuthInfo] - fn insert_account( - &mut self, - account: &Account, - account_seed: Option, - auth_info: &AuthInfo, - ) -> Result<(), StoreError>; - - // SYNC - // -------------------------------------------------------------------------------------------- - - /// Returns the note tags that the client is interested in. - fn get_note_tags(&self) -> Result, StoreError>; - - /// Adds a note tag to the list of tags that the client is interested in. - fn add_note_tag(&mut self, tag: u64) -> Result; - - /// Returns the block number of the last state sync block. - fn get_sync_height(&self) -> Result; - - /// Applies the state sync update to the store. An update involves: - /// - /// - Inserting the new block header to the store alongside new MMR peaks information - /// - Updating the notes, marking them as `committed` or `consumed` based on incoming - /// inclusion proofs and nullifiers - /// - Updating transactions in the store, marking as `committed` the ones provided with - /// `committed_transactions` - /// - Storing new MMR authentication nodes - fn apply_state_sync( - &mut self, - block_header: BlockHeader, - nullifiers: Vec, - new_note_details: SyncedNewNotes, - committed_transactions: &[TransactionId], - new_mmr_peaks: MmrPeaks, - new_authentication_nodes: &[(InOrderIndex, Digest)], - updated_onchain_accounts: &[Account], - ) -> Result<(), StoreError>; -} - -// DATABASE AUTH INFO -// ================================================================================================ - -/// Represents the types of authentication information of accounts -#[derive(Debug)] -pub enum AuthInfo { - RpoFalcon512(SecretKey), -} - -const RPO_FALCON512_AUTH: u8 = 0; - -impl AuthInfo { - /// Returns byte identifier of specific AuthInfo - const fn type_byte(&self) -> u8 { - match self { - AuthInfo::RpoFalcon512(_) => RPO_FALCON512_AUTH, - } - } - - /// Returns the authentication information as a tuple of (key, value) - /// that can be input to the advice map at the moment of transaction execution. - pub fn into_advice_inputs(self) -> (Word, Vec) { - match self { - AuthInfo::RpoFalcon512(key) => { - let pub_key: Word = key.public_key().into(); - let mut pk_sk_bytes = key.to_bytes(); - pk_sk_bytes.append(&mut pub_key.to_bytes()); - - (pub_key, pk_sk_bytes.iter().map(|a| Felt::new(*a as u64)).collect::>()) - }, - } - } -} - -impl Serializable for AuthInfo { - fn write_into(&self, target: &mut W) { - let mut bytes = vec![self.type_byte()]; - match self { - AuthInfo::RpoFalcon512(key_pair) => { - bytes.append(&mut key_pair.to_bytes()); - target.write_bytes(&bytes); - }, - } - } -} - -impl Deserializable for AuthInfo { - fn read_from(source: &mut R) -> Result { - let auth_type: u8 = source.read_u8()?; - match auth_type { - RPO_FALCON512_AUTH => { - let key_pair = SecretKey::read_from(source)?; - Ok(AuthInfo::RpoFalcon512(key_pair)) - }, - val => Err(DeserializationError::InvalidValue(val.to_string())), - } - } -} - -// CHAIN MMR NODE FILTER -// ================================================================================================ - -pub enum ChainMmrNodeFilter<'a> { - /// Return all nodes. - All, - /// Filter by the specified in-order indices. - List(&'a [InOrderIndex]), -} - -// TRANSACTION FILTERS -// ================================================================================================ - -pub enum TransactionFilter { - /// Return all transactions. - All, - /// Filter by transactions that have not yet been committed to the blockchain as per the last - /// sync. - Uncomitted, -} - -// NOTE FILTER -// ================================================================================================ - -pub enum NoteFilter { - /// Return a list of all notes ([InputNoteRecord] or [OutputNoteRecord]). - All, - /// Filter by consumed notes ([InputNoteRecord] or [OutputNoteRecord]). notes that have been used as inputs in transactions. - Consumed, - /// Return a list of committed notes ([InputNoteRecord] or [OutputNoteRecord]). These represent notes that the blockchain - /// has included in a block, and for which we are storing anchor data. - Committed, - /// Return a list of pending notes ([InputNoteRecord] or [OutputNoteRecord]). These represent notes for which the store - /// does not have anchor data. - Pending, -} diff --git a/src/store/note_record/input_note_record.rs b/src/store/note_record/input_note_record.rs deleted file mode 100644 index 8724012be..000000000 --- a/src/store/note_record/input_note_record.rs +++ /dev/null @@ -1,189 +0,0 @@ -use miden_objects::{ - notes::{ - Note, NoteAssets, NoteId, NoteInclusionProof, NoteInputs, NoteMetadata, NoteRecipient, - }, - transaction::InputNote, - utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}, - Digest, -}; - -use super::{NoteRecordDetails, NoteStatus}; -use crate::errors::ClientError; - -// INPUT NOTE RECORD -// ================================================================================================ - -/// Represents a Note of which the Store can keep track and retrieve. -/// -/// An [InputNoteRecord] contains all the information of a [Note], in addition of (optionally) the -/// [NoteInclusionProof] that identifies when the note was included in the chain. -/// -/// Once the proof is set, the [InputNoteRecord] can be transformed into an [InputNote] and used as -/// input for transactions. -/// -/// It is also possible to convert [Note] and [InputNote] into [InputNoteRecord] (we fill the -/// `metadata` and `inclusion_proof` fields if possible) -#[derive(Clone, Debug, PartialEq)] -pub struct InputNoteRecord { - assets: NoteAssets, - details: NoteRecordDetails, - id: NoteId, - inclusion_proof: Option, - metadata: Option, - recipient: Digest, - status: NoteStatus, -} - -impl InputNoteRecord { - pub fn new( - id: NoteId, - recipient: Digest, - assets: NoteAssets, - status: NoteStatus, - metadata: Option, - inclusion_proof: Option, - details: NoteRecordDetails, - ) -> InputNoteRecord { - InputNoteRecord { - id, - recipient, - assets, - status, - metadata, - inclusion_proof, - details, - } - } - - pub fn id(&self) -> NoteId { - self.id - } - - pub fn recipient(&self) -> Digest { - self.recipient - } - - pub fn assets(&self) -> &NoteAssets { - &self.assets - } - - pub fn status(&self) -> NoteStatus { - self.status - } - - pub fn metadata(&self) -> Option<&NoteMetadata> { - self.metadata.as_ref() - } - - pub fn nullifier(&self) -> &str { - &self.details.nullifier - } - - pub fn inclusion_proof(&self) -> Option<&NoteInclusionProof> { - self.inclusion_proof.as_ref() - } - - pub fn details(&self) -> &NoteRecordDetails { - &self.details - } -} - -impl Serializable for InputNoteRecord { - fn write_into(&self, target: &mut W) { - self.id().write_into(target); - self.recipient().write_into(target); - self.assets().write_into(target); - self.status().write_into(target); - self.metadata().write_into(target); - self.details().write_into(target); - self.inclusion_proof().write_into(target); - } -} - -impl Deserializable for InputNoteRecord { - fn read_from(source: &mut R) -> Result { - let id = NoteId::read_from(source)?; - let recipient = Digest::read_from(source)?; - let assets = NoteAssets::read_from(source)?; - let status = NoteStatus::read_from(source)?; - let metadata = Option::::read_from(source)?; - let details = NoteRecordDetails::read_from(source)?; - let inclusion_proof = Option::::read_from(source)?; - - Ok(InputNoteRecord { - id, - recipient, - assets, - status, - metadata, - inclusion_proof, - details, - }) - } -} - -impl From for InputNoteRecord { - fn from(note: Note) -> Self { - InputNoteRecord { - id: note.id(), - recipient: note.recipient_digest(), - assets: note.assets().clone(), - status: NoteStatus::Pending, - metadata: Some(*note.metadata()), - inclusion_proof: None, - details: NoteRecordDetails::new( - note.nullifier().to_string(), - note.script().clone(), - note.inputs().to_vec(), - note.serial_num(), - ), - } - } -} - -impl From for InputNoteRecord { - fn from(recorded_note: InputNote) -> Self { - InputNoteRecord { - id: recorded_note.note().id(), - recipient: recorded_note.note().recipient_digest(), - assets: recorded_note.note().assets().clone(), - status: NoteStatus::Pending, - metadata: Some(*recorded_note.note().metadata()), - details: NoteRecordDetails::new( - recorded_note.note().nullifier().to_string(), - recorded_note.note().script().clone(), - recorded_note.note().inputs().values().to_vec(), - recorded_note.note().serial_num(), - ), - inclusion_proof: Some(recorded_note.proof().clone()), - } - } -} - -impl TryInto for InputNoteRecord { - type Error = ClientError; - - fn try_into(self) -> Result { - match (self.inclusion_proof, self.metadata) { - (Some(proof), Some(metadata)) => { - // TODO: Write functions to get these fields more easily - let note_inputs = NoteInputs::new(self.details.inputs)?; - let note_recipient = - NoteRecipient::new(self.details.serial_num, self.details.script, note_inputs); - let note = Note::new(self.assets, metadata, note_recipient); - Ok(InputNote::new(note, proof.clone())) - }, - - (None, _) => { - Err(ClientError::NoteError(miden_objects::NoteError::invalid_origin_index( - "Input Note Record contains no inclusion proof".to_string(), - ))) - }, - (_, None) => { - Err(ClientError::NoteError(miden_objects::NoteError::invalid_origin_index( - "Input Note Record contains no metadata".to_string(), - ))) - }, - } - } -} diff --git a/src/store/note_record/mod.rs b/src/store/note_record/mod.rs deleted file mode 100644 index 603d8d479..000000000 --- a/src/store/note_record/mod.rs +++ /dev/null @@ -1,173 +0,0 @@ -use miden_objects::{ - assembly::{Assembler, ProgramAst}, - notes::NoteScript, - utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}, - Digest, Felt, Word, -}; -use serde::{Deserialize, Serialize}; - -mod input_note_record; -mod output_note_record; - -pub use input_note_record::InputNoteRecord; -pub use output_note_record::OutputNoteRecord; - -/// This module defines common structs to be used within the [Store](crate::store::Store) for notes -/// that are available to be consumed ([InputNoteRecord]) and notes that have been produced as a -/// result of executing a transaction ([OutputNoteRecord]). -/// -/// # Features -/// -/// ## Serialization / Deserialization -/// -/// We provide serialization and deserialization support via [Serializable] and [Deserializable] -/// traits implementations, and also via [Serialize] and [Deserialize] from `serde` to provide the -/// ability to serialize most fields into JSON. This is useful for example if you want to store -/// some fields as json columns like we do in -/// [SqliteStore](crate::store::sqlite_store::SqliteStore). For example, suppose we want to store -/// [InputNoteRecord]'s metadata field in a JSON column. In that case, we could do something like: -/// -/// ```ignore -/// fn insert_metadata_into_some_table(db: &mut Database, note: InputNoteRecord) { -/// let note_metadata_json = serde_json::to_string(note.metadata()).unwrap(); -/// -/// db.execute("INSERT INTO notes_metadata (note_id, note_metadata) VALUES (?, ?)", -/// note.id().to_hex(), note_metadata_json).unwrap() -/// } -/// ``` -/// -/// ## Type conversion -/// -/// We also facilitate converting from/into [InputNote](miden_objects::transaction::InputNote) / -/// [Note](miden_objects::notes::Note), although this is not always possible. Check both -/// [InputNoteRecord]'s and [OutputNoteRecord]'s documentation for more details into this - -// NOTE STATUS -// ================================================================================================ -#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq)] -pub enum NoteStatus { - Pending, - Committed, - Consumed, -} - -impl From for u8 { - fn from(value: NoteStatus) -> Self { - match value { - NoteStatus::Pending => 0, - NoteStatus::Committed => 1, - NoteStatus::Consumed => 2, - } - } -} - -impl TryFrom for NoteStatus { - type Error = DeserializationError; - fn try_from(value: u8) -> Result { - match value { - 0 => Ok(NoteStatus::Pending), - 1 => Ok(NoteStatus::Committed), - 2 => Ok(NoteStatus::Consumed), - _ => Err(DeserializationError::InvalidValue(value.to_string())), - } - } -} - -impl Serializable for NoteStatus { - fn write_into(&self, target: &mut W) { - target.write_bytes(&[(*self).into()]); - } -} - -impl Deserializable for NoteStatus { - fn read_from(source: &mut R) -> Result { - let enum_byte = u8::read_from(source)?; - enum_byte.try_into() - } -} - -fn default_script() -> NoteScript { - let assembler = Assembler::default(); - let note_program_ast = - ProgramAst::parse("begin end").expect("dummy script should be parseable"); - let (note_script, _) = NoteScript::new(note_program_ast, &assembler) - .expect("dummy note script should be created without issues"); - note_script -} - -// NOTE: NoteInputs does not impl Serialize which is why we use Vec here -#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] -pub struct NoteRecordDetails { - nullifier: String, - script_hash: Digest, - #[serde(skip_serializing, skip_deserializing, default = "default_script")] - script: NoteScript, - inputs: Vec, - serial_num: Word, -} - -impl NoteRecordDetails { - pub fn new(nullifier: String, script: NoteScript, inputs: Vec, serial_num: Word) -> Self { - let script_hash = script.hash(); - Self { - nullifier, - script, - script_hash, - inputs, - serial_num, - } - } - - pub fn nullifier(&self) -> &str { - &self.nullifier - } - - pub fn script_hash(&self) -> &Digest { - &self.script_hash - } - - pub fn script(&self) -> &NoteScript { - &self.script - } - - pub fn inputs(&self) -> &Vec { - &self.inputs - } - - pub fn serial_num(&self) -> Word { - self.serial_num - } -} - -impl Serializable for NoteRecordDetails { - fn write_into(&self, target: &mut W) { - let nullifier_bytes = self.nullifier.as_bytes(); - target.write_usize(nullifier_bytes.len()); - target.write_bytes(nullifier_bytes); - - self.script().write_into(target); - - target.write_usize(self.inputs.len()); - target.write_many(self.inputs()); - - self.serial_num().write_into(target); - } -} - -impl Deserializable for NoteRecordDetails { - fn read_from(source: &mut R) -> Result { - let nullifier_len = usize::read_from(source)?; - let nullifier_bytes = source.read_vec(nullifier_len)?; - let nullifier = - String::from_utf8(nullifier_bytes).expect("Nullifier String bytes should be readable."); - - let script = NoteScript::read_from(source)?; - - let inputs_len = source.read_usize()?; - let inputs = source.read_many::(inputs_len)?; - - let serial_num = Word::read_from(source)?; - - Ok(NoteRecordDetails::new(nullifier, script, inputs, serial_num)) - } -} diff --git a/src/store/note_record/output_note_record.rs b/src/store/note_record/output_note_record.rs deleted file mode 100644 index a5c2441dc..000000000 --- a/src/store/note_record/output_note_record.rs +++ /dev/null @@ -1,98 +0,0 @@ -use miden_objects::{ - notes::{Note, NoteAssets, NoteId, NoteInclusionProof, NoteMetadata}, - Digest, -}; - -use super::{NoteRecordDetails, NoteStatus}; - -// OUTPUT NOTE RECORD -// ================================================================================================ - -/// Represents a Note which was the result of executing some transaction of which the Store can -/// keep track and retrieve. -/// -/// An [OutputNoteRecord] contains all the information of a [Note] while it allows for not knowing -/// the details (nullifier, script, inputs and serial number), in addition of (optionally) the -/// [NoteInclusionProof] that identifies when the note was included in the chain. -/// -/// It is also possible to convert [Note] into [OutputNoteRecord] (we fill the `details` and -/// `inclusion_proof` fields if possible) -#[derive(Clone, Debug, PartialEq)] -pub struct OutputNoteRecord { - assets: NoteAssets, - details: Option, - id: NoteId, - inclusion_proof: Option, - metadata: NoteMetadata, - recipient: Digest, - status: NoteStatus, -} - -impl OutputNoteRecord { - pub fn new( - id: NoteId, - recipient: Digest, - assets: NoteAssets, - status: NoteStatus, - metadata: NoteMetadata, - inclusion_proof: Option, - details: Option, - ) -> OutputNoteRecord { - OutputNoteRecord { - id, - recipient, - assets, - status, - metadata, - inclusion_proof, - details, - } - } - - pub fn id(&self) -> NoteId { - self.id - } - - pub fn recipient(&self) -> Digest { - self.recipient - } - - pub fn assets(&self) -> &NoteAssets { - &self.assets - } - - pub fn status(&self) -> NoteStatus { - self.status - } - - pub fn metadata(&self) -> &NoteMetadata { - &self.metadata - } - - pub fn inclusion_proof(&self) -> Option<&NoteInclusionProof> { - self.inclusion_proof.as_ref() - } - - pub fn details(&self) -> Option<&NoteRecordDetails> { - self.details.as_ref() - } -} - -impl From for OutputNoteRecord { - fn from(note: Note) -> Self { - OutputNoteRecord { - id: note.id(), - recipient: note.recipient_digest(), - assets: note.assets().clone(), - status: NoteStatus::Pending, - metadata: *note.metadata(), - inclusion_proof: None, - details: Some(NoteRecordDetails::new( - note.nullifier().to_string(), - note.script().clone(), - note.inputs().to_vec(), - note.serial_num(), - )), - } - } -} diff --git a/src/store/sqlite_store/accounts.rs b/src/store/sqlite_store/accounts.rs deleted file mode 100644 index 109e4dbe9..000000000 --- a/src/store/sqlite_store/accounts.rs +++ /dev/null @@ -1,500 +0,0 @@ -use clap::error::Result; -use miden_lib::transaction::TransactionKernel; -use miden_objects::{ - accounts::{Account, AccountCode, AccountId, AccountStorage, AccountStub}, - assembly::{AstSerdeOptions, ModuleAst}, - assets::{Asset, AssetVault}, - Digest, Felt, Word, -}; -use miden_tx::utils::{Deserializable, Serializable}; -use rusqlite::{params, Transaction}; - -use super::SqliteStore; -use crate::{errors::StoreError, store::AuthInfo}; - -// TYPES -// ================================================================================================ -type SerializedAccountData = (i64, String, String, String, i64, bool); -type SerializedAccountsParts = (i64, i64, String, String, String, Option>); - -type SerializedAccountAuthData = (i64, Vec); -type SerializedAccountAuthParts = (i64, Vec); - -type SerializedAccountVaultData = (String, String); -type SerializedAccountVaultParts = (String, String); - -type SerializedAccountCodeData = (String, String, Vec); -type SerializedAccountCodeParts = (String, String, Vec); - -type SerializedAccountStorageData = (String, Vec); -type SerializedAccountStorageParts = (String, Vec); - -impl SqliteStore { - // ACCOUNTS - // -------------------------------------------------------------------------------------------- - - pub(super) fn get_account_ids(&self) -> Result, StoreError> { - const QUERY: &str = "SELECT DISTINCT id FROM accounts"; - - self.db - .prepare(QUERY)? - .query_map([], |row| row.get(0)) - .expect("no binding parameters used in query") - .map(|result| { - Ok(result - .map(|id: i64| AccountId::try_from(id as u64).expect("account id is valid"))?) - }) - .collect::, StoreError>>() - } - - pub(super) fn get_account_stubs(&self) -> Result)>, StoreError> { - const QUERY: &str = - "SELECT a.id, a.nonce, a.vault_root, a.storage_root, a.code_root, a.account_seed \ - FROM accounts a \ - WHERE a.nonce = (SELECT MAX(b.nonce) FROM accounts b WHERE b.id = a.id)"; - - self.db - .prepare(QUERY)? - .query_map([], parse_accounts_columns) - .expect("no binding parameters used in query") - .map(|result| Ok(result?).and_then(parse_accounts)) - .collect() - } - - pub(crate) fn get_account_stub( - &self, - account_id: AccountId, - ) -> Result<(AccountStub, Option), StoreError> { - let account_id_int: u64 = account_id.into(); - const QUERY: &str = "SELECT id, nonce, vault_root, storage_root, code_root, account_seed \ - FROM accounts WHERE id = ? \ - ORDER BY nonce DESC \ - LIMIT 1"; - self.db - .prepare(QUERY)? - .query_map(params![account_id_int as i64], parse_accounts_columns)? - .map(|result| Ok(result?).and_then(parse_accounts)) - .next() - .ok_or(StoreError::AccountDataNotFound(account_id))? - } - - // TODO: Get all parts from a single query - pub(crate) fn get_account( - &self, - account_id: AccountId, - ) -> Result<(Account, Option), StoreError> { - let (account_stub, seed) = self.get_account_stub(account_id)?; - let (_procedures, module_ast) = self.get_account_code(account_stub.code_root())?; - - let account_code = AccountCode::new(module_ast, &TransactionKernel::assembler()).unwrap(); - - let account_storage = self.get_account_storage(account_stub.storage_root())?; - - let account_vault = self.get_vault_assets(account_stub.vault_root())?; - let account_vault = AssetVault::new(&account_vault)?; - - let account = Account::new( - account_stub.id(), - account_vault, - account_storage, - account_code, - account_stub.nonce(), - ); - - Ok((account, seed)) - } - - /// Retrieve account keys data by Account Id - pub(crate) fn get_account_auth(&self, account_id: AccountId) -> Result { - let account_id_int: u64 = account_id.into(); - const QUERY: &str = "SELECT account_id, auth_info FROM account_auth WHERE account_id = ?"; - self.db - .prepare(QUERY)? - .query_map(params![account_id_int as i64], parse_account_auth_columns)? - .map(|result| Ok(result?).and_then(parse_account_auth)) - .next() - .ok_or(StoreError::AccountDataNotFound(account_id))? - } - - /// Retrieve account code-related data by code root - pub(super) fn get_account_code( - &self, - root: Digest, - ) -> Result<(Vec, ModuleAst), StoreError> { - let root_serialized = root.to_string(); - const QUERY: &str = "SELECT root, procedures, module FROM account_code WHERE root = ?"; - - self.db - .prepare(QUERY)? - .query_map(params![root_serialized], parse_account_code_columns)? - .map(|result| Ok(result?).and_then(parse_account_code)) - .next() - .ok_or(StoreError::AccountCodeDataNotFound(root))? - } - - /// Retrieve account storage data by vault root - pub(super) fn get_account_storage(&self, root: Digest) -> Result { - let root_serialized = &root.to_string(); - - const QUERY: &str = "SELECT root, slots FROM account_storage WHERE root = ?"; - self.db - .prepare(QUERY)? - .query_map(params![root_serialized], parse_account_storage_columns)? - .map(|result| Ok(result?).and_then(parse_account_storage)) - .next() - .ok_or(StoreError::AccountStorageNotFound(root))? - } - - /// Retrieve assets by vault root - pub(super) fn get_vault_assets(&self, root: Digest) -> Result, StoreError> { - let vault_root = - serde_json::to_string(&root).map_err(StoreError::InputSerializationError)?; - - const QUERY: &str = "SELECT root, assets FROM account_vaults WHERE root = ?"; - self.db - .prepare(QUERY)? - .query_map(params![vault_root], parse_account_asset_vault_columns)? - .map(|result| Ok(result?).and_then(parse_account_asset_vault)) - .next() - .ok_or(StoreError::VaultDataNotFound(root))? - } - - pub(crate) fn insert_account( - &mut self, - account: &Account, - account_seed: Option, - auth_info: &AuthInfo, - ) -> Result<(), StoreError> { - let tx = self.db.transaction()?; - - insert_account_code(&tx, account.code())?; - insert_account_storage(&tx, account.storage())?; - insert_account_asset_vault(&tx, account.vault())?; - insert_account_record(&tx, account, account_seed)?; - insert_account_auth(&tx, account.id(), auth_info)?; - - Ok(tx.commit()?) - } -} - -// HELPERS -// ================================================================================================ - -/// Update previously-existing account after a transaction execution -/// -/// Because the Client retrieves the account by account ID before applying the delta, we don't -/// need to check that it exists here. This inserts a new row into the accounts table. -/// We can later identify the proper account state by looking at the nonce. -pub(crate) fn update_account( - tx: &Transaction<'_>, - new_account_state: &Account, -) -> Result<(), StoreError> { - insert_account_storage(tx, new_account_state.storage())?; - insert_account_asset_vault(tx, new_account_state.vault())?; - insert_account_record(tx, new_account_state, None) -} - -pub(super) fn insert_account_record( - tx: &Transaction<'_>, - account: &Account, - account_seed: Option, -) -> Result<(), StoreError> { - let (id, code_root, storage_root, vault_root, nonce, committed) = serialize_account(account)?; - - let account_seed = account_seed.map(|seed| seed.to_bytes()); - - const QUERY: &str = "INSERT INTO accounts (id, code_root, storage_root, vault_root, nonce, committed, account_seed) VALUES (?, ?, ?, ?, ?, ?, ?)"; - tx.execute( - QUERY, - params![id, code_root, storage_root, vault_root, nonce, committed, account_seed], - )?; - Ok(()) -} - -/// Inserts an [AccountCode] -fn insert_account_code(tx: &Transaction<'_>, account_code: &AccountCode) -> Result<(), StoreError> { - let (code_root, code, module) = serialize_account_code(account_code)?; - const QUERY: &str = - "INSERT OR IGNORE INTO account_code (root, procedures, module) VALUES (?, ?, ?)"; - tx.execute(QUERY, params![code_root, code, module,])?; - Ok(()) -} - -/// Inserts an [AccountStorage] -pub(super) fn insert_account_storage( - tx: &Transaction<'_>, - account_storage: &AccountStorage, -) -> Result<(), StoreError> { - let (storage_root, storage_slots) = serialize_account_storage(account_storage)?; - const QUERY: &str = "INSERT OR IGNORE INTO account_storage (root, slots) VALUES (?, ?)"; - tx.execute(QUERY, params![storage_root, storage_slots])?; - Ok(()) -} - -/// Inserts an [AssetVault] -pub(super) fn insert_account_asset_vault( - tx: &Transaction<'_>, - asset_vault: &AssetVault, -) -> Result<(), StoreError> { - let (vault_root, assets) = serialize_account_asset_vault(asset_vault)?; - const QUERY: &str = "INSERT OR IGNORE INTO account_vaults (root, assets) VALUES (?, ?)"; - tx.execute(QUERY, params![vault_root, assets])?; - Ok(()) -} - -/// Inserts an [AuthInfo] for the account with id `account_id` -pub(super) fn insert_account_auth( - tx: &Transaction<'_>, - account_id: AccountId, - auth_info: &AuthInfo, -) -> Result<(), StoreError> { - let (account_id, auth_info) = serialize_account_auth(account_id, auth_info)?; - const QUERY: &str = "INSERT INTO account_auth (account_id, auth_info) VALUES (?, ?)"; - tx.execute(QUERY, params![account_id, auth_info])?; - Ok(()) -} - -/// Parse accounts colums from the provided row into native types -pub(super) fn parse_accounts_columns( - row: &rusqlite::Row<'_>, -) -> Result { - let id: i64 = row.get(0)?; - let nonce: i64 = row.get(1)?; - let vault_root: String = row.get(2)?; - let storage_root: String = row.get(3)?; - let code_root: String = row.get(4)?; - let account_seed: Option> = row.get(5)?; - Ok((id, nonce, vault_root, storage_root, code_root, account_seed)) -} - -/// Parse an account from the provided parts. -pub(super) fn parse_accounts( - serialized_account_parts: SerializedAccountsParts, -) -> Result<(AccountStub, Option), StoreError> { - let (id, nonce, vault_root, storage_root, code_root, account_seed) = serialized_account_parts; - let account_seed = account_seed.map(|seed| Word::read_from_bytes(&seed)).transpose()?; - - Ok(( - AccountStub::new( - (id as u64) - .try_into() - .expect("Conversion from stored AccountID should not panic"), - Felt::new(nonce as u64), - serde_json::from_str(&vault_root).map_err(StoreError::JsonDataDeserializationError)?, - Digest::try_from(&storage_root)?, - Digest::try_from(&code_root)?, - ), - account_seed, - )) -} - -/// Serialized the provided account into database compatible types. -fn serialize_account(account: &Account) -> Result { - let id: u64 = account.id().into(); - let code_root = account.code().root().to_string(); - let storage_root = account.storage().root().to_string(); - let vault_root = serde_json::to_string(&account.vault().commitment()) - .map_err(StoreError::InputSerializationError)?; - let committed = account.is_on_chain(); - let nonce = account.nonce().as_int() as i64; - - Ok((id as i64, code_root, storage_root, vault_root, nonce, committed)) -} - -/// Parse account_auth columns from the provided row into native types -fn parse_account_auth_columns( - row: &rusqlite::Row<'_>, -) -> Result { - let account_id: i64 = row.get(0)?; - let auth_info_bytes: Vec = row.get(1)?; - Ok((account_id, auth_info_bytes)) -} - -/// Parse an `AuthInfo` from the provided parts. -fn parse_account_auth( - serialized_account_auth_parts: SerializedAccountAuthParts, -) -> Result { - let (_, auth_info_bytes) = serialized_account_auth_parts; - let auth_info = AuthInfo::read_from_bytes(&auth_info_bytes)?; - Ok(auth_info) -} - -/// Serialized the provided account_auth into database compatible types. -fn serialize_account_auth( - account_id: AccountId, - auth_info: &AuthInfo, -) -> Result { - let account_id: u64 = account_id.into(); - let auth_info = auth_info.to_bytes(); - Ok((account_id as i64, auth_info)) -} - -/// Parse account_code columns from the provided row into native types. -fn parse_account_code_columns( - row: &rusqlite::Row<'_>, -) -> Result { - let root: String = row.get(0)?; - let procedures: String = row.get(1)?; - let module: Vec = row.get(2)?; - Ok((root, procedures, module)) -} - -/// Parse an account_code from the provided parts. -fn parse_account_code( - serialized_account_code_parts: SerializedAccountCodeParts, -) -> Result<(Vec, ModuleAst), StoreError> { - let (_, procedures, module) = serialized_account_code_parts; - - let procedures = - serde_json::from_str(&procedures).map_err(StoreError::JsonDataDeserializationError)?; - let module = ModuleAst::from_bytes(&module)?; - Ok((procedures, module)) -} - -/// Serialize the provided account_code into database compatible types. -fn serialize_account_code( - account_code: &AccountCode, -) -> Result { - let root = account_code.root().to_string(); - let procedures = serde_json::to_string(account_code.procedures()) - .map_err(StoreError::InputSerializationError)?; - let module = account_code.module().to_bytes(AstSerdeOptions { serialize_imports: true }); - - Ok((root, procedures, module)) -} - -/// Parse account_storage columns from the provided row into native types. -fn parse_account_storage_columns( - row: &rusqlite::Row<'_>, -) -> Result { - let root: String = row.get(0)?; - let storage: Vec = row.get(1)?; - Ok((root, storage)) -} - -/// Parse an account_storage from the provided parts. -fn parse_account_storage( - serialized_account_storage_parts: SerializedAccountStorageParts, -) -> Result { - let (_, storage) = serialized_account_storage_parts; - - let storage = AccountStorage::read_from_bytes(&storage)?; - Ok(storage) -} - -/// Serialize the provided account_storage into database compatible types. -fn serialize_account_storage( - account_storage: &AccountStorage, -) -> Result { - let root = account_storage.root().to_string(); - let storage = account_storage.to_bytes(); - - Ok((root, storage)) -} - -/// Parse account_vault columns from the provided row into native types. -fn parse_account_asset_vault_columns( - row: &rusqlite::Row<'_>, -) -> Result { - let root: String = row.get(0)?; - let assets: String = row.get(1)?; - Ok((root, assets)) -} - -/// Parse a vector of assets from the provided parts. -fn parse_account_asset_vault( - serialized_account_asset_vault_parts: SerializedAccountVaultParts, -) -> Result, StoreError> { - let (_, assets) = serialized_account_asset_vault_parts; - - let assets = serde_json::from_str(&assets).map_err(StoreError::JsonDataDeserializationError)?; - Ok(assets) -} - -/// Serialize the provided asset_vault into database compatible types. -fn serialize_account_asset_vault( - asset_vault: &AssetVault, -) -> Result { - let root = serde_json::to_string(&asset_vault.commitment()) - .map_err(StoreError::InputSerializationError)?; - let assets: Vec = asset_vault.assets().collect(); - let assets = serde_json::to_string(&assets).map_err(StoreError::InputSerializationError)?; - Ok((root, assets)) -} - -#[cfg(test)] -mod tests { - use miden_objects::{ - accounts::{AccountCode, AccountId}, - assembly::ModuleAst, - crypto::dsa::rpo_falcon512::SecretKey, - }; - use miden_tx::utils::{Deserializable, Serializable}; - - use super::{insert_account_auth, AuthInfo}; - use crate::{ - mock::DEFAULT_ACCOUNT_CODE, - store::sqlite_store::{accounts::insert_account_code, tests::create_test_store}, - }; - - #[test] - fn test_account_code_insertion_no_duplicates() { - let mut store = create_test_store(); - let assembler = miden_lib::transaction::TransactionKernel::assembler(); - let module_ast = ModuleAst::parse(DEFAULT_ACCOUNT_CODE).unwrap(); - let account_code = AccountCode::new(module_ast, &assembler).unwrap(); - let tx = store.db.transaction().unwrap(); - - // Table is empty at the beginning - let mut actual: usize = - tx.query_row("SELECT Count(*) FROM account_code", [], |row| row.get(0)).unwrap(); - assert_eq!(actual, 0); - - // First insertion generates a new row - insert_account_code(&tx, &account_code).unwrap(); - actual = tx.query_row("SELECT Count(*) FROM account_code", [], |row| row.get(0)).unwrap(); - assert_eq!(actual, 1); - - // Second insertion passes but does not generate a new row - assert!(insert_account_code(&tx, &account_code).is_ok()); - actual = tx.query_row("SELECT Count(*) FROM account_code", [], |row| row.get(0)).unwrap(); - assert_eq!(actual, 1); - } - - #[test] - fn test_auth_info_serialization() { - let exp_key_pair = SecretKey::new(); - let auth_info = AuthInfo::RpoFalcon512(exp_key_pair.clone()); - let bytes = auth_info.to_bytes(); - let actual = AuthInfo::read_from_bytes(&bytes).unwrap(); - match actual { - AuthInfo::RpoFalcon512(act_key_pair) => { - assert_eq!(exp_key_pair.to_bytes(), act_key_pair.to_bytes()); - assert_eq!(exp_key_pair.public_key(), act_key_pair.public_key()); - }, - } - } - - #[test] - fn test_auth_info_store() { - let exp_key_pair = SecretKey::new(); - - let mut store = create_test_store(); - - let account_id = AccountId::try_from(3238098370154045919u64).unwrap(); - { - let tx = store.db.transaction().unwrap(); - insert_account_auth(&tx, account_id, &AuthInfo::RpoFalcon512(exp_key_pair.clone())) - .unwrap(); - tx.commit().unwrap(); - } - - let account_auth = store.get_account_auth(account_id).unwrap(); - match account_auth { - AuthInfo::RpoFalcon512(act_key_pair) => { - assert_eq!(exp_key_pair.to_bytes(), act_key_pair.to_bytes()); - assert_eq!(exp_key_pair.public_key(), act_key_pair.public_key()); - }, - } - } -} diff --git a/src/store/sqlite_store/chain_data.rs b/src/store/sqlite_store/chain_data.rs deleted file mode 100644 index 4223fb6df..000000000 --- a/src/store/sqlite_store/chain_data.rs +++ /dev/null @@ -1,289 +0,0 @@ -use alloc::collections::BTreeMap; -use std::num::NonZeroUsize; - -use clap::error::Result; -use miden_objects::{ - crypto::merkle::{InOrderIndex, MmrPeaks}, - BlockHeader, Digest, -}; -use rusqlite::{params, OptionalExtension, Transaction}; - -use super::SqliteStore; -use crate::{errors::StoreError, store::ChainMmrNodeFilter}; - -type SerializedBlockHeaderData = (i64, String, String, bool); -type SerializedBlockHeaderParts = (u64, String, String, bool); - -type SerializedChainMmrNodeData = (i64, String); -type SerializedChainMmrNodeParts = (u64, String); - -// CHAIN MMR NODE FILTER -// -------------------------------------------------------------------------------------------- - -impl ChainMmrNodeFilter<'_> { - fn to_query(&self) -> String { - let base = String::from("SELECT id, node FROM chain_mmr_nodes"); - match self { - ChainMmrNodeFilter::All => base, - ChainMmrNodeFilter::List(ids) => { - let formatted_list = ids - .iter() - .map(|id| (Into::::into(*id)).to_string()) - .collect::>() - .join(","); - format!("{base} WHERE id IN ({})", formatted_list) - }, - } - } -} - -impl SqliteStore { - pub(crate) fn insert_block_header( - &self, - block_header: BlockHeader, - chain_mmr_peaks: MmrPeaks, - has_client_notes: bool, - ) -> Result<(), StoreError> { - let chain_mmr_peaks = chain_mmr_peaks.peaks().to_vec(); - let (block_num, header, chain_mmr, has_client_notes) = - serialize_block_header(block_header, chain_mmr_peaks, has_client_notes)?; - const QUERY: &str = "\ - INSERT INTO block_headers - (block_num, header, chain_mmr_peaks, has_client_notes) - VALUES (?, ?, ?, ?)"; - - self.db - .execute(QUERY, params![block_num, header, chain_mmr, has_client_notes])?; - - Ok(()) - } - - pub(crate) fn get_block_headers( - &self, - block_numbers: &[u32], - ) -> Result, StoreError> { - let formatted_block_numbers_list = block_numbers - .iter() - .map(|block_number| (*block_number as i64).to_string()) - .collect::>() - .join(","); - let query = format!( - "SELECT block_num, header, chain_mmr_peaks, has_client_notes FROM block_headers WHERE block_num IN ({})", - formatted_block_numbers_list - ); - self.db - .prepare(&query)? - .query_map(params![], parse_block_headers_columns)? - .map(|result| Ok(result?).and_then(parse_block_header)) - .collect() - } - - pub(crate) fn get_tracked_block_headers(&self) -> Result, StoreError> { - const QUERY: &str = "SELECT block_num, header, chain_mmr_peaks, has_client_notes FROM block_headers WHERE has_client_notes=true"; - self.db - .prepare(QUERY)? - .query_map(params![], parse_block_headers_columns)? - .map(|result| Ok(result?).and_then(parse_block_header).map(|(block, _)| block)) - .collect() - } - - pub(crate) fn get_chain_mmr_nodes( - &self, - filter: ChainMmrNodeFilter, - ) -> Result, StoreError> { - self.db - .prepare(&filter.to_query())? - .query_map(params![], parse_chain_mmr_nodes_columns)? - .map(|result| Ok(result?).and_then(parse_chain_mmr_nodes)) - .collect() - } - - pub(crate) fn get_chain_mmr_peaks_by_block_num( - &self, - block_num: u32, - ) -> Result { - const QUERY: &str = "SELECT chain_mmr_peaks FROM block_headers WHERE block_num = ?"; - - let mmr_peaks = self - .db - .prepare(QUERY)? - .query_row(params![block_num], |row| { - let peaks: String = row.get(0)?; - Ok(peaks) - }) - .optional()?; - - if let Some(mmr_peaks) = mmr_peaks { - return parse_mmr_peaks(block_num, mmr_peaks); - } - - Ok(MmrPeaks::new(0, vec![])?) - } - - /// Inserts a list of MMR authentication nodes to the Chain MMR nodes table. - pub(crate) fn insert_chain_mmr_nodes( - tx: &Transaction<'_>, - nodes: &[(InOrderIndex, Digest)], - ) -> Result<(), StoreError> { - for (index, node) in nodes { - insert_chain_mmr_node(tx, *index, *node)?; - } - Ok(()) - } - - /// Inserts a block header using a [rusqlite::Transaction] - pub(crate) fn insert_block_header_tx( - tx: &Transaction<'_>, - block_header: BlockHeader, - chain_mmr_peaks: MmrPeaks, - has_client_notes: bool, - ) -> Result<(), StoreError> { - let chain_mmr_peaks = chain_mmr_peaks.peaks().to_vec(); - let (block_num, header, chain_mmr, has_client_notes) = - serialize_block_header(block_header, chain_mmr_peaks, has_client_notes)?; - const QUERY: &str = "\ - INSERT INTO block_headers - (block_num, header, chain_mmr_peaks, has_client_notes) - VALUES (?, ?, ?, ?)"; - tx.execute(QUERY, params![block_num, header, chain_mmr, has_client_notes])?; - Ok(()) - } -} - -// HELPERS -// ================================================================================================ - -/// Inserts a node represented by its in-order index and the node value. -fn insert_chain_mmr_node( - tx: &Transaction<'_>, - id: InOrderIndex, - node: Digest, -) -> Result<(), StoreError> { - let (id, node) = serialize_chain_mmr_node(id, node)?; - const QUERY: &str = "INSERT INTO chain_mmr_nodes (id, node) VALUES (?, ?)"; - tx.execute(QUERY, params![id, node])?; - Ok(()) -} - -fn parse_mmr_peaks(forest: u32, peaks_nodes: String) -> Result { - let mmr_peaks_nodes: Vec = - serde_json::from_str(&peaks_nodes).map_err(StoreError::JsonDataDeserializationError)?; - - MmrPeaks::new(forest as usize, mmr_peaks_nodes).map_err(StoreError::MmrError) -} - -fn serialize_block_header( - block_header: BlockHeader, - chain_mmr_peaks: Vec, - has_client_notes: bool, -) -> Result { - let block_num = block_header.block_num(); - let header = - serde_json::to_string(&block_header).map_err(StoreError::InputSerializationError)?; - let chain_mmr_peaks = - serde_json::to_string(&chain_mmr_peaks).map_err(StoreError::InputSerializationError)?; - - Ok((block_num as i64, header, chain_mmr_peaks, has_client_notes)) -} - -fn parse_block_headers_columns( - row: &rusqlite::Row<'_>, -) -> Result { - let block_num: i64 = row.get(0)?; - let header: String = row.get(1)?; - let chain_mmr: String = row.get(2)?; - let has_client_notes: bool = row.get(3)?; - - Ok((block_num as u64, header, chain_mmr, has_client_notes)) -} - -fn parse_block_header( - serialized_block_header_parts: SerializedBlockHeaderParts, -) -> Result<(BlockHeader, bool), StoreError> { - let (_, header, _, has_client_notes) = serialized_block_header_parts; - - Ok(( - serde_json::from_str(&header).map_err(StoreError::JsonDataDeserializationError)?, - has_client_notes, - )) -} - -fn serialize_chain_mmr_node( - id: InOrderIndex, - node: Digest, -) -> Result { - let id: u64 = id.into(); - let node = serde_json::to_string(&node).map_err(StoreError::InputSerializationError)?; - Ok((id as i64, node)) -} - -fn parse_chain_mmr_nodes_columns( - row: &rusqlite::Row<'_>, -) -> Result { - let id: i64 = row.get(0)?; - let node = row.get(1)?; - Ok((id as u64, node)) -} - -fn parse_chain_mmr_nodes( - serialized_chain_mmr_node_parts: SerializedChainMmrNodeParts, -) -> Result<(InOrderIndex, Digest), StoreError> { - let (id, node) = serialized_chain_mmr_node_parts; - - let id = InOrderIndex::new(NonZeroUsize::new(id as usize).unwrap()); - let node: Digest = - serde_json::from_str(&node).map_err(StoreError::JsonDataDeserializationError)?; - Ok((id, node)) -} - -#[cfg(test)] -mod test { - use miden_objects::{crypto::merkle::MmrPeaks, BlockHeader}; - - use crate::store::{ - sqlite_store::{tests::create_test_store, SqliteStore}, - Store, - }; - - fn insert_dummy_block_headers(store: &mut SqliteStore) -> Vec { - let block_headers: Vec = - (0..5).map(|block_num| BlockHeader::mock(block_num, None, None, &[])).collect(); - let tx = store.db.transaction().unwrap(); - let dummy_peaks = MmrPeaks::new(0, Vec::new()).unwrap(); - (0..5).for_each(|block_num| { - SqliteStore::insert_block_header_tx( - &tx, - block_headers[block_num], - dummy_peaks.clone(), - false, - ) - .unwrap() - }); - tx.commit().unwrap(); - - block_headers - } - - #[test] - fn insert_and_get_block_headers_by_number() { - let mut store = create_test_store(); - let block_headers = insert_dummy_block_headers(&mut store); - - let block_header = store.get_block_header_by_num(3).unwrap(); - assert_eq!(block_headers[3], block_header.0); - } - - #[test] - fn insert_and_get_block_headers_by_list() { - let mut store = create_test_store(); - let mock_block_headers = insert_dummy_block_headers(&mut store); - - let block_headers: Vec = store - .get_block_headers(&[1, 3]) - .unwrap() - .into_iter() - .map(|(block_header, _has_notes)| block_header) - .collect(); - assert_eq!(&[mock_block_headers[1], mock_block_headers[3]], &block_headers[..]); - } -} diff --git a/src/store/sqlite_store/migrations.rs b/src/store/sqlite_store/migrations.rs deleted file mode 100644 index 0290f2d48..000000000 --- a/src/store/sqlite_store/migrations.rs +++ /dev/null @@ -1,20 +0,0 @@ -use lazy_static::lazy_static; -use rusqlite::Connection; -use rusqlite_migration::{Migrations, M}; - -use crate::errors::StoreError; - -// MIGRATIONS -// ================================================================================================ - -lazy_static! { - static ref MIGRATIONS: Migrations<'static> = - Migrations::new(vec![M::up(include_str!("store.sql")),]); -} - -// PUBLIC FUNCTIONS -// ================================================================================================ - -pub(crate) fn update_to_latest(conn: &mut Connection) -> Result<(), StoreError> { - Ok(MIGRATIONS.to_latest(conn)?) -} diff --git a/src/store/sqlite_store/mod.rs b/src/store/sqlite_store/mod.rs deleted file mode 100644 index 972818c65..000000000 --- a/src/store/sqlite_store/mod.rs +++ /dev/null @@ -1,291 +0,0 @@ -use alloc::collections::BTreeMap; - -use miden_objects::{ - accounts::{Account, AccountId, AccountStub}, - crypto::merkle::{InOrderIndex, MmrPeaks}, - notes::NoteId, - transaction::TransactionId, - BlockHeader, Digest, Word, -}; -use rusqlite::Connection; - -use super::{ - AuthInfo, ChainMmrNodeFilter, InputNoteRecord, NoteFilter, OutputNoteRecord, Store, - TransactionFilter, -}; -use crate::{ - client::{ - sync::SyncedNewNotes, - transactions::{TransactionRecord, TransactionResult}, - }, - config::StoreConfig, - errors::StoreError, -}; - -mod accounts; -mod chain_data; -mod migrations; -mod notes; -mod sync; -mod transactions; - -// SQLITE STORE -// ================================================================================================ -/// -/// Represents a connection with an sqlite database -/// -/// -/// Current table definitions can be found at `store.sql` migration file. One particular column -/// type used is JSON, for which you can look more info at [sqlite's official documentation](https://www.sqlite.org/json1.html). -/// In the case of json, some caveats must be taken: -/// -/// - To insert json values you must use sqlite's `json` function in the query alongside named -/// parameters, and the provided parameter must be a valid json. That is: -/// -/// ```sql -/// INSERT INTO SOME_TABLE -/// (some_field) -/// VALUES (json(:some_field))") -/// ``` -/// -/// ```ignore -/// let metadata = format!(r#"{{"some_inner_field": {some_field}, "some_other_inner_field": {some_other_field}}}"#); -/// ``` -/// -/// (Using raw string literals for the jsons is encouraged if possible) -/// -/// - To get data from any of the json fields you can use the `json_extract` function (in some -/// cases you'll need to do some explicit type casting to help rusqlite figure out types): -/// -/// ```sql -/// SELECT CAST(json_extract(some_json_col, '$.some_json_field') AS TEXT) from some_table -/// ``` -/// -/// - For some datatypes you'll need to do some manual serialization/deserialization. For example, -/// suppose one of your json fields is an array of digests. Then you'll need to -/// - Create the json with an array of strings representing the digests: -/// -/// ```ignore -/// let some_array_field = some_array -/// .into_iter() -/// .map(array_elem_to_string) -/// .collect::>() -/// .join(","); -/// -/// Some(format!( -/// r#"{{ -/// "some_array_field": [{some_array_field}] -/// }}"# -/// )), -/// ``` -/// -/// - When deserializing, handling the extra symbols (`[`, `]`, `,`, `"`). For that you can use -/// the `parse_json_array` function: -/// -/// ```ignore -/// let some_array = parse_json_array(some_array_field) -/// .into_iter() -/// .map(parse_json_byte_str) -/// .collect::, _>>()?; -/// ``` -/// - Thus, if needed you can create a struct representing the json values and use serde_json to -/// simplify all of the serialization/deserialization logic -pub struct SqliteStore { - pub(crate) db: Connection, -} - -impl SqliteStore { - // CONSTRUCTORS - // -------------------------------------------------------------------------------------------- - - /// Returns a new instance of [Store] instantiated with the specified configuration options. - pub fn new(config: StoreConfig) -> Result { - let mut db = Connection::open(config.database_filepath)?; - migrations::update_to_latest(&mut db)?; - - Ok(Self { db }) - } -} - -// SQLite implementation of the Store trait -// -// To simplify, all implementations rely on inner SqliteStore functions that map 1:1 by name -// This way, the actual implementations are grouped by entity types in their own sub-modules -impl Store for SqliteStore { - fn get_note_tags(&self) -> Result, StoreError> { - self.get_note_tags() - } - - fn add_note_tag(&mut self, tag: u64) -> Result { - self.add_note_tag(tag) - } - - fn get_sync_height(&self) -> Result { - self.get_sync_height() - } - - fn apply_state_sync( - &mut self, - block_header: BlockHeader, - nullifiers: Vec, - committed_notes: SyncedNewNotes, - committed_transactions: &[TransactionId], - new_mmr_peaks: MmrPeaks, - new_authentication_nodes: &[(InOrderIndex, Digest)], - updated_onchain_accounts: &[Account], - ) -> Result<(), StoreError> { - self.apply_state_sync( - block_header, - nullifiers, - committed_notes, - committed_transactions, - new_mmr_peaks, - new_authentication_nodes, - updated_onchain_accounts, - ) - } - - fn get_transactions( - &self, - transaction_filter: TransactionFilter, - ) -> Result, StoreError> { - self.get_transactions(transaction_filter) - } - - fn apply_transaction(&mut self, tx_result: TransactionResult) -> Result<(), StoreError> { - self.apply_transaction(tx_result) - } - - fn get_input_notes(&self, note_filter: NoteFilter) -> Result, StoreError> { - self.get_input_notes(note_filter) - } - - fn get_output_notes( - &self, - note_filter: NoteFilter, - ) -> Result, StoreError> { - self.get_output_notes(note_filter) - } - - fn get_input_note(&self, note_id: NoteId) -> Result { - self.get_input_note(note_id) - } - - fn insert_input_note(&mut self, note: &InputNoteRecord) -> Result<(), StoreError> { - self.insert_input_note(note) - } - - fn insert_block_header( - &self, - block_header: BlockHeader, - chain_mmr_peaks: MmrPeaks, - has_client_notes: bool, - ) -> Result<(), StoreError> { - self.insert_block_header(block_header, chain_mmr_peaks, has_client_notes) - } - - fn get_block_headers( - &self, - block_numbers: &[u32], - ) -> Result, StoreError> { - self.get_block_headers(block_numbers) - } - - fn get_tracked_block_headers(&self) -> Result, StoreError> { - self.get_tracked_block_headers() - } - - fn get_chain_mmr_nodes( - &self, - filter: ChainMmrNodeFilter, - ) -> Result, StoreError> { - self.get_chain_mmr_nodes(filter) - } - - fn get_chain_mmr_peaks_by_block_num(&self, block_num: u32) -> Result { - self.get_chain_mmr_peaks_by_block_num(block_num) - } - - fn insert_account( - &mut self, - account: &Account, - account_seed: Option, - auth_info: &AuthInfo, - ) -> Result<(), StoreError> { - self.insert_account(account, account_seed, auth_info) - } - - fn get_account_ids(&self) -> Result, StoreError> { - self.get_account_ids() - } - - fn get_account_stubs(&self) -> Result)>, StoreError> { - self.get_account_stubs() - } - - fn get_account_stub( - &self, - account_id: AccountId, - ) -> Result<(AccountStub, Option), StoreError> { - self.get_account_stub(account_id) - } - - fn get_account(&self, account_id: AccountId) -> Result<(Account, Option), StoreError> { - self.get_account(account_id) - } - - fn get_account_auth(&self, account_id: AccountId) -> Result { - self.get_account_auth(account_id) - } -} - -// TESTS -// ================================================================================================ - -#[cfg(test)] -pub mod tests { - use std::env::temp_dir; - - use rusqlite::Connection; - use uuid::Uuid; - - use super::{migrations, SqliteStore}; - use crate::{ - client::get_random_coin, - config::{ClientConfig, RpcConfig}, - mock::{MockClient, MockRpcApi}, - }; - - pub fn create_test_client() -> MockClient { - let client_config = ClientConfig { - store: create_test_store_path() - .into_os_string() - .into_string() - .unwrap() - .try_into() - .unwrap(), - rpc: RpcConfig::default(), - }; - - let rpc_endpoint = client_config.rpc.endpoint.to_string(); - let store = SqliteStore::new((&client_config).into()).unwrap(); - let rng = get_random_coin(); - let executor_store = SqliteStore::new((&client_config).into()).unwrap(); - - MockClient::new(MockRpcApi::new(&rpc_endpoint), rng, store, executor_store).unwrap() - } - - pub(crate) fn create_test_store_path() -> std::path::PathBuf { - let mut temp_file = temp_dir(); - temp_file.push(format!("{}.sqlite3", Uuid::new_v4())); - temp_file - } - - pub(crate) fn create_test_store() -> SqliteStore { - let temp_file = create_test_store_path(); - let mut db = Connection::open(temp_file).unwrap(); - migrations::update_to_latest(&mut db).unwrap(); - - SqliteStore { db } - } -} diff --git a/src/store/sqlite_store/notes.rs b/src/store/sqlite_store/notes.rs deleted file mode 100644 index ecee9f456..000000000 --- a/src/store/sqlite_store/notes.rs +++ /dev/null @@ -1,561 +0,0 @@ -use std::fmt; - -use clap::error::Result; -use miden_objects::{ - crypto::utils::{Deserializable, Serializable}, - notes::{NoteAssets, NoteId, NoteInclusionProof, NoteMetadata, NoteScript, Nullifier}, - Digest, -}; -use rusqlite::{named_params, params, Transaction}; - -use super::SqliteStore; -use crate::{ - errors::StoreError, - store::{InputNoteRecord, NoteFilter, NoteRecordDetails, NoteStatus, OutputNoteRecord}, -}; - -fn insert_note_query(table_name: NoteTable) -> String { - format!("\ - INSERT INTO {table_name} - (note_id, assets, recipient, status, metadata, details, inclusion_proof) - VALUES (:note_id, :assets, :recipient, :status, json(:metadata), json(:details), json(:inclusion_proof))") -} - -// TYPES -// ================================================================================================ - -type SerializedInputNoteData = ( - String, - Vec, - String, - String, - Option, - String, - String, - Vec, - Option, -); -type SerializedOutputNoteData = ( - String, - Vec, - String, - String, - String, - Option, - Option, - Option>, - Option, -); - -type SerializedInputNoteParts = - (Vec, String, String, String, Option, Option, Vec); -type SerializedOutputNoteParts = - (Vec, Option, String, String, String, Option, Option>); - -// NOTE TABLE -// ================================================================================================ - -/// Represents a table in the SQL DB used to store notes based on their use case -enum NoteTable { - InputNotes, - OutputNotes, -} - -impl fmt::Display for NoteTable { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - NoteTable::InputNotes => write!(f, "input_notes"), - NoteTable::OutputNotes => write!(f, "output_notes"), - } - } -} - -// NOTE FILTER -// ================================================================================================ - -impl NoteFilter { - /// Returns a [String] containing the query for this Filter - fn to_query(&self, notes_table: NoteTable) -> String { - let base = format!( - "SELECT - note.assets, - note.details, - note.recipient, - note.status, - note.metadata, - note.inclusion_proof, - script.serialized_note_script - from {notes_table} AS note - LEFT OUTER JOIN notes_scripts AS script - ON note.details IS NOT NULL AND - json_extract(note.details, '$.script_hash') = script.script_hash" - ); - - match self { - NoteFilter::All => base, - NoteFilter::Committed => format!("{base} WHERE status = 'Committed'"), - NoteFilter::Consumed => format!("{base} WHERE status = 'Consumed'"), - NoteFilter::Pending => format!("{base} WHERE status = 'Pending'"), - } - } -} - -// NOTES STORE METHODS -// -------------------------------------------------------------------------------------------- - -impl SqliteStore { - pub(crate) fn get_input_notes( - &self, - filter: NoteFilter, - ) -> Result, StoreError> { - self.db - .prepare(&filter.to_query(NoteTable::InputNotes))? - .query_map([], parse_input_note_columns) - .expect("no binding parameters used in query") - .map(|result| Ok(result?).and_then(parse_input_note)) - .collect::, _>>() - } - - /// Retrieves the output notes from the database - pub(crate) fn get_output_notes( - &self, - filter: NoteFilter, - ) -> Result, StoreError> { - self.db - .prepare(&filter.to_query(NoteTable::OutputNotes))? - .query_map([], parse_output_note_columns) - .expect("no binding parameters used in query") - .map(|result| Ok(result?).and_then(parse_output_note)) - .collect::, _>>() - } - - pub(crate) fn get_input_note(&self, note_id: NoteId) -> Result { - let query_id = ¬e_id.inner().to_string(); - - const QUERY: &str = "SELECT - note.assets, - note.details, - note.recipient, - note.status, - note.metadata, - note.inclusion_proof, - script.serialized_note_script - from input_notes AS note - LEFT OUTER JOIN notes_scripts AS script - ON note.details IS NOT NULL AND - json_extract(note.details, '$.script_hash') = script.script_hash - WHERE note.note_id = ?"; - - self.db - .prepare(QUERY)? - .query_map(params![query_id.to_string()], parse_input_note_columns)? - .map(|result| Ok(result?).and_then(parse_input_note)) - .next() - .ok_or(StoreError::InputNoteNotFound(note_id))? - } - - pub(crate) fn insert_input_note(&mut self, note: &InputNoteRecord) -> Result<(), StoreError> { - let tx = self.db.transaction()?; - - insert_input_note_tx(&tx, note)?; - - Ok(tx.commit()?) - } - - /// Returns the nullifiers of all unspent input notes - pub fn get_unspent_input_note_nullifiers(&self) -> Result, StoreError> { - const QUERY: &str = "SELECT json_extract(details, '$.nullifier') FROM input_notes WHERE status = 'Committed'"; - - self.db - .prepare(QUERY)? - .query_map([], |row| row.get(0)) - .expect("no binding parameters used in query") - .map(|result| { - result.map_err(|err| StoreError::ParsingError(err.to_string())).and_then( - |v: String| { - Digest::try_from(v).map(Nullifier::from).map_err(StoreError::HexParseError) - }, - ) - }) - .collect::, _>>() - } -} - -// HELPERS -// ================================================================================================ - -/// Inserts the provided input note into the database -pub(super) fn insert_input_note_tx( - tx: &Transaction<'_>, - note: &InputNoteRecord, -) -> Result<(), StoreError> { - let ( - note_id, - assets, - recipient, - status, - metadata, - details, - note_script_hash, - serialized_note_script, - inclusion_proof, - ) = serialize_input_note(note)?; - - tx.execute( - &insert_note_query(NoteTable::InputNotes), - named_params! { - ":note_id": note_id, - ":assets": assets, - ":recipient": recipient, - ":status": status, - ":metadata": metadata, - ":details": details, - ":inclusion_proof": inclusion_proof, - }, - ) - .map_err(|err| StoreError::QueryError(err.to_string())) - .map(|_| ())?; - - const QUERY: &str = - "INSERT OR IGNORE INTO notes_scripts (script_hash, serialized_note_script) VALUES (?, ?)"; - tx.execute(QUERY, params![note_script_hash, serialized_note_script,]) - .map_err(|err| StoreError::QueryError(err.to_string())) - .map(|_| ()) -} - -/// Inserts the provided input note into the database -pub fn insert_output_note_tx( - tx: &Transaction<'_>, - note: &OutputNoteRecord, -) -> Result<(), StoreError> { - let ( - note_id, - assets, - recipient, - status, - metadata, - details, - note_script_hash, - serialized_note_script, - inclusion_proof, - ) = serialize_output_note(note)?; - - tx.execute( - &insert_note_query(NoteTable::OutputNotes), - named_params! { - ":note_id": note_id, - ":assets": assets, - ":recipient": recipient, - ":status": status, - ":metadata": metadata, - ":details": details, - ":inclusion_proof": inclusion_proof, - }, - ) - .map_err(|err| StoreError::QueryError(err.to_string())) - .map(|_| ())?; - - const QUERY: &str = - "INSERT OR IGNORE INTO notes_scripts (script_hash, serialized_note_script) VALUES (?, ?)"; - tx.execute(QUERY, params![note_script_hash, serialized_note_script,]) - .map_err(|err| StoreError::QueryError(err.to_string())) - .map(|_| ()) -} - -/// Parse input note columns from the provided row into native types. -fn parse_input_note_columns( - row: &rusqlite::Row<'_>, -) -> Result { - let assets: Vec = row.get(0)?; - let details: String = row.get(1)?; - let recipient: String = row.get(2)?; - let status: String = row.get(3)?; - let metadata: Option = row.get(4)?; - let inclusion_proof: Option = row.get(5)?; - let serialized_note_script: Vec = row.get(6)?; - - Ok(( - assets, - details, - recipient, - status, - metadata, - inclusion_proof, - serialized_note_script, - )) -} - -/// Parse a note from the provided parts. -fn parse_input_note( - serialized_input_note_parts: SerializedInputNoteParts, -) -> Result { - let ( - note_assets, - note_details, - recipient, - status, - note_metadata, - note_inclusion_proof, - serialized_note_script, - ) = serialized_input_note_parts; - - // Merge the info that comes from the input notes table and the notes script table - let note_script = NoteScript::read_from_bytes(&serialized_note_script)?; - let note_details: NoteRecordDetails = - serde_json::from_str(¬e_details).map_err(StoreError::JsonDataDeserializationError)?; - let note_details = NoteRecordDetails::new( - note_details.nullifier().to_string(), - note_script, - note_details.inputs().clone(), - note_details.serial_num(), - ); - - let note_metadata: Option = if let Some(metadata_as_json_str) = note_metadata { - Some( - serde_json::from_str(&metadata_as_json_str) - .map_err(StoreError::JsonDataDeserializationError)?, - ) - } else { - None - }; - - let note_assets = NoteAssets::read_from_bytes(¬e_assets)?; - - let inclusion_proof = match note_inclusion_proof { - Some(note_inclusion_proof) => { - let note_inclusion_proof: NoteInclusionProof = - serde_json::from_str(¬e_inclusion_proof) - .map_err(StoreError::JsonDataDeserializationError)?; - - Some(note_inclusion_proof) - }, - _ => None, - }; - - let recipient = Digest::try_from(recipient)?; - let id = NoteId::new(recipient, note_assets.commitment()); - let status: NoteStatus = serde_json::from_str(&format!("\"{status}\"")) - .map_err(StoreError::JsonDataDeserializationError)?; - - Ok(InputNoteRecord::new( - id, - recipient, - note_assets, - status, - note_metadata, - inclusion_proof, - note_details, - )) -} - -/// Serialize the provided input note into database compatible types. -pub(crate) fn serialize_input_note( - note: &InputNoteRecord, -) -> Result { - let note_id = note.id().inner().to_string(); - let note_assets = note.assets().to_bytes(); - - let (inclusion_proof, status) = match note.inclusion_proof() { - Some(proof) => { - let block_num = proof.origin().block_num; - let node_index = proof.origin().node_index.value(); - let sub_hash = proof.sub_hash(); - let note_root = proof.note_root(); - - let inclusion_proof = serde_json::to_string(&NoteInclusionProof::new( - block_num, - sub_hash, - note_root, - node_index, - proof.note_path().clone(), - )?) - .map_err(StoreError::InputSerializationError)?; - - let status = serde_json::to_string(&NoteStatus::Committed) - .map_err(StoreError::InputSerializationError)? - .replace('\"', ""); - (Some(inclusion_proof), status) - }, - None => { - let status = serde_json::to_string(&NoteStatus::Pending) - .map_err(StoreError::InputSerializationError)? - .replace('\"', ""); - - (None, status) - }, - }; - let recipient = note.recipient().to_hex(); - - let metadata = if let Some(metadata) = note.metadata() { - Some(serde_json::to_string(metadata).map_err(StoreError::InputSerializationError)?) - } else { - None - }; - - let details = - serde_json::to_string(¬e.details()).map_err(StoreError::InputSerializationError)?; - let note_script_hash = note.details().script_hash().to_hex(); - let serialized_note_script = note.details().script().to_bytes(); - - Ok(( - note_id, - note_assets, - recipient, - status, - metadata, - details, - note_script_hash, - serialized_note_script, - inclusion_proof, - )) -} - -/// Parse input note columns from the provided row into native types. -fn parse_output_note_columns( - row: &rusqlite::Row<'_>, -) -> Result { - let assets: Vec = row.get(0)?; - let details: Option = row.get(1)?; - let recipient: String = row.get(2)?; - let status: String = row.get(3)?; - let metadata: String = row.get(4)?; - let inclusion_proof: Option = row.get(5)?; - let serialized_note_script: Option> = row.get(6)?; - - Ok(( - assets, - details, - recipient, - status, - metadata, - inclusion_proof, - serialized_note_script, - )) -} - -/// Parse a note from the provided parts. -fn parse_output_note( - serialized_output_note_parts: SerializedOutputNoteParts, -) -> Result { - let ( - note_assets, - note_details, - recipient, - status, - note_metadata, - note_inclusion_proof, - serialized_note_script, - ) = serialized_output_note_parts; - - let note_details: Option = if let Some(details_as_json_str) = note_details { - // Merge the info that comes from the input notes table and the notes script table - let serialized_note_script = serialized_note_script - .expect("Has note details so it should have the serialized script"); - let note_script = NoteScript::read_from_bytes(&serialized_note_script)?; - let note_details: NoteRecordDetails = serde_json::from_str(&details_as_json_str) - .map_err(StoreError::JsonDataDeserializationError)?; - let note_details = NoteRecordDetails::new( - note_details.nullifier().to_string(), - note_script, - note_details.inputs().clone(), - note_details.serial_num(), - ); - - Some(note_details) - } else { - None - }; - - let note_metadata: NoteMetadata = - serde_json::from_str(¬e_metadata).map_err(StoreError::JsonDataDeserializationError)?; - - let note_assets = NoteAssets::read_from_bytes(¬e_assets)?; - - let inclusion_proof = match note_inclusion_proof { - Some(note_inclusion_proof) => { - let note_inclusion_proof: NoteInclusionProof = - serde_json::from_str(¬e_inclusion_proof) - .map_err(StoreError::JsonDataDeserializationError)?; - - Some(note_inclusion_proof) - }, - _ => None, - }; - - let recipient = Digest::try_from(recipient)?; - let id = NoteId::new(recipient, note_assets.commitment()); - let status: NoteStatus = serde_json::from_str(&format!("\"{status}\"")) - .map_err(StoreError::JsonDataDeserializationError)?; - - Ok(OutputNoteRecord::new( - id, - recipient, - note_assets, - status, - note_metadata, - inclusion_proof, - note_details, - )) -} - -/// Serialize the provided output note into database compatible types. -pub(crate) fn serialize_output_note( - note: &OutputNoteRecord, -) -> Result { - let note_id = note.id().inner().to_string(); - let note_assets = note.assets().to_bytes(); - let (inclusion_proof, status) = match note.inclusion_proof() { - Some(proof) => { - let block_num = proof.origin().block_num; - let node_index = proof.origin().node_index.value(); - let sub_hash = proof.sub_hash(); - let note_root = proof.note_root(); - - let inclusion_proof = serde_json::to_string(&NoteInclusionProof::new( - block_num, - sub_hash, - note_root, - node_index, - proof.note_path().clone(), - )?) - .map_err(StoreError::InputSerializationError)?; - - let status = serde_json::to_string(&NoteStatus::Committed) - .map_err(StoreError::InputSerializationError)? - .replace('\"', ""); - - (Some(inclusion_proof), status) - }, - None => { - let status = serde_json::to_string(&NoteStatus::Pending) - .map_err(StoreError::InputSerializationError)? - .replace('\"', ""); - - (None, status) - }, - }; - let recipient = note.recipient().to_hex(); - - let metadata = - serde_json::to_string(note.metadata()).map_err(StoreError::InputSerializationError)?; - - let details = if let Some(details) = note.details() { - Some(serde_json::to_string(&details).map_err(StoreError::InputSerializationError)?) - } else { - None - }; - let note_script_hash = note.details().map(|details| details.script_hash().to_hex()); - let serialized_note_script = note.details().map(|details| details.script().to_bytes()); - - Ok(( - note_id, - note_assets, - recipient, - status, - metadata, - details, - note_script_hash, - serialized_note_script, - inclusion_proof, - )) -} diff --git a/src/store/sqlite_store/store.sql b/src/store/sqlite_store/store.sql deleted file mode 100644 index 334485b13..000000000 --- a/src/store/sqlite_store/store.sql +++ /dev/null @@ -1,193 +0,0 @@ --- Create account_code table -CREATE TABLE account_code ( - root BLOB NOT NULL, -- root of the Merkle tree for all exported procedures in account module. - procedures BLOB NOT NULL, -- serialized procedure digests for the account code. - module BLOB NOT NULL, -- serialized ModuleAst for the account code. - PRIMARY KEY (root) -); - --- Create account_storage table -CREATE TABLE account_storage ( - root BLOB NOT NULL, -- root of the account storage Merkle tree. - slots BLOB NOT NULL, -- serialized key-value pair of non-empty account slots. - PRIMARY KEY (root) -); - --- Create account_vaults table -CREATE TABLE account_vaults ( - root BLOB NOT NULL, -- root of the Merkle tree for the account asset vault. - assets BLOB NOT NULL, -- serialized account vault assets. - PRIMARY KEY (root) -); - --- Create account_auth table -CREATE TABLE account_auth ( - account_id UNSIGNED BIG INT NOT NULL, -- ID of the account - auth_info BLOB NOT NULL, -- Serialized representation of information needed for authentication - PRIMARY KEY (account_id) -); - --- Create accounts table -CREATE TABLE accounts ( - id UNSIGNED BIG INT NOT NULL, -- Account ID. - code_root BLOB NOT NULL, -- Root of the account_code - storage_root BLOB NOT NULL, -- Root of the account_storage Merkle tree. - vault_root BLOB NOT NULL, -- Root of the account_vault Merkle tree. - nonce BIGINT NOT NULL, -- Account nonce. - committed BOOLEAN NOT NULL, -- True if recorded, false if not. - account_seed BLOB NULL, -- Account seed used to generate the ID. Expected to be NULL for non-new accounts - PRIMARY KEY (id, nonce), - FOREIGN KEY (code_root) REFERENCES account_code(root), - FOREIGN KEY (storage_root) REFERENCES account_storage(root), - FOREIGN KEY (vault_root) REFERENCES account_vaults(root) - - CONSTRAINT check_seed_nonzero CHECK (NOT (nonce = 0 AND account_seed IS NULL)) -); - --- Create transactions table -CREATE TABLE transactions ( - id BLOB NOT NULL, -- Transaction ID (hash of various components) - account_id UNSIGNED BIG INT NOT NULL, -- ID of the account against which the transaction was executed. - init_account_state BLOB NOT NULL, -- Hash of the account state before the transaction was executed. - final_account_state BLOB NOT NULL, -- Hash of the account state after the transaction was executed. - input_notes BLOB, -- Serialized list of input note hashes - output_notes BLOB, -- Serialized list of output note hashes - script_hash BLOB, -- Transaction script hash - script_inputs BLOB, -- Transaction script inputs - block_num UNSIGNED BIG INT, -- Block number for the block against which the transaction was executed. - commit_height UNSIGNED BIG INT NULL, -- Block number of the block at which the transaction was included in the chain. - FOREIGN KEY (script_hash) REFERENCES transaction_scripts(script_hash), - PRIMARY KEY (id) -); - -CREATE TABLE transaction_scripts ( - script_hash BLOB NOT NULL, -- Transaction script Hash - program BLOB, -- Transaction script program, serialized - - PRIMARY KEY (script_hash) -); - --- Create input notes table -CREATE TABLE input_notes ( - note_id BLOB NOT NULL, -- the note id - recipient BLOB NOT NULL, -- the note recipient - assets BLOB NOT NULL, -- the serialized NoteAssets, including vault hash and list of assets - status TEXT CHECK( status IN ( -- the status of the note - either pending, committed or consumed - 'Pending', 'Committed', 'Consumed' - )), - - inclusion_proof JSON NULL, -- JSON consisting of the following fields: - -- block_num -- number of the block the note was included in - -- note_index -- the index of the note in the note Merkle tree of the block the note was created in. - -- sub_hash -- sub hash of the block the note was included in stored as a hex string - -- note_root -- the note root of the block the note was created in - -- note_path -- the Merkle path to the note in the note Merkle tree of the block the note was created in, stored as an array of digests - - metadata JSON NULL, -- JSON consisting of the following fields: - -- sender_id -- the account ID of the sender - -- tag -- the note tag - - details JSON NOT NULL, -- JSON consisting of the following fields: - -- nullifier -- the nullifier of the note - -- script_hash -- the note's script hash - -- inputs -- the serialized NoteInputs, including inputs hash and list of inputs - -- serial_num -- the note serial number - PRIMARY KEY (note_id) - - CONSTRAINT check_valid_inclusion_proof_json CHECK ( - inclusion_proof IS NULL OR - ( - json_extract(inclusion_proof, '$.origin.block_num') IS NOT NULL AND - json_extract(inclusion_proof, '$.origin.node_index') IS NOT NULL AND - json_extract(inclusion_proof, '$.sub_hash') IS NOT NULL AND - json_extract(inclusion_proof, '$.note_root') IS NOT NULL AND - json_extract(inclusion_proof, '$.note_path') IS NOT NULL - )) - CONSTRAINT check_valid_metadata_json CHECK (metadata IS NULL OR (json_extract(metadata, '$.sender') IS NOT NULL AND json_extract(metadata, '$.tag') IS NOT NULL)) -); - --- Create output notes table -CREATE TABLE output_notes ( - note_id BLOB NOT NULL, -- the note id - recipient BLOB NOT NULL, -- the note recipient - assets BLOB NOT NULL, -- the serialized NoteAssets, including vault hash and list of assets - status TEXT CHECK( status IN ( -- the status of the note - either pending, committed or consumed - 'Pending', 'Committed', 'Consumed' - )), - - inclusion_proof JSON NULL, -- JSON consisting of the following fields: - -- block_num -- number of the block the note was included in - -- note_index -- the index of the note in the note Merkle tree of the block the note was created in. - -- sub_hash -- sub hash of the block the note was included in stored as a hex string - -- note_root -- the note root of the block the note was created in - -- note_path -- the Merkle path to the note in the note Merkle tree of the block the note was created in, stored as an array of digests - - metadata JSON NOT NULL, -- JSON consisting of the following fields: - -- sender_id -- the account ID of the sender - -- tag -- the note tag - - details JSON NULL, -- JSON consisting of the following fields: - -- nullifier -- the nullifier of the note - -- script -- the note's script hash - -- inputs -- the serialized NoteInputs, including inputs hash and list of inputs - -- serial_num -- the note serial number - PRIMARY KEY (note_id) - - CONSTRAINT check_valid_inclusion_proof_json CHECK ( - inclusion_proof IS NULL OR - ( - json_extract(inclusion_proof, '$.origin.block_num') IS NOT NULL AND - json_extract(inclusion_proof, '$.origin.node_index') IS NOT NULL AND - json_extract(inclusion_proof, '$.sub_hash') IS NOT NULL AND - json_extract(inclusion_proof, '$.note_root') IS NOT NULL AND - json_extract(inclusion_proof, '$.note_path') IS NOT NULL - )) - CONSTRAINT check_valid_details_json CHECK ( - details IS NULL OR - ( - json_extract(details, '$.nullifier') IS NOT NULL AND - json_extract(details, '$.script_hash') IS NOT NULL AND - json_extract(details, '$.inputs') IS NOT NULL AND - json_extract(details, '$.serial_num') IS NOT NULL - )) - -); - --- Create note's scripts table, used for both input and output notes --- TODO: can't do FOREIGN KEY over json fields, sure we're ok? -CREATE TABLE notes_scripts ( - script_hash BLOB NOT NULL, -- Note script Hash - serialized_note_script BLOB, -- NoteScript, serialized - - PRIMARY KEY (script_hash) -); - --- Create state sync table -CREATE TABLE state_sync ( - block_num UNSIGNED BIG INT NOT NULL, -- the block number of the most recent state sync - tags BLOB NOT NULL, -- the serialized list of tags - PRIMARY KEY (block_num) -); - --- insert initial row into state_sync table -INSERT OR IGNORE INTO state_sync (block_num, tags) -SELECT 0, '[]' -WHERE ( - SELECT COUNT(*) FROM state_sync -) = 0; - --- Create block headers table -CREATE TABLE block_headers ( - block_num UNSIGNED BIG INT NOT NULL, -- block number - header BLOB NOT NULL, -- serialized block header - chain_mmr_peaks BLOB NOT NULL, -- serialized peaks of the chain MMR at this block - has_client_notes BOOL NOT NULL, -- whether the block has notes relevant to the client - PRIMARY KEY (block_num) -); - --- Create chain mmr nodes -CREATE TABLE chain_mmr_nodes ( - id UNSIGNED BIG INT NOT NULL, -- in-order index of the internal MMR node - node BLOB NOT NULL, -- internal node value (hash) - PRIMARY KEY (id) -) diff --git a/src/store/sqlite_store/sync.rs b/src/store/sqlite_store/sync.rs deleted file mode 100644 index f642ae25e..000000000 --- a/src/store/sqlite_store/sync.rs +++ /dev/null @@ -1,160 +0,0 @@ -use miden_objects::{ - accounts::Account, - crypto::merkle::{InOrderIndex, MmrPeaks}, - notes::NoteInclusionProof, - transaction::TransactionId, - BlockHeader, Digest, -}; -use rusqlite::{named_params, params}; - -use super::SqliteStore; -use crate::{ - client::sync::SyncedNewNotes, - errors::StoreError, - store::sqlite_store::{accounts::update_account, notes::insert_input_note_tx}, -}; - -impl SqliteStore { - pub(crate) fn get_note_tags(&self) -> Result, StoreError> { - const QUERY: &str = "SELECT tags FROM state_sync"; - - self.db - .prepare(QUERY)? - .query_map([], |row| row.get(0)) - .expect("no binding parameters used in query") - .map(|result| { - result.map_err(|err| StoreError::ParsingError(err.to_string())).and_then( - |v: String| { - serde_json::from_str(&v).map_err(StoreError::JsonDataDeserializationError) - }, - ) - }) - .next() - .expect("state sync tags exist") - } - - pub(super) fn add_note_tag(&mut self, tag: u64) -> Result { - let mut tags = self.get_note_tags()?; - if tags.contains(&tag) { - return Ok(false); - } - tags.push(tag); - let tags = serde_json::to_string(&tags).map_err(StoreError::InputSerializationError)?; - - const QUERY: &str = "UPDATE state_sync SET tags = ?"; - self.db.execute(QUERY, params![tags])?; - - Ok(true) - } - - pub(super) fn get_sync_height(&self) -> Result { - const QUERY: &str = "SELECT block_num FROM state_sync"; - - self.db - .prepare(QUERY)? - .query_map([], |row| row.get(0)) - .expect("no binding parameters used in query") - .map(|result| Ok(result?).map(|v: i64| v as u32)) - .next() - .expect("state sync block number exists") - } - - pub(super) fn apply_state_sync( - &mut self, - block_header: BlockHeader, - nullifiers: Vec, - committed_notes: SyncedNewNotes, - committed_transactions: &[TransactionId], - new_mmr_peaks: MmrPeaks, - new_authentication_nodes: &[(InOrderIndex, Digest)], - updated_onchain_accounts: &[Account], - ) -> Result<(), StoreError> { - let tx = self.db.transaction()?; - - // Update state sync block number - const BLOCK_NUMBER_QUERY: &str = "UPDATE state_sync SET block_num = ?"; - tx.execute(BLOCK_NUMBER_QUERY, params![block_header.block_num()])?; - - // Update spent notes - for nullifier in nullifiers.iter() { - const SPENT_INPUT_NOTE_QUERY: &str = - "UPDATE input_notes SET status = 'Consumed' WHERE json_extract(details, '$.nullifier') = ?"; - let nullifier = nullifier.to_hex(); - tx.execute(SPENT_INPUT_NOTE_QUERY, params![nullifier])?; - - const SPENT_OUTPUT_NOTE_QUERY: &str = - "UPDATE output_notes SET status = 'Consumed' WHERE json_extract(details, '$.nullifier') = ?"; - tx.execute(SPENT_OUTPUT_NOTE_QUERY, params![nullifier])?; - } - - // TODO: Due to the fact that notes are returned based on fuzzy matching of tags, - // this process of marking if the header has notes needs to be revisited - let block_has_relevant_notes = !committed_notes.is_empty(); - Self::insert_block_header_tx(&tx, block_header, new_mmr_peaks, block_has_relevant_notes)?; - - // Insert new authentication nodes (inner nodes of the PartialMmr) - Self::insert_chain_mmr_nodes(&tx, new_authentication_nodes)?; - - // Update tracked notes - for (note_id, inclusion_proof) in committed_notes.new_inclusion_proofs().iter() { - let block_num = inclusion_proof.origin().block_num; - let sub_hash = inclusion_proof.sub_hash(); - let note_root = inclusion_proof.note_root(); - let note_index = inclusion_proof.origin().node_index.value(); - - let inclusion_proof = serde_json::to_string(&NoteInclusionProof::new( - block_num, - sub_hash, - note_root, - note_index, - inclusion_proof.note_path().clone(), - )?) - .map_err(StoreError::InputSerializationError)?; - - const COMMITTED_INPUT_NOTES_QUERY: &str = - "UPDATE input_notes SET status = 'Committed', inclusion_proof = json(:inclusion_proof) WHERE note_id = :note_id"; - - tx.execute( - COMMITTED_INPUT_NOTES_QUERY, - named_params! { - ":inclusion_proof": inclusion_proof, - ":note_id": note_id.inner().to_hex(), - }, - )?; - - // Update output notes - const COMMITTED_OUTPUT_NOTES_QUERY: &str = - "UPDATE output_notes SET status = 'Committed', inclusion_proof = json(:inclusion_proof) WHERE note_id = :note_id"; - - tx.execute( - COMMITTED_OUTPUT_NOTES_QUERY, - named_params! { - ":inclusion_proof": inclusion_proof, - ":note_id": note_id.inner().to_hex(), - }, - )?; - } - - // Commit new public notes - for note in committed_notes.new_public_notes() { - insert_input_note_tx(&tx, ¬e.clone().into())?; - } - - // Mark transactions as committed - Self::mark_transactions_as_committed( - &tx, - block_header.block_num(), - committed_transactions, - )?; - - // Update onchain accounts on the db that have been updated onchain - for account in updated_onchain_accounts { - update_account(&tx, account)?; - } - - // Commit the updates - tx.commit()?; - - Ok(()) - } -} diff --git a/src/store/sqlite_store/transactions.rs b/src/store/sqlite_store/transactions.rs deleted file mode 100644 index 1cb5ae898..000000000 --- a/src/store/sqlite_store/transactions.rs +++ /dev/null @@ -1,344 +0,0 @@ -use alloc::collections::BTreeMap; - -use miden_objects::{ - accounts::AccountId, - assembly::{AstSerdeOptions, ProgramAst}, - crypto::utils::{Deserializable, Serializable}, - transaction::{OutputNotes, ToNullifier, TransactionId, TransactionScript}, - Digest, Felt, -}; -use rusqlite::{params, Transaction}; -use tracing::info; - -use super::{ - accounts::update_account, - notes::{insert_input_note_tx, insert_output_note_tx}, - SqliteStore, -}; -use crate::{ - client::transactions::{TransactionRecord, TransactionResult, TransactionStatus}, - errors::StoreError, - store::{InputNoteRecord, OutputNoteRecord, TransactionFilter}, -}; - -pub(crate) const INSERT_TRANSACTION_QUERY: &str = - "INSERT INTO transactions (id, account_id, init_account_state, final_account_state, \ - input_notes, output_notes, script_hash, script_inputs, block_num, commit_height) \ - VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"; - -pub(crate) const INSERT_TRANSACTION_SCRIPT_QUERY: &str = - "INSERT OR IGNORE INTO transaction_scripts (script_hash, program) \ - VALUES (?, ?)"; - -// TRANSACTIONS FILTERS -// ================================================================================================ - -impl TransactionFilter { - /// Returns a [String] containing the query for this Filter - pub fn to_query(&self) -> String { - const QUERY: &str = "SELECT tx.id, tx.account_id, tx.init_account_state, tx.final_account_state, \ - tx.input_notes, tx.output_notes, tx.script_hash, script.program, tx.script_inputs, tx.block_num, tx.commit_height \ - FROM transactions AS tx LEFT JOIN transaction_scripts AS script ON tx.script_hash = script.script_hash"; - match self { - TransactionFilter::All => QUERY.to_string(), - TransactionFilter::Uncomitted => format!("{QUERY} WHERE tx.commit_height IS NULL"), - } - } -} - -// TRANSACTIONS -// ================================================================================================ - -type SerializedTransactionData = ( - String, - i64, - String, - String, - String, - Vec, - Option>, - Option>, - Option, - u32, - Option, -); - -impl SqliteStore { - /// Retrieves tracked transactions, filtered by [TransactionFilter]. - pub fn get_transactions( - &self, - filter: TransactionFilter, - ) -> Result, StoreError> { - self.db - .prepare(&filter.to_query())? - .query_map([], parse_transaction_columns) - .expect("no binding parameters used in query") - .map(|result| Ok(result?).and_then(parse_transaction)) - .collect::, _>>() - } - - /// Inserts a transaction and updates the current state based on the `tx_result` changes - pub fn apply_transaction(&mut self, tx_result: TransactionResult) -> Result<(), StoreError> { - let account_id = tx_result.executed_transaction().account_id(); - let account_delta = tx_result.account_delta(); - - let (mut account, _seed) = self.get_account(account_id)?; - - account.apply_delta(account_delta).map_err(StoreError::AccountError)?; - - let created_input_notes = tx_result - .relevant_notes() - .into_iter() - .map(|note| InputNoteRecord::from(note.clone())) - .collect::>(); - - let created_output_notes = tx_result - .created_notes() - .iter() - .map(|note| OutputNoteRecord::from(note.clone())) - .collect::>(); - - let tx = self.db.transaction()?; - - // Transaction Data - insert_proven_transaction_data(&tx, tx_result)?; - - // Account Data - update_account(&tx, &account)?; - - // Updates for notes - - // TODO: see if we should filter the input notes we store to keep notes we can consume with - // existing accounts - for note in &created_input_notes { - insert_input_note_tx(&tx, note)?; - } - - for note in &created_output_notes { - insert_output_note_tx(&tx, note)?; - } - - tx.commit()?; - - Ok(()) - } - - /// Set the provided transactions as committed - /// - /// # Errors - /// - /// This function can return an error if any of the updates to the transactions within the - /// database transaction fail. - pub(crate) fn mark_transactions_as_committed( - tx: &Transaction<'_>, - block_num: u32, - transactions_to_commit: &[TransactionId], - ) -> Result { - let mut rows = 0; - for transaction_id in transactions_to_commit { - const QUERY: &str = "UPDATE transactions set commit_height=? where id=?"; - rows += tx.execute(QUERY, params![Some(block_num), transaction_id.to_string()])?; - } - info!("Marked {} transactions as committed", rows); - - Ok(rows) - } -} - -pub(super) fn insert_proven_transaction_data( - tx: &Transaction<'_>, - transaction_result: TransactionResult, -) -> Result<(), StoreError> { - let ( - transaction_id, - account_id, - init_account_state, - final_account_state, - input_notes, - output_notes, - script_program, - script_hash, - script_inputs, - block_num, - committed, - ) = serialize_transaction_data(transaction_result)?; - - if let Some(hash) = script_hash.clone() { - tx.execute(INSERT_TRANSACTION_SCRIPT_QUERY, params![hash, script_program])?; - } - - tx.execute( - INSERT_TRANSACTION_QUERY, - params![ - transaction_id, - account_id, - init_account_state, - final_account_state, - input_notes, - output_notes, - script_hash, - script_inputs, - block_num, - committed, - ], - )?; - - Ok(()) -} - -pub(super) fn serialize_transaction_data( - transaction_result: TransactionResult, -) -> Result { - let executed_transaction = transaction_result.executed_transaction(); - let transaction_id: String = executed_transaction.id().inner().into(); - let account_id: u64 = executed_transaction.account_id().into(); - let init_account_state = &executed_transaction.initial_account().hash().to_string(); - let final_account_state = &executed_transaction.final_account().hash().to_string(); - - // TODO: Double check if saving nullifiers as input notes is enough - let nullifiers: Vec = executed_transaction - .input_notes() - .iter() - .map(|x| x.nullifier().inner()) - .collect(); - - let input_notes = - serde_json::to_string(&nullifiers).map_err(StoreError::InputSerializationError)?; - - let output_notes = executed_transaction.output_notes(); - - info!("Transaction ID: {}", executed_transaction.id().inner()); - info!("Transaction account ID: {}", executed_transaction.account_id()); - - // TODO: Scripts should be in their own tables and only identifiers should be stored here - let transaction_args = transaction_result.transaction_arguments(); - let mut script_program = None; - let mut script_hash = None; - let mut script_inputs = None; - - if let Some(tx_script) = transaction_args.tx_script() { - script_program = - Some(tx_script.code().to_bytes(AstSerdeOptions { serialize_imports: true })); - script_hash = Some(tx_script.hash().to_bytes()); - script_inputs = Some( - serde_json::to_string(&tx_script.inputs()) - .map_err(StoreError::InputSerializationError)?, - ); - } - - Ok(( - transaction_id, - account_id as i64, - init_account_state.to_owned(), - final_account_state.to_owned(), - input_notes, - output_notes.to_bytes(), - script_program, - script_hash, - script_inputs, - transaction_result.block_num(), - None, - )) -} - -fn parse_transaction_columns( - row: &rusqlite::Row<'_>, -) -> Result { - let id: String = row.get(0)?; - let account_id: i64 = row.get(1)?; - let init_account_state: String = row.get(2)?; - let final_account_state: String = row.get(3)?; - let input_notes: String = row.get(4)?; - let output_notes: Vec = row.get(5)?; - let script_hash: Option> = row.get(6)?; - let script_program: Option> = row.get(7)?; - let script_inputs: Option = row.get(8)?; - let block_num: u32 = row.get(9)?; - let commit_height: Option = row.get(10)?; - - Ok(( - id, - account_id, - init_account_state, - final_account_state, - input_notes, - output_notes, - script_hash, - script_program, - script_inputs, - block_num, - commit_height, - )) -} - -/// Parse a transaction from the provided parts. -fn parse_transaction( - serialized_transaction: SerializedTransactionData, -) -> Result { - let ( - id, - account_id, - init_account_state, - final_account_state, - input_notes, - output_notes, - script_hash, - script_program, - script_inputs, - block_num, - commit_height, - ) = serialized_transaction; - let account_id = AccountId::try_from(account_id as u64)?; - let id: Digest = id.try_into()?; - let init_account_state: Digest = init_account_state.try_into()?; - - let final_account_state: Digest = final_account_state.try_into()?; - - let input_note_nullifiers: Vec = - serde_json::from_str(&input_notes).map_err(StoreError::JsonDataDeserializationError)?; - - let output_notes = OutputNotes::read_from_bytes(&output_notes)?; - - let transaction_script: Option = if script_hash.is_some() { - let script_hash = script_hash - .map(|hash| Digest::read_from_bytes(&hash)) - .transpose()? - .expect("Script hash should be included in the row"); - - let script_program = script_program - .map(|program| ProgramAst::from_bytes(&program)) - .transpose()? - .expect("Script program should be included in the row"); - - let script_inputs = script_inputs - .map(|hash| serde_json::from_str::>>(&hash)) - .transpose() - .map_err(StoreError::JsonDataDeserializationError)? - .expect("Script inputs should be included in the row"); - - let tx_script = TransactionScript::from_parts( - script_program, - script_hash, - script_inputs.into_iter().map(|(k, v)| (k.into(), v)), - )?; - - Some(tx_script) - } else { - None - }; - - let transaction_status = - commit_height.map_or(TransactionStatus::Pending, TransactionStatus::Committed); - - Ok(TransactionRecord { - id: id.into(), - account_id, - init_account_state, - final_account_state, - input_note_nullifiers, - output_notes, - transaction_script, - block_num, - transaction_status, - }) -} diff --git a/src/tests.rs b/src/tests.rs deleted file mode 100644 index c9e27841b..000000000 --- a/src/tests.rs +++ /dev/null @@ -1,370 +0,0 @@ -// TESTS -// ================================================================================================ -use miden_lib::transaction::TransactionKernel; -use miden_objects::{ - accounts::{AccountId, AccountStub, ACCOUNT_ID_FUNGIBLE_FAUCET_OFF_CHAIN}, - assembly::{AstSerdeOptions, ModuleAst}, - assets::{FungibleAsset, TokenSymbol}, - crypto::dsa::rpo_falcon512::SecretKey, - Word, -}; - -use crate::{ - client::{ - accounts::{AccountStorageMode, AccountTemplate}, - transactions::transaction_request::TransactionTemplate, - }, - mock::{ - get_account_with_default_account_code, mock_full_chain_mmr_and_notes, - mock_fungible_faucet_account, mock_notes, ACCOUNT_ID_REGULAR, - }, - store::{sqlite_store::tests::create_test_client, AuthInfo, InputNoteRecord, NoteFilter}, -}; - -#[tokio::test] -async fn test_input_notes_round_trip() { - // generate test client with a random store name - let mut client = create_test_client(); - - // generate test data - - let assembler = TransactionKernel::assembler(); - let (consumed_notes, _created_notes) = mock_notes(&assembler); - let (_, consumed_notes, ..) = mock_full_chain_mmr_and_notes(consumed_notes); - - // insert notes into database - for note in consumed_notes.iter().cloned() { - client.import_input_note(note.into()).unwrap(); - } - - // retrieve notes from database - let retrieved_notes = client.get_input_notes(NoteFilter::Committed).unwrap(); - assert_eq!(retrieved_notes.len(), consumed_notes.len()); - - let recorded_notes: Vec = - consumed_notes.iter().map(|n| n.clone().into()).collect(); - // compare notes - for (recorded_note, retrieved_note) in recorded_notes.iter().zip(retrieved_notes) { - assert_eq!(recorded_note.id(), retrieved_note.id()); - } -} - -#[tokio::test] -async fn test_get_input_note() { - // generate test client with a random store name - let mut client = create_test_client(); - - let assembler = TransactionKernel::assembler(); - let (_consumed_notes, created_notes) = mock_notes(&assembler); - - // insert Note into database - client.import_input_note(created_notes.first().unwrap().clone().into()).unwrap(); - - // retrieve note from database - let retrieved_note = - client.get_input_note(created_notes.first().unwrap().clone().id()).unwrap(); - - let recorded_note: InputNoteRecord = created_notes.first().unwrap().clone().into(); - assert_eq!(recorded_note.id(), retrieved_note.id()); -} - -#[tokio::test] -async fn insert_basic_account() { - // generate test client with a random store name - let mut client = create_test_client(); - - let account_template = AccountTemplate::BasicWallet { - mutable_code: true, - storage_mode: AccountStorageMode::Local, - }; - - // Insert Account - let account_insert_result = client.new_account(account_template); - assert!(account_insert_result.is_ok()); - - let (account, account_seed) = account_insert_result.unwrap(); - - // Fetch Account - let fetched_account_data = client.get_account(account.id()); - assert!(fetched_account_data.is_ok()); - - let (fetched_account, fetched_account_seed) = fetched_account_data.unwrap(); - // Validate stub has matching data - assert_eq!(account.id(), fetched_account.id()); - assert_eq!(account.nonce(), fetched_account.nonce()); - assert_eq!(account.vault(), fetched_account.vault()); - assert_eq!(account.storage().root(), fetched_account.storage().root()); - assert_eq!(account.code().root(), fetched_account.code().root()); - - // Validate seed matches - assert_eq!(account_seed, fetched_account_seed.unwrap()); -} - -#[tokio::test] -async fn insert_faucet_account() { - // generate test client with a random store name - let mut client = create_test_client(); - - let faucet_template = AccountTemplate::FungibleFaucet { - token_symbol: TokenSymbol::new("TEST").unwrap(), - decimals: 10, - max_supply: 9999999999, - storage_mode: AccountStorageMode::Local, - }; - - // Insert Account - let account_insert_result = client.new_account(faucet_template); - assert!(account_insert_result.is_ok()); - - let (account, account_seed) = account_insert_result.unwrap(); - - // Fetch Account - let fetched_account_data = client.get_account(account.id()); - assert!(fetched_account_data.is_ok()); - - let (fetched_account, fetched_account_seed) = fetched_account_data.unwrap(); - // Validate stub has matching data - assert_eq!(account.id(), fetched_account.id()); - assert_eq!(account.nonce(), fetched_account.nonce()); - assert_eq!(account.vault(), fetched_account.vault()); - assert_eq!(account.storage(), fetched_account.storage()); - assert_eq!(account.code().root(), fetched_account.code().root()); - - // Validate seed matches - assert_eq!(account_seed, fetched_account_seed.unwrap()); -} - -#[tokio::test] -async fn insert_same_account_twice_fails() { - // generate test client with a random store name - let mut client = create_test_client(); - - let account = get_account_with_default_account_code( - AccountId::try_from(ACCOUNT_ID_REGULAR).unwrap(), - Word::default(), - None, - ); - - let key_pair = SecretKey::new(); - - assert!(client - .insert_account(&account, Some(Word::default()), &AuthInfo::RpoFalcon512(key_pair.clone())) - .is_ok()); - assert!(client - .insert_account(&account, Some(Word::default()), &AuthInfo::RpoFalcon512(key_pair)) - .is_err()); -} - -#[tokio::test] -async fn test_account_code() { - // generate test client with a random store name - let mut client = create_test_client(); - - let key_pair = SecretKey::new(); - - let account = get_account_with_default_account_code( - AccountId::try_from(ACCOUNT_ID_REGULAR).unwrap(), - Word::default(), - None, - ); - - let mut account_module = account.code().module().clone(); - - // this is needed due to the reconstruction not including source locations - account_module.clear_locations(); - account_module.clear_imports(); - - let account_module_bytes = account_module.to_bytes(AstSerdeOptions { serialize_imports: true }); - let reconstructed_ast = ModuleAst::from_bytes(&account_module_bytes).unwrap(); - assert_eq!(account_module, reconstructed_ast); - - client - .insert_account(&account, Some(Word::default()), &AuthInfo::RpoFalcon512(key_pair)) - .unwrap(); - let (retrieved_acc, _) = client.get_account(account.id()).unwrap(); - - let mut account_module = account.code().module().clone(); - account_module.clear_locations(); - account_module.clear_imports(); - assert_eq!(*account_module.procs(), *retrieved_acc.code().module().procs()); -} - -#[tokio::test] -async fn test_get_account_by_id() { - // generate test client with a random store name - let mut client = create_test_client(); - - let account = get_account_with_default_account_code( - AccountId::try_from(ACCOUNT_ID_REGULAR).unwrap(), - Word::default(), - None, - ); - - let key_pair = SecretKey::new(); - - client - .insert_account(&account, Some(Word::default()), &AuthInfo::RpoFalcon512(key_pair)) - .unwrap(); - - // Retrieving an existing account should succeed - let (acc_from_db, _account_seed) = match client.get_account_stub_by_id(account.id()) { - Ok(account) => account, - Err(err) => panic!("Error retrieving account: {}", err), - }; - assert_eq!(AccountStub::from(account), acc_from_db); - - // Retrieving a non existing account should fail - let hex = format!("0x{}", "1".repeat(16)); - let invalid_id = AccountId::from_hex(&hex).unwrap(); - assert!(client.get_account_stub_by_id(invalid_id).is_err()); -} - -#[tokio::test] -async fn test_sync_state() { - // generate test client with a random store name - let mut client = create_test_client(); - - // generate test data - crate::mock::insert_mock_data(&mut client).await; - - // assert that we have no consumed nor pending notes prior to syncing state - assert_eq!(client.get_input_notes(NoteFilter::Consumed).unwrap().len(), 0); - - let pending_notes = client.get_input_notes(NoteFilter::Pending).unwrap(); - - // sync state - let block_num: u32 = client.sync_state().await.unwrap(); - - // verify that the client is synced to the latest block - assert_eq!( - block_num, - client.rpc_api().state_sync_requests.first_key_value().unwrap().1.chain_tip - ); - - // verify that we now have one consumed note after syncing state - assert_eq!(client.get_input_notes(NoteFilter::Consumed).unwrap().len(), 1); - - // verify that the pending note we had is now committed - assert_ne!(client.get_input_notes(NoteFilter::Committed).unwrap(), pending_notes); - - // verify that the latest block number has been updated - assert_eq!( - client.get_sync_height().unwrap(), - client.rpc_api().state_sync_requests.first_key_value().unwrap().1.chain_tip - ); -} - -#[tokio::test] -async fn test_sync_state_mmr_state() { - // generate test client with a random store name - let mut client = create_test_client(); - - // generate test data - let tracked_block_headers = crate::mock::insert_mock_data(&mut client).await; - - // sync state - let block_num: u32 = client.sync_state().await.unwrap(); - - // verify that the client is synced to the latest block - assert_eq!( - block_num, - client.rpc_api().state_sync_requests.first_key_value().unwrap().1.chain_tip - ); - - // verify that the latest block number has been updated - assert_eq!( - client.get_sync_height().unwrap(), - client.rpc_api().state_sync_requests.first_key_value().unwrap().1.chain_tip - ); - - // verify that we inserted the latest block into the db via the client - let latest_block = client.get_sync_height().unwrap(); - assert_eq!(block_num, latest_block); - assert_eq!( - tracked_block_headers[tracked_block_headers.len() - 1], - client.get_block_headers(&[latest_block]).unwrap()[0].0 - ); - - // Try reconstructing the chain_mmr from what's in the database - let partial_mmr = client.build_current_partial_mmr().unwrap(); - - // Since Mocked data contains three sync updates we should be "tracking" those blocks - // However, remember that we don't actually update the partial_mmr with the latest block but up - // to one block before instead. This is because the prologue will already build the - // authentication path for that block. - assert_eq!(partial_mmr.forest(), 6); - assert!(partial_mmr.open(0).unwrap().is_none()); - assert!(partial_mmr.open(1).unwrap().is_none()); - assert!(partial_mmr.open(2).unwrap().is_some()); - assert!(partial_mmr.open(3).unwrap().is_none()); - assert!(partial_mmr.open(4).unwrap().is_some()); - assert!(partial_mmr.open(5).unwrap().is_none()); - - // Ensure the proofs are valid - let mmr_proof = partial_mmr.open(2).unwrap().unwrap(); - assert!(partial_mmr.peaks().verify(tracked_block_headers[0].hash(), mmr_proof)); - - let mmr_proof = partial_mmr.open(4).unwrap().unwrap(); - assert!(partial_mmr.peaks().verify(tracked_block_headers[1].hash(), mmr_proof)); -} - -#[tokio::test] -async fn test_add_tag() { - // generate test client with a random store name - let mut client = create_test_client(); - - // assert that no tags are being tracked - assert_eq!(client.get_note_tags().unwrap().len(), 0); - - // add a tag - const TAG_VALUE_1: u64 = 1; - const TAG_VALUE_2: u64 = 2; - client.add_note_tag(TAG_VALUE_1).unwrap(); - client.add_note_tag(TAG_VALUE_2).unwrap(); - - // verify that the tag is being tracked - assert_eq!(client.get_note_tags().unwrap(), vec![TAG_VALUE_1, TAG_VALUE_2]); - - // attempt to add the same tag again - client.add_note_tag(TAG_VALUE_1).unwrap(); - - // verify that the tag is still being tracked only once - assert_eq!(client.get_note_tags().unwrap(), vec![TAG_VALUE_1, TAG_VALUE_2]); -} - -#[tokio::test] -async fn test_mint_transaction() { - const FAUCET_ID: u64 = ACCOUNT_ID_FUNGIBLE_FAUCET_OFF_CHAIN; - const INITIAL_BALANCE: u64 = 1000; - - // generate test client with a random store name - let mut client = create_test_client(); - - // Faucet account generation - let key_pair = SecretKey::new(); - - let faucet = mock_fungible_faucet_account( - AccountId::try_from(FAUCET_ID).unwrap(), - INITIAL_BALANCE, - key_pair.clone(), - ); - - client - .store() - .insert_account(&faucet, None, &AuthInfo::RpoFalcon512(key_pair)) - .unwrap(); - - client.sync_state().await.unwrap(); - - // Test submitting a mint transaction - let transaction_template = TransactionTemplate::MintFungibleAsset( - FungibleAsset::new(faucet.id(), 5u64).unwrap(), - AccountId::from_hex("0x168187d729b31a84").unwrap(), - miden_objects::notes::NoteType::OffChain, - ); - - let transaction_request = client.build_transaction_request(transaction_template).unwrap(); - - let transaction = client.new_transaction(transaction_request).unwrap(); - assert!(transaction.executed_transaction().account_delta().nonce().is_some()); -} diff --git a/wasm/Cargo.toml b/wasm/Cargo.toml new file mode 100644 index 000000000..f579b398e --- /dev/null +++ b/wasm/Cargo.toml @@ -0,0 +1,42 @@ +[package] +name = "miden-wasm" +version = "1.0.0" +description = "WebAssembly based toolkit for developing zero knowledge applications with Polygon Miden" +edition = "2021" +build = "build.rs" + +[lib] +crate-type = ["cdylib"] + +[features] +concurrent = ["miden-lib/concurrent", "miden-objects/concurrent", "miden-tx/concurrent"] +default = ["std"] +std = ["miden-objects/std"] +testing = ["miden-objects/testing", "miden-lib/testing"] +test_utils = ["miden-objects/testing"] + +[dependencies] +async-trait = "0.1" +base64 = "0.13" +futures = "0.3" +getrandom = { version = "0.2", features = ["js"] } +hex = { version = "0.4" } +miden-client = { path = "../../demox-miden-client", version = "0.3.0", default-features = false, features = ["wasm", "async"] } +miden-lib = { version = "0.3.1", default-features = false, features = ["testing"] } +miden-tx = { version = "0.3.1", default-features = false, features = ["async"] } +miden-objects = { version = "0.3.1", default-features = false, features = ["std", "serde", "testing"] } +prost = { version = "0.12.3" } +rand = { version = "0.8.5" } +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +serde-wasm-bindgen = "0.6.5" +tonic = { version="0.11.0", default-features = false, features=["prost","codegen"] } +tonic-web-wasm-client = "0.5" +wasm-bindgen = { version = "0.2", features = ["serde-serialize"] } +wasm-bindgen-futures = "0.4" +web-sys = { version = "0.3", features = ["console"] } +console_error_panic_hook = "0.1.6" + +[build-dependencies] +tonic-build = { version="0.11.0", default-features = false, features = ["prost"]} + diff --git a/wasm/LICENSE b/wasm/LICENSE new file mode 100644 index 000000000..7e84649f0 --- /dev/null +++ b/wasm/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2024 Polygon Miden + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/wasm/README.md b/wasm/README.md new file mode 100644 index 000000000..548513c67 --- /dev/null +++ b/wasm/README.md @@ -0,0 +1,251 @@ +# @demox-labs/miden-sdk +The @demox-labs/miden-sdk is a toolkit designed for interacting with the Miden virtual machine. It offers essential tools and functionalities for developers aiming to integrate or utilize Miden VM capabilities in their applications. + +## Installation +To install the package via npm, run the following command: + +```javascript +npm i @demox-labs/miden-sdk +``` + +For yarn: +```javascript +yarn add @demox-labs/miden-sdk +``` + +## Usage + +```typescript +import { WebClient } from "@demox-labs/miden-sdk"; + +const webClient = new WebClient(); +await webClient.create_client(); + +// Use webclient to create accounts, notes, transactions, etc. +// This will create a mutable, off-chain account and store it in IndexedDB +const accountId = await webClient.new_wallet("OffChain", true); +``` + +## Examples +### The WebClient +The WebClient is your gateway to creating and interacting with anything miden vm related. +Example: +```typescript +// Creates a new WebClient instance which can then be configured after +const webClient = new WebClient(); + +// Creates the internal client of a previously instantiated WebClient. +// Can provide `node_url` as an optional parameter. Defaults to "http://localhost:57291". +// See https://github.com/0xPolygonMiden/miden-node for setting up and running your own node locally +await webClient.create_client(); +``` +### Accounts +You can use the WebClient to create and retrieve account information. +```typescript +const webClient = new WebClient(); +await webClient.create_client(); + +/** + * Creates a new wallet account. + * + * @param storage_type String. Either "OffChain" or "OnChain". + * @param mutable Boolean. Whether the wallet code is mutable or not + * + * Returns: Wallet Id + */ +const walletId = await webClient.new_wallet("OffChain", true); + +/** + * Creates a new faucet account. + * + * @param storage_type String. Either "OffChain" or "OnChain". + * @param non_fungible Boolean. Whether the faucet is non_fungible or not. NOTE: Non-fungible faucets are not supported yet + * @param token_symbol String. Token symbol of the token the faucet creates + * @param decimals String. Decimal precision of token. + * @param max_supply String. Maximum token supply + */ +const faucetId = await webClient.new_faucet("OffChain", true, "TOK", 6, 1_000_000) + +// Returns all accounts. Both wallets and faucets. +const accounts = await webClient.get_accounts() + +// Gets a single account by id +const account = await webClient.get_account("0x9258fec00ad6d9bc"); +``` + +### Transactions +You can use the webClient to facilitate transactions between accounts. + +Let's mint some tokens for our wallet from our faucet: +```typescript +const webClient = new WebClient(); +await webClient.create_client(); +const walletId = await webClient.new_wallet("OffChain", true); +const faucetId = await webClient.new_faucet("OffChain", true, "TOK", 6, 1_000_000); + +// Syncs web client with node state. +await webClient.sync_state(); +// Caches faucet account auth. A workaround to allow for synchronicity in the transaction flow. +await webClient.fetch_and_cache_account_auth_by_pub_key(faucetId); + +// Mint 10_000 tokens for the previously created wallet via a Private Note +const newTxnResult = await webClient.new_mint_transaction(walletId, faucetId, "Private", 10_000) + +// Sync state again +await webClient.sync_state(); +``` + +## API Reference + +```typescript +/** + * @returns {Promise} + */ +get_accounts(): Promise; + +/** + * @param {string} account_id + * @returns {Promise} + */ +get_account(account_id: string): Promise; + +/** + * @param {any} pub_key_bytes + * @returns {any} + */ +get_account_auth_by_pub_key(pub_key_bytes: any): any; + +/** + * @param {string} account_id + * @returns {Promise} + */ +fetch_and_cache_account_auth_by_pub_key(account_id: string): Promise; + +/** + * @param {string} note_id + * @returns {Promise} + */ +export_note(note_id: string): Promise; + +/** + * @param {any} account_bytes + * @returns {Promise} + */ +import_account(account_bytes: any): Promise; + +/** + * @param {string} note_bytes + * @param {boolean} verify + * @returns {Promise} + */ +import_note(note_bytes: string, verify: boolean): Promise; + +/** + * @param {string} storage_type + * @param {boolean} mutable + * @returns {Promise} + */ +new_wallet(storage_type: string, mutable: boolean): Promise; + +/** + * @param {string} storage_type + * @param {boolean} non_fungible + * @param {string} token_symbol + * @param {string} decimals + * @param {string} max_supply + * @returns {Promise} + */ +new_faucet(storage_type: string, non_fungible: boolean, token_symbol: string, decimals: string, max_supply: string): Promise; + +/** + * @param {string} target_account_id + * @param {string} faucet_id + * @param {string} note_type + * @param {string} amount + * @returns {Promise} + */ +new_mint_transaction(target_account_id: string, faucet_id: string, note_type: string, amount: string): Promise; + +/** + * @param {string} sender_account_id + * @param {string} target_account_id + * @param {string} faucet_id + * @param {string} note_type + * @param {string} amount + * @param {string | undefined} [recall_height] + * @returns {Promise} + */ +new_send_transaction(sender_account_id: string, target_account_id: string, faucet_id: string, note_type: string, amount: string, recall_height?: string): Promise; + +/** + * @param {string} account_id + * @param {(string)[]} list_of_notes + * @returns {Promise} + */ +new_consume_transaction(account_id: string, list_of_notes: (string)[]): Promise; + +/** + * @param {string} sender_account_id + * @param {string} offered_asset_faucet_id + * @param {string} offered_asset_amount + * @param {string} requested_asset_faucet_id + * @param {string} requested_asset_amount + * @param {string} note_type + * @returns {Promise} + */ +new_swap_transaction(sender_account_id: string, offered_asset_faucet_id: string, offered_asset_amount: string, requested_asset_faucet_id: string, requested_asset_amount: string, note_type: string): Promise; + +/** + * @param {any} filter + * @returns {Promise} + */ +get_input_notes(filter: any): Promise; + +/** + * @param {string} note_id + * @returns {Promise} + */ +get_input_note(note_id: string): Promise; + +/** + * @param {any} filter + * @returns {Promise} + */ +get_output_notes(filter: any): Promise; + +/** + * @param {string} note_id + * @returns {Promise} + */ +get_output_note(note_id: string): Promise; + +/** + * @returns {Promise} + */ +sync_state(): Promise; + +/** + * @returns {Promise} + */ +get_transactions(): Promise; + +/** + * @param {string} tag + * @returns {Promise} + */ +add_tag(tag: string): Promise; + +/** + */ +constructor(); + +/** + * @param {string | undefined} [node_url] + * @returns {Promise} + */ +create_client(node_url?: string): Promise; +``` + +## License + +This project is licensed under the MIT License - see the LICENSE file for details. \ No newline at end of file diff --git a/wasm/build.rs b/wasm/build.rs new file mode 100644 index 000000000..9e7b766d3 --- /dev/null +++ b/wasm/build.rs @@ -0,0 +1,25 @@ + +use std::io; + +fn main() -> io::Result<()> { + // Compile the proto files into Rust code + tonic_build::configure() + .build_server(false) + .build_client(true) + .compile(&[ + "src/web_client/rpc/proto/account.proto", + "src/web_client/rpc/proto/block_header.proto", + "src/web_client/rpc/proto/block_producer.proto", + "src/web_client/rpc/proto/digest.proto", + "src/web_client/rpc/proto/merkle.proto", + "src/web_client/rpc/proto/mmr.proto", + "src/web_client/rpc/proto/note.proto", + "src/web_client/rpc/proto/requests.proto", + "src/web_client/rpc/proto/responses.proto", + "src/web_client/rpc/proto/rpc.proto", + "src/web_client/rpc/proto/smt.proto", + "src/web_client/rpc/proto/store.proto", + ], &[ + "src/web_client/rpc/proto" + ]) +} \ No newline at end of file diff --git a/wasm/config/envoy.yaml b/wasm/config/envoy.yaml new file mode 100644 index 000000000..2aca68ea2 --- /dev/null +++ b/wasm/config/envoy.yaml @@ -0,0 +1,56 @@ +static_resources: + listeners: + - name: listener_0 + address: + socket_address: + address: 0.0.0.0 + port_value: 8080 + filter_chains: + - filters: + - name: envoy.filters.network.http_connection_manager + typed_config: + "@type": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager + stat_prefix: grpc_web + route_config: + name: local_route + virtual_hosts: + - name: local_service + domains: ["*"] + routes: + - match: { prefix: "/" } + route: + cluster: grpc_server + cors: + allow_origin_string_match: + - safe_regex: + google_re2: {} + regex: "http://localhost:3000" + allow_methods: "GET, PUT, DELETE, POST, OPTIONS" + allow_headers: "content-type, x-grpc-web, x-user-agent" + max_age: "1728000" + expose_headers: "custom-header1, grpc-status, grpc-message" + http_filters: + - name: envoy.filters.http.cors + typed_config: + "@type": type.googleapis.com/envoy.extensions.filters.http.cors.v3.Cors + - name: envoy.filters.http.grpc_web + typed_config: + "@type": type.googleapis.com/envoy.extensions.filters.http.grpc_web.v3.GrpcWeb + - name: envoy.filters.http.router + typed_config: + "@type": type.googleapis.com/envoy.extensions.filters.http.router.v3.Router + clusters: + - name: grpc_server + connect_timeout: 0.25s + type: LOGICAL_DNS + lb_policy: ROUND_ROBIN + load_assignment: + cluster_name: grpc_server + endpoints: + - lb_endpoints: + - endpoint: + address: + socket_address: + address: 18.203.155.106 + port_value: 57291 + http2_protocol_options: {} \ No newline at end of file diff --git a/wasm/js/db/accounts.js b/wasm/js/db/accounts.js new file mode 100644 index 000000000..c9559e1f9 --- /dev/null +++ b/wasm/js/db/accounts.js @@ -0,0 +1,449 @@ +import { + accountCodes, + accountStorages, + accountVaults, + accountAuths, + accounts +} from './schema.js'; + +// GET FUNCTIONS +export async function getAccountIds() { + try { + let allIds = new Set(); // Use a Set to ensure uniqueness + + // Iterate over each account entry + await accounts.each(account => { + allIds.add(account.id); // Assuming 'account' has an 'id' property + }); + + return Array.from(allIds); // Convert back to array to return a list of unique IDs + } catch (error) { + console.error("Failed to retrieve account IDs: ", error); + throw error; // Or handle the error as fits your application's error handling strategy + } +} + +export async function getAllAccountStubs() { + try { + // Fetch all records + const allRecords = await accounts.toArray(); + + // Use a Map to track the latest record for each id based on nonce + const latestRecordsMap = new Map(); + + allRecords.forEach(record => { + const existingRecord = latestRecordsMap.get(record.id); + if (!existingRecord || BigInt(record.nonce) > BigInt(existingRecord.nonce)) { + latestRecordsMap.set(record.id, record); + } + }); + + // Extract the latest records from the Map + const latestRecords = Array.from(latestRecordsMap.values()); + + const resultObject = await Promise.all(latestRecords.map(async record => { + let accountSeedBase64 = null; + if (record.accountSeed) { + // Ensure accountSeed is processed as a Uint8Array and converted to Base64 + let accountSeedArrayBuffer = await record.accountSeed.arrayBuffer(); + let accountSeedArray = new Uint8Array(accountSeedArrayBuffer); + accountSeedBase64 = uint8ArrayToBase64(accountSeedArray); + } + + return { + id: record.id, + nonce: record.nonce, + vault_root: record.vaultRoot, + storage_root: record.storageRoot, + code_root: record.codeRoot, + account_seed: accountSeedBase64 // Now correctly formatted as Base64 + }; + })); + + return resultObject; + } catch (error) { + console.error('Error fetching all latest account stubs:', error); + throw error; + } +} + +export async function getAccountStub( + accountId +) { + try { + let allRecords = await accounts.toArray(); + // Fetch all records matching the given id + const allMatchingRecords = await accounts + .where('id') + .equals(accountId) + .toArray(); + + if (allMatchingRecords.length === 0) { + console.log('No records found for given ID.'); + return null; // No records found + } + + // Convert nonce to BigInt and sort + // Note: This assumes all nonces are valid BigInt strings. + const sortedRecords = allMatchingRecords.sort((a, b) => { + const bigIntA = BigInt(a.nonce); + const bigIntB = BigInt(b.nonce); + return bigIntA > bigIntB ? -1 : bigIntA < bigIntB ? 1 : 0; + }); + + // The first record is the most recent one due to the sorting + const mostRecentRecord = sortedRecords[0]; + + let accountSeedBase64 = null; + if (mostRecentRecord.accountSeed) { + // Ensure accountSeed is processed as a Uint8Array and converted to Base64 + let accountSeedArrayBuffer = await mostRecentRecord.accountSeed.arrayBuffer(); + let accountSeedArray = new Uint8Array(accountSeedArrayBuffer); + accountSeedBase64 = uint8ArrayToBase64(accountSeedArray); + } + const accountStub = { + id: mostRecentRecord.id, + nonce: mostRecentRecord.nonce, + vault_root: mostRecentRecord.vaultRoot, + storage_root: mostRecentRecord.storageRoot, + code_root: mostRecentRecord.codeRoot, + account_seed: accountSeedBase64 + } + return accountStub; + } catch (error) { + console.error('Error fetching most recent account record:', error); + throw error; // Re-throw the error for further handling + } +} + +export async function getAccountCode( + codeRoot +) { + try { + // Fetch all records matching the given root + const allMatchingRecords = await accountCodes + .where('root') + .equals(codeRoot) + .toArray(); + + if (allMatchingRecords.length === 0) { + console.log('No records found for given code root.'); + return null; // No records found + } + + // The first record is the only one due to the uniqueness constraint + const codeRecord = allMatchingRecords[0]; + + // Convert the module Blob to an ArrayBuffer + const moduleArrayBuffer = await codeRecord.module.arrayBuffer(); + const moduleArray = new Uint8Array(moduleArrayBuffer); + const moduleBase64 = uint8ArrayToBase64(moduleArray); + return { + root: codeRecord.root, + procedures: codeRecord.procedures, + module: moduleBase64, + }; + } catch (error) { + console.error('Error fetching code record:', error); + throw error; // Re-throw the error for further handling + } +} + +export async function getAccountStorage( + storageRoot +) { + try { + // Fetch all records matching the given root + const allMatchingRecords = await accountStorages + .where('root') + .equals(storageRoot) + .toArray(); + + if (allMatchingRecords.length === 0) { + console.log('No records found for given storage root.'); + return null; // No records found + } + + // The first record is the only one due to the uniqueness constraint + const storageRecord = allMatchingRecords[0]; + + // Convert the module Blob to an ArrayBuffer + const storageArrayBuffer = await storageRecord.slots.arrayBuffer(); + const storageArray = new Uint8Array(storageArrayBuffer); + const storageBase64 = uint8ArrayToBase64(storageArray); + return { + root: storageRecord.root, + storage: storageBase64 + }; + } catch (error) { + console.error('Error fetching code record:', error); + throw error; // Re-throw the error for further handling + } +} + +export async function getAccountAssetVault( + vaultRoot +) { + try { + // Fetch all records matching the given root + const allMatchingRecords = await accountVaults + .where('root') + .equals(vaultRoot) + .toArray(); + + if (allMatchingRecords.length === 0) { + console.log('No records found for given vault root.'); + return null; // No records found + } + + // The first record is the only one due to the uniqueness constraint + const vaultRecord = allMatchingRecords[0]; + + return { + root: vaultRecord.root, + assets: vaultRecord.assets + }; + } catch (error) { + console.error('Error fetching code record:', error); + throw error; // Re-throw the error for further handling + } +} + +export async function getAccountAuth( + accountId +) { + try { + console.log('called getAccountAuth'); + // Fetch all records matching the given id + const allMatchingRecords = await accountAuths + .where('accountId') + .equals(accountId) + .toArray(); + + if (allMatchingRecords.length === 0) { + console.log('No records found for given account ID.'); + return null; // No records found + } + console.log('allMatchingRecords', allMatchingRecords); + + // The first record is the only one due to the uniqueness constraint + const authRecord = allMatchingRecords[0]; + console.log('authRecord', authRecord); + + // Convert the authInfo Blob to an ArrayBuffer + const authInfoArrayBuffer = await authRecord.authInfo.arrayBuffer(); + const authInfoArray = new Uint8Array(authInfoArrayBuffer); + const authInfoBase64 = uint8ArrayToBase64(authInfoArray); + console.log('authInfoBase64', authInfoBase64); + console.log('authRecord.accountId', authRecord.accountId); + return { + id: authRecord.accountId, + auth_info: authInfoBase64 + }; + } catch (err) { + console.error('Error fetching account auth:', err); + throw err; // Re-throw the error for further handling + } +} + +export function getAccountAuthByPubKey( + pubKey +) { + console.log('called getAccountAuthByPubKey'); + // Try to get the account auth from the cache + let pubKeyArray = new Uint8Array(pubKey); + let pubKeyBase64 = uint8ArrayToBase64(pubKeyArray); + let cachedAccountAuth = ACCOUNT_AUTH_MAP.get(pubKeyBase64); + console.log(cachedAccountAuth) + // Print the cache for debugging + console.log('PubKey', pubKeyBase64); + console.log('ACCOUNT_AUTH_MAP', ACCOUNT_AUTH_MAP); + console.log('account_info', cachedAccountAuth.auth_info) + + // If it's not in the cache, throw an error + if (!cachedAccountAuth) { + throw new Error('Account auth not found in cache.'); + } + + let data = { + id: cachedAccountAuth.id, + auth_info: cachedAccountAuth.auth_info + } + + return data; +} + +var ACCOUNT_AUTH_MAP = new Map(); +export async function fetchAndCacheAccountAuthByPubKey( + accountId +) { + try { + // Fetch all records matching the given id + const allMatchingRecords = await accountAuths + .where('accountId') + .equals(accountId) + .toArray(); + console.log('allMatchingRecords', allMatchingRecords); + + if (allMatchingRecords.length === 0) { + console.log('No records found for given account ID.'); + return null; // No records found + } + + // The first record is the only one due to the uniqueness constraint + const authRecord = allMatchingRecords[0]; + + // Convert the authInfo Blob to an ArrayBuffer + const authInfoArrayBuffer = await authRecord.authInfo.arrayBuffer(); + console.log('authInfoArrayBuffer', authInfoArrayBuffer); + const authInfoArray = new Uint8Array(authInfoArrayBuffer); + const authInfoBase64 = uint8ArrayToBase64(authInfoArray); + + // Store the auth info in the map + ACCOUNT_AUTH_MAP.set(authRecord.pubKey, { + id: authRecord.accountId, + auth_info: authInfoBase64 + }); + + console.log('ACCOUNT_AUTH_MAP', ACCOUNT_AUTH_MAP); + + return { + id: authRecord.accountId, + auth_info: authInfoBase64 + }; + } catch (err) { + console.error('Error fetching account auth by public key:', err); + throw err; // Re-throw the error for further handling + } +} + +// INSERT FUNCTIONS + +export async function insertAccountCode( + codeRoot, + code, + module +) { + try { + // Create a Blob from the ArrayBuffer + const moduleBlob = new Blob([new Uint8Array(module)]); + + // Prepare the data object to insert + const data = { + root: codeRoot, // Using codeRoot as the key + procedures: code, + module: moduleBlob, // Blob created from ArrayBuffer + }; + + // Perform the insert using Dexie + await accountCodes.add(data); + } catch (error) { + console.error(`Error inserting code with root: ${codeRoot}:`, error); + throw error; // Rethrow the error to handle it further up the call chain if needed + } +} + +export async function insertAccountStorage( + storageRoot, + storageSlots +) { + try { + const storageSlotsBlob = new Blob([new Uint8Array(storageSlots)]); + + // Prepare the data object to insert + const data = { + root: storageRoot, // Using storageRoot as the key + slots: storageSlotsBlob, // Blob created from ArrayBuffer + }; + + // Perform the insert using Dexie + await accountStorages.add(data); + } catch (error) { + console.error(`Error inserting storage with root: ${storageRoot}:`, error); + throw error; // Rethrow the error to handle it further up the call chain if needed + } +} + +export async function insertAccountAssetVault( + vaultRoot, + assets +) { + try { + // Prepare the data object to insert + const data = { + root: vaultRoot, // Using vaultRoot as the key + assets: assets, + }; + + // Perform the insert using Dexie + await accountVaults.add(data); + } catch (error) { + console.error(`Error inserting vault with root: ${vaultRoot}:`, error); + throw error; // Rethrow the error to handle it further up the call chain if needed + } +} + +export async function insertAccountRecord( + accountId, + code_root, + storage_root, + vault_root, + nonce, + committed, + account_seed +) { + try { + let accountSeedBlob = null; + if (account_seed) { + accountSeedBlob = new Blob([new Uint8Array(account_seed)]); + } + + + // Prepare the data object to insert + const data = { + id: accountId, // Using accountId as the key + codeRoot: code_root, + storageRoot: storage_root, + vaultRoot: vault_root, + nonce: nonce, + committed: committed, + accountSeed: accountSeedBlob, + }; + + // Perform the insert using Dexie + await accounts.add(data); + } catch (error) { + console.error(`Error inserting account: ${accountId}:`, error); + throw error; // Rethrow the error to handle it further up the call chain if needed + } +} + +export async function insertAccountAuth( + accountId, + authInfo, + pubKey +) { + try { + let authInfoBlob = new Blob([new Uint8Array(authInfo)]); + let pubKeyArray = new Uint8Array(pubKey); + let pubKeyBase64 = uint8ArrayToBase64(pubKeyArray); + + // Prepare the data object to insert + const data = { + accountId: accountId, // Using accountId as the key + authInfo: authInfoBlob, + pubKey: pubKeyBase64 + }; + + // Perform the insert using Dexie + await accountAuths.add(data); + } catch (error) { + console.error(`Error inserting auth for account: ${accountId}:`, error); + throw error; // Rethrow the error to handle it further up the call chain if needed + } +} + +function uint8ArrayToBase64(bytes) { + const binary = bytes.reduce((acc, byte) => acc + String.fromCharCode(byte), ''); + return btoa(binary); +} \ No newline at end of file diff --git a/wasm/js/db/chainData.js b/wasm/js/db/chainData.js new file mode 100644 index 000000000..8abeeee39 --- /dev/null +++ b/wasm/js/db/chainData.js @@ -0,0 +1,138 @@ +import { + blockHeaders, + chainMmrNodes +} from './schema.js' + +// INSERT FUNCTIONS +export async function insertBlockHeader( + blockNum, + header, + chainMmrPeaks, + hasClientNotes +) { + try { + const data = { + blockNum: blockNum, + header: header, + chainMmrPeaks: chainMmrPeaks, + hasClientNotes: hasClientNotes + }; + + await blockHeaders.add(data); + } catch (err) { + console.error("Failed to insert block header: ", err); + throw err; + } +} + +export async function insertChainMmrNodes( + ids, + nodes +) { + try { + const data = nodes.map((node, index) => { + return { + id: ids[index], + node: node + } + }); + + await chainMmrNodes.bulkAdd(data); + } catch (err) { + console.error("Failed to insert chain mmr nodes: ", err); + throw err; + } +} + +// GET FUNCTIONS +export async function getBlockHeaders( + blockNumbers +) { + console.log("called getBlockHeaders with blockNumbers: ", blockNumbers); + try { + const blockHeaderPromises = blockNumbers.map(blockNum => + blockHeaders.get(blockNum) + ); + + const results = await Promise.all(blockHeaderPromises); + + console.log("blockNum map"); + console.log(results); + + // replace any undefined values with null + results.forEach((result, index) => { + if (result === undefined) { + results[index] = null; + } else { + results[index] = { + block_num: results[index].blockNum, + header: results[index].header, + chain_mmr: results[index].chainMmrPeaks, + has_client_notes: results[index].hasClientNotes + } + } + }); + + console.log("blockNum map after"); + console.log(results); + + return results + } catch (err) { + console.error("Failed to get block headers: ", err); + throw err; + } +} + +export async function getTrackedBlockHeaders() { + try { + // Fetch all records matching the given root + const allMatchingRecords = await blockHeaders + .where('hasClientNotes') + .equals(true) + .toArray(); + return allMatchingRecords; + } catch (err) { + console.error("Failed to get tracked block headers: ", err); + throw err; + } +} + +export async function getChainMmrPeaksByBlockNum( + blockNum +) { + try { + const blockHeader = await blockHeaders.get(blockNum); + return { + peaks: blockHeader.chainMmrPeaks + }; + } catch (err) { + console.error("Failed to get chain mmr peaks: ", err); + throw err; + } +} + +export async function getChainMmrNodesAll() { + try { + const chainMmrNodesAll = await chainMmrNodes.toArray(); + return chainMmrNodesAll; + } catch (err) { + console.error("Failed to get chain mmr nodes: ", err); + throw err; + } +} + +export async function getChainMmrNodes( + ids +) { + try { + const chainMmrNodesPromises = ids.map(id => + chainMmrNodes.get(id) + ); + + const results = await Promise.all(chainMmrNodesPromises); + return results; + } catch (err) { + console.error("Failed to get chain mmr nodes: ", err); + throw err; + } +} \ No newline at end of file diff --git a/wasm/js/db/notes.js b/wasm/js/db/notes.js new file mode 100644 index 000000000..7104a5cd9 --- /dev/null +++ b/wasm/js/db/notes.js @@ -0,0 +1,329 @@ +import { + db, + inputNotes, + outputNotes, + notesScripts, + transactions +} from './schema.js'; + +export async function getOutputNotes( + status +) { + console.log("called getOutputNotes"); + try { + let notes; + + // Fetch the records based on the filter + if (status === 'All') { + notes = await outputNotes.toArray(); + } else { + notes = await outputNotes.where('status').equals(status).toArray(); + } + + return await processOutputNotes(notes); + } catch (err) { + console.error("Failed to get input notes: ", err); + throw err; + } +} + +export async function getInputNotes( + status +) { + console.log("called getInputNotes"); + try { + let notes; + + // Fetch the records based on the filter + if (status === 'All') { + console.log("fetching all notes"); + notes = await inputNotes.toArray(); + } else { + console.log("fetching notes with status: ", status); + notes = await inputNotes.where('status').equals(status).toArray(); + } + console.log("notes: ", notes); + + return await processInputNotes(notes); + } catch (err) { + console.error("Failed to get input notes: ", err); + throw err; + } +} + +export async function getInputNotesFromIds( + noteIds +) { + console.log("called getInputNotesFromIds"); + console.log("noteIds: ", noteIds); + try { + let notes; + + // Fetch the records based on a list of IDs + notes = await inputNotes.where('noteId').anyOf(noteIds).toArray(); + + console.log("notes: ", notes); + + return await processInputNotes(notes); + } catch (err) { + console.error("Failed to get input notes: ", err); + throw err; + } +} + +export async function getOutputNotesFromIds( + noteIds +) { + try { + let notes; + + // Fetch the records based on a list of IDs + notes = await outputNotes.where('noteId').anyOf(noteIds).toArray(); + + return await processOutputNotes(notes); + } catch (err) { + console.error("Failed to get input notes: ", err); + throw err; + } +} + +export async function getUnspentInputNoteNullifiers() { + try { + const notes = await db.InputNotes.where('status').equals('Committed').toArray(); + const nullifiers = notes.map(note => JSON.parse(note.details).nullifier); + + return nullifiers; + } catch (err) { + console.error("Failed to get unspent input note nullifiers: ", err); + throw err; + } +} + +export async function insertInputNote( + noteId, + assets, + recipient, + status, + metadata, + details, + noteScriptHash, + serializedNoteScript, + inclusionProof +) { + return db.transaction('rw', inputNotes, notesScripts, async (tx) => { + try { + let assetsBlob = new Blob([new Uint8Array(assets)]); + + // Prepare the data object to insert + const data = { + noteId: noteId, + assets: assetsBlob, + recipient: recipient, + status: status, + metadata: metadata ? metadata : null, + details: details, + inclusionProof: inclusionProof ? JSON.stringify(inclusionProof) : null, + consumerTransactionId: null + }; + + // Perform the insert using Dexie + await tx.inputNotes.add(data); + + let serializedNoteScriptBlob = new Blob([new Uint8Array(serializedNoteScript)]); + + const noteScriptData = { + scriptHash: noteScriptHash, + serializedNoteScript: serializedNoteScriptBlob, + }; + + await tx.notesScripts.put(noteScriptData); + } catch { + console.error(`Error inserting note: ${noteId}:`, error); + throw error; // Rethrow the error to handle it further up the call chain if needed + } + }); +} + +export async function insertOutputNote( + noteId, + assets, + recipient, + status, + metadata, + details, + noteScriptHash, + serializedNoteScript, + inclusionProof +) { + console.log("insertOutputNote"); + console.log("noteId: ", noteId); + console.log('status', status); + return db.transaction('rw', outputNotes, notesScripts, async (tx) => { + try { + let assetsBlob = new Blob([new Uint8Array(assets)]); + + // Prepare the data object to insert + const data = { + noteId: noteId, + assets: assetsBlob, + recipient: recipient, + status: status, + metadata: metadata, + details: details ? details : null, + inclusionProof: inclusionProof ? JSON.stringify(inclusionProof) : null, + consumerTransactionId: null + }; + + // Perform the insert using Dexie + await tx.outputNotes.add(data); + + const exists = await tx.notesScripts.get(noteScriptHash); + if (!exists) { + let serializedNoteScriptBlob = null; + if (serializedNoteScript) { + serializedNoteScriptBlob = new Blob([new Uint8Array(serializedNoteScript)]); + } + + const data = { + scriptHash: noteScriptHash, + serializedNoteScript: serializedNoteScriptBlob, + }; + await tx.notesScripts.add(data); + } + } catch { + console.error(`Error inserting note: ${noteId}:`, error); + throw error; // Rethrow the error to handle it further up the call chain if needed + } + }); +} + +export async function updateNoteConsumerTxId(noteId, consumerTxId) { + try { + // Start a transaction that covers both tables + await db.transaction('rw', inputNotes, outputNotes, async (tx) => { + // Update input_notes where note_id matches + const updatedInputNotes = await tx.inputNotes + .where('noteId') + .equals(noteId) + .modify({ consumerTransactionId: consumerTxId }); + + // Update output_notes where note_id matches + const updatedOutputNotes = await tx.outputNotes + .where('noteId') + .equals(noteId) + .modify({ consumerTransactionId: consumerTxId }); + + // Log the count of updated entries in both tables (optional) + console.log(`Updated ${updatedInputNotes} input notes and ${updatedOutputNotes} output notes`); + }); + } catch (err) { + console.error("Failed to update note consumer transaction ID: ", err); + throw err; + } +} + +async function processInputNotes( + notes +) { + // Fetch all scripts from the scripts table for joining + const scripts = await notesScripts.toArray(); + const scriptMap = new Map(scripts.map(script => [script.scriptHash, script.serializedNoteScript])); + + const transactionRecords = await transactions.toArray(); + const transactionMap = new Map(transactionRecords.map(transaction => [transaction.id, transaction.accountId])); + + const processedNotes = await Promise.all(notes.map(async note => { + // Convert the assets blob to base64 + const assetsArrayBuffer = await note.assets.arrayBuffer(); + const assetsArray = new Uint8Array(assetsArrayBuffer); + const assetsBase64 = uint8ArrayToBase64(assetsArray); + note.assets = assetsBase64; + + // Convert the serialized note script blob to base64 + let serializedNoteScriptBase64 = null; + // Parse details JSON and perform a "join" + if (note.details) { + const details = JSON.parse(note.details); + if (details.script_hash) { + let serializedNoteScript = scriptMap.get(details.script_hash); + let serializedNoteScriptArrayBuffer = await serializedNoteScript.arrayBuffer(); + const serializedNoteScriptArray = new Uint8Array(serializedNoteScriptArrayBuffer); + serializedNoteScriptBase64 = uint8ArrayToBase64(serializedNoteScriptArray); + } + } + + // Perform a "join" with the transactions table + let consumerAccountId = null; + if (transactionMap.has(note.consumerTransactionId)) { + consumerAccountId = transactionMap.get(note.consumerTransactionId); + } + + return { + assets: note.assets, + details: note.details, + recipient: note.recipient, + status: note.status, + metadata: note.metadata ? note.metadata : null, + inclusion_proof: note.inclusionProof ? note.inclusionProof : null, + serialized_note_script: serializedNoteScriptBase64, + consumer_account_id: consumerAccountId + }; + })); + console.log("processedNotes: ", processedNotes); + return processedNotes; +} + +async function processOutputNotes( + notes +) { + console.log("called processOutputNotes"); + // Fetch all scripts from the scripts table for joining + const scripts = await notesScripts.toArray(); + const scriptMap = new Map(scripts.map(script => [script.scriptHash, script.serializedNoteScript])); + + const transactionRecords = await transactions.toArray(); + const transactionMap = new Map(transactionRecords.map(transaction => [transaction.id, transaction.accountId])); + + // Process each note to convert 'blobField' from Blob to Uint8Array + const processedNotes = await Promise.all(notes.map(async note => { + const assetsArrayBuffer = await note.assets.arrayBuffer(); + const assetsArray = new Uint8Array(assetsArrayBuffer); + const assetsBase64 = uint8ArrayToBase64(assetsArray); + note.assets = assetsBase64; + + let serializedNoteScriptBase64 = null; + // Parse details JSON and perform a "join" + if (note.details) { + const details = JSON.parse(note.details); + if (details.script_hash) { + let serializedNoteScript = scriptMap.get(details.script_hash); + let serializedNoteScriptArrayBuffer = await serializedNoteScript.arrayBuffer(); + const serializedNoteScriptArray = new Uint8Array(serializedNoteScriptArrayBuffer); + serializedNoteScriptBase64 = uint8ArrayToBase64(serializedNoteScriptArray); + } + } + + // Perform a "join" with the transactions table + let consumerAccountId = null; + if (transactionMap.has(note.consumerTransactionId)) { + consumerAccountId = transactionMap.get(note.consumerTransactionId); + } + + return { + assets: note.assets, + details: note.details ? note.details : null, + recipient: note.recipient, + status: note.status, + metadata: note.metadata, + inclusion_proof: note.inclusionProof ? note.inclusionProof : null, + serialized_note_script: serializedNoteScriptBase64, + consumer_account_id: consumerAccountId + }; + })); + return processedNotes; +} + +function uint8ArrayToBase64(bytes) { + const binary = bytes.reduce((acc, byte) => acc + String.fromCharCode(byte), ''); + return btoa(binary); +} \ No newline at end of file diff --git a/wasm/js/db/schema.js b/wasm/js/db/schema.js new file mode 100644 index 000000000..2c1e1cd7f --- /dev/null +++ b/wasm/js/db/schema.js @@ -0,0 +1,105 @@ +import Dexie from "dexie"; + +const DATABASE_NAME = 'MidenClientDB'; + +export async function openDatabase() { + console.log('Opening database...') + try { + await db.open(); + console.log("Database opened successfully"); + return true; + } catch (err) { + console.error("Failed to open database: ", err); + return false; + } +} + +export async function dropDatabase() { + console.log('Dropping database...') + try { + await db.delete(); + console.log("Database dropped successfully"); + return true; + } catch (err) { + console.error("Failed to drop database: ", err); + return false; + } + +} + +const Table = { + AccountCode: 'accountCode', + AccountStorage: 'accountStorage', + AccountVaults: 'accountVaults', + AccountAuth: 'accountAuth', + Accounts: 'accounts', + Transactions: 'transactions', + TransactionScripts: 'transactionScripts', + InputNotes: 'inputNotes', + OutputNotes: 'outputNotes', + NotesScripts: 'notesScripts', + StateSync: 'stateSync', + BlockHeaders: 'blockHeaders', + ChainMmrNodes: 'chainMmrNodes', + Greet: 'greets', +}; + +const db = new Dexie(DATABASE_NAME); +db.version(1).stores({ + [Table.AccountCode]: indexes('root'), + [Table.AccountStorage]: indexes('root'), + [Table.AccountVaults]: indexes('root'), + [Table.AccountAuth]: indexes('accountId', 'pubKey'), + [Table.Accounts]: indexes('[id+nonce]', 'codeRoot', 'storageRoot', 'vaultRoot'), + [Table.Transactions]: indexes('id'), + [Table.TransactionScripts]: indexes('scriptHash'), + [Table.InputNotes]: indexes('noteId', 'recipient', 'status'), + [Table.OutputNotes]: indexes('noteId', 'recipient', 'status'), + [Table.NotesScripts]: indexes('scriptHash'), + [Table.StateSync]: indexes('id'), + [Table.BlockHeaders]: indexes('blockNum'), + [Table.ChainMmrNodes]: indexes('id'), + [Table.Greet]: '++id', +}); + +function indexes(...items) { + return items.join(','); +} + +db.on('populate', () => { + // Populate the stateSync table with default values + db.stateSync.put({ id: 1, blockNum: "0", tags: [] }); +}); + +const accountCodes = db.table(Table.AccountCode); +const accountStorages = db.table(Table.AccountStorage); +const accountVaults = db.table(Table.AccountVaults); +const accountAuths = db.table(Table.AccountAuth); +const accounts = db.table(Table.Accounts); +const transactions = db.table(Table.Transactions); +const transactionScripts = db.table(Table.TransactionScripts); +const inputNotes = db.table(Table.InputNotes); +const outputNotes = db.table(Table.OutputNotes); +const notesScripts = db.table(Table.NotesScripts); +const stateSync = db.table(Table.StateSync); +const blockHeaders = db.table(Table.BlockHeaders); +const chainMmrNodes = db.table(Table.ChainMmrNodes); +const greets = db.table(Table.Greet); + +export { + db, + accountCodes, + accountStorages, + accountVaults, + accountAuths, + accounts, + transactions, + transactionScripts, + inputNotes, + outputNotes, + notesScripts, + stateSync, + blockHeaders, + chainMmrNodes, + greets, +}; diff --git a/wasm/js/db/sync.js b/wasm/js/db/sync.js new file mode 100644 index 000000000..135103675 --- /dev/null +++ b/wasm/js/db/sync.js @@ -0,0 +1,291 @@ +import { + db, + stateSync, + inputNotes, + outputNotes, + transactions, + blockHeaders, + chainMmrNodes, +} from './schema.js'; + +export async function getNoteTags() { + console.log('Getting note tags'); + try { + const record = await stateSync.get(1); // Since id is the primary key and always 1 + if (record) { + let data = null; + if (record.tags.length === 0) { + data = { + tags: JSON.stringify(record.tags) + } + } else { + data = { + tags: record.tags + } + }; + console.log('Note tags: ', data.tags); + return data; + } else { + return null; + } + } catch (error) { + console.error('Error fetching record:', error); + return null; + } +} + +export async function getSyncHeight() { + try { + const record = await stateSync.get(1); // Since id is the primary key and always 1 + if (record) { + let data = { + block_num: record.blockNum + }; + return data; + } else { + return null; + } + } catch (error) { + console.error('Error fetching record:', error); + return null; + } +} + +export async function addNoteTag( + tags +) { + console.log('Adding note tag: ', tags); + try { + await stateSync.update(1, { tags: tags }); + } catch { + console.error("Failed to add note tag: ", err); + throw err; + } +} + +export async function applyStateSync( + blockNum, + nullifiers, + blockHeader, + chainMmrPeaks, + hasClientNotes, + nodeIndices, + nodes, + outputNoteIds, + outputNoteInclusionProofs, + inputNoteIds, + inputNoteInluclusionProofs, + inputeNoteMetadatas, + transactionIds, +) { + return db.transaction('rw', stateSync, inputNotes, outputNotes, transactions, blockHeaders, chainMmrNodes, async (tx) => { + await updateSyncHeight(tx, blockNum); + await updateSpentNotes(tx, nullifiers); + await updateBlockHeader(tx, blockNum, blockHeader, chainMmrPeaks, hasClientNotes); + await updateChainMmrNodes(tx, nodeIndices, nodes); + await updateCommittedNotes(tx, outputNoteIds, outputNoteInclusionProofs, inputNoteIds, inputNoteInluclusionProofs, inputeNoteMetadatas); + await updateCommittedTransactions(tx, blockNum, transactionIds); + }); +} + +async function updateSyncHeight( + tx, + blockNum +) { + try { + await tx.stateSync.update(1, { blockNum: blockNum }); + } catch (error) { + console.error("Failed to update sync height: ", error); + throw error; + } +} + +async function updateSpentNotes( + tx, + nullifiers +) { + try { + // Fetch all notes + const inputNotes = await tx.inputNotes.toArray(); + const outputNotes = await tx.outputNotes.toArray(); + + // Pre-parse all details and store them with their respective note ids for quick access + const parsedInputNotes = inputNotes.map(note => ({ + noteId: note.noteId, + details: JSON.parse(note.details) // Parse the JSON string into an object + })); + + // Iterate through each parsed note and check against the list of nullifiers + for (const note of parsedInputNotes) { + if (nullifiers.includes(note.details.nullifier)) { + // If the nullifier is in the list, update the note's status + await tx.inputNotes.update(note.noteId, { status: 'Consumed' }); + } + } + + // Pre-parse all details and store them with their respective note ids for quick access + const parsedOutputNotes = outputNotes.map(note => ({ + noteId: note.noteId, + details: JSON.parse(note.details) // Parse the JSON string into an object + })); + + // Iterate through each parsed note and check against the list of nullifiers + for (const note of parsedOutputNotes) { + if (nullifiers.includes(note.details.nullifier)) { + // If the nullifier is in the list, update the note's status + await tx.outputNotes.update(note.noteId, { status: 'Consumed' }); + } + } + + } catch (error) { + console.error("Error updating input notes:", error); + throw error; + } +} + +async function updateBlockHeader( + tx, + blockNum, + blockHeader, + chainMmrPeaks, + hasClientNotes +) { + try { + const data = { + blockNum: blockNum, + header: blockHeader, + chainMmrPeaks: chainMmrPeaks, + hasClientNotes: hasClientNotes + }; + + await tx.blockHeaders.add(data); + } catch (err) { + console.error("Failed to insert block header: ", err); + throw error; + } +} + +async function updateChainMmrNodes( + tx, + nodeIndices, + nodes +) { + try { + // Check if the arrays are not of the same length + if (nodeIndices.length !== nodes.length) { + throw new Error("nodeIndices and nodes arrays must be of the same length"); + } + + if (nodeIndices.length === 0) { + return; + } + + // Create the updates array with objects matching the structure expected by your IndexedDB schema + const updates = nodeIndices.map((index, i) => ({ + id: index, // Assuming 'index' is the primary key or part of it + node: nodes[i] // Other attributes of the object + })); + + // Perform bulk update or insertion; assumes tx.chainMmrNodes is a valid table reference in a transaction + await tx.chainMmrNodes.bulkAdd(updates); + } catch (err) { + console.error("Failed to update chain mmr nodes: ", err); + throw error; + } +} + +async function updateCommittedNotes( + tx, + outputNoteIds, + outputNoteInclusionProofs, + inputNoteIds, + inputNoteInclusionProofs, + inputNoteMetadatas +) { + console.log("Updating committed notes"); + console.log("Output note ids: ", outputNoteIds); + console.log("Output note inclusion proofs: ", outputNoteInclusionProofs); + console.log('Input note ids: ', inputNoteIds); + console.log('Input note inclusion proofs: ', inputNoteInclusionProofs); + console.log('Input note metadatas: ', inputNoteMetadatas); + try { + // if (outputNoteIds.length === 0 || inputNoteIds.length === 0) { + // return; + // } + + if (outputNoteIds.length !== outputNoteInclusionProofs.length) { + throw new Error("Arrays outputNoteIds and outputNoteInclusionProofs must be of the same length"); + } + + if ( + inputNoteIds.length !== inputNoteInclusionProofs.length && + inputNoteIds.length !== inputNoteMetadatas.length && + inputNoteInclusionProofs.length !== inputNoteMetadatas.length + ) { + console.log('Errored out here'); + throw new Error("Arrays inputNoteIds and inputNoteInclusionProofs and inputNoteMetadatas must be of the same length"); + } + + console.log('output note ids length: ', outputNoteIds.length) + for (let i = 0; i < outputNoteIds.length; i++) { + const noteId = outputNoteIds[i]; + const inclusionProof = outputNoteInclusionProofs[i]; + + // Update output notes + await tx.outputNotes.where({ noteId: noteId }).modify({ + status: 'Committed', + inclusionProof: inclusionProof + }); + } + + console.log('input note ids length: ', inputNoteIds.length); + for (let i = 0; i < inputNoteIds.length; i++) { + const noteId = inputNoteIds[i]; + console.log('Note id: ', noteId); + const inclusionProof = inputNoteInclusionProofs[i]; + console.log('Inclusion proof: ', inclusionProof); + const metadata = inputNoteMetadatas[i]; + console.log('Metadata: ', metadata); + + // Update input notes + await tx.inputNotes.where({ noteId: noteId }).modify({ + status: 'Committed', + inclusionProof: inclusionProof, + metadata: metadata + }); + + console.log('Updated input notes'); + console.log(tx.inputNotes.toArray()); + } + } catch (error) { + console.error("Error updating committed notes:", error); + throw error; + } +} + +async function updateCommittedTransactions( + tx, + blockNum, + transactionIds +) { + try { + if (transactionIds.length === 0) { + return; + } + + // Fetch existing records + const existingRecords = await tx.transactions.where('id').anyOf(transactionIds).toArray(); + + // Create updates by merging existing records with the new values + const updates = existingRecords.map(record => ({ + ...record, // Spread existing fields + commitHeight: blockNum // Update specific field + })); + + // Perform the update + await tx.transactions.bulkPut(updates); + } catch (err) { + console.error("Failed to mark transactions as committed: ", err); + throw err; + } +} \ No newline at end of file diff --git a/wasm/js/db/transactions.js b/wasm/js/db/transactions.js new file mode 100644 index 000000000..4b8b6f73b --- /dev/null +++ b/wasm/js/db/transactions.js @@ -0,0 +1,186 @@ +import { + transactions, + transactionScripts, +} from './schema.js' + +export async function getTransactions( + filter +) { + let transactionRecords; + + try { + if (filter === 'Uncomitted') { + transactionRecords = await transactions.filter(tx => tx.commitHeight === undefined || tx.commitHeight === null).toArray(); + } else { + transactionRecords = await transactions.toArray(); + } + + if (transactionRecords.length === 0) { + return []; + } + + const scriptHashes = transactionRecords.map(transactionRecord => { + return transactionRecord.scriptHash + }); + + const scripts = await transactionScripts.where("scriptHash").anyOf(scriptHashes).toArray(); + + // Create a map of scriptHash to script for quick lookup + const scriptMap = new Map(); + scripts.forEach(script => { + scriptMap.set(script.scriptHash, script.program); + }); + + const processedTransactions = await Promise.all(transactionRecords.map(async transactionRecord => { + let scriptProgramBase64 = null; + + if (transactionRecord.scriptHash) { + const scriptProgram = scriptMap.get(transactionRecord.scriptHash); + + if (scriptProgram) { + let scriptProgramArrayBuffer = await scriptProgram.arrayBuffer(); + let scriptProgramArray = new Uint8Array(scriptProgramArrayBuffer); + scriptProgramBase64 = uint8ArrayToBase64(scriptProgramArray); + } + } + + let outputNotesArrayBuffer = await transactionRecord.outputNotes.arrayBuffer(); + let outputNotesArray = new Uint8Array(outputNotesArrayBuffer); + let outputNotesBase64 = uint8ArrayToBase64(outputNotesArray); + + transactionRecord.outputNotes = outputNotesBase64; + + let data = { + id: transactionRecord.id, + account_id: transactionRecord.accountId, + init_account_state: transactionRecord.initAccountState, + final_account_state: transactionRecord.finalAccountState, + input_notes: transactionRecord.inputNotes, + output_notes: transactionRecord.outputNotes, + script_hash: transactionRecord.scriptHash ? transactionRecord.scriptHash : null, + script_program: scriptProgramBase64, + script_inputs: transactionRecord.scriptInputs ? transactionRecord.scriptInputs : null, + block_num: transactionRecord.blockNum, + commit_height: transactionRecord.commitHeight ? transactionRecord.commitHeight : null + } + + return data; + })); + + return processedTransactions + } catch { + console.error("Failed to get transactions: ", err); + throw err; + } +} + +export async function insertTransactionScript( + scriptHash, + scriptProgram +) { + try { + // check if script hash already exists + let record = await transactionScripts.where("scriptHash").equals(scriptHash).first(); + + if (record) { + return; + } + + if (!scriptHash) { + throw new Error("Script hash must be provided"); + } + + let scriptHashArray = new Uint8Array(scriptHash); + let scriptHashBase64 = uint8ArrayToBase64(scriptHashArray); + let scriptProgramBlob = null; + + if (scriptProgram ) { + scriptProgramBlob = new Blob([new Uint8Array(scriptProgram)]); + } + + const data = { + scriptHash: scriptHashBase64, + program: scriptProgramBlob + } + + await transactionScripts.add(data); + } catch (error) { + // Check if the error is because the record already exists + if (error.name === 'ConstraintError') { + } else { + // Re-throw the error if it's not a constraint error + throw error; + } + } +} + +export async function insertProvenTransactionData( + transactionId, + accountId, + initAccountState, + finalAccountState, + inputNotes, + outputNotes, + scriptHash, + scriptInputs, + blockNum, + committed +) { + try { + let scriptHashBase64 = null; + let outputNotesBlob = new Blob([new Uint8Array(outputNotes)]); + if (scriptHash !== null) { + let scriptHashArray = new Uint8Array(scriptHash); + scriptHashBase64 = uint8ArrayToBase64(scriptHashArray); + } + + const data = { + id: transactionId, + accountId: accountId, + initAccountState: initAccountState, + finalAccountState: finalAccountState, + inputNotes: inputNotes, + outputNotes: outputNotesBlob, + scriptHash: scriptHashBase64, + scriptInputs: scriptInputs ? scriptInputs : null, + blockNum: blockNum, + commitHeight: committed ? committed : null + } + + await transactions.add(data); + } catch (err) { + console.error("Failed to insert proven transaction data: ", err); + throw err; + } +} + +export async function markTransactionsAsCommitted( + blockNum, + transactionIds +) { + try { + if (transactionIds.length === 0) { + return; + } + + // Fetch existing records + const existingRecords = await tx.transactions.where('id').anyOf(transactionIds).toArray(); + + // Create updates by merging existing records with the new values + const updates = existingRecords.map(record => ({ + ...record, // Spread existing fields + commitHeight: blockNum // Update specific field + })); + + // Perform the update + await tx.transactions.bulkPut(updates); + } catch (err) { + console.error("Failed to mark transactions as committed: ", err); + throw err; + } +} + +function uint8ArrayToBase64(bytes) { + const binary = bytes.reduce((acc, byte) => acc + String.fromCharCode(byte), ''); + return btoa(binary); +} \ No newline at end of file diff --git a/wasm/js/index.js b/wasm/js/index.js new file mode 100644 index 000000000..8253837c2 --- /dev/null +++ b/wasm/js/index.js @@ -0,0 +1,13 @@ +import wasm from "../dist/wasm.js"; + +const { + WebClient +} = await wasm({ + importHook: () => { + return new URL("assets/miden_wasm.wasm", import.meta.url); + }, +}); + +export { + WebClient, +}; \ No newline at end of file diff --git a/wasm/js/types/index.d.ts b/wasm/js/types/index.d.ts new file mode 100644 index 000000000..b45c840d8 --- /dev/null +++ b/wasm/js/types/index.d.ts @@ -0,0 +1,10 @@ +export { + WebClient, + // Account models + AssetInfo, + SerializedAccount, + SerializedAccountStub, + // Transaction models + NewTransactionResult, + NewSwapTransactionResult +} from "./crates/miden_wasm"; \ No newline at end of file diff --git a/wasm/js/wasm.js b/wasm/js/wasm.js new file mode 100644 index 000000000..5c4a23b42 --- /dev/null +++ b/wasm/js/wasm.js @@ -0,0 +1 @@ +export { default } from "../Cargo.toml"; \ No newline at end of file diff --git a/wasm/js/web-rpc-client.js b/wasm/js/web-rpc-client.js new file mode 100644 index 000000000..7e7894046 --- /dev/null +++ b/wasm/js/web-rpc-client.js @@ -0,0 +1,8 @@ +export async function testRpc(endpoint) { + try { + console.log(`Calling ${endpoint}`); + await fetch(endpoint); + } catch (error) { + console.log('Failed to call RPC endpoint'); + } +} \ No newline at end of file diff --git a/wasm/miden-client.toml b/wasm/miden-client.toml new file mode 100644 index 000000000..5971457cb --- /dev/null +++ b/wasm/miden-client.toml @@ -0,0 +1,11 @@ +## USAGE: +## ================================================================================================ +# [rpc]: Settings for the RPC client used to communicate with the node +# - endpoint: tuple indicating the protocol (http, https), the host, and the port where the node is listening. +# [store]: Settings for the client's Store +# - database_filepath: path for the sqlite's database +[rpc] +endpoint = { protocol = "http", host = "localhost", port = 57291 } + +[store] +database_filepath = "store.sqlite3" \ No newline at end of file diff --git a/wasm/package.json b/wasm/package.json new file mode 100644 index 000000000..27d7a52cc --- /dev/null +++ b/wasm/package.json @@ -0,0 +1,34 @@ +{ + "name": "@demox-labs/miden-sdk", + "version": "0.0.3", + "description": "Polygon Miden Wasm SDK", + "collaborators": [ + "Polygon Miden", + "Demox Labs " + ], + "type": "module", + "main": "./dist/index.js", + "browser": "./dist/index.js", + "types": "./dist/index.d.ts", + "exports": { + ".": "./dist/index.js" + }, + "files": [ + "dist", + "../LICENSE.md" + ], + "scripts": { + "build": "rimraf dist && rollup -c rollup.config.js && cpr js/types dist && rimraf dist/wasm*" + }, + "devDependencies": { + "@rollup/plugin-commonjs": "^25.0.7", + "@rollup/plugin-node-resolve": "^15.2.3", + "@wasm-tool/rollup-plugin-rust": "^2.4.5", + "cpr": "^3.0.1", + "rimraf": "^5.0.1", + "rollup": "^3.27.2" + }, + "dependencies": { + "dexie": "^4.0.1" + } +} diff --git a/wasm/rollup.config.js b/wasm/rollup.config.js new file mode 100644 index 000000000..0c3a1bd00 --- /dev/null +++ b/wasm/rollup.config.js @@ -0,0 +1,46 @@ +import rust from "@wasm-tool/rollup-plugin-rust"; +import resolve from "@rollup/plugin-node-resolve"; +import commonjs from "@rollup/plugin-commonjs"; + +export default [ + { + input: { + wasm: "./js/wasm.js", + }, + output: { + dir: `dist`, + format: "es", + sourcemap: true, + assetFileNames: "assets/[name][extname]", + }, + plugins: [ + rust({ + // debug: true, + cargoArgs: [ + // This enables multi-threading + // "--features", "testing", // Add this line to include the concurrent feature + // "--config", `build.rustflags=["-C", "target-feature=+atomics,+bulk-memory,+mutable-globals", "-C", "link-arg=--max-memory=4294967296"]`, + "--config", `build.rustflags=["-C", "target-feature=+atomics,+bulk-memory,+mutable-globals", "-C", "link-arg=--max-memory=4294967296", "-C", "debuginfo=2"]`, + "--no-default-features", + "-Z", "build-std=panic_abort,std", + ], + + experimental: { + typescriptDeclarationDir: "dist/crates", + }, + }), + resolve(), // Add this + commonjs(), // And this, if you have CommonJS modules + ], + }, + { + input: { + index: "./js/index.js", + }, + output: { + dir: `dist`, + format: "es", + sourcemap: true, + }, + } +]; diff --git a/wasm/src/lib.rs b/wasm/src/lib.rs new file mode 100644 index 000000000..88baff2f1 --- /dev/null +++ b/wasm/src/lib.rs @@ -0,0 +1,9 @@ +extern crate alloc; + +pub mod web_client; + +use wasm_bindgen::prelude::*; +use wasm_bindgen_futures::*; +use web_sys::console; +use serde_wasm_bindgen::Serializer; +use serde::Serialize; \ No newline at end of file diff --git a/wasm/src/web_client/account.rs b/wasm/src/web_client/account.rs new file mode 100644 index 000000000..e2c92b2a7 --- /dev/null +++ b/wasm/src/web_client/account.rs @@ -0,0 +1,156 @@ +use super::WebClient; +use crate::web_client::models::accounts::SerializedAccountStub; +use crate::web_client::models::accounts::AssetInfo; +use crate::web_client::models::accounts::SerializedAccount; + +use base64::encode; +use miden_objects::{accounts::{AccountData, AccountId}, assets::{self, Asset, TokenSymbol}, notes::NoteId}; +use miden_tx::utils::{Deserializable, Serializable}; + +use miden_client::client::accounts; +use miden_client::client::rpc::NodeRpcClient; +use miden_client::store::Store; + +use miden_objects::accounts::AccountStub; + +use serde::{Serialize, Deserialize}; +use serde_wasm_bindgen::from_value; +use wasm_bindgen::prelude::*; +use web_sys::console; +use std::panic; +use serde_wasm_bindgen::Serializer; +use console_error_panic_hook; + +#[derive(Serialize, Deserialize)] +#[serde(tag = "type")] +pub enum AccountTemplate { + BasicImmutable { + storage_mode: String // AccountStorageMode + }, + BasicMutable { + storage_mode: String // AccountStorageMode + }, + FungibleFaucet { + token_symbol: String, + decimals: String, // u8 + max_supply: String, // u64 + storage_mode: String + }, + NonFungibleFaucet { + storage_mode: String + }, +} + +// Account functions to be exposed to the JavaScript environment +// For now, just a simple function that calls an underlying store method +// and inserts a string to the indexedDB store. Also tests out a simple +// RPC call. +#[wasm_bindgen] +impl WebClient { + pub async fn get_accounts( + &mut self + ) -> Result { + console_error_panic_hook::set_once(); + if let Some(client) = self.get_mut_inner() { + let account_tuples = client.get_account_stubs().await.unwrap(); + let accounts: Vec = account_tuples.into_iter().map(|(account, _)| { + SerializedAccountStub::new( + account.id().to_string(), + account.nonce().to_string(), + account.vault_root().to_string(), + account.storage_root().to_string(), + account.code_root().to_string(), + format!("{:?}", account.id().account_type()), + account.id().is_faucet(), + account.id().is_regular_account(), + account.id().is_on_chain() + ) + }).collect(); + + let accounts_as_js_value = serde_wasm_bindgen::to_value(&accounts) + .unwrap_or_else(|_| wasm_bindgen::throw_val(JsValue::from_str("Serialization error"))); + + Ok(accounts_as_js_value) + } else { + Err(JsValue::from_str("Client not initialized")) + } + } + + pub async fn get_account( + &mut self, + account_id: String + ) -> Result { + web_sys::console::log_1(&JsValue::from_str("get_account called")); + if let Some(client) = self.get_mut_inner() { + let native_account_id = AccountId::from_hex(&account_id).unwrap(); + + let result = client.get_account(native_account_id).await.unwrap(); + let asset_infos: Vec = result.0.vault().assets().map(|asset| { + match asset { + assets::Asset::Fungible(fungible) => AssetInfo::new( + asset.is_fungible(), + fungible.amount().to_string(), + asset.faucet_id().to_string(), + ), + assets::Asset::NonFungible(non_fungible) => AssetInfo::new( + asset.is_fungible(), + "0".to_string(), + asset.faucet_id().to_string(), + ) + } + }).collect(); + let account_stub: AccountStub = (&result.0).into(); + + Ok(SerializedAccount::new( + result.0.id().to_string(), + result.0.nonce().to_string(), + account_stub.vault_root().to_string(), + account_stub.storage_root().to_string(), + account_stub.code_root().to_string(), + format!("{:?}", result.0.id().account_type()), + result.0.id().is_faucet(), + result.0.id().is_regular_account(), + result.0.id().is_on_chain(), + asset_infos + )) + // serde_wasm_bindgen::to_value(&result.0.id().to_string()) + // .map_err(|e| JsValue::from_str(&e.to_string())) + } else { + Err(JsValue::from_str("Client not initialized")) + } + } + + pub fn get_account_auth_by_pub_key( + &mut self, + pub_key_bytes: JsValue + ) -> Result { + use miden_objects::Word; + web_sys::console::log_1(&JsValue::from_str("get_account_auth_by_pub_key called")); + if let Some(client) = self.get_mut_inner() { + let pub_key_bytes_result: Vec = from_value(pub_key_bytes).unwrap(); + let pub_key_as_word = Word::read_from_bytes(pub_key_bytes_result.as_slice()).unwrap(); + + let result = client.store().get_account_auth_by_pub_key(pub_key_as_word).unwrap(); + + Ok(JsValue::from_str("Okay, it worked")) + } else { + Err(JsValue::from_str("Client not initialized")) + } + } + + pub async fn fetch_and_cache_account_auth_by_pub_key( + &mut self, + account_id: String + ) -> Result { + use miden_objects::Word; + web_sys::console::log_1(&JsValue::from_str("fetch_and_cache_account_auth_by_pub_key called")); + if let Some(client) = self.get_mut_inner() { + + let result = client.store().fetch_and_cache_account_auth_by_pub_key(account_id).await.unwrap(); + + Ok(JsValue::from_str("Okay, it worked")) + } else { + Err(JsValue::from_str("Client not initialized")) + } + } +} \ No newline at end of file diff --git a/wasm/src/web_client/export.rs b/wasm/src/web_client/export.rs new file mode 100644 index 000000000..b9d90758d --- /dev/null +++ b/wasm/src/web_client/export.rs @@ -0,0 +1,50 @@ +use wasm_bindgen::*; +use wasm_bindgen::prelude::*; + +use miden_client::store::{InputNoteRecord, NoteFilter}; +use miden_objects::{ + utils::Serializable, + Digest +}; + +use crate::web_client::WebClient; + +use web_sys::console; + +#[wasm_bindgen] +impl WebClient { + pub async fn export_note( + &mut self, + note_id: String + ) -> Result { + web_sys::console::log_1(&JsValue::from_str("export_note called")); + if let Some(client) = self.get_mut_inner() { + let note_id = Digest::try_from(note_id) + .map_err(|err| format!("Failed to parse input note id: {}", err))? + .into(); + web_sys::console::log_1(&JsValue::from_str("export_note 1")); + + let output_note = client + .get_output_notes(NoteFilter::Unique(note_id)).await.unwrap() + .pop().unwrap(); + web_sys::console::log_1(&JsValue::from_str("export_note 2")); + + // Convert output note into InputNoteRecord before exporting + let input_note: InputNoteRecord = output_note + .try_into() + .map_err(|_err| format!("Can't export note with ID {}", note_id.to_hex()))?; + web_sys::console::log_1(&JsValue::from_str("export_note 3")); + + let input_note_bytes = input_note.to_bytes(); + web_sys::console::log_1(&JsValue::from_str("export_note 4")); + + let serialized_input_note_bytes = serde_wasm_bindgen::to_value(&input_note_bytes) + .unwrap_or_else(|_| wasm_bindgen::throw_val(JsValue::from_str("Serialization error"))); + web_sys::console::log_1(&JsValue::from_str("export_note 5")); + + Ok(serialized_input_note_bytes) + } else { + Err(JsValue::from_str("Client not initialized")) + } + } +} \ No newline at end of file diff --git a/wasm/src/web_client/import.rs b/wasm/src/web_client/import.rs new file mode 100644 index 000000000..403bd82ed --- /dev/null +++ b/wasm/src/web_client/import.rs @@ -0,0 +1,72 @@ +use serde_wasm_bindgen::from_value; +use wasm_bindgen::*; +use wasm_bindgen::prelude::*; + +use miden_objects::{ + accounts::AccountData, + utils::Deserializable +}; +use miden_client::store::InputNoteRecord; + +use crate::web_client::WebClient; + +use web_sys::console; +use base64::decode; + +#[wasm_bindgen] +impl WebClient { + pub async fn import_account( + &mut self, + account_bytes: JsValue + ) -> Result { + if let Some(client) = self.get_mut_inner() { + let account_bytes_result: Vec = from_value(account_bytes).unwrap(); + let account_data = AccountData::read_from_bytes(&account_bytes_result).map_err(|err| err.to_string())?; + let account_id = account_data.account.id().to_string(); + + match client.import_account(account_data).await { + Ok(_) => { + let message = format!("Imported account with ID: {}", account_id); + Ok(JsValue::from_str(&message)) + }, + Err(err) => { + let error_message = format!("Failed to import account: {:?}", err); + Err(JsValue::from_str(&error_message)) + } + } + } else { + Err(JsValue::from_str("Client not initialized")) + } + } + + pub async fn import_note( + &mut self, + note_bytes: JsValue, + verify: bool + ) -> Result { + web_sys::console::log_1(&JsValue::from_str("import_note called")); + if let Some(client) = self.get_mut_inner() { + let note_bytes_result: Vec = from_value(note_bytes).unwrap(); + // let note_bytes_result = decode(note_bytes).unwrap(); + web_sys::console::log_1(&JsValue::from_str("import_note 2")); + let input_note_record = + InputNoteRecord::read_from_bytes(¬e_bytes_result).map_err(|err| err.to_string())?; + web_sys::console::log_1(&JsValue::from_str("import_note 3")); + let note_id = input_note_record.id(); + web_sys::console::log_1(&JsValue::from_str("import_note 4")); + + match client.import_input_note(input_note_record, verify).await { + Ok(_) => { + Ok(JsValue::from_str(note_id.to_string().as_str())) + }, + Err(err) => { + let error_message = format!("Failed to import note: {:?}", err); + Err(JsValue::from_str(&error_message)) + + } + } + } else { + Err(JsValue::from_str("Client not initialized")) + } + } +} \ No newline at end of file diff --git a/wasm/src/web_client/mod.rs b/wasm/src/web_client/mod.rs new file mode 100644 index 000000000..685b59cce --- /dev/null +++ b/wasm/src/web_client/mod.rs @@ -0,0 +1,77 @@ +use alloc::rc::Rc; +use wasm_bindgen::prelude::*; +use miden_objects::{ utils::Deserializable, crypto::rand::RpoRandomCoin }; +use miden_tx::TransactionAuthenticator; +use miden_client::client::{Client, get_random_coin, store_authenticator::StoreAuthenticator}; + +pub mod account; +pub mod export; +pub mod import; +pub mod new_account; +pub mod new_transactions; +pub mod notes; +pub mod sync; +pub mod transactions; +pub mod tags; +pub mod store; +pub mod rpc; +pub mod models; + +use store::WebStore; +use rpc::WebRpcClient; + +// My strategy here is to create a WebClient struct that has methods exposed +// to the browser environment. When these methods are called, they will +// use the inner client to execute the proper code and store methods. + +#[wasm_bindgen] +pub struct WebClient { + inner: Option>>, + tx_authenticator: Option>, +} + +#[wasm_bindgen] +impl WebClient { + #[wasm_bindgen(constructor)] + pub fn new() -> Self { + WebClient { inner: None, tx_authenticator: None } + } + + // Getter for the inner client, used internally for operations + pub(crate) fn get_mut_inner(&mut self) -> Option<&mut Client>> { + self.inner.as_mut() + } + + // Exposed method to JS to create an internal client + pub async fn create_client( + &mut self, + node_url: Option, + ) -> Result { + web_sys::console::log_1(&JsValue::from_str("create_client called")); + let rng = get_random_coin(); + let web_store: WebStore = WebStore::new().await.map_err(|_| JsValue::from_str("Failed to initialize WebStore"))?; + let web_store = Rc::new(web_store); + let authenticator: StoreAuthenticator = StoreAuthenticator::new_with_rng(web_store.clone(), rng); + let web_rpc_client = WebRpcClient::new(&node_url.unwrap_or_else(|| "http://localhost:57291".to_string())); + + // self.tx_authenticator = Some(authenticator); + self.inner = Some(Client::new(web_rpc_client, rng, web_store, authenticator, false)); + + Ok(JsValue::from_str("Client created successfully")) + } + + // pub async fn set_account_auth_by_pub_key( + // &mut self, + // pub_key: String + // ) -> Result { + // web_sys::console::log_1(&JsValue::from_str("set_account_auth_by_pub_key called")); + // if let Some(client) = self.get_mut_inner() { + // let pub_key = hex::decode(pub_key).map_err(|_| JsValue::from_str("Failed to decode public key"))?; + // let pub_key = miden_objects::Word::read_from_bytes(&pub_key).unwrap(); + // self.tx_authenticator.as_mut().unwrap().set_auth_secret_key(pub_key).await.map_err(|_| JsValue::from_str("Failed to set auth secret key"))?; + // Ok(JsValue::from_str("Auth secret key set successfully")) + // } else { + // Err(JsValue::from_str("Client not initialized")) + // } + // } +} \ No newline at end of file diff --git a/wasm/src/web_client/models/accounts.rs b/wasm/src/web_client/models/accounts.rs new file mode 100644 index 000000000..6a44deed3 --- /dev/null +++ b/wasm/src/web_client/models/accounts.rs @@ -0,0 +1,217 @@ +use wasm_bindgen::prelude::*; +use serde::{Serialize, Deserialize}; +use serde_wasm_bindgen::to_value; + +#[wasm_bindgen] +#[derive(Serialize, Deserialize)] +pub struct AssetInfo { + is_fungible: bool, + amount: String, + faucet_id: String +} + +#[wasm_bindgen] +impl AssetInfo { + pub fn new(is_fungible: bool, amount: String, faucet_id: String) -> AssetInfo { + AssetInfo { + is_fungible, + amount, + faucet_id + } + } + + #[wasm_bindgen(getter)] + pub fn is_fungible(&self) -> bool { + self.is_fungible.clone() + } + + #[wasm_bindgen(getter)] + pub fn amount(&self) -> String { + self.amount.clone() + } + + #[wasm_bindgen(getter)] + pub fn faucet_id(&self) -> String { + self.faucet_id.clone() + } +} + +#[wasm_bindgen] +#[derive(Serialize, Deserialize)] +pub struct SerializedAccount { + id: String, + nonce: String, + vault_root: String, + storage_root: String, + code_root: String, + account_type: String, + is_faucet: bool, + is_regular_account: bool, + is_on_chain: bool, + assets: Vec +} + +#[wasm_bindgen] +impl SerializedAccount { + pub fn new( + id: String, + nonce: String, + vault_root: String, + storage_root: String, + code_root: String, + account_type: String, + is_faucet: bool, + is_regular_account: bool, + is_on_chain: bool, + assets: Vec + ) -> SerializedAccount { + SerializedAccount { + id, + nonce, + vault_root, + storage_root, + code_root, + account_type, + is_faucet, + is_regular_account, + is_on_chain, + assets + } + } + + #[wasm_bindgen(getter)] + pub fn id(&self) -> String { + self.id.clone() + } + + #[wasm_bindgen(getter)] + pub fn nonce(&self) -> String { + self.nonce.clone() + } + + #[wasm_bindgen(getter)] + pub fn vault_root(&self) -> String { + self.vault_root.clone() + } + + #[wasm_bindgen(getter)] + pub fn storage_root(&self) -> String { + self.storage_root.clone() + } + + #[wasm_bindgen(getter)] + pub fn code_root(&self) -> String { + self.code_root.clone() + } + + #[wasm_bindgen(getter)] + pub fn account_type(&self) -> String { + self.account_type.clone() + } + + #[wasm_bindgen(getter)] + pub fn is_faucet(&self) -> bool { + self.is_faucet.clone() + } + + #[wasm_bindgen(getter)] + pub fn is_regular_account(&self) -> bool { + self.is_regular_account.clone() + } + + #[wasm_bindgen(getter)] + pub fn is_on_chain(&self) -> bool { + self.is_on_chain.clone() + } + + #[wasm_bindgen(getter)] + pub fn assets(&self) -> JsValue { + to_value(&self.assets).unwrap() + } +} + +#[wasm_bindgen] +#[derive(Serialize, Deserialize)] +pub struct SerializedAccountStub { + id: String, + nonce: String, + vault_root: String, + storage_root: String, + code_root: String, + account_type: String, + is_faucet: bool, + is_regular_account: bool, + is_on_chain: bool +} + +#[wasm_bindgen] +impl SerializedAccountStub { + pub fn new( + id: String, + nonce: String, + vault_root: String, + storage_root: String, + code_root: String, + account_type: String, + is_faucet: bool, + is_regular_account: bool, + is_on_chain: bool + ) -> SerializedAccountStub { + SerializedAccountStub { + id, + nonce, + vault_root, + storage_root, + code_root, + account_type, + is_faucet, + is_regular_account, + is_on_chain + } + } + + #[wasm_bindgen(getter)] + pub fn id(&self) -> String { + self.id.clone() + } + + #[wasm_bindgen(getter)] + pub fn nonce(&self) -> String { + self.nonce.clone() + } + + #[wasm_bindgen(getter)] + pub fn vault_root(&self) -> String { + self.vault_root.clone() + } + + #[wasm_bindgen(getter)] + pub fn storage_root(&self) -> String { + self.storage_root.clone() + } + + #[wasm_bindgen(getter)] + pub fn code_root(&self) -> String { + self.code_root.clone() + } + + #[wasm_bindgen(getter)] + pub fn account_type(&self) -> String { + self.account_type.clone() + } + + #[wasm_bindgen(getter)] + pub fn is_faucet(&self) -> bool { + self.is_faucet.clone() + } + + #[wasm_bindgen(getter)] + pub fn is_regular_account(&self) -> bool { + self.is_regular_account.clone() + } + + #[wasm_bindgen(getter)] + pub fn is_on_chain(&self) -> bool { + self.is_on_chain.clone() + } +} diff --git a/wasm/src/web_client/models/mod.rs b/wasm/src/web_client/models/mod.rs new file mode 100644 index 000000000..8718233fb --- /dev/null +++ b/wasm/src/web_client/models/mod.rs @@ -0,0 +1,2 @@ +pub mod accounts; +pub mod transactions; \ No newline at end of file diff --git a/wasm/src/web_client/models/transactions.rs b/wasm/src/web_client/models/transactions.rs new file mode 100644 index 000000000..b26ce2f73 --- /dev/null +++ b/wasm/src/web_client/models/transactions.rs @@ -0,0 +1,80 @@ +use wasm_bindgen::prelude::*; +use wasm_bindgen::JsValue; + +#[wasm_bindgen] +pub struct NewTransactionResult { + transaction_id: String, + created_note_ids: Vec, +} + +#[wasm_bindgen] +impl NewTransactionResult { + pub fn new( + transaction_id: String, + created_note_ids: Vec + ) -> NewTransactionResult { + NewTransactionResult { + transaction_id, + created_note_ids + } + } + + #[wasm_bindgen(getter)] + pub fn transaction_id(&self) -> String { + self.transaction_id.clone() + } + + #[wasm_bindgen(getter)] + pub fn created_note_ids(&self) -> JsValue { + JsValue::from_serde(&self.created_note_ids).unwrap() + } +} + +#[wasm_bindgen] +pub struct NewSwapTransactionResult { + transaction_id: String, + expected_output_note_ids: Vec, + expected_partial_note_ids: Vec, + payback_note_tag: String, +} + +#[wasm_bindgen] +impl NewSwapTransactionResult { + pub fn new( + transaction_id: String, + expected_output_note_ids: Vec, + expected_partial_note_ids: Vec, + payback_note_tag: Option, + ) -> NewSwapTransactionResult { + NewSwapTransactionResult { + transaction_id, + expected_output_note_ids, + expected_partial_note_ids, + payback_note_tag: payback_note_tag.unwrap_or_else(|| "".to_string()), // Use default value if None + } + } + + pub fn setNoteTag(&mut self, payback_note_tag: String) { + self.payback_note_tag = payback_note_tag + } + + #[wasm_bindgen(getter)] + pub fn transaction_id(&self) -> String { + self.transaction_id.clone() + } + + #[wasm_bindgen(getter)] + pub fn expected_output_note_ids(&self) -> JsValue { + JsValue::from_serde(&self.expected_output_note_ids).unwrap() + } + + #[wasm_bindgen(getter)] + pub fn expected_partial_note_ids(&self) -> JsValue { + JsValue::from_serde(&self.expected_partial_note_ids).unwrap() + } + + #[wasm_bindgen(getter)] + pub fn payback_note_tag(&self) -> String { + self.payback_note_tag.clone() + } +} \ No newline at end of file diff --git a/wasm/src/web_client/new_account.rs b/wasm/src/web_client/new_account.rs new file mode 100644 index 000000000..846136009 --- /dev/null +++ b/wasm/src/web_client/new_account.rs @@ -0,0 +1,87 @@ +use wasm_bindgen::*; +use wasm_bindgen::prelude::*; +use super::WebClient; + +use miden_objects::{accounts::AccountStorageType, assets::TokenSymbol}; +use miden_client::client::accounts::AccountTemplate; + +#[wasm_bindgen] +impl WebClient { + pub async fn new_wallet( + &mut self, + storage_type: String, + mutable: bool + ) -> Result { + web_sys::console::log_1(&JsValue::from_str("new_wallet called")); + if let Some(client) = self.get_mut_inner() { + let client_template = AccountTemplate::BasicWallet { + mutable_code: mutable, + storage_type: match storage_type.as_str() { + "OffChain" => AccountStorageType::OffChain, + "OnChain" => AccountStorageType::OnChain, + _ => return Err(JsValue::from_str("Invalid storage mode")) + }, + }; + + match client.new_account(client_template).await { + Ok((account, _)) => { + // Create a struct or tuple to hold both values + // Convert directly to JsValue + serde_wasm_bindgen::to_value(&account.id().to_string()) + .map_err(|e| JsValue::from_str(&e.to_string())) + }, + Err(err) => { + let error_message = format!("Failed to create new account: {:?}", err); + Err(JsValue::from_str(&error_message)) + } + } + } else { + Err(JsValue::from_str("Client not initialized")) + } + } + + pub async fn new_faucet( + &mut self, + storage_type: String, + non_fungible: bool, + token_symbol: String, + decimals: String, + max_supply: String + ) -> Result { + web_sys::console::log_1(&JsValue::from_str("new_faucet called")); + if non_fungible { + return Err(JsValue::from_str("Non-fungible faucets are not supported yet")); + } + + if let Some(client) = self.get_mut_inner() { + let client_template = AccountTemplate::FungibleFaucet { + token_symbol: TokenSymbol::new(&token_symbol) + .map_err(|e| JsValue::from_str(&e.to_string()))?, + decimals: decimals.parse::() + .map_err(|e| JsValue::from_str(&e.to_string()))?, + max_supply: max_supply.parse::() + .map_err(|e| JsValue::from_str(&e.to_string()))?, + storage_type: match storage_type.as_str() { // Note: Fixed typo in variable name + "OffChain" => AccountStorageType::OffChain, + "OnChain" => AccountStorageType::OnChain, + _ => return Err(JsValue::from_str("Invalid storage mode")), + }, + }; + + match client.new_account(client_template).await { + Ok((account, _)) => { + // Create a struct or tuple to hold both values + // Convert directly to JsValue + serde_wasm_bindgen::to_value(&account.id().to_string()) + .map_err(|e| JsValue::from_str(&e.to_string())) + }, + Err(err) => { + let error_message = format!("Failed to create new account: {:?}", err); + Err(JsValue::from_str(&error_message)) + } + } + } else { + Err(JsValue::from_str("Client not initialized")) + } + } +} \ No newline at end of file diff --git a/wasm/src/web_client/new_transactions.rs b/wasm/src/web_client/new_transactions.rs new file mode 100644 index 000000000..8a93df2bc --- /dev/null +++ b/wasm/src/web_client/new_transactions.rs @@ -0,0 +1,254 @@ +use wasm_bindgen::*; +use wasm_bindgen::prelude::*; + +use miden_objects::{ + accounts::AccountId, + assets::{ + FungibleAsset, + Asset::Fungible + }, + notes::{NoteId, NoteType as MidenNoteType} +}; +use miden_client::client::{ + build_swap_tag, + get_input_note_with_id_prefix, + transactions::transaction_request::{PaymentTransactionData, SwapTransactionData, TransactionTemplate} + }; + +use super::WebClient; +use crate::web_client::models::transactions::{NewTransactionResult, NewSwapTransactionResult}; + +#[wasm_bindgen] +impl WebClient { + pub async fn new_mint_transaction( + &mut self, + target_account_id: String, + faucet_id: String, + note_type: String, + amount: String, + ) -> Result { + web_sys::console::log_1(&JsValue::from_str("new_mint_transaction called")); + if let Some(client) = self.get_mut_inner() { + // log all inputs + web_sys::console::log_1(&JsValue::from_str("new_mint_transaction 1")); + web_sys::console::log_1(&JsValue::from_str(&target_account_id)); + web_sys::console::log_1(&JsValue::from_str(&faucet_id)); + web_sys::console::log_1(&JsValue::from_str(¬e_type)); + web_sys::console::log_1(&JsValue::from_str(&amount)); + let target_account_id = AccountId::from_hex(&target_account_id).unwrap(); + let faucet_id = AccountId::from_hex(&faucet_id).unwrap(); + let amount_as_u64: u64 = amount.parse::().map_err(|err| err.to_string())?; + let fungible_asset = FungibleAsset::new(faucet_id, amount_as_u64).map_err(|err| err.to_string())?; + let note_type = match note_type.as_str() { + "Public" => MidenNoteType::Public, + "Private" => MidenNoteType::OffChain, + _ => MidenNoteType::OffChain + }; + + let mint_transaction_template = TransactionTemplate::MintFungibleAsset( + fungible_asset, + target_account_id, + note_type + ); + + web_sys::console::log_1(&JsValue::from_str("new_mint_transaction 2")); + + let mint_transaction_request = client.build_transaction_request(mint_transaction_template.clone()).await.unwrap(); + web_sys::console::log_1(&JsValue::from_str("new_mint_transaction 3")); + // log the mint_transaction_request account id + web_sys::console::log_1(&JsValue::from_str(&mint_transaction_request.account_id().to_string())); + // log the mint_transaction_request expected_output_notes + web_sys::console::log_1(&JsValue::from_str(&mint_transaction_request.expected_output_notes().iter().map(|note| note.id().to_string()).collect::>().join(", "))); + let mint_transaction_execution_result = client.new_transaction(mint_transaction_request).await.unwrap(); + web_sys::console::log_1(&JsValue::from_str("new_mint_transaction 4")); + let result = NewTransactionResult::new( + mint_transaction_execution_result.executed_transaction().id().to_string(), + mint_transaction_execution_result.created_notes().iter().map(|note| note.id().to_string()).collect() + ); + web_sys::console::log_1(&JsValue::from_str("new_mint_transaction 5")); + let proven_transaction = client.prove_transaction(mint_transaction_execution_result.executed_transaction().clone()).unwrap(); + web_sys::console::log_1(&JsValue::from_str("new_mint_transaction 6")); + client.submit_transaction(mint_transaction_execution_result, proven_transaction).await.unwrap(); + web_sys::console::log_1(&JsValue::from_str("new_mint_transaction 7")); + + Ok(result) + } else { + Err(JsValue::from_str("Client not initialized")) + } + } + + pub async fn new_send_transaction( + &mut self, + sender_account_id: String, + target_account_id: String, + faucet_id: String, + note_type: String, + amount: String, + recall_height: Option + ) -> Result { + web_sys::console::log_1(&JsValue::from_str("new_send_transaction called")); + if let Some(client) = self.get_mut_inner() { + let sender_account_id = AccountId::from_hex(&sender_account_id).unwrap(); + let target_account_id = AccountId::from_hex(&target_account_id).unwrap(); + + let faucet_id = AccountId::from_hex(&faucet_id).unwrap(); + let amount_as_u64: u64 = amount.parse::().map_err(|err| err.to_string())?; + let fungible_asset = FungibleAsset::new(faucet_id, amount_as_u64) + .map_err(|err| err.to_string())? + .into(); + + let note_type = match note_type.as_str() { + "Public" => MidenNoteType::Public, + "Private" => MidenNoteType::OffChain, + _ => MidenNoteType::OffChain + }; + let payment_transaction = PaymentTransactionData::new(fungible_asset, sender_account_id, target_account_id); + + let send_transaction_template: TransactionTemplate; + if let Some(recall_height) = recall_height { + let recall_height_as_u32: u32 = recall_height.parse::().map_err(|err| err.to_string())?; + send_transaction_template = TransactionTemplate::PayToIdWithRecall( + payment_transaction, + recall_height_as_u32, + note_type, + ); + } else { + send_transaction_template = TransactionTemplate::PayToId( + payment_transaction, + note_type, + ); + } + + let send_transaction_request = client.build_transaction_request(send_transaction_template.clone()).await.unwrap(); + let send_transaction_execution_result = client.new_transaction(send_transaction_request).await.unwrap(); + let result = NewTransactionResult::new( + send_transaction_execution_result.executed_transaction().id().to_string(), + send_transaction_execution_result.created_notes().iter().map(|note| note.id().to_string()).collect() + ); + + let proven_transaction = client.prove_transaction(send_transaction_execution_result.executed_transaction().clone()).unwrap(); + client.submit_transaction(send_transaction_execution_result, proven_transaction).await.unwrap(); + + Ok(result) + } else { + Err(JsValue::from_str("Client not initialized")) + } + } + + pub async fn new_consume_transaction( + &mut self, + account_id: String, + list_of_notes: Vec, + ) -> Result { + web_sys::console::log_1(&JsValue::from_str("new_consume_transaction called")); + if let Some(client) = self.get_mut_inner() { + let account_id = AccountId::from_hex(&account_id).unwrap(); + let mut result = Vec::new(); + for note_id in list_of_notes { + match get_input_note_with_id_prefix(client, ¬e_id).await { + Ok(note_record) => result.push(note_record.id()), + Err(err) => return Err(JsValue::from_str(&err.to_string())), + } + } + // let list_of_notes = list_of_notes + // .iter() + // .map(|note_id| { + // get_input_note_with_id_prefix(client, note_id).await + // .map(|note_record| note_record.id()) + // .map_err(|err| err.to_string()) + // }) + // .collect::, _>>()?; + + let consume_transaction_template = TransactionTemplate::ConsumeNotes(account_id, result); + + let consume_transaction_request = client.build_transaction_request(consume_transaction_template.clone()).await.unwrap(); + let consume_transaction_execution_result = client.new_transaction(consume_transaction_request).await.unwrap(); + let result = NewTransactionResult::new( + consume_transaction_execution_result.executed_transaction().id().to_string(), + consume_transaction_execution_result.created_notes().iter().map(|note| note.id().to_string()).collect() + ); + let proven_transaction = client.prove_transaction(consume_transaction_execution_result.executed_transaction().clone()).unwrap(); + client.submit_transaction(consume_transaction_execution_result, proven_transaction).await.unwrap(); + + Ok(result) + } else { + Err(JsValue::from_str("Client not initialized")) + } + } + + pub async fn new_swap_transaction( + &mut self, + sender_account_id: String, + offered_asset_faucet_id: String, + offered_asset_amount: String, + requested_asset_faucet_id: String, + requested_asset_amount: String, + note_type: String, + ) -> Result { + web_sys::console::log_1(&JsValue::from_str("new_swap_transaction called")); + if let Some(client) = self.get_mut_inner() { + let sender_account_id = AccountId::from_hex(&sender_account_id).unwrap(); + + let offered_asset_faucet_id = AccountId::from_hex(&offered_asset_faucet_id).unwrap(); + let offered_asset_amount_as_u64: u64 = offered_asset_amount.parse::().map_err(|err| err.to_string())?; + let offered_fungible_asset = FungibleAsset::new(offered_asset_faucet_id, offered_asset_amount_as_u64) + .map_err(|err| err.to_string())? + .into(); + + let requested_asset_faucet_id = AccountId::from_hex(&requested_asset_faucet_id).unwrap(); + let requested_asset_amount_as_u64: u64 = requested_asset_amount.parse::().map_err(|err| err.to_string())?; + let requested_fungible_asset = FungibleAsset::new(requested_asset_faucet_id, requested_asset_amount_as_u64) + .map_err(|err| err.to_string())? + .into(); + + let note_type = match note_type.as_str() { + "Public" => MidenNoteType::Public, + "Private" => MidenNoteType::OffChain, + _ => MidenNoteType::OffChain + }; + + let swap_transaction = SwapTransactionData::new( + sender_account_id, + offered_fungible_asset, + requested_fungible_asset, + ); + + let swap_transaction_template = TransactionTemplate::Swap(swap_transaction, note_type); + + let swap_transaction_request = client.build_transaction_request(swap_transaction_template.clone()).await.unwrap(); + let swap_transaction_execution_result = client.new_transaction(swap_transaction_request.clone()).await.unwrap(); + let mut result = NewSwapTransactionResult::new( + swap_transaction_execution_result.executed_transaction().id().to_string(), + swap_transaction_request.expected_output_notes().iter().map(|note| note.id().to_string()).collect(), + swap_transaction_request.expected_partial_notes().iter().map(|note| note.id().to_string()).collect(), + None + ); + let proven_transaction = client.prove_transaction(swap_transaction_execution_result.executed_transaction().clone()).unwrap(); + client.submit_transaction(swap_transaction_execution_result, proven_transaction).await.unwrap(); + + if let TransactionTemplate::Swap(swap_data, note_type) = swap_transaction_template { + let payback_note_tag = build_swap_tag( + note_type, + swap_data.offered_asset().faucet_id(), + swap_data.requested_asset().faucet_id(), + ) + .map_err(|err| err.to_string()).unwrap(); + + let payback_note_tag_u32: u32 = build_swap_tag( + note_type, + swap_data.offered_asset().faucet_id(), + swap_data.requested_asset().faucet_id(), + ) + .map_err(|err| err.to_string())? + .into(); + + result.setNoteTag(payback_note_tag_u32.to_string()); + // client.add_note_tag(payback_note_tag).await.unwrap(); + } + + Ok(result) + } else { + Err(JsValue::from_str("Client not initialized")) + } + } +} \ No newline at end of file diff --git a/wasm/src/web_client/notes.rs b/wasm/src/web_client/notes.rs new file mode 100644 index 000000000..c143dbf3b --- /dev/null +++ b/wasm/src/web_client/notes.rs @@ -0,0 +1,121 @@ +use miden_objects::{notes::NoteId, utils::Serializable, Digest}; +use miden_tx::utils::Deserializable; +use wasm_bindgen::prelude::*; +use serde::{Serialize, Deserialize}; +use serde_wasm_bindgen::from_value; +use web_sys::console; + +use miden_client::store::{InputNoteRecord, OutputNoteRecord}; +use miden_client::store::NoteFilter; + +use super::WebClient; + +#[derive(Serialize, Deserialize)] +pub enum WebClientNoteFilter { + All, + Pending, + Committed, + Consumed, +} + +#[wasm_bindgen] +impl WebClient { + pub async fn get_input_notes( + &mut self, + filter: JsValue + ) -> Result { + if let Some(client) = self.get_mut_inner() { + let filter: WebClientNoteFilter = from_value(filter).unwrap(); + let native_filter = match filter { + WebClientNoteFilter::Pending => NoteFilter::Pending, + WebClientNoteFilter::Committed => NoteFilter::Committed, + WebClientNoteFilter::Consumed => NoteFilter::Consumed, + WebClientNoteFilter::All => NoteFilter::All + }; + + let notes: Vec = client.get_input_notes(native_filter).await.unwrap(); + let note_ids = notes.iter().map(|note| + note.id().to_string() + ).collect::>(); + + // Convert the Vec to JsValue + serde_wasm_bindgen::to_value(¬e_ids).map_err(|e| JsValue::from_str(&e.to_string())) + } else { + Err(JsValue::from_str("Client not initialized")) + } + } + + pub async fn get_input_note( + &mut self, + note_id: String + ) -> Result { + if let Some(client) = self.get_mut_inner() { + let note_id: NoteId = Digest::try_from(note_id) + .map_err(|err| format!("Failed to parse input note id: {}", err))? + .into(); + let note: InputNoteRecord = client.get_input_note(note_id).await.unwrap(); + + serde_wasm_bindgen::to_value(¬e.id().to_string()).map_err(|e| JsValue::from_str(&e.to_string())) + } else { + Err(JsValue::from_str("Client not initialized")) + } + } + + pub async fn get_output_notes( + &mut self, + filter: JsValue + ) -> Result { + web_sys::console::log_1(&JsValue::from_str("get_output_notes called")); + if let Some(client) = self.get_mut_inner() { + let filter: WebClientNoteFilter = from_value(filter).unwrap(); + let native_filter = match filter { + WebClientNoteFilter::Pending => NoteFilter::Pending, + WebClientNoteFilter::Committed => NoteFilter::Committed, + WebClientNoteFilter::Consumed => NoteFilter::Consumed, + WebClientNoteFilter::All => NoteFilter::All + }; + + let notes: Vec = client.get_output_notes(native_filter).await.unwrap(); + let note_ids = notes.iter().map(|note| + note.id().to_string() + ).collect::>(); + + // Convert the Vec to JsValue + serde_wasm_bindgen::to_value(¬e_ids).map_err(|e| JsValue::from_str(&e.to_string())) + } else { + Err(JsValue::from_str("Client not initialized")) + } + } + + pub async fn get_output_note( + &mut self, + note_id: String + ) -> Result { + if let Some(client) = self.get_mut_inner() { + let note_id: NoteId = Digest::try_from(note_id) + .map_err(|err| format!("Failed to parse output note id: {}", err))? + .into(); + let note: OutputNoteRecord = client.get_output_note(note_id).await.unwrap(); + + serde_wasm_bindgen::to_value(¬e.id().to_string()).map_err(|e| JsValue::from_str(&e.to_string())) + } else { + Err(JsValue::from_str("Client not initialized")) + } + } + + pub async fn get_output_note_status( + &mut self, + note_id: String + ) -> Result { + if let Some(client) = self.get_mut_inner() { + let note_id: NoteId = Digest::try_from(note_id) + .map_err(|err| format!("Failed to parse output note id: {}", err))? + .into(); + let note: OutputNoteRecord = client.get_output_note(note_id).await.unwrap(); + + serde_wasm_bindgen::to_value(&format!("{:?}", note.status())).map_err(|e| JsValue::from_str(&e.to_string())) + } else { + Err(JsValue::from_str("Client not initialized")) + } + } +} \ No newline at end of file diff --git a/wasm/src/web_client/rpc/client_grpc.rs b/wasm/src/web_client/rpc/client_grpc.rs new file mode 100644 index 000000000..62208f70d --- /dev/null +++ b/wasm/src/web_client/rpc/client_grpc.rs @@ -0,0 +1,47 @@ +pub mod rpc { + tonic::include_proto!("rpc"); +} + +pub mod block_producer { + tonic::include_proto!("block_producer"); +} + +pub mod store { + tonic::include_proto!("store"); +} + +pub mod responses { + tonic::include_proto!("responses"); +} + +pub mod requests { + tonic::include_proto!("requests"); +} + +pub mod block_header { + tonic::include_proto!("block_header"); +} + +pub mod note { + tonic::include_proto!("note"); +} + +pub mod account { + tonic::include_proto!("account"); +} + +pub mod mmr { + tonic::include_proto!("mmr"); +} + +pub mod smt { + tonic::include_proto!("smt"); +} + +pub mod merkle { + tonic::include_proto!("merkle"); +} + +pub mod digest { + tonic::include_proto!("digest"); +} \ No newline at end of file diff --git a/wasm/src/web_client/rpc/domain/accounts.rs b/wasm/src/web_client/rpc/domain/accounts.rs new file mode 100644 index 000000000..80581490b --- /dev/null +++ b/wasm/src/web_client/rpc/domain/accounts.rs @@ -0,0 +1,206 @@ +use std::fmt::{Debug, Display, Formatter}; + +use super::format_opt; +use miden_objects::{ + accounts::{Account, AccountId}, + crypto::{hash::rpo::RpoDigest, merkle::MerklePath}, + utils::Serializable, + Digest, +}; + +use miden_client::errors::{ConversionError, MissingFieldHelper}; +use crate::web_client::rpc::client_grpc::{ + account::{ + AccountId as AccountIdPb, + AccountInfo as AccountInfoPb, + AccountSummary as AccountSummaryPb, + }, + responses::{AccountBlockInputRecord, AccountTransactionInputRecord}, +}; + +// ACCOUNT ID +// ================================================================================================ + +impl Display for AccountIdPb { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.write_fmt(format_args!("0x{:x}", self.id)) + } +} + +// impl Debug for AccountIdPb { +// fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { +// Display::fmt(self, f) +// } +// } + +// INTO PROTO ACCOUNT ID +// ------------------------------------------------------------------------------------------------ + +impl From for AccountIdPb { + fn from(value: u64) -> Self { + AccountIdPb { id: value } + } +} + +impl From<&AccountId> for AccountIdPb { + fn from(account_id: &AccountId) -> Self { + (*account_id).into() + } +} + +impl From for AccountIdPb { + fn from(account_id: AccountId) -> Self { + Self { id: account_id.into() } + } +} + +// FROM PROTO ACCOUNT ID +// ------------------------------------------------------------------------------------------------ + +impl From for u64 { + fn from(value: AccountIdPb) -> Self { + value.id + } +} + +impl TryFrom for AccountId { + type Error = ConversionError; + + fn try_from(account_id: AccountIdPb) -> Result { + account_id.id.try_into().map_err(|_| ConversionError::NotAValidFelt) + } +} + +// ACCOUNT UPDATE +// ================================================================================================ + +#[derive(Debug, PartialEq)] +pub struct AccountSummary { + pub account_id: AccountId, + pub account_hash: RpoDigest, + pub block_num: u32, +} + +impl From<&AccountSummary> for AccountSummaryPb { + fn from(update: &AccountSummary) -> Self { + Self { + account_id: Some(update.account_id.into()), + account_hash: Some(update.account_hash.into()), + block_num: update.block_num, + } + } +} + +#[derive(Debug, PartialEq)] +pub struct AccountInfo { + pub summary: AccountSummary, + pub details: Option, +} + +impl From<&AccountInfo> for AccountInfoPb { + fn from(AccountInfo { summary, details }: &AccountInfo) -> Self { + Self { + summary: Some(summary.into()), + details: details.as_ref().map(|account| account.to_bytes()), + } + } +} + +// ACCOUNT INPUT RECORD +// ================================================================================================ + +#[derive(Clone, Debug)] +pub struct AccountInputRecord { + pub account_id: AccountId, + pub account_hash: Digest, + pub proof: MerklePath, +} + +impl From for AccountBlockInputRecord { + fn from(from: AccountInputRecord) -> Self { + Self { + account_id: Some(from.account_id.into()), + account_hash: Some(from.account_hash.into()), + proof: Some(from.proof.into()), + } + } +} + +impl TryFrom for AccountInputRecord { + type Error = ConversionError; + + fn try_from(account_input_record: AccountBlockInputRecord) -> Result { + Ok(Self { + account_id: account_input_record + .account_id + .ok_or(AccountBlockInputRecord::missing_field(stringify!(account_id)))? + .try_into()?, + account_hash: account_input_record + .account_hash + .ok_or(AccountBlockInputRecord::missing_field(stringify!(account_hash)))? + .try_into()?, + proof: account_input_record + .proof + .ok_or(AccountBlockInputRecord::missing_field(stringify!(proof)))? + .try_into()?, + }) + } +} + +// ACCOUNT STATE +// ================================================================================================ + +/// Information needed from the store to verify account in transaction. +#[derive(Debug)] +pub struct AccountState { + /// Account ID + pub account_id: AccountId, + /// The account hash in the store corresponding to tx's account ID + pub account_hash: Option, +} + +impl Display for AccountState { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.write_fmt(format_args!( + "{{ account_id: {}, account_hash: {} }}", + self.account_id, + format_opt(self.account_hash.as_ref()), + )) + } +} + +impl From for AccountTransactionInputRecord { + fn from(from: AccountState) -> Self { + Self { + account_id: Some(from.account_id.into()), + account_hash: from.account_hash.map(Into::into), + } + } +} + +impl TryFrom for AccountState { + type Error = ConversionError; + + fn try_from(from: AccountTransactionInputRecord) -> Result { + let account_id = from + .account_id + .clone() + .ok_or(AccountTransactionInputRecord::missing_field(stringify!(account_id)))? + .try_into()?; + + let account_hash = from + .account_hash + .ok_or(AccountTransactionInputRecord::missing_field(stringify!(account_hash)))? + .try_into()?; + + // If the hash is equal to `Digest::default()`, it signifies that this is a new account + // which is not yet present in the Store. + let account_hash = if account_hash == Digest::default() { + None + } else { + Some(account_hash) + }; + + Ok(Self { account_id, account_hash }) + } +} diff --git a/wasm/src/web_client/rpc/domain/blocks.rs b/wasm/src/web_client/rpc/domain/blocks.rs new file mode 100644 index 000000000..f33ee8370 --- /dev/null +++ b/wasm/src/web_client/rpc/domain/blocks.rs @@ -0,0 +1,78 @@ +use miden_objects::BlockHeader; + +use miden_client::errors::{ConversionError, MissingFieldHelper}; +use crate::web_client::rpc::client_grpc::block_header; + +// BLOCK HEADER +// ================================================================================================ + +impl From<&BlockHeader> for block_header::BlockHeader { + fn from(header: &BlockHeader) -> Self { + Self { + version: header.version(), + prev_hash: Some(header.prev_hash().into()), + block_num: header.block_num(), + chain_root: Some(header.chain_root().into()), + account_root: Some(header.account_root().into()), + nullifier_root: Some(header.nullifier_root().into()), + note_root: Some(header.note_root().into()), + batch_root: Some(header.batch_root().into()), + proof_hash: Some(header.proof_hash().into()), + timestamp: header.timestamp(), + } + } +} + +impl From for block_header::BlockHeader { + fn from(header: BlockHeader) -> Self { + (&header).into() + } +} + +impl TryFrom<&block_header::BlockHeader> for BlockHeader { + type Error = ConversionError; + + fn try_from(value: &block_header::BlockHeader) -> Result { + value.clone().try_into() + } +} + +impl TryFrom for BlockHeader { + type Error = ConversionError; + + fn try_from(value: block_header::BlockHeader) -> Result { + Ok(BlockHeader::new( + value.version, + value + .prev_hash + .ok_or(block_header::BlockHeader::missing_field(stringify!(prev_hash)))? + .try_into()?, + value.block_num, + value + .chain_root + .ok_or(block_header::BlockHeader::missing_field(stringify!(chain_root)))? + .try_into()?, + value + .account_root + .ok_or(block_header::BlockHeader::missing_field(stringify!(account_root)))? + .try_into()?, + value + .nullifier_root + .ok_or(block_header::BlockHeader::missing_field(stringify!(nullifier_root)))? + .try_into()?, + value + .note_root + .ok_or(block_header::BlockHeader::missing_field(stringify!(note_root)))? + .try_into()?, + value + .batch_root + .ok_or(block_header::BlockHeader::missing_field(stringify!(batch_root)))? + .try_into()?, + value + .proof_hash + .ok_or(block_header::BlockHeader::missing_field(stringify!(proof_hash)))? + .try_into()?, + value.timestamp, + )) + } +} diff --git a/wasm/src/web_client/rpc/domain/digest.rs b/wasm/src/web_client/rpc/domain/digest.rs new file mode 100644 index 000000000..718d17cb7 --- /dev/null +++ b/wasm/src/web_client/rpc/domain/digest.rs @@ -0,0 +1,245 @@ +use std::fmt::{Debug, Display, Formatter}; + +use hex::{FromHex, ToHex}; +use miden_objects::{notes::NoteId, Digest, Felt, StarkField}; + +use miden_client::errors::ConversionError; +use crate::web_client::rpc::client_grpc::digest; + +// CONSTANTS +// ================================================================================================ + +pub const DIGEST_DATA_SIZE: usize = 32; + +// FORMATTING +// ================================================================================================ + +impl Display for digest::Digest { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.write_str(&self.encode_hex::()) + } +} + +// impl Debug for digest::Digest { +// fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { +// Display::fmt(self, f) +// } +// } + +impl ToHex for &digest::Digest { + fn encode_hex>(&self) -> T { + (*self).encode_hex() + } + + fn encode_hex_upper>(&self) -> T { + (*self).encode_hex_upper() + } +} + +impl ToHex for digest::Digest { + fn encode_hex>(&self) -> T { + let mut data: Vec = Vec::with_capacity(DIGEST_DATA_SIZE); + data.extend(format!("{:016x}", self.d0).chars()); + data.extend(format!("{:016x}", self.d1).chars()); + data.extend(format!("{:016x}", self.d2).chars()); + data.extend(format!("{:016x}", self.d3).chars()); + data.into_iter().collect() + } + + fn encode_hex_upper>(&self) -> T { + let mut data: Vec = Vec::with_capacity(DIGEST_DATA_SIZE); + data.extend(format!("{:016X}", self.d0).chars()); + data.extend(format!("{:016X}", self.d1).chars()); + data.extend(format!("{:016X}", self.d2).chars()); + data.extend(format!("{:016X}", self.d3).chars()); + data.into_iter().collect() + } +} + +impl FromHex for digest::Digest { + type Error = ConversionError; + + fn from_hex>(hex: T) -> Result { + let data = hex::decode(hex)?; + + match data.len() { + size if size < DIGEST_DATA_SIZE => { + Err(ConversionError::InsufficientData { expected: DIGEST_DATA_SIZE, got: size }) + }, + size if size > DIGEST_DATA_SIZE => { + Err(ConversionError::TooMuchData { expected: DIGEST_DATA_SIZE, got: size }) + }, + _ => { + let d0 = u64::from_be_bytes(data[..8].try_into().unwrap()); + let d1 = u64::from_be_bytes(data[8..16].try_into().unwrap()); + let d2 = u64::from_be_bytes(data[16..24].try_into().unwrap()); + let d3 = u64::from_be_bytes(data[24..32].try_into().unwrap()); + + Ok(digest::Digest { d0, d1, d2, d3 }) + }, + } + } +} + +// INTO +// ================================================================================================ + +impl From<[u64; 4]> for digest::Digest { + fn from(value: [u64; 4]) -> Self { + Self { + d0: value[0], + d1: value[1], + d2: value[2], + d3: value[3], + } + } +} + +impl From<&[u64; 4]> for digest::Digest { + fn from(value: &[u64; 4]) -> Self { + (*value).into() + } +} + +impl From<[Felt; 4]> for digest::Digest { + fn from(value: [Felt; 4]) -> Self { + Self { + d0: value[0].as_int(), + d1: value[1].as_int(), + d2: value[2].as_int(), + d3: value[3].as_int(), + } + } +} + +impl From<&[Felt; 4]> for digest::Digest { + fn from(value: &[Felt; 4]) -> Self { + (*value).into() + } +} + +impl From for digest::Digest { + fn from(value: Digest) -> Self { + Self { + d0: value[0].as_int(), + d1: value[1].as_int(), + d2: value[2].as_int(), + d3: value[3].as_int(), + } + } +} + +impl From<&Digest> for digest::Digest { + fn from(value: &Digest) -> Self { + (*value).into() + } +} + +impl From<&NoteId> for digest::Digest { + fn from(value: &NoteId) -> Self { + (*value).inner().into() + } +} + +impl From for digest::Digest { + fn from(value: NoteId) -> Self { + value.inner().into() + } +} + +// FROM DIGEST +// ================================================================================================ + +impl From for [u64; 4] { + fn from(value: digest::Digest) -> Self { + [value.d0, value.d1, value.d2, value.d3] + } +} + +impl TryFrom for [Felt; 4] { + type Error = ConversionError; + + fn try_from(value: digest::Digest) -> Result { + if ![value.d0, value.d1, value.d2, value.d3] + .iter() + .all(|v| *v < ::MODULUS) + { + Err(ConversionError::NotAValidFelt) + } else { + Ok([ + Felt::new(value.d0), + Felt::new(value.d1), + Felt::new(value.d2), + Felt::new(value.d3), + ]) + } + } +} + +impl TryFrom for Digest { + type Error = ConversionError; + + fn try_from(value: digest::Digest) -> Result { + Ok(Self::new(value.try_into()?)) + } +} + +impl TryFrom<&digest::Digest> for [Felt; 4] { + type Error = ConversionError; + + fn try_from(value: &digest::Digest) -> Result { + value.clone().try_into() + } +} + +impl TryFrom<&digest::Digest> for Digest { + type Error = ConversionError; + + fn try_from(value: &digest::Digest) -> Result { + value.clone().try_into() + } +} + +// TESTS +// ================================================================================================ + +#[cfg(test)] +mod test { + use hex::{FromHex, ToHex}; + use proptest::prelude::*; + + use crate::generated::digest::Digest; + + #[test] + fn test_hex_digest() { + let digest = Digest { + d0: 3488802789098113751, + d1: 5271242459988994564, + d2: 17816570245237064784, + d3: 10910963388447438895, + }; + let encoded: String = ToHex::encode_hex(&digest); + let round_trip: Result = FromHex::from_hex::<&[u8]>(encoded.as_ref()); + assert_eq!(digest, round_trip.unwrap()); + + let digest = Digest { d0: 0, d1: 0, d2: 0, d3: 0 }; + let encoded: String = ToHex::encode_hex(&digest); + let round_trip: Result = FromHex::from_hex::<&[u8]>(encoded.as_ref()); + assert_eq!(digest, round_trip.unwrap()); + } + + proptest! { + #[test] + fn test_encode_decode( + d0: u64, + d1: u64, + d2: u64, + d3: u64, + ) { + let digest = Digest { d0, d1, d2, d3 }; + let encoded: String = ToHex::encode_hex(&digest); + let round_trip: Result = FromHex::from_hex::<&[u8]>(encoded.as_ref()); + assert_eq!(digest, round_trip.unwrap()); + } + } +} diff --git a/wasm/src/web_client/rpc/domain/merkle.rs b/wasm/src/web_client/rpc/domain/merkle.rs new file mode 100644 index 000000000..6fc8f0f9a --- /dev/null +++ b/wasm/src/web_client/rpc/domain/merkle.rs @@ -0,0 +1,157 @@ +use miden_objects::{ + crypto::merkle::{LeafIndex, MerklePath, MmrDelta, SmtLeaf, SmtProof}, + Digest, Word, +}; + +use miden_client::errors::{ConversionError, MissingFieldHelper}; + +use super::{convert, try_convert}; + +use crate::web_client::rpc::client_grpc; + +// MERKLE PATH +// ================================================================================================ + +impl From for client_grpc::merkle::MerklePath { + fn from(value: MerklePath) -> Self { + let siblings = value.nodes().iter().map(client_grpc::digest::Digest::from).collect(); + client_grpc::merkle::MerklePath { siblings } + } +} + +impl TryFrom for MerklePath { + type Error = ConversionError; + + fn try_from(merkle_path: client_grpc::merkle::MerklePath) -> Result { + merkle_path.siblings.into_iter().map(Digest::try_from).collect() + } +} + +// MMR DELTA +// ================================================================================================ + +impl From for client_grpc::mmr::MmrDelta { + fn from(value: MmrDelta) -> Self { + let data = value.data.into_iter().map(client_grpc::digest::Digest::from).collect(); + client_grpc::mmr::MmrDelta { forest: value.forest as u64, data } + } +} + +impl TryFrom for MmrDelta { + type Error = ConversionError; + + fn try_from(value: client_grpc::mmr::MmrDelta) -> Result { + let data: Result, ConversionError> = + value.data.into_iter().map(Digest::try_from).collect(); + + Ok(MmrDelta { + forest: value.forest as usize, + data: data?, + }) + } +} + +// SPARSE MERKLE TREE +// ================================================================================================ + +// SMT LEAF +// ------------------------------------------------------------------------------------------------ + +impl TryFrom for SmtLeaf { + type Error = ConversionError; + + fn try_from(value: client_grpc::smt::SmtLeaf) -> Result { + let leaf = value.leaf.ok_or(client_grpc::smt::SmtLeaf::missing_field(stringify!(leaf)))?; + + match leaf { + client_grpc::smt::smt_leaf::Leaf::Empty(leaf_index) => { + Ok(Self::new_empty(LeafIndex::new_max_depth(leaf_index))) + }, + client_grpc::smt::smt_leaf::Leaf::Single(entry) => { + let (key, value): (Digest, Word) = entry.try_into()?; + + Ok(SmtLeaf::new_single(key, value)) + }, + client_grpc::smt::smt_leaf::Leaf::Multiple(entries) => { + let domain_entries: Vec<(Digest, Word)> = try_convert(entries.entries)?; + + Ok(SmtLeaf::new_multiple(domain_entries)?) + }, + } + } +} + +impl From for client_grpc::smt::SmtLeaf { + fn from(smt_leaf: SmtLeaf) -> Self { + use client_grpc::smt::smt_leaf::Leaf; + + let leaf = match smt_leaf { + SmtLeaf::Empty(leaf_index) => Leaf::Empty(leaf_index.value()), + SmtLeaf::Single(entry) => Leaf::Single(entry.into()), + SmtLeaf::Multiple(entries) => { + Leaf::Multiple(client_grpc::smt::SmtLeafEntries { entries: convert(entries) }) + }, + }; + + Self { leaf: Some(leaf) } + } +} + +// SMT LEAF ENTRY +// ------------------------------------------------------------------------------------------------ + +impl TryFrom for (Digest, Word) { + type Error = ConversionError; + + fn try_from(entry: client_grpc::smt::SmtLeafEntry) -> Result { + let key: Digest = entry + .key + .ok_or(client_grpc::smt::SmtLeafEntry::missing_field(stringify!(key)))? + .try_into()?; + let value: Word = entry + .value + .ok_or(client_grpc::smt::SmtLeafEntry::missing_field(stringify!(value)))? + .try_into()?; + + Ok((key, value)) + } +} + +impl From<(Digest, Word)> for client_grpc::smt::SmtLeafEntry { + fn from((key, value): (Digest, Word)) -> Self { + Self { + key: Some(key.into()), + value: Some(value.into()), + } + } +} + +// SMT PROOF +// ------------------------------------------------------------------------------------------------ + +impl TryFrom for SmtProof { + type Error = ConversionError; + + fn try_from(opening: client_grpc::smt::SmtOpening) -> Result { + let path: MerklePath = opening + .path + .ok_or(client_grpc::smt::SmtOpening::missing_field(stringify!(path)))? + .try_into()?; + let leaf: SmtLeaf = opening + .leaf + .ok_or(client_grpc::smt::SmtOpening::missing_field(stringify!(leaf)))? + .try_into()?; + + Ok(SmtProof::new(path, leaf)?) + } +} + +impl From for client_grpc::smt::SmtOpening { + fn from(proof: SmtProof) -> Self { + let (path, leaf) = proof.into_parts(); + Self { + path: Some(path.into()), + leaf: Some(leaf.into()), + } + } +} diff --git a/wasm/src/web_client/rpc/domain/mod.rs b/wasm/src/web_client/rpc/domain/mod.rs new file mode 100644 index 000000000..8181ba584 --- /dev/null +++ b/wasm/src/web_client/rpc/domain/mod.rs @@ -0,0 +1,31 @@ +use std::fmt::Display; + +pub mod accounts; +pub mod blocks; +pub mod digest; +pub mod merkle; +pub mod nullifiers; +pub mod notes; + +// UTILITIES +// ================================================================================================ + +pub fn convert(from: T) -> Vec +where + T: IntoIterator, + From: Into, +{ + from.into_iter().map(|e| e.into()).collect() +} + +pub fn try_convert(from: T) -> Result, E> +where + T: IntoIterator, + From: TryInto, +{ + from.into_iter().map(|e| e.try_into()).collect() +} + +pub fn format_opt(opt: Option<&T>) -> String { + opt.map(ToString::to_string).unwrap_or("None".to_owned()) +} \ No newline at end of file diff --git a/wasm/src/web_client/rpc/domain/notes.rs b/wasm/src/web_client/rpc/domain/notes.rs new file mode 100644 index 000000000..1871d2930 --- /dev/null +++ b/wasm/src/web_client/rpc/domain/notes.rs @@ -0,0 +1,32 @@ +use miden_objects::{ + notes::{NoteMetadata, NoteTag, NoteType}, + Felt, +}; +use miden_client::errors::{ConversionError, MissingFieldHelper}; + +impl TryFrom for NoteMetadata { + type Error = ConversionError; + + fn try_from(value: crate::web_client::rpc::client_grpc::note::NoteMetadata) -> Result { + let sender = value + .sender + .ok_or_else(|| crate::web_client::rpc::client_grpc::note::NoteMetadata::missing_field("Sender"))? + .try_into()?; + let note_type = NoteType::try_from(value.note_type as u64)?; + let tag = NoteTag::from(value.tag); + let aux = Felt::try_from(value.aux).map_err(|_| ConversionError::NotAValidFelt)?; + + Ok(NoteMetadata::new(sender, note_type, tag, aux)?) + } +} + +impl From for crate::web_client::rpc::client_grpc::note::NoteMetadata { + fn from(val: NoteMetadata) -> Self { + let sender = Some(val.sender().into()); + let note_type = val.note_type() as u32; + let tag = val.tag().into(); + let aux = val.aux().into(); + + crate::web_client::rpc::client_grpc::note::NoteMetadata { sender, note_type, tag, aux } + } +} \ No newline at end of file diff --git a/wasm/src/web_client/rpc/domain/nullifiers.rs b/wasm/src/web_client/rpc/domain/nullifiers.rs new file mode 100644 index 000000000..db513fa86 --- /dev/null +++ b/wasm/src/web_client/rpc/domain/nullifiers.rs @@ -0,0 +1,69 @@ +use miden_objects::{ + crypto::{hash::rpo::RpoDigest, merkle::SmtProof}, + notes::Nullifier, +}; +use miden_client::errors::{ConversionError, MissingFieldHelper}; + +use crate::web_client::rpc::client_grpc::{digest::Digest, responses::NullifierBlockInputRecord}; + +// FROM NULLIFIER +// ================================================================================================ + +impl From<&Nullifier> for Digest { + fn from(value: &Nullifier) -> Self { + (*value).inner().into() + } +} + +impl From for Digest { + fn from(value: Nullifier) -> Self { + value.inner().into() + } +} + +// INTO NULLIFIER +// ================================================================================================ + +impl TryFrom for Nullifier { + type Error = ConversionError; + + fn try_from(value: Digest) -> Result { + let digest: RpoDigest = value.try_into()?; + Ok(digest.into()) + } +} + +// NULLIFIER INPUT RECORD +// ================================================================================================ + +#[derive(Clone, Debug)] +pub struct NullifierWitness { + pub nullifier: Nullifier, + pub proof: SmtProof, +} + +impl TryFrom for NullifierWitness { + type Error = ConversionError; + + fn try_from(nullifier_input_record: NullifierBlockInputRecord) -> Result { + Ok(Self { + nullifier: nullifier_input_record + .nullifier + .ok_or(NullifierBlockInputRecord::missing_field(stringify!(nullifier)))? + .try_into()?, + proof: nullifier_input_record + .opening + .ok_or(NullifierBlockInputRecord::missing_field(stringify!(opening)))? + .try_into()?, + }) + } +} + +impl From for NullifierBlockInputRecord { + fn from(value: NullifierWitness) -> Self { + Self { + nullifier: Some(value.nullifier.into()), + opening: Some(value.proof.into()), + } + } +} diff --git a/src/client/rpc/tonic_client.rs b/wasm/src/web_client/rpc/mod.rs similarity index 59% rename from src/client/rpc/tonic_client.rs rename to wasm/src/web_client/rpc/mod.rs index 56a720d94..766212531 100644 --- a/src/client/rpc/tonic_client.rs +++ b/wasm/src/web_client/rpc/mod.rs @@ -1,76 +1,83 @@ use async_trait::async_trait; -use miden_node_proto::{ - errors::ConversionError, - generated::{ - requests::{ - GetAccountDetailsRequest, GetBlockHeaderByNumberRequest, GetNotesByIdRequest, - SubmitProvenTransactionRequest, SyncStateRequest, - }, - responses::SyncStateResponse, - rpc::api_client::ApiClient, - }, -}; +use core::fmt; +use tonic::Response; +use tonic_web_wasm_client::Client; +use wasm_bindgen::prelude::*; +use wasm_bindgen_futures::*; + use miden_objects::{ accounts::{Account, AccountId}, - notes::{Note, NoteId, NoteMetadata, NoteTag, NoteType}, + crypto::merkle::{MerklePath, MmrProof}, + notes::{ + NoteMetadata, + NoteId, + Note, NoteTag, NoteType + }, transaction::ProvenTransaction, utils::Deserializable, BlockHeader, Digest, Felt, }; use miden_tx::utils::Serializable; -use tonic::transport::Channel; -use super::{ - CommittedNote, NodeRpcClient, NodeRpcClientEndpoint, NoteDetails, NoteInclusionDetails, - StateSyncInfo, +use miden_client::{ + client::rpc::{AccountUpdateSummary, AccountDetails, CommittedNote, NodeRpcClient, NodeRpcClientEndpoint, NoteDetails, NoteInclusionDetails, StateSyncInfo}, + errors::{ConversionError, NodeRpcClientError}, +}; +// use crate::native_code::{ +// errors::{ConversionError, NodeRpcClientError}, +// rpc::{ +// CommittedNote, NodeRpcClient, NodeRpcClientEndpoint, NoteDetails, NoteInclusionDetails, +// StateSyncInfo +// }, +// }; + +use client_grpc::{ + requests::{ + GetAccountDetailsRequest, GetBlockHeaderByNumberRequest, GetNotesByIdRequest, + SubmitProvenTransactionRequest, SyncStateRequest, + }, + responses::SyncStateResponse, + rpc::api_client::ApiClient }; -use crate::errors::NodeRpcClientError; -// TONIC RPC CLIENT -// ================================================================================================ +pub mod client_grpc; +pub mod domain; -/// Client for the Node RPC API using tonic -/// -/// Wraps the ApiClient which defers establishing a connection with a node until necessary -pub struct TonicRpcClient { - rpc_api: Option>, - endpoint: String, +#[wasm_bindgen(module = "/js/web-rpc-client.js")] +extern "C" { + #[wasm_bindgen(js_name = testRpc)] + fn test_rpc(endpoint: String) -> js_sys::Promise; } -impl TonicRpcClient { - /// Returns a new instance of [TonicRpcClient] that'll do calls the `config_endpoint` provided - pub fn new(config_endpoint: &str) -> TonicRpcClient { - TonicRpcClient { - rpc_api: None, - endpoint: config_endpoint.to_string(), +pub struct WebRpcClient { + endpoint: String +} + +impl WebRpcClient { + pub fn new(endpoint: &str) -> Self { + Self { + endpoint: endpoint.to_string() } } - /// Takes care of establishing the RPC connection if not connected yet and returns a reference - /// to the inner ApiClient - async fn rpc_api(&mut self) -> Result<&mut ApiClient, NodeRpcClientError> { - if self.rpc_api.is_some() { - Ok(self.rpc_api.as_mut().unwrap()) - } else { - let rpc_api = ApiClient::connect(self.endpoint.clone()) - .await - .map_err(|err| NodeRpcClientError::ConnectionError(err.to_string()))?; - Ok(self.rpc_api.insert(rpc_api)) - } + pub fn build_api_client(&self) -> ApiClient { + let wasm_client = Client::new(self.endpoint.clone()); + ApiClient::new(wasm_client) } } -#[async_trait] -impl NodeRpcClient for TonicRpcClient { +impl NodeRpcClient for WebRpcClient { async fn submit_proven_transaction( &mut self, proven_transaction: ProvenTransaction, ) -> Result<(), NodeRpcClientError> { + let mut query_client = self.build_api_client(); + let request = SubmitProvenTransactionRequest { transaction: proven_transaction.to_bytes(), }; - let rpc_api = self.rpc_api().await?; - rpc_api.submit_proven_transaction(request).await.map_err(|err| { + + let response = query_client.submit_proven_transaction(request).await.map_err(|err| { NodeRpcClientError::RequestError( NodeRpcClientEndpoint::SubmitProvenTx.to_string(), err.to_string(), @@ -83,33 +90,69 @@ impl NodeRpcClient for TonicRpcClient { async fn get_block_header_by_number( &mut self, block_num: Option, - ) -> Result { - let request = GetBlockHeaderByNumberRequest { block_num }; - let rpc_api = self.rpc_api().await?; - let api_response = rpc_api.get_block_header_by_number(request).await.map_err(|err| { + include_mmr_proof: bool, + ) -> Result<(BlockHeader, Option), NodeRpcClientError> { + let mut query_client = self.build_api_client(); + + let request = GetBlockHeaderByNumberRequest { + block_num, + include_mmr_proof: Some(include_mmr_proof), + }; + + // Attempt to send the request and process the response + let api_response = query_client.get_block_header_by_number(request).await.map_err(|err| { + // log to console all the properties of block header NodeRpcClientError::RequestError( NodeRpcClientEndpoint::GetBlockHeaderByNumber.to_string(), err.to_string(), ) })?; - api_response - .into_inner() + let response = api_response.into_inner(); + + let block_header: BlockHeader = response .block_header .ok_or(NodeRpcClientError::ExpectedFieldMissing("BlockHeader".into()))? .try_into() - .map_err(|err: ConversionError| NodeRpcClientError::ConversionFailure(err.to_string())) + .map_err(|err: ConversionError| { + NodeRpcClientError::ConversionFailure(err.to_string()) + })?; + + let mmr_proof = if include_mmr_proof { + let forest = response + .chain_length + .ok_or(NodeRpcClientError::ExpectedFieldMissing("ChainLength".into()))?; + let merkle_path: MerklePath = response + .mmr_path + .ok_or(NodeRpcClientError::ExpectedFieldMissing("MmrPath".into()))? + .try_into() + .map_err(|err: ConversionError| { + NodeRpcClientError::ConversionFailure(err.to_string()) + })?; + + Some(MmrProof { + forest: forest as usize, + position: block_header.block_num() as usize, + merkle_path, + }) + } else { + None + }; + + Ok((block_header, mmr_proof)) } async fn get_notes_by_id( &mut self, note_ids: &[NoteId], ) -> Result, NodeRpcClientError> { + let mut query_client = self.build_api_client(); + let request = GetNotesByIdRequest { note_ids: note_ids.iter().map(|id| id.inner().into()).collect(), }; - let rpc_api = self.rpc_api().await?; - let api_response = rpc_api.get_notes_by_id(request).await.map_err(|err| { + + let api_response = query_client.get_notes_by_id(request).await.map_err(|err| { NodeRpcClientError::RequestError( NodeRpcClientEndpoint::GetBlockHeaderByNumber.to_string(), err.to_string(), @@ -119,8 +162,11 @@ impl NodeRpcClient for TonicRpcClient { let rpc_notes = api_response.into_inner().notes; let mut response_notes = Vec::with_capacity(rpc_notes.len()); for note in rpc_notes { - let sender_id = - note.sender.ok_or(NodeRpcClientError::ExpectedFieldMissing("Sender".into()))?; + let sender_id = note + .metadata + .clone() + .and_then(|metadata| metadata.sender) + .ok_or(NodeRpcClientError::ExpectedFieldMissing("Metadata.Sender".into()))?; let inclusion_details = { let merkle_path = note @@ -140,7 +186,11 @@ impl NodeRpcClient for TonicRpcClient { }, // Off-chain notes do not have details None => { - let note_tag = NoteTag::from(note.tag).validate(NoteType::OffChain)?; + let tag = note + .metadata + .ok_or(NodeRpcClientError::ExpectedFieldMissing("Metadata".into()))? + .tag; + let note_tag = NoteTag::from(tag).validate(NoteType::OffChain)?; let note_metadata = NoteMetadata::new( sender_id.try_into()?, NoteType::OffChain, @@ -169,10 +219,10 @@ impl NodeRpcClient for TonicRpcClient { note_tags: &[NoteTag], nullifiers_tags: &[u16], ) -> Result { - let account_ids = account_ids.iter().map(|acc| (*acc).into()).collect(); + let mut query_client = self.build_api_client(); + let account_ids = account_ids.iter().map(|acc| (*acc).into()).collect(); let nullifiers = nullifiers_tags.iter().map(|&nullifier| nullifier as u32).collect(); - let note_tags = note_tags.iter().map(|¬e_tag| note_tag.into()).collect(); let request = SyncStateRequest { @@ -182,8 +232,7 @@ impl NodeRpcClient for TonicRpcClient { nullifiers, }; - let rpc_api = self.rpc_api().await?; - let response = rpc_api.sync_state(request).await.map_err(|err| { + let response = query_client.sync_state(request).await.map_err(|err| { NodeRpcClientError::RequestError( NodeRpcClientEndpoint::SyncState.to_string(), err.to_string(), @@ -191,7 +240,7 @@ impl NodeRpcClient for TonicRpcClient { })?; response.into_inner().try_into() } - + /// Sends a [GetAccountDetailsRequest] to the Miden node, and extracts an [Account] from the /// `GetAccountDetailsResponse` response. /// @@ -206,38 +255,48 @@ impl NodeRpcClient for TonicRpcClient { /// - There is an error during [Account] deserialization async fn get_account_update( &mut self, - account_id: AccountId, - ) -> Result { - if !account_id.is_on_chain() { - return Err(NodeRpcClientError::InvalidAccountReceived( - "should only get updates for offchain accounts".to_string(), - )); - } + account_id: AccountId + ) -> Result { + let mut query_client = self.build_api_client(); - let account_id = account_id.into(); - let request = GetAccountDetailsRequest { account_id: Some(account_id) }; + let request = GetAccountDetailsRequest { account_id: Some(account_id.into()) }; - let rpc_api = self.rpc_api().await?; - - let response = rpc_api.get_account_details(request).await.map_err(|err| { + let response = query_client.get_account_details(request).await.map_err(|err| { NodeRpcClientError::RequestError( NodeRpcClientEndpoint::GetAccountDetails.to_string(), err.to_string(), ) })?; + let response = response.into_inner(); let account_info = response.account.ok_or(NodeRpcClientError::ExpectedFieldMissing( "GetAccountDetails response should have an `account`".to_string(), ))?; - let details_bytes = - account_info.details.ok_or(NodeRpcClientError::ExpectedFieldMissing( - "GetAccountDetails response's account should have `details`".to_string(), + let account_summary = + account_info.summary.ok_or(NodeRpcClientError::ExpectedFieldMissing( + "GetAccountDetails response's account should have a `summary`".to_string(), ))?; - let details = Account::read_from_bytes(&details_bytes)?; + let hash = account_summary.account_hash.ok_or(NodeRpcClientError::ExpectedFieldMissing( + "GetAccountDetails response's account should have an `account_hash`".to_string(), + ))?; + + let hash = hash.try_into()?; + + let update_summary = AccountUpdateSummary::new(hash, account_summary.block_num); + if account_id.is_on_chain() { + let details_bytes = + account_info.details.ok_or(NodeRpcClientError::ExpectedFieldMissing( + "GetAccountDetails response's account should have `details`".to_string(), + ))?; - Ok(details) + let account = Account::read_from_bytes(&details_bytes)?; + + Ok(AccountDetails::Public(account, update_summary)) + } else { + Ok(AccountDetails::OffChain(account_id, update_summary)) + } } } @@ -296,17 +355,26 @@ impl TryFrom for StateSyncInfo { .try_into()?; let sender_account_id = note - .sender - .ok_or(NodeRpcClientError::ExpectedFieldMissing("Notes.Sender".into()))? + .metadata + .clone() + .and_then(|m| m.sender) + .ok_or(NodeRpcClientError::ExpectedFieldMissing("Notes.Metadata.Sender".into()))? .try_into()?; - let note_type = NoteType::try_from(Felt::new(note.note_type.into()))?; - let metadata = NoteMetadata::new( - sender_account_id, - note_type, - note.tag.into(), - Default::default(), - )?; + let tag = note + .metadata + .clone() + .ok_or(NodeRpcClientError::ExpectedFieldMissing("Notes.Metadata".into()))? + .tag; + + let note_type = note + .metadata + .ok_or(NodeRpcClientError::ExpectedFieldMissing("Notes.Metadata".into()))? + .note_type; + + let note_type = NoteType::try_from(note_type)?; + let metadata = + NoteMetadata::new(sender_account_id, note_type, tag.into(), Default::default())?; let committed_note = CommittedNote::new(note_id, note.note_index, merkle_path, metadata); @@ -338,4 +406,4 @@ impl TryFrom for StateSyncInfo { nullifiers, }) } -} +} \ No newline at end of file diff --git a/wasm/src/web_client/rpc/proto/account.proto b/wasm/src/web_client/rpc/proto/account.proto new file mode 100644 index 000000000..f73c5dac7 --- /dev/null +++ b/wasm/src/web_client/rpc/proto/account.proto @@ -0,0 +1,22 @@ +syntax = "proto3"; +package account; + +import "digest.proto"; + +message AccountId { + // A miden account is defined with a little bit of proof-of-work, the id itself is defined as + // the first word of a hash digest. For this reason account ids can be considered as random + // values, because of that the encoding bellow uses fixed 64 bits, instead of zig-zag encoding. + fixed64 id = 1; +} + +message AccountSummary { + AccountId account_id = 1; + digest.Digest account_hash = 2; + uint32 block_num = 3; +} + +message AccountInfo { + AccountSummary summary = 1; + optional bytes details = 2; +} diff --git a/wasm/src/web_client/rpc/proto/block_header.proto b/wasm/src/web_client/rpc/proto/block_header.proto new file mode 100644 index 000000000..bd9d4ad9e --- /dev/null +++ b/wasm/src/web_client/rpc/proto/block_header.proto @@ -0,0 +1,27 @@ +syntax = "proto3"; +package block_header; + +import "digest.proto"; + +message BlockHeader { + // specifies the version of the protocol. + uint32 version = 1; + // the hash of the previous blocks header. + digest.Digest prev_hash = 2; + // a unique sequential number of the current block. + fixed32 block_num = 3; + // a commitment to an MMR of the entire chain where each block is a leaf. + digest.Digest chain_root = 4; + // a commitment to account database. + digest.Digest account_root = 5; + // a commitment to the nullifier database. + digest.Digest nullifier_root = 6; + // a commitment to all notes created in the current block. + digest.Digest note_root = 7; + // a commitment to a set of transaction batches executed as a part of this block. + digest.Digest batch_root = 8; + // a hash of a STARK proof attesting to the correct state transition. + digest.Digest proof_hash = 9; + // the time when the block was created. + fixed32 timestamp = 10; +} \ No newline at end of file diff --git a/wasm/src/web_client/rpc/proto/block_producer.proto b/wasm/src/web_client/rpc/proto/block_producer.proto new file mode 100644 index 000000000..d4f2c0062 --- /dev/null +++ b/wasm/src/web_client/rpc/proto/block_producer.proto @@ -0,0 +1,11 @@ +// Specification of the user facing gRPC API. +syntax = "proto3"; +package block_producer; + +import "requests.proto"; +import "responses.proto"; + +service Api { + rpc SubmitProvenTransaction(requests.SubmitProvenTransactionRequest) returns (responses.SubmitProvenTransactionResponse) {} +} + diff --git a/wasm/src/web_client/rpc/proto/digest.proto b/wasm/src/web_client/rpc/proto/digest.proto new file mode 100644 index 000000000..e6a8f066c --- /dev/null +++ b/wasm/src/web_client/rpc/proto/digest.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; +package digest; + +// A hash digest, the result of a hash function. +message Digest { + fixed64 d0 = 1; + fixed64 d1 = 2; + fixed64 d2 = 3; + fixed64 d3 = 4; +} diff --git a/wasm/src/web_client/rpc/proto/merkle.proto b/wasm/src/web_client/rpc/proto/merkle.proto new file mode 100644 index 000000000..abded7231 --- /dev/null +++ b/wasm/src/web_client/rpc/proto/merkle.proto @@ -0,0 +1,8 @@ +syntax = "proto3"; +package merkle; + +import "digest.proto"; + +message MerklePath { + repeated digest.Digest siblings = 1; +} diff --git a/wasm/src/web_client/rpc/proto/mmr.proto b/wasm/src/web_client/rpc/proto/mmr.proto new file mode 100644 index 000000000..baaced2c9 --- /dev/null +++ b/wasm/src/web_client/rpc/proto/mmr.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; +package mmr; + +import "digest.proto"; + +message MmrDelta { + uint64 forest = 1; + repeated digest.Digest data = 2; +} diff --git a/wasm/src/web_client/rpc/proto/note.proto b/wasm/src/web_client/rpc/proto/note.proto new file mode 100644 index 000000000..9139b2fa4 --- /dev/null +++ b/wasm/src/web_client/rpc/proto/note.proto @@ -0,0 +1,31 @@ +syntax = "proto3"; +package note; + +import "digest.proto"; +import "merkle.proto"; +import "account.proto"; + +message NoteMetadata { + account.AccountId sender = 1; + uint32 note_type = 2; + fixed32 tag = 3; + fixed64 aux = 4; +} + +message Note { + fixed32 block_num = 1; + uint32 note_index = 2; + digest.Digest note_id = 3; + NoteMetadata metadata = 4; + merkle.MerklePath merkle_path = 5; + // This field will be present when the note is on-chain. + // details contain the `Note` in a serialized format. + optional bytes details = 6; +} + +message NoteSyncRecord { + uint32 note_index = 1; + digest.Digest note_id = 2; + NoteMetadata metadata = 3; + merkle.MerklePath merkle_path = 4; +} \ No newline at end of file diff --git a/wasm/src/web_client/rpc/proto/requests.proto b/wasm/src/web_client/rpc/proto/requests.proto new file mode 100644 index 000000000..d44c04350 --- /dev/null +++ b/wasm/src/web_client/rpc/proto/requests.proto @@ -0,0 +1,99 @@ +syntax = "proto3"; +package requests; + +import "account.proto"; +import "block_header.proto"; +import "digest.proto"; +import "note.proto"; + +message ApplyBlockRequest { + bytes block = 1; +} + +message CheckNullifiersRequest { + repeated digest.Digest nullifiers = 1; +} + +// Returns the block header corresponding to the requested block number, as well as the merkle +// path and current forest which validate the block's inclusion in the chain. +// +// The Merkle path is an MMR proof for the block's leaf, based on the current chain length. +message GetBlockHeaderByNumberRequest { + // The block number of the target block. + // + // If not provided, means latest know block. + optional uint32 block_num = 1; + // Whether or not to return authentication data for the block header. + optional bool include_mmr_proof = 2; +} + +// State synchronization request. +// +// Specifies state updates the client is intersted in. The server will return the first block which +// contains a note matching `note_tags` or the chain tip. And the corresponding updates to +// `nullifiers` and `account_ids` for that block range. +message SyncStateRequest { + // Last block known by the client. The response will contain data starting from the next block, + // until the first block which contains a note of matching the requested tag, or the chain tip + // if there are no notes. + fixed32 block_num = 1; + + // Accounts' hash to include in the response. + // + // An account hash will be included if-and-only-if it is the latest update. Meaning it is + // possible there was an update to the account for the given range, but if it is not the latest, + // it won't be included in the response. + repeated account.AccountId account_ids = 2; + + // Determines the tags which the client is interested in. These are only the 16high bits of the + // note's complete tag. + // + // The above means it is not possible to request an specific note, but only a "note family", + // this is done to increase the privacy of the client, by hiding the note's the client is + // intereted on. + repeated uint32 note_tags = 3; + + // Determines the nullifiers the client is interested in. + // + // Similarly to the note_tags, this determins only the 16high bits of the target nullifier. + repeated uint32 nullifiers = 4; +} + +message GetBlockInputsRequest { + // ID of the account against which a transaction is executed. + repeated account.AccountId account_ids = 1; + // Array of nullifiers for all notes consumed by a transaction. + repeated digest.Digest nullifiers = 2; +} + +message GetTransactionInputsRequest { + account.AccountId account_id = 1; + repeated digest.Digest nullifiers = 2; +} + +message SubmitProvenTransactionRequest { + // Transaction encoded using miden's native format + bytes transaction = 1; +} + +message GetNotesByIdRequest { + // List of NoteId's to be queried from the database + repeated digest.Digest note_ids = 1; +} + +message ListNullifiersRequest {} + +message ListAccountsRequest {} + +message ListNotesRequest {} + +// Returns the latest state of an account with the specified ID. +message GetAccountDetailsRequest { + // Account ID to get details. + account.AccountId account_id = 1; +} + +message GetBlockByNumberRequest { + // The block number of the target block. + fixed32 block_num = 1; +} \ No newline at end of file diff --git a/wasm/src/web_client/rpc/proto/responses.proto b/wasm/src/web_client/rpc/proto/responses.proto new file mode 100644 index 000000000..b7df73977 --- /dev/null +++ b/wasm/src/web_client/rpc/proto/responses.proto @@ -0,0 +1,131 @@ +syntax = "proto3"; +package responses; + +import "account.proto"; +import "block_header.proto"; +import "digest.proto"; +import "merkle.proto"; +import "mmr.proto"; +import "note.proto"; +import "smt.proto"; + +message ApplyBlockResponse {} + +message CheckNullifiersResponse { + // Each requested nullifier has its corresponding nullifier proof at the same position. + repeated smt.SmtOpening proofs = 1; +} + +message GetBlockHeaderByNumberResponse { + // The requested block header + block_header.BlockHeader block_header = 1; + + // Merkle path to verify the block's inclusion in the MMR at the returned `chain_length` + optional merkle.MerklePath mmr_path = 2; + + // Current chain length + optional fixed32 chain_length = 3; +} + +message NullifierUpdate { + digest.Digest nullifier = 1; + fixed32 block_num = 2; +} + +message SyncStateResponse { + // Number of the latest block in the chain + fixed32 chain_tip = 1; + + // Block header of the block with the first note matching the specified criteria + block_header.BlockHeader block_header = 2; + + // Data needed to update the partial MMR from `block_num + 1` to `block_header.block_num` + mmr.MmrDelta mmr_delta = 3; + + // List of account hashes updated after `block_num + 1` but not after `block_header.block_num` + repeated account.AccountSummary accounts = 5; + + // List of all notes together with the Merkle paths from `block_header.note_root` + repeated note.NoteSyncRecord notes = 6; + + // List of nullifiers created between `block_num + 1` and `block_header.block_num` + repeated NullifierUpdate nullifiers = 7; +} + +// An account returned as a response to the GetBlockInputs +message AccountBlockInputRecord { + account.AccountId account_id = 1; + digest.Digest account_hash = 2; + merkle.MerklePath proof = 3; +} + +// A nullifier returned as a response to the GetBlockInputs +message NullifierBlockInputRecord { + digest.Digest nullifier = 1; + smt.SmtOpening opening = 2; +} + +message GetBlockInputsResponse { + // The latest block header + block_header.BlockHeader block_header = 1; + + // Peaks of the above block's mmr, The `forest` value is equal to the block number. + repeated digest.Digest mmr_peaks = 2; + + // The hashes of the requested accounts and their authentication paths + repeated AccountBlockInputRecord account_states = 3; + + // The requested nullifiers and their authentication paths + repeated NullifierBlockInputRecord nullifiers = 4; +} + +// An account returned as a response to the GetTransactionInputs +message AccountTransactionInputRecord { + account.AccountId account_id = 1; + // The latest account hash, zero hash if the account doesn't exist. + digest.Digest account_hash = 2; +} + +// A nullifier returned as a response to the GetTransactionInputs +message NullifierTransactionInputRecord { + digest.Digest nullifier = 1; + // The block at which the nullifier has been consumed, zero if not consumed. + fixed32 block_num = 2; +} + +message GetTransactionInputsResponse { + AccountTransactionInputRecord account_state = 1; + repeated NullifierTransactionInputRecord nullifiers = 2; +} + +message SubmitProvenTransactionResponse {} + +message GetNotesByIdResponse { + // Lists Note's returned by the database + repeated note.Note notes = 1; +} + +message ListNullifiersResponse { + // Lists all nullifiers of the current chain + repeated smt.SmtLeafEntry nullifiers = 1; +} + +message ListAccountsResponse { + // Lists all accounts of the current chain + repeated account.AccountInfo accounts = 1; +} + +message ListNotesResponse { + // Lists all notes of the current chain + repeated note.Note notes = 1; +} + +message GetAccountDetailsResponse { + // Account info (with details for on-chain accounts) + account.AccountInfo account = 1; +} + +message GetBlockByNumberResponse { + // The requested `Block` data encoded using miden native format + optional bytes block = 1; +} \ No newline at end of file diff --git a/wasm/src/web_client/rpc/proto/rpc.proto b/wasm/src/web_client/rpc/proto/rpc.proto new file mode 100644 index 000000000..428336b19 --- /dev/null +++ b/wasm/src/web_client/rpc/proto/rpc.proto @@ -0,0 +1,16 @@ +// Specification of the user facing gRPC API. +syntax = "proto3"; +package rpc; + +import "requests.proto"; +import "responses.proto"; + +service Api { + rpc CheckNullifiers(requests.CheckNullifiersRequest) returns (responses.CheckNullifiersResponse) {} + rpc GetAccountDetails(requests.GetAccountDetailsRequest) returns (responses.GetAccountDetailsResponse) {} + rpc GetBlockByNumber(requests.GetBlockByNumberRequest) returns (responses.GetBlockByNumberResponse) {} + rpc GetBlockHeaderByNumber(requests.GetBlockHeaderByNumberRequest) returns (responses.GetBlockHeaderByNumberResponse) {} + rpc GetNotesById(requests.GetNotesByIdRequest) returns (responses.GetNotesByIdResponse) {} + rpc SubmitProvenTransaction(requests.SubmitProvenTransactionRequest) returns (responses.SubmitProvenTransactionResponse) {} + rpc SyncState(requests.SyncStateRequest) returns (responses.SyncStateResponse) {} +} \ No newline at end of file diff --git a/wasm/src/web_client/rpc/proto/smt.proto b/wasm/src/web_client/rpc/proto/smt.proto new file mode 100644 index 000000000..ec9522a7d --- /dev/null +++ b/wasm/src/web_client/rpc/proto/smt.proto @@ -0,0 +1,32 @@ +// Message definitions related to Sparse Merkle Trees (SMT). + +syntax = "proto3"; +package smt; + +import "digest.proto"; +import "merkle.proto"; + +// An entry in a leaf. +message SmtLeafEntry { + digest.Digest key = 1; + digest.Digest value = 2; +} + +message SmtLeafEntries { + repeated SmtLeafEntry entries = 1; +} + +// A leaf in an SMT, sitting at depth 64. A leaf can contain 0, 1 or multiple leaf entries. +message SmtLeaf { + oneof leaf { + uint64 empty = 1; + SmtLeafEntry single = 2; + SmtLeafEntries multiple = 3; + } +} + +// The opening of a leaf in an SMT. +message SmtOpening { + merkle.MerklePath path = 1; + SmtLeaf leaf = 2; +} \ No newline at end of file diff --git a/wasm/src/web_client/rpc/proto/store.proto b/wasm/src/web_client/rpc/proto/store.proto new file mode 100644 index 000000000..142729cac --- /dev/null +++ b/wasm/src/web_client/rpc/proto/store.proto @@ -0,0 +1,23 @@ +// Specification of the store RPC. +// +// This provided access to the rollup data to the other nodes. +syntax = "proto3"; +package store; + +import "requests.proto"; +import "responses.proto"; + +service Api { + rpc ApplyBlock(requests.ApplyBlockRequest) returns (responses.ApplyBlockResponse) {} + rpc CheckNullifiers(requests.CheckNullifiersRequest) returns (responses.CheckNullifiersResponse) {} + rpc GetAccountDetails(requests.GetAccountDetailsRequest) returns (responses.GetAccountDetailsResponse) {} + rpc GetBlockByNumber(requests.GetBlockByNumberRequest) returns (responses.GetBlockByNumberResponse) {} + rpc GetBlockHeaderByNumber(requests.GetBlockHeaderByNumberRequest) returns (responses.GetBlockHeaderByNumberResponse) {} + rpc GetBlockInputs(requests.GetBlockInputsRequest) returns (responses.GetBlockInputsResponse) {} + rpc GetNotesById(requests.GetNotesByIdRequest) returns (responses.GetNotesByIdResponse) {} + rpc GetTransactionInputs(requests.GetTransactionInputsRequest) returns (responses.GetTransactionInputsResponse) {} + rpc ListAccounts(requests.ListAccountsRequest) returns (responses.ListAccountsResponse) {} + rpc ListNotes(requests.ListNotesRequest) returns (responses.ListNotesResponse) {} + rpc ListNullifiers(requests.ListNullifiersRequest) returns (responses.ListNullifiersResponse) {} + rpc SyncState(requests.SyncStateRequest) returns (responses.SyncStateResponse) {} +} \ No newline at end of file diff --git a/wasm/src/web_client/store/accounts/js_bindings.rs b/wasm/src/web_client/store/accounts/js_bindings.rs new file mode 100644 index 000000000..93264eec6 --- /dev/null +++ b/wasm/src/web_client/store/accounts/js_bindings.rs @@ -0,0 +1,89 @@ +use wasm_bindgen::prelude::*; +use wasm_bindgen_futures::*; + +// Account IndexedDB Operations +#[wasm_bindgen(module = "/js/db/accounts.js")] +extern "C" { + // GETS + // ================================================================================================ + #[wasm_bindgen(js_name = getAccountIds)] + pub fn idxdb_get_account_ids() -> js_sys::Promise; + + #[wasm_bindgen(js_name = getAllAccountStubs)] + pub fn idxdb_get_account_stubs() -> js_sys::Promise; + + #[wasm_bindgen(js_name = getAccountStub)] + pub fn idxdb_get_account_stub( + account_id: String + ) -> js_sys::Promise; + + #[wasm_bindgen(js_name = getAccountCode)] + pub fn idxdb_get_account_code( + code_root: String + ) -> js_sys::Promise; + + #[wasm_bindgen(js_name = getAccountStorage)] + pub fn idxdb_get_account_storage( + storage_root: String + ) -> js_sys::Promise; + + #[wasm_bindgen(js_name = getAccountAssetVault)] + pub fn idxdb_get_account_asset_vault( + vault_root: String + ) -> js_sys::Promise; + + #[wasm_bindgen(js_name = getAccountAuth)] + pub fn idxdb_get_account_auth( + account_id: String + ) -> js_sys::Promise; + + #[wasm_bindgen(js_name = getAccountAuthByPubKey)] + pub fn idxdb_get_account_auth_by_pub_key( + pub_key: Vec + ) -> JsValue; + + #[wasm_bindgen(js_name = fetchAndCacheAccountAuthByPubKey)] + pub fn idxdb_fetch_and_cache_account_auth_by_pub_key( + account_id: String + ) -> js_sys::Promise; + + // INSERTS + // ================================================================================================ + + #[wasm_bindgen(js_name = insertAccountCode)] + pub fn idxdb_insert_account_code( + code_root: String, + code: String, + module: Vec + ) -> js_sys::Promise; + + #[wasm_bindgen(js_name = insertAccountStorage)] + pub fn idxdb_insert_account_storage( + storage_root: String, + storage_slots: Vec + ) -> js_sys::Promise; + + #[wasm_bindgen(js_name = insertAccountAssetVault)] + pub fn idxdb_insert_account_asset_vault( + vault_root: String, + assets: String + ) -> js_sys::Promise; + + #[wasm_bindgen(js_name = insertAccountRecord)] + pub fn idxdb_insert_account_record( + id: String, + code_root: String, + storage_root: String, + vault_root: String, + nonce: String, + committed: bool, + account_seed: Option> + ) -> js_sys::Promise; + + #[wasm_bindgen(js_name = insertAccountAuth)] + pub fn idxdb_insert_account_auth( + id: String, + auth_info: Vec, + pub_key: Vec + ) -> js_sys::Promise; +} \ No newline at end of file diff --git a/wasm/src/web_client/store/accounts/mod.rs b/wasm/src/web_client/store/accounts/mod.rs new file mode 100644 index 000000000..2e90c243f --- /dev/null +++ b/wasm/src/web_client/store/accounts/mod.rs @@ -0,0 +1,199 @@ +use serde::{Serialize, Deserialize}; +use serde_wasm_bindgen::from_value; +use wasm_bindgen_futures::*; +use wasm_bindgen::prelude::*; +use web_sys::console; + +use miden_lib::transaction::TransactionKernel; +use miden_objects::{ + accounts::{Account, AccountCode, AccountId, AccountStorage, AccountStub, AuthSecretKey}, + assembly::ModuleAst, + assets::{Asset, AssetVault}, + Digest, Felt, Word +}; +use miden_tx::utils::{Deserializable, Serializable}; +use miden_client::errors::StoreError; + +// use crate::native_code::{errors::StoreError, store::{NoteFilter, Store}}; + +use super::WebStore; + +mod js_bindings; +use js_bindings::*; + +mod models; +use models::*; + +pub(crate) mod utils; +use utils::*; + +impl WebStore { + pub(super) async fn get_account_ids( + &self + ) -> Result, StoreError> { + let promise = idxdb_get_account_ids(); + let js_value = JsFuture::from(promise).await.unwrap(); + let account_ids_as_strings: Vec = from_value(js_value).unwrap(); + + let native_account_ids: Vec = account_ids_as_strings.into_iter().map(|id| { + AccountId::from_hex(&id).unwrap() + }).collect(); + + Ok(native_account_ids) + } + + pub(super) async fn get_account_stubs( + &self + ) -> Result)>, StoreError> { + let promise = idxdb_get_account_stubs(); + let js_value = JsFuture::from(promise).await.unwrap(); + let account_stubs_idxdb: Vec = from_value(js_value).unwrap(); + + let account_stubs: Result)>, StoreError> = account_stubs_idxdb + .into_iter() + .map(|stub| parse_account_record_idxdb_object(stub)) + .collect(); // Collect results into a single Result + + account_stubs + } + + pub(crate) async fn get_account_stub( + &self, + account_id: AccountId, + ) -> Result<(AccountStub, Option), StoreError> { + let account_id_str = account_id.to_string(); + + let promise = idxdb_get_account_stub(account_id_str); + let js_value = JsFuture::from(promise).await.unwrap(); + let account_stub_idxdb: AccountRecordIdxdbOjbect = from_value(js_value).unwrap(); + + parse_account_record_idxdb_object(account_stub_idxdb) + } + + pub(crate) async fn get_account( + &self, + account_id: AccountId + ) -> Result<(Account, Option), StoreError> { + let (account_stub, seed) = self.get_account_stub(account_id).await.unwrap(); + let (_procedures, module_ast) = self.get_account_code(account_stub.code_root()).await.unwrap(); + let account_code = AccountCode::new(module_ast, &TransactionKernel::assembler()).unwrap(); + let account_storage = self.get_account_storage(account_stub.storage_root()).await.unwrap(); + let account_vault = self.get_vault_assets(account_stub.vault_root()).await.unwrap(); + let account_vault = AssetVault::new(&account_vault).unwrap(); + + let account = Account::new( + account_stub.id(), + account_vault, + account_storage, + account_code, + account_stub.nonce(), + ); + + Ok((account, seed)) + } + + pub(super) async fn get_account_code( + &self, + root: Digest + ) -> Result<(Vec, ModuleAst), StoreError> { + let root_serialized = root.to_string(); + + let promise = idxdb_get_account_code(root_serialized); + let js_value = JsFuture::from(promise).await.unwrap(); + let account_code_idxdb: AccountCodeIdxdbObject = from_value(js_value).unwrap(); + + let procedures = + serde_json::from_str(&account_code_idxdb.procedures).unwrap(); + + let module = ModuleAst::from_bytes(&account_code_idxdb.module).unwrap(); + + Ok((procedures, module)) + } + + pub(super) async fn get_account_storage( + &self, + root: Digest + ) -> Result { + let root_serialized = root.to_string(); + + let promise = idxdb_get_account_storage(root_serialized); + let js_value = JsFuture::from(promise).await.unwrap(); + let account_storage_idxdb: AccountStorageIdxdbObject = from_value(js_value).unwrap(); + + let storage = AccountStorage::read_from_bytes(&account_storage_idxdb.storage).unwrap(); + Ok(storage) + } + + pub(super) async fn get_vault_assets( + &self, + root: Digest + ) -> Result, StoreError> { + let root_serialized = serde_json::to_string(&root.to_string()).unwrap(); + + let promise = idxdb_get_account_asset_vault(root_serialized); + let js_value = JsFuture::from(promise).await.unwrap(); + let vault_assets_idxdb: AccountVaultIdxdbObject = from_value(js_value).unwrap(); + + let assets = serde_json::from_str(&vault_assets_idxdb.assets).unwrap(); + Ok(assets) + } + + pub(crate) async fn get_account_auth( + &self, + account_id: AccountId + ) -> Result { + let account_id_str = account_id.to_string(); + + let promise = idxdb_get_account_auth(account_id_str); + let js_value = JsFuture::from(promise).await.unwrap(); + let auth_info_idxdb: AccountAuthIdxdbObject = from_value(js_value).unwrap(); + + // Convert the auth_info to the appropriate AuthInfo enum variant + let auth_info = AuthSecretKey::read_from_bytes(&auth_info_idxdb.auth_info)?; + + Ok(auth_info) + } + + pub(crate) async fn insert_account( + &self, + account: &Account, + account_seed: Option, + auth_info: &AuthSecretKey, + ) -> Result<(), StoreError> { + insert_account_code(account.code()).await.unwrap(); + insert_account_storage(account.storage()).await.unwrap(); + insert_account_asset_vault(account.vault()).await.unwrap(); + insert_account_record(account, account_seed).await.unwrap(); + insert_account_auth(account.id(), auth_info).await.unwrap(); + + Ok(()) + } + + /// Returns an [AuthSecretKey] by a public key represented by a [Word] + pub(crate) fn get_account_auth_by_pub_key(&self, pub_key: Word) -> Result { + let pub_key_bytes = pub_key.to_bytes(); + + let js_value = idxdb_get_account_auth_by_pub_key(pub_key_bytes); + let account_auth_idxdb: AccountAuthIdxdbObject = from_value(js_value).unwrap(); + + // Convert the auth_info to the appropriate AuthInfo enum variant + let auth_info = AuthSecretKey::read_from_bytes(&account_auth_idxdb.auth_info)?; + + Ok(auth_info) + } + + /// Fetches an [AuthSecretKey] by a public key represented by a [Word] and caches it in the store + pub(crate) async fn fetch_and_cache_account_auth_by_pub_key(&self, account_id: String) -> Result { + // Print to console for debugging + console::log_1(&JsValue::from_str("fetch_and_cache_account_auth_by_pub_key called inner")); + + let promise = idxdb_fetch_and_cache_account_auth_by_pub_key(account_id); + let js_value = JsFuture::from(promise).await.unwrap(); + let account_auth_idxdb: AccountAuthIdxdbObject = from_value(js_value).unwrap(); + + // Convert the auth_info to the appropriate AuthInfo enum variant + let auth_info = AuthSecretKey::read_from_bytes(&account_auth_idxdb.auth_info)?; + + Ok(auth_info) + } +} \ No newline at end of file diff --git a/wasm/src/web_client/store/accounts/models.rs b/wasm/src/web_client/store/accounts/models.rs new file mode 100644 index 000000000..0f406bffd --- /dev/null +++ b/wasm/src/web_client/store/accounts/models.rs @@ -0,0 +1,63 @@ +use base64::{decode as base64_decode, DecodeError}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde::de::{Error, Unexpected}; + +#[derive(Serialize, Deserialize)] +pub struct AccountCodeIdxdbObject { + pub root: String, + pub procedures: String, + #[serde(deserialize_with = "base64_to_vec_u8_required", default)] + pub module: Vec, +} + +#[derive(Serialize, Deserialize)] +pub struct AccountAuthIdxdbObject { + pub id: String, + #[serde(deserialize_with = "base64_to_vec_u8_required", default)] + pub auth_info: Vec, +} + +#[derive(Serialize, Deserialize)] +pub struct AccountStorageIdxdbObject { + pub root: String, + #[serde(deserialize_with = "base64_to_vec_u8_required", default)] + pub storage: Vec, +} + +#[derive(Serialize, Deserialize)] +pub struct AccountVaultIdxdbObject { + pub root: String, + pub assets: String, +} + +#[derive(Serialize, Deserialize)] +pub struct AccountRecordIdxdbOjbect { + pub id: String, + pub nonce: String, + pub vault_root: String, + pub storage_root: String, + pub code_root: String, + #[serde(deserialize_with = "base64_to_vec_u8_optional", default)] + pub account_seed: Option>, +} + +fn base64_to_vec_u8_required<'de, D>(deserializer: D) -> Result, D::Error> +where + D: Deserializer<'de>, +{ + let base64_str: String = Deserialize::deserialize(deserializer)?; + base64_decode(&base64_str).map_err(|e| Error::custom(format!("Base64 decode error: {}", e))) +} + +fn base64_to_vec_u8_optional<'de, D>(deserializer: D) -> Result>, D::Error> +where + D: Deserializer<'de>, +{ + let base64_str: Option = Option::deserialize(deserializer)?; + match base64_str { + Some(str) => base64_decode(&str) + .map(Some) + .map_err(|e| Error::custom(format!("Base64 decode error: {}", e))), + None => Ok(None), + } +} \ No newline at end of file diff --git a/wasm/src/web_client/store/accounts/utils.rs b/wasm/src/web_client/store/accounts/utils.rs new file mode 100644 index 000000000..d8bc2af48 --- /dev/null +++ b/wasm/src/web_client/store/accounts/utils.rs @@ -0,0 +1,115 @@ +use wasm_bindgen_futures::*; +use wasm_bindgen::prelude::*; +use web_sys::console; + +use miden_objects::{ + accounts::{Account, AccountCode, AccountId, AccountStorage, AccountStub, AuthSecretKey}, + assembly::AstSerdeOptions, + assets::{Asset, AssetVault}, + utils::Deserializable, + Felt, Digest, Word +}; +use miden_tx::utils::Serializable; +use miden_client::errors::StoreError; + +use super::js_bindings::*; +use super::models::*; + +pub async fn insert_account_code( + account_code: &AccountCode +) -> Result<(), ()> { + let root = account_code.root().to_string(); + let procedures = serde_json::to_string(account_code.procedures()).unwrap(); + let module = account_code.module().to_bytes(AstSerdeOptions { serialize_imports: true }); + + let promise = idxdb_insert_account_code(root, procedures, module); + let js_value = JsFuture::from(promise).await; + + Ok(()) +} + +pub async fn insert_account_storage( + account_storage: &AccountStorage +) -> Result<(), ()> { + let root = account_storage.root().to_string(); + let storage = account_storage.to_bytes(); + + let promise = idxdb_insert_account_storage(root, storage); + let js_value = JsFuture::from(promise).await; + + Ok(()) +} + +pub async fn insert_account_asset_vault( + asset_vault: &AssetVault +) -> Result<(), ()> { + let root = serde_json::to_string(&asset_vault.commitment()).unwrap(); + let assets: Vec = asset_vault.assets().collect(); + let assets_as_str = serde_json::to_string(&assets).unwrap(); + + let promise = idxdb_insert_account_asset_vault(root, assets_as_str); + let js_value = JsFuture::from(promise).await; + Ok(()) +} + +pub async fn insert_account_auth( + account_id: AccountId, + auth_info: &AuthSecretKey, +) -> Result<(), ()> { + let pub_key = match auth_info { + AuthSecretKey::RpoFalcon512(secret) => Word::from(secret.public_key()), + } + .to_bytes(); + + let account_id_str = account_id.to_string(); + let auth_info = auth_info.to_bytes(); + + let promise = idxdb_insert_account_auth(account_id_str, auth_info, pub_key); + let js_value = JsFuture::from(promise).await; + + Ok(()) +} + +pub async fn insert_account_record( + account: &Account, + account_seed: Option, +) -> Result<(), ()> { + let account_id_str = account.id().to_string(); + let code_root = account.code().root().to_string(); + let storage_root = account.storage().root().to_string(); + let vault_root = serde_json::to_string(&account.vault().commitment()).unwrap(); + let committed = account.is_on_chain(); + let nonce = account.nonce().to_string(); + let account_seed = account_seed.map(|seed| seed.to_bytes()); + + let promise = idxdb_insert_account_record( + account_id_str, + code_root, + storage_root, + vault_root, + nonce, + committed, + account_seed, + ); + let js_value = JsFuture::from(promise).await; + + Ok(()) +} + +pub fn parse_account_record_idxdb_object( + account_stub_idxdb: AccountRecordIdxdbOjbect +) -> Result<(AccountStub, Option), StoreError> { + let native_account_id: AccountId = AccountId::from_hex(&account_stub_idxdb.id).unwrap(); + let native_nonce: u64 = account_stub_idxdb.nonce.parse::().map_err(|err| StoreError::ParsingError(err.to_string()))?; + let account_seed = account_stub_idxdb.account_seed.map(|seed| Word::read_from_bytes(&seed)).transpose()?; + + let account_stub = AccountStub::new( + native_account_id, + Felt::new(native_nonce), + serde_json::from_str(&account_stub_idxdb.vault_root).map_err(StoreError::InputSerializationError)?, + Digest::try_from(&account_stub_idxdb.storage_root)?, + Digest::try_from(&account_stub_idxdb.code_root)?, + ); + + Ok((account_stub, account_seed)) +} \ No newline at end of file diff --git a/wasm/src/web_client/store/chain_data/js_bindings.rs b/wasm/src/web_client/store/chain_data/js_bindings.rs new file mode 100644 index 000000000..4ae35fec2 --- /dev/null +++ b/wasm/src/web_client/store/chain_data/js_bindings.rs @@ -0,0 +1,47 @@ +use wasm_bindgen::prelude::*; +use wasm_bindgen_futures::*; + +// Account IndexedDB Operations +#[wasm_bindgen(module = "/js/db/chainData.js")] +extern "C" { + // GETS + // ================================================================================================ + + #[wasm_bindgen(js_name = getBlockHeaders)] + pub fn idxdb_get_block_headers( + block_numbers: Vec + ) -> js_sys::Promise; + + #[wasm_bindgen(js_name = getTrackedBlockHeaders)] + pub fn idxdb_get_tracked_block_headers() -> js_sys::Promise; + + #[wasm_bindgen(js_name = getChainMmrNodesAll)] + pub fn idxdb_get_chain_mmr_nodes_all() -> js_sys::Promise; + + #[wasm_bindgen(js_name = getChainMmrNodes)] + pub fn idxdb_get_chain_mmr_nodes( + ids: Vec + ) -> js_sys::Promise; + + #[wasm_bindgen(js_name = getChainMmrPeaksByBlockNum)] + pub fn idxdb_get_chain_mmr_peaks_by_block_num( + block_num: String + ) -> js_sys::Promise; + + // INSERTS + // ================================================================================================ + + #[wasm_bindgen(js_name = insertBlockHeader)] + pub fn idxdb_insert_block_header( + block_num: String, + header: String, + chain_mmr_peaks: String, + has_client_notes: bool + ) -> js_sys::Promise; + + #[wasm_bindgen(js_name = insertChainMmrNodes)] + pub fn idxdb_insert_chain_mmr_nodes( + ids: Vec, + nodes: Vec + ) -> js_sys::Promise; +} \ No newline at end of file diff --git a/wasm/src/web_client/store/chain_data/mod.rs b/wasm/src/web_client/store/chain_data/mod.rs new file mode 100644 index 000000000..0b6f46327 --- /dev/null +++ b/wasm/src/web_client/store/chain_data/mod.rs @@ -0,0 +1,205 @@ +use std::{collections::BTreeMap, num::NonZeroUsize}; + +use wasm_bindgen_futures::JsFuture; +use serde_wasm_bindgen::from_value; + +use miden_client::{ + errors::StoreError, + store::ChainMmrNodeFilter +}; + +// use crate::native_code::{ +// errors::StoreError, +// store::ChainMmrNodeFilter +// }; + +use miden_objects::{crypto::merkle::{InOrderIndex, MmrPeaks}, BlockHeader, Digest}; + +use super::WebStore; + +mod js_bindings; +use js_bindings::*; + +mod models; +use models::*; + +pub mod utils; +use utils::*; + +impl WebStore { + pub(crate) async fn insert_block_header( + &self, + block_header: BlockHeader, + chain_mmr_peaks: MmrPeaks, + has_client_notes: bool + ) -> Result<(), StoreError> { + let chain_mmr_peaks = chain_mmr_peaks.peaks().to_vec(); + let (block_num, header, chain_mmr, has_client_notes) = + serialize_block_header(block_header, chain_mmr_peaks, has_client_notes)?; + + let promise = idxdb_insert_block_header( + block_num, + header, + chain_mmr, + has_client_notes + ); + JsFuture::from(promise).await.unwrap(); + + Ok(()) + } + + pub(crate) async fn get_block_headers( + &self, + block_numbers: &[u32], + ) -> Result, StoreError> { + let formatted_block_numbers_list: Vec = block_numbers + .iter() + .map(|block_number| (*block_number as i64).to_string()) + .collect(); + + let promise = idxdb_get_block_headers(formatted_block_numbers_list); + let js_value = JsFuture::from(promise).await.unwrap(); + let block_headers_idxdb: Vec> = from_value(js_value).unwrap(); + + // Transform the list of Option to a list of results + let results: Result, StoreError> = block_headers_idxdb.into_iter() + .enumerate() // Adding enumerate for better error tracking/logging + .filter_map(|(index, record_option)| { + match record_option { + Some(record) => Some(Ok(record)), + None => { + None // Skip over missing records instead of erroring out + }, + } + }) + .map(|record_result: Result |{ + let record = record_result?; + let block_header = serde_json::from_str(&record.header) + .map_err(StoreError::JsonDataDeserializationError)?; + let has_client_notes = record.has_client_notes; + + Ok((block_header, has_client_notes)) + }) + .collect(); // Collects into Result, StoreError> + + return results; + } + + pub(crate) async fn get_tracked_block_headers( + &self + ) -> Result, StoreError> { + let promise = idxdb_get_tracked_block_headers(); + let js_value = JsFuture::from(promise).await.unwrap(); + let block_headers_idxdb: Vec = from_value(js_value).unwrap(); + + let results:Result, StoreError> = block_headers_idxdb.into_iter().map(|record| { + let block_header = serde_json::from_str(&record.header).unwrap(); + + Ok(block_header) + }).collect(); + + return results; + } + + pub(crate) async fn get_chain_mmr_nodes<'a>( + &'a self, + filter: ChainMmrNodeFilter<'a>, + ) -> Result, StoreError> { + match filter { + ChainMmrNodeFilter::All => { + let promise = idxdb_get_chain_mmr_nodes_all(); + let js_value = JsFuture::from(promise).await.unwrap(); + let chain_mmr_nodes_idxdb: Vec = from_value(js_value).unwrap(); + + let results:Result, StoreError> = chain_mmr_nodes_idxdb.into_iter().map(|record| { + let id_as_u64: u64 = record.id.parse::().unwrap(); + let id = InOrderIndex::new(NonZeroUsize::new(id_as_u64 as usize).unwrap()); + let node: Digest = + serde_json::from_str(&record.node).map_err(StoreError::JsonDataDeserializationError)?; + Ok((id, node)) + }).collect(); + + return results; + }, + ChainMmrNodeFilter::List(ids) => { + let formatted_list: Vec = ids + .iter() + .map(|id| (Into::::into(*id)).to_string()) + .collect(); + + let promise = idxdb_get_chain_mmr_nodes(formatted_list); + let js_value = JsFuture::from(promise).await.unwrap(); + let chain_mmr_nodes_idxdb: Vec = from_value(js_value).unwrap(); + + let results:Result, StoreError> = chain_mmr_nodes_idxdb.into_iter().map(|record| { + let id_as_u64: u64 = record.id.parse::().unwrap(); + let id = InOrderIndex::new(NonZeroUsize::new(id_as_u64 as usize).unwrap()); + let node: Digest = + serde_json::from_str(&record.node).map_err(StoreError::JsonDataDeserializationError)?; + Ok((id, node)) + }).collect(); + + return results; + } + } + } + + pub(crate) async fn get_chain_mmr_peaks_by_block_num( + &self, + block_num: u32, + ) -> Result { + let block_num_as_str = block_num.to_string(); + + let promise = idxdb_get_chain_mmr_peaks_by_block_num(block_num_as_str); + let js_value = JsFuture::from(promise).await.unwrap(); + let mmr_peaks_idxdb: MmrPeaksIdxdbObject = from_value(js_value).unwrap(); + + if let Some(peaks) = mmr_peaks_idxdb.peaks { + let mmr_peaks_nodes: Vec = + serde_json::from_str(&peaks).map_err(StoreError::JsonDataDeserializationError)?; + + return MmrPeaks::new(block_num as usize, mmr_peaks_nodes).map_err(StoreError::MmrError) + } + + return Ok(MmrPeaks::new(0, vec![])?); + } + + pub(crate) async fn insert_chain_mmr_nodes( + &self, + nodes: &[(InOrderIndex, Digest)], + ) -> Result<(), StoreError> { + let mut serialized_node_ids = Vec::new(); + let mut serialized_nodes = Vec::new(); + for (id, node) in nodes.iter() { + let (serialized_id, serialized_node) = serialize_chain_mmr_node(*id, *node)?; + serialized_node_ids.push(serialized_id); + serialized_nodes.push(serialized_node); + }; + + let promise = idxdb_insert_chain_mmr_nodes(serialized_node_ids, serialized_nodes); + JsFuture::from(promise).await.unwrap(); + + Ok(()) + } + + // Without self? + pub(crate) async fn insert_block_header_tx( + block_header: BlockHeader, + chain_mmr_peaks: MmrPeaks, + has_client_notes: bool, + ) -> Result<(), StoreError> { + let chain_mmr_peaks = chain_mmr_peaks.peaks().to_vec(); + let (block_num, header, chain_mmr, has_client_notes) = + serialize_block_header(block_header, chain_mmr_peaks, has_client_notes)?; + + let promise = idxdb_insert_block_header( + block_num, + header, + chain_mmr, + has_client_notes + ); + JsFuture::from(promise).await.unwrap(); + + Ok(()) + } +} \ No newline at end of file diff --git a/wasm/src/web_client/store/chain_data/models.rs b/wasm/src/web_client/store/chain_data/models.rs new file mode 100644 index 000000000..e8130f6e3 --- /dev/null +++ b/wasm/src/web_client/store/chain_data/models.rs @@ -0,0 +1,20 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Serialize, Deserialize)] +pub struct BlockHeaderIdxdbObject { + pub block_num: String, + pub header: String, + pub chain_mmr: String, + pub has_client_notes: bool +} + +#[derive(Serialize, Deserialize)] +pub struct ChainMmrNodeIdxdbObject { + pub id: String, + pub node: String +} + +#[derive(Serialize, Deserialize)] +pub struct MmrPeaksIdxdbObject { + pub peaks: Option +} \ No newline at end of file diff --git a/wasm/src/web_client/store/chain_data/utils.rs b/wasm/src/web_client/store/chain_data/utils.rs new file mode 100644 index 000000000..69afce7b2 --- /dev/null +++ b/wasm/src/web_client/store/chain_data/utils.rs @@ -0,0 +1,31 @@ +use miden_objects::{crypto::merkle::InOrderIndex, BlockHeader, Digest}; + +use miden_client::errors::StoreError; +// use crate::native_code::errors::StoreError; + +type SerializedBlockHeaderData = (String, String, String, bool); +type SerializedChainMmrNodeData = (String, String); + +pub fn serialize_block_header( + block_header: BlockHeader, + chain_mmr_peaks: Vec, + has_client_notes: bool, +) -> Result { + let block_num = block_header.block_num().to_string(); + let header = + serde_json::to_string(&block_header).map_err(StoreError::InputSerializationError)?; + let chain_mmr_peaks = + serde_json::to_string(&chain_mmr_peaks).map_err(StoreError::InputSerializationError)?; + + Ok((block_num, header, chain_mmr_peaks, has_client_notes)) +} + +pub fn serialize_chain_mmr_node( + id: InOrderIndex, + node: Digest, +) -> Result { + let id: u64 = id.into(); + let id_as_str = id.to_string(); + let node = serde_json::to_string(&node).map_err(StoreError::InputSerializationError)?; + Ok((id_as_str, node)) +} \ No newline at end of file diff --git a/wasm/src/web_client/store/mod.rs b/wasm/src/web_client/store/mod.rs new file mode 100644 index 000000000..57224e03f --- /dev/null +++ b/wasm/src/web_client/store/mod.rs @@ -0,0 +1,231 @@ +use std::collections::BTreeMap; + +use async_trait::async_trait; +use wasm_bindgen::prelude::*; +use wasm_bindgen_futures::*; + +use miden_objects::{ + accounts::{Account, AccountId, AccountStub, AuthSecretKey}, + crypto::merkle::{InOrderIndex, MmrPeaks}, + notes::{NoteId, NoteInclusionProof, NoteTag}, + transaction::TransactionId, + BlockHeader, Digest, Word +}; +use miden_client::{ + errors::StoreError, + store::{ChainMmrNodeFilter, InputNoteRecord, NoteFilter, OutputNoteRecord, Store, TransactionFilter}, + client::{ + sync::StateSyncUpdate, + transactions::{TransactionRecord, TransactionResult} + } +}; + +// use crate::native_code::{ +// errors::{ClientError, StoreError}, +// store::{ +// note_record::{InputNoteRecord, OutputNoteRecord}, +// ChainMmrNodeFilter, NoteFilter, Store, TransactionFilter +// }, +// sync::SyncedNewNotes, +// transactions::{TransactionRecord, TransactionResult} +// }; + +pub mod accounts; +pub mod notes; +pub mod transactions; +pub mod sync; +pub mod chain_data; + +// Initialize IndexedDB +#[wasm_bindgen(module = "/js/db/schema.js")] +extern "C" { + #[wasm_bindgen(js_name = openDatabase)] + fn setup_indexed_db() -> js_sys::Promise; +} + +pub struct WebStore {} + +impl WebStore { + pub async fn new() -> Result { + JsFuture::from(setup_indexed_db()).await; + Ok(WebStore {}) + } +} + +// #[async_trait(?Send)] +impl Store for WebStore { + // SYNC + // -------------------------------------------------------------------------------------------- + + async fn get_note_tags( + &self + ) -> Result, StoreError> { + self.get_note_tags().await + } + + async fn add_note_tag( + &self, + tag: NoteTag, + ) -> Result { + self.add_note_tag(tag).await + } + + async fn remove_note_tag( + &self, + tag: NoteTag + ) -> Result { + self.remove_note_tag(tag).await + } + + async fn get_sync_height( + &self + ) -> Result { + self.get_sync_height().await + } + + async fn apply_state_sync( + &self, + state_sync_update: StateSyncUpdate, + ) -> Result<(), StoreError> { + self.apply_state_sync(state_sync_update).await + } + + // TRANSACTIONS + // -------------------------------------------------------------------------------------------- + + async fn get_transactions( + &self, + transaction_filter: TransactionFilter, + ) -> Result, StoreError> { + self.get_transactions(transaction_filter).await + } + + async fn apply_transaction( + &self, + tx_result: TransactionResult, + ) -> Result<(), StoreError> { + self.apply_transaction(tx_result).await + } + + // NOTES + // -------------------------------------------------------------------------------------------- + + async fn get_input_notes( + &self, + filter: NoteFilter<'_>, + ) -> Result, StoreError> { + self.get_input_notes(filter).await + } + + async fn get_output_notes( + &self, + note_filter: NoteFilter<'_>, + ) -> Result, StoreError> { + self.get_output_notes(note_filter).await + } + + async fn insert_input_note( + &self, + note: &InputNoteRecord, + ) -> Result<(), StoreError> { + self.insert_input_note(note).await + } + + // CHAIN DATA + // -------------------------------------------------------------------------------------------- + + async fn insert_block_header( + &self, + block_header: BlockHeader, + chain_mmr_peaks: MmrPeaks, + has_client_notes: bool, + ) -> Result<(), StoreError> { + self.insert_block_header(block_header, chain_mmr_peaks, has_client_notes).await + } + + async fn get_block_headers( + &self, + block_numbers: &[u32], + ) -> Result, StoreError> { + self.get_block_headers(block_numbers).await + } + + async fn get_tracked_block_headers( + &self + ) -> Result, StoreError> { + self.get_tracked_block_headers().await + } + + async fn get_chain_mmr_nodes<'a>( + &self, + filter: ChainMmrNodeFilter<'a>, + ) -> Result, StoreError> { + self.get_chain_mmr_nodes(filter).await + } + + async fn insert_chain_mmr_nodes( + &self, + nodes: &[(InOrderIndex, Digest)] + ) -> Result<(), StoreError> { + self.insert_chain_mmr_nodes(nodes).await + } + + async fn get_chain_mmr_peaks_by_block_num( + &self, + block_num: u32 + ) -> Result { + self.get_chain_mmr_peaks_by_block_num(block_num).await + } + + // ACCOUNTS + // -------------------------------------------------------------------------------------------- + + async fn insert_account( + &self, + account: &Account, + account_seed: Option, + auth_info: &AuthSecretKey, + ) -> Result<(), StoreError> { + self.insert_account(account, account_seed, auth_info).await + } + + async fn get_account_ids( + &self + ) -> Result, StoreError> { + self.get_account_ids().await + } + + async fn get_account_stubs( + &self + ) -> Result)>, StoreError> { + self.get_account_stubs().await + } + + async fn get_account_stub( + &self, + account_id: AccountId, + ) -> Result<(AccountStub, Option), StoreError> { + self.get_account_stub(account_id).await + } + + async fn get_account( + &self, + account_id: AccountId, + ) -> Result<(Account, Option), StoreError> { + self.get_account(account_id).await + } + + async fn get_account_auth( + &self, + account_id: AccountId, + ) -> Result { + self.get_account_auth(account_id).await + } + + fn get_account_auth_by_pub_key( + &self, + pub_key: Word + ) -> Result { + self.get_account_auth_by_pub_key(pub_key) + } +} \ No newline at end of file diff --git a/wasm/src/web_client/store/notes/js_bindings.rs b/wasm/src/web_client/store/notes/js_bindings.rs new file mode 100644 index 000000000..60089cda1 --- /dev/null +++ b/wasm/src/web_client/store/notes/js_bindings.rs @@ -0,0 +1,67 @@ +use wasm_bindgen::prelude::*; +use wasm_bindgen_futures::*; + +// Account IndexedDB Operations +#[wasm_bindgen(module = "/js/db/notes.js")] +extern "C" { + // GETS + // ================================================================================================ + + #[wasm_bindgen(js_name = getInputNotes)] + pub fn idxdb_get_input_notes( + status: String + ) -> js_sys::Promise; + + #[wasm_bindgen(js_name = getInputNotesFromIds)] + pub fn idxdb_get_input_notes_from_ids( + note_ids: Vec + ) -> js_sys::Promise; + + #[wasm_bindgen(js_name = getOutputNotes)] + pub fn idxdb_get_output_notes( + status: String + ) -> js_sys::Promise; + + #[wasm_bindgen(js_name = getOutputNotesFromIds)] + pub fn idxdb_get_output_notes_from_ids( + note_ids: Vec + ) -> js_sys::Promise; + + #[wasm_bindgen(js_name = getUnspentInputNoteNullifiers)] + pub fn idxdb_get_unspent_input_note_nullifiers() -> js_sys::Promise; + + // INSERTS + // ================================================================================================ + + #[wasm_bindgen(js_name = insertInputNote)] + pub fn idxdb_insert_input_note( + note_id: String, + assets: Vec, + recipient: String, + status: String, + metadata: Option, + details: String, + note_script_hash: String, + serialized_note_script: Vec, + inclusion_proof: Option + ) -> js_sys::Promise; + + #[wasm_bindgen(js_name = insertOutputNote)] + pub fn idxdb_insert_output_note( + note_id: String, + assets: Vec, + recipient: String, + status: String, + metadata: String, + details: Option, + note_script_hash: Option, + serialized_note_script: Option>, + inclusion_proof: Option + ) -> js_sys::Promise; + + #[wasm_bindgen(js_name = updateNoteConsumerTxId)] + pub fn idxdb_update_note_consumer_tx_id( + note_id: String, + consumer_tx_id: String + ) -> js_sys::Promise; +} \ No newline at end of file diff --git a/wasm/src/web_client/store/notes/mod.rs b/wasm/src/web_client/store/notes/mod.rs new file mode 100644 index 000000000..c1ff675ee --- /dev/null +++ b/wasm/src/web_client/store/notes/mod.rs @@ -0,0 +1,174 @@ +use miden_objects::notes::{NoteAssets, NoteId, NoteInclusionProof, NoteInputs, NoteMetadata, NoteScript, Nullifier}; +use miden_objects::Digest; +use miden_tx::utils::Deserializable; +use serde_wasm_bindgen::from_value; +use wasm_bindgen_futures::*; + +use super::WebStore; +use miden_client::{ + errors::StoreError, + store::{InputNoteRecord, NoteFilter, OutputNoteRecord} +}; +// use crate::native_code::errors::StoreError; +// use crate::native_code::store::note_record::{ +// InputNoteRecord, +// NoteRecordDetails, +// NoteStatus, +// OutputNoteRecord +// }; +// use crate::native_code::store::NoteFilter; +use crate::web_client::notes::WebClientNoteFilter; + +mod js_bindings; +use js_bindings::*; + +mod models; +use models::*; + +pub(crate) mod utils; +use utils::*; + +use web_sys::console; +use wasm_bindgen::*; + +impl WebStore { + pub(crate) async fn get_input_notes( + &self, + filter: NoteFilter<'_> + ) -> Result, StoreError> { + let promise = match filter { + NoteFilter::All | NoteFilter::Consumed | NoteFilter::Committed | NoteFilter::Pending => { + let filter_as_str = match filter { + NoteFilter::All => "All", + NoteFilter::Consumed => "Consumed", + NoteFilter::Committed => "Committed", + NoteFilter::Pending => "Pending", + _ => unreachable!(), // Safety net, should never be reached + }; + + // Assuming `js_fetch_notes` is your JavaScript function that handles simple string filters + idxdb_get_input_notes(filter_as_str.to_string()) + }, + NoteFilter::List(ids) => { + let note_ids_as_str: Vec = ids.into_iter().map(|id| id.inner().to_string()).collect(); + idxdb_get_input_notes_from_ids(note_ids_as_str) + }, + NoteFilter::Unique(id) => { + let note_id_as_str = id.inner().to_string(); + let note_ids = vec![note_id_as_str]; + idxdb_get_input_notes_from_ids(note_ids) + } + }; + + let js_value = JsFuture::from(promise).await.unwrap(); + let input_notes_idxdb: Vec = from_value(js_value).unwrap(); + + let native_input_notes: Result, StoreError> = input_notes_idxdb + .into_iter() + .map(parse_input_note_idxdb_object) // Simplified closure + .collect::, _>>(); // Collect results into a single Result + + match native_input_notes { + Ok(ref notes) => match filter { + NoteFilter::Unique(note_id) if notes.is_empty() => { + return Err(StoreError::NoteNotFound(note_id)); + }, + NoteFilter::List(note_ids) if note_ids.len() != notes.len() => { + let missing_note_id = note_ids + .iter() + .find(|&¬e_id| !notes.iter().any(|note_record| note_record.id() == note_id)) + .expect("should find one note id that wasn't retrieved by the db"); + return Err(StoreError::NoteNotFound(*missing_note_id)); + }, + _ => {}, + }, + Err(e) => return Err(e), + } + + native_input_notes + } + + pub(crate) async fn get_output_notes( + &self, + filter: NoteFilter<'_> + ) -> Result, StoreError> { + web_sys::console::log_1(&JsValue::from_str("get_output_notes called")); + let promise = match filter { + NoteFilter::All | NoteFilter::Consumed | NoteFilter::Committed | NoteFilter::Pending => { + let filter_as_str = match filter { + NoteFilter::All => "All", + NoteFilter::Consumed => "Consumed", + NoteFilter::Committed => "Committed", + NoteFilter::Pending => "Pending", + _ => unreachable!(), // Safety net, should never be reached + }; + + // Assuming `js_fetch_notes` is your JavaScript function that handles simple string filters + web_sys::console::log_1(&JsValue::from_str("get_output_notes 2")); + idxdb_get_output_notes(filter_as_str.to_string()) + }, + NoteFilter::List(ids) => { + let note_ids_as_str: Vec = ids.into_iter().map(|id| id.inner().to_string()).collect(); + idxdb_get_output_notes_from_ids(note_ids_as_str) + }, + NoteFilter::Unique(id) => { + let note_id_as_str = id.inner().to_string(); + let note_ids = vec![note_id_as_str]; + idxdb_get_output_notes_from_ids(note_ids) + } + }; + + web_sys::console::log_1(&JsValue::from_str("get_output_notes 3")); + let js_value = JsFuture::from(promise).await.unwrap(); + web_sys::console::log_1(&JsValue::from_str("get_output_notes 4")); + let output_notes_idxdb: Vec = from_value(js_value).unwrap(); + web_sys::console::log_1(&JsValue::from_str("get_output_notes 5")); + + let native_output_notes: Result, StoreError> = output_notes_idxdb + .into_iter() + .map(parse_output_note_idxdb_object) // Simplified closure + .collect::, _>>(); // Collect results into a single Result + web_sys::console::log_1(&JsValue::from_str("get_output_notes 6")); + + match native_output_notes { + Ok(ref notes) => match filter { + NoteFilter::Unique(note_id) if notes.is_empty() => { + return Err(StoreError::NoteNotFound(note_id)); + }, + NoteFilter::List(note_ids) if note_ids.len() != notes.len() => { + let missing_note_id = note_ids + .iter() + .find(|&¬e_id| !notes.iter().any(|note_record| note_record.id() == note_id)) + .expect("should find one note id that wasn't retrieved by the db"); + return Err(StoreError::NoteNotFound(*missing_note_id)); + }, + _ => {}, + }, + Err(e) => return Err(e), + } + web_sys::console::log_1(&JsValue::from_str("get_output_notes 7")); + + native_output_notes + } + + pub(crate) async fn insert_input_note( + &self, + note: &InputNoteRecord + ) -> Result<(), StoreError> { + insert_input_note_tx(note).await + } + + pub(crate) async fn get_unspent_input_note_nullifiers( + &self + ) -> Result, StoreError>{ + let promise = idxdb_get_unspent_input_note_nullifiers(); + let js_value = JsFuture::from(promise).await.unwrap(); + let nullifiers_as_str: Vec = from_value(js_value).unwrap(); + + let nullifiers = nullifiers_as_str.into_iter().map(|s| { + Digest::try_from(s).map(Nullifier::from).map_err(StoreError::HexParseError) + }).collect::, _>>(); + + return nullifiers; + } +} diff --git a/wasm/src/web_client/store/notes/models.rs b/wasm/src/web_client/store/notes/models.rs new file mode 100644 index 000000000..b40716287 --- /dev/null +++ b/wasm/src/web_client/store/notes/models.rs @@ -0,0 +1,52 @@ +use base64::{decode as base64_decode, DecodeError}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde::de::{Error, Unexpected}; + +#[derive(Serialize, Deserialize)] +pub struct InputNoteIdxdbObject { + #[serde(deserialize_with = "base64_to_vec_u8_required", default)] + pub assets: Vec, + pub details: String, + pub recipient: String, + pub status: String, + pub metadata: Option, + pub inclusion_proof: Option, + #[serde(deserialize_with = "base64_to_vec_u8_required", default)] + pub serialized_note_script: Vec, + pub consumer_account_id: Option, +} + +#[derive(Serialize, Deserialize)] +pub struct OutputNoteIdxdbObject { + #[serde(deserialize_with = "base64_to_vec_u8_required", default)] + pub assets: Vec, + pub details: Option, + pub recipient: String, + pub status: String, + pub metadata: String, + pub inclusion_proof: Option, + #[serde(deserialize_with = "base64_to_vec_u8_optional", default)] + pub serialized_note_script: Option>, + pub consumer_account_id: Option +} + +fn base64_to_vec_u8_required<'de, D>(deserializer: D) -> Result, D::Error> +where + D: Deserializer<'de>, +{ + let base64_str: String = Deserialize::deserialize(deserializer)?; + base64_decode(&base64_str).map_err(|e| Error::custom(format!("Base64 decode error: {}", e))) +} + +fn base64_to_vec_u8_optional<'de, D>(deserializer: D) -> Result>, D::Error> +where + D: Deserializer<'de>, +{ + let base64_str: Option = Option::deserialize(deserializer)?; + match base64_str { + Some(str) => base64_decode(&str) + .map(Some) + .map_err(|e| Error::custom(format!("Base64 decode error: {}", e))), + None => Ok(None), + } +} \ No newline at end of file diff --git a/wasm/src/web_client/store/notes/utils.rs b/wasm/src/web_client/store/notes/utils.rs new file mode 100644 index 000000000..c3b31c8d0 --- /dev/null +++ b/wasm/src/web_client/store/notes/utils.rs @@ -0,0 +1,379 @@ +use miden_objects::{ + accounts::AccountId, + notes::{NoteAssets, NoteId, NoteInclusionProof, NoteMetadata, NoteScript}, + transaction::TransactionId, + utils::Deserializable, + Digest +}; +use miden_tx::utils::Serializable; +use wasm_bindgen_futures::*; + +use miden_client::{ + store::{NoteStatus, NoteRecordDetails, InputNoteRecord, OutputNoteRecord}, + errors::StoreError +}; +// use crate::native_code::{errors::StoreError, store::note_record::{InputNoteRecord, NoteStatus, OutputNoteRecord}}; + +use crate::web_client::store::notes::{InputNoteIdxdbObject, OutputNoteIdxdbObject}; +use super::js_bindings::*; + +use wasm_bindgen::*; +use web_sys::console; + +// TYPES +// ================================================================================================ + +type SerializedInputNoteData = ( + String, + Vec, + String, + String, + Option, + String, + String, + Vec, + Option +); + +type SerializedOutputNoteData = ( + String, + Vec, + String, + String, + String, + Option, + Option, + Option>, + Option, +); + +// ================================================================================================ + +pub(crate) async fn update_note_consumer_tx_id( + note_id: NoteId, + consumer_tx_id: TransactionId, +) -> Result<(), StoreError> { + let serialized_note_id = note_id.inner().to_string(); + let serialized_consumer_tx_id = consumer_tx_id.to_string(); + + let promise = idxdb_update_note_consumer_tx_id(serialized_note_id, serialized_consumer_tx_id); + let result = JsFuture::from(promise).await.unwrap(); + + Ok(()) +} + +pub(crate) fn serialize_input_note( + note: &InputNoteRecord +) -> Result { + let note_id = note.id().inner().to_string(); + let note_assets = note.assets().to_bytes(); + + let (inclusion_proof, status) = match note.inclusion_proof() { + Some(proof) => { + let block_num = proof.origin().block_num; + let node_index = proof.origin().node_index.value(); + let sub_hash = proof.sub_hash(); + let note_root = proof.note_root(); + + let inclusion_proof = serde_json::to_string(&NoteInclusionProof::new( + block_num, + sub_hash, + note_root, + node_index, + proof.note_path().clone(), + )?) + .map_err(StoreError::InputSerializationError)?; + + let status = serde_json::to_string(&NoteStatus::Committed) + .map_err(StoreError::InputSerializationError)? + .replace('\"', ""); + (Some(inclusion_proof), status) + }, + None => { + let status = serde_json::to_string(&NoteStatus::Pending) + .map_err(StoreError::InputSerializationError)? + .replace('\"', ""); + + (None, status) + }, + }; + let recipient = note.recipient().to_hex(); + + let metadata = if let Some(metadata) = note.metadata() { + Some(serde_json::to_string(metadata).map_err(StoreError::InputSerializationError)?) + } else { + None + }; + + let details = + serde_json::to_string(¬e.details()).map_err(StoreError::InputSerializationError)?; + let note_script_hash = note.details().script_hash().to_hex(); + let serialized_note_script = note.details().script().to_bytes(); + + Ok(( + note_id, + note_assets, + recipient, + status, + metadata, + details, + note_script_hash, + serialized_note_script, + inclusion_proof, + )) +} + +pub async fn insert_input_note_tx( + note: &InputNoteRecord +) -> Result<(), StoreError> { + let ( + note_id, + assets, + recipient, + status, + metadata, + details, + note_script_hash, + serialized_note_script, + inclusion_proof + ) = serialize_input_note(note)?; + + let promise = idxdb_insert_input_note( + note_id, + assets, + recipient, + status, + metadata, + details, + note_script_hash, + serialized_note_script, + inclusion_proof + ); + JsFuture::from(promise).await.unwrap(); + + Ok(()) +} + +pub(crate) fn serialize_output_note( + note: &OutputNoteRecord, +) -> Result { + let note_id = note.id().inner().to_string(); + let note_assets = note.assets().to_bytes(); + let (inclusion_proof, status) = match note.inclusion_proof() { + Some(proof) => { + let block_num = proof.origin().block_num; + let node_index = proof.origin().node_index.value(); + let sub_hash = proof.sub_hash(); + let note_root = proof.note_root(); + + let inclusion_proof = serde_json::to_string(&NoteInclusionProof::new( + block_num, + sub_hash, + note_root, + node_index, + proof.note_path().clone(), + )?) + .map_err(StoreError::InputSerializationError)?; + + let status = serde_json::to_string(&NoteStatus::Committed) + .map_err(StoreError::InputSerializationError)? + .replace('\"', ""); + + (Some(inclusion_proof), status) + }, + None => { + let status = serde_json::to_string(&NoteStatus::Pending) + .map_err(StoreError::InputSerializationError)? + .replace('\"', ""); + + (None, status) + }, + }; + let recipient = note.recipient().to_hex(); + + let metadata = + serde_json::to_string(note.metadata()).map_err(StoreError::InputSerializationError)?; + + let details = if let Some(details) = note.details() { + Some(serde_json::to_string(&details).map_err(StoreError::InputSerializationError)?) + } else { + None + }; + let note_script_hash = note.details().map(|details| details.script_hash().to_hex()); + let serialized_note_script = note.details().map(|details| details.script().to_bytes()); + + Ok(( + note_id, + note_assets, + recipient, + status, + metadata, + details, + note_script_hash, + serialized_note_script, + inclusion_proof, + )) +} + +pub async fn insert_output_note_tx( + note: &OutputNoteRecord +) -> Result<(), StoreError> { + let ( + note_id, + assets, + recipient, + status, + metadata, + details, + note_script_hash, + serialized_note_script, + inclusion_proof + ) = serialize_output_note(note)?; + + let result = JsFuture::from(idxdb_insert_output_note( + note_id, + assets, + recipient, + status, + metadata, + details, + note_script_hash, + serialized_note_script, + inclusion_proof + )).await; + match result { + Ok(_) => Ok(()), + Err(_) => Err(StoreError::QueryError("Failed to insert output note".to_string())), + } +} + +pub fn parse_input_note_idxdb_object( + note_idxdb: InputNoteIdxdbObject +) -> Result { + // Merge the info that comes from the input notes table and the notes script table + let note_script = NoteScript::read_from_bytes(¬e_idxdb.serialized_note_script)?; + let note_details: NoteRecordDetails = + serde_json::from_str(¬e_idxdb.details).map_err(StoreError::JsonDataDeserializationError)?; + let note_details = NoteRecordDetails::new( + note_details.nullifier().to_string(), + note_script, + note_details.inputs().clone(), + note_details.serial_num(), + ); + + let note_metadata: Option = if let Some(metadata_as_json_str) = note_idxdb.metadata { + Some( + serde_json::from_str(&metadata_as_json_str) + .map_err(StoreError::JsonDataDeserializationError)?, + ) + } else { + None + }; + + let note_assets = NoteAssets::read_from_bytes(¬e_idxdb.assets)?; + + let inclusion_proof = match note_idxdb.inclusion_proof { + Some(note_inclusion_proof) => { + let note_inclusion_proof: NoteInclusionProof = + serde_json::from_str(¬e_inclusion_proof) + .map_err(StoreError::JsonDataDeserializationError)?; + + Some(note_inclusion_proof) + }, + _ => None, + }; + + let recipient = Digest::try_from(note_idxdb.recipient)?; + let id = NoteId::new(recipient, note_assets.commitment()); + let status: NoteStatus = serde_json::from_str(&format!("\"{0}\"", note_idxdb.status)) + .map_err(StoreError::JsonDataDeserializationError)?; + let consumer_account_id: Option = match note_idxdb.consumer_account_id { + Some(account_id) => Some(AccountId::from_hex(&account_id)?), + None => None, + }; + + Ok(InputNoteRecord::new( + id, + recipient, + note_assets, + status, + note_metadata, + inclusion_proof, + note_details, + consumer_account_id + )) +} + +pub fn parse_output_note_idxdb_object( + note_idxdb: OutputNoteIdxdbObject +) -> Result { + web_sys::console::log_1(&JsValue::from_str("parse_output_note_idxdb_object called")); + let note_details: Option = if let Some(details_as_json_str) = note_idxdb.details { + web_sys::console::log_1(&JsValue::from_str("parse_output_note_idxdb_object 1")); + // Merge the info that comes from the input notes table and the notes script table + let serialized_note_script = note_idxdb.serialized_note_script + .expect("Has note details so it should have the serialized script"); + web_sys::console::log_1(&JsValue::from_str("parse_output_note_idxdb_object 2")); + let note_script = NoteScript::read_from_bytes(&serialized_note_script)?; + web_sys::console::log_1(&JsValue::from_str("parse_output_note_idxdb_object 3")); + let note_details: NoteRecordDetails = serde_json::from_str(&details_as_json_str) + .map_err(StoreError::JsonDataDeserializationError)?; + web_sys::console::log_1(&JsValue::from_str("parse_output_note_idxdb_object 4")); + let note_details = NoteRecordDetails::new( + note_details.nullifier().to_string(), + note_script, + note_details.inputs().clone(), + note_details.serial_num(), + ); + web_sys::console::log_1(&JsValue::from_str("parse_output_note_idxdb_object 5")); + + Some(note_details) + } else { + web_sys::console::log_1(&JsValue::from_str("parse_output_note_idxdb_object 6")); + None + }; + web_sys::console::log_1(&JsValue::from_str("parse_output_note_idxdb_object 7")); + let note_metadata: NoteMetadata = + serde_json::from_str(¬e_idxdb.metadata).map_err(StoreError::JsonDataDeserializationError)?; + web_sys::console::log_1(&JsValue::from_str("parse_output_note_idxdb_object 8")); + + let note_assets = NoteAssets::read_from_bytes(¬e_idxdb.assets)?; + web_sys::console::log_1(&JsValue::from_str("parse_output_note_idxdb_object 9")); + + let inclusion_proof = match note_idxdb.inclusion_proof { + Some(note_inclusion_proof) => { + let note_inclusion_proof: NoteInclusionProof = + serde_json::from_str(¬e_inclusion_proof) + .map_err(StoreError::JsonDataDeserializationError)?; + + Some(note_inclusion_proof) + }, + _ => None, + }; + web_sys::console::log_1(&JsValue::from_str("parse_output_note_idxdb_object 10")); + + let recipient = Digest::try_from(note_idxdb.recipient)?; + web_sys::console::log_1(&JsValue::from_str("parse_output_note_idxdb_object 11")); + let id = NoteId::new(recipient, note_assets.commitment()); + web_sys::console::log_1(&JsValue::from_str("parse_output_note_idxdb_object 12")); + let status: NoteStatus = serde_json::from_str(&format!("\"{0}\"", note_idxdb.status)) + .map_err(StoreError::JsonDataDeserializationError)?; + web_sys::console::log_1(&JsValue::from_str("parse_output_note_idxdb_object 13")); + + let consumer_account_id: Option = match note_idxdb.consumer_account_id { + Some(account_id) => Some(AccountId::from_hex(&account_id)?), + None => None, + }; + web_sys::console::log_1(&JsValue::from_str("parse_output_note_idxdb_object 14")); + + Ok(OutputNoteRecord::new( + id, + recipient, + note_assets, + status, + note_metadata, + inclusion_proof, + note_details, + consumer_account_id, + )) +} \ No newline at end of file diff --git a/wasm/src/web_client/store/sync/js_bindings.rs b/wasm/src/web_client/store/sync/js_bindings.rs new file mode 100644 index 000000000..7a69d4fca --- /dev/null +++ b/wasm/src/web_client/store/sync/js_bindings.rs @@ -0,0 +1,40 @@ +use wasm_bindgen::prelude::*; +use wasm_bindgen_futures::*; + +// Account IndexedDB Operations +#[wasm_bindgen(module = "/js/db/sync.js")] +extern "C" { + // GETS + // ================================================================================================ + + #[wasm_bindgen(js_name = getSyncHeight)] + pub fn idxdb_get_sync_height() -> js_sys::Promise; + + #[wasm_bindgen(js_name = getNoteTags)] + pub fn idxdb_get_note_tags() -> js_sys::Promise; + + // INSERTS + // ================================================================================================ + + #[wasm_bindgen(js_name = addNoteTag)] + pub fn idxdb_add_note_tag( + tags: String + ) -> js_sys::Promise; + + #[wasm_bindgen(js_name = applyStateSync)] + pub fn idxdb_apply_state_sync( + block_num: String, + nullifiers: Vec, + block_header: String, + chain_mmr_peaks: String, + has_client_notes: bool, + serialized_node_ids: Vec, + serialized_nodes: Vec, + output_note_ids: Vec, + output_note_inclusion_proofs: Vec, + input_note_ids: Vec, + input_note_inclusion_proofs: Vec, + input_note_metadatas: Vec, + transactions_to_commit: Vec, + ) -> js_sys::Promise; +} \ No newline at end of file diff --git a/wasm/src/web_client/store/sync/mod.rs b/wasm/src/web_client/store/sync/mod.rs new file mode 100644 index 000000000..3e3b89657 --- /dev/null +++ b/wasm/src/web_client/store/sync/mod.rs @@ -0,0 +1,183 @@ +use miden_objects::{ + accounts::Account, + crypto::merkle::{ + InOrderIndex, MmrPeaks + }, + notes::{NoteInclusionProof, NoteTag}, + transaction::TransactionId, + BlockHeader, Digest +}; +use wasm_bindgen_futures::*; +use serde_wasm_bindgen::from_value; + +use miden_client::{ + client::sync::StateSyncUpdate, + errors::StoreError +}; +// use crate::native_code::{errors::StoreError, sync::SyncedNewNotes}; + +use super::{chain_data::utils::serialize_chain_mmr_node, notes::utils::insert_input_note_tx, transactions::utils::update_account, WebStore}; + +mod js_bindings; +use js_bindings::*; + +mod models; +use models::*; + +impl WebStore { + pub(crate) async fn get_note_tags( + &self + ) -> Result, StoreError>{ + let promise = idxdb_get_note_tags(); + let js_value = JsFuture::from(promise).await.unwrap(); + let tags_idxdb: NoteTagsIdxdbObject = from_value(js_value).unwrap(); + + let tags: Vec = serde_json::from_str(&tags_idxdb.tags).unwrap(); + + return Ok(tags); + } + + pub(super) async fn get_sync_height( + &self + ) -> Result { + let promise = idxdb_get_sync_height(); + let js_value = JsFuture::from(promise).await.unwrap(); + let block_num_idxdb: SyncHeightIdxdbObject = from_value(js_value).unwrap(); + + let block_num_as_u32: u32 = block_num_idxdb.block_num.parse::().unwrap(); + return Ok(block_num_as_u32); + } + + pub(super) async fn add_note_tag( + &self, + tag: NoteTag + ) -> Result { + let mut tags = self.get_note_tags().await.unwrap(); + if tags.contains(&tag) { + return Ok(false); + } + tags.push(tag); + let tags = serde_json::to_string(&tags).map_err(StoreError::InputSerializationError)?; + + let promise = idxdb_add_note_tag(tags); + JsFuture::from(promise).await.unwrap(); + return Ok(true); + } + + pub(super) async fn remove_note_tag( + &self, + tag: NoteTag + ) -> Result { + let mut tags = self.get_note_tags().await?; + if let Some(index_of_tag) = tags.iter().position(|&tag_candidate| tag_candidate == tag) { + tags.remove(index_of_tag); + + let tags = serde_json::to_string(&tags).map_err(StoreError::InputSerializationError)?; + + let promise = idxdb_add_note_tag(tags); + JsFuture::from(promise).await.unwrap(); + return Ok(true); + } + + Ok(false) + } + + pub(super) async fn apply_state_sync( + &self, + state_sync_update: StateSyncUpdate, + ) -> Result<(), StoreError> { + let StateSyncUpdate { + block_header, + nullifiers, + synced_new_notes: committed_notes, + transactions_to_commit: committed_transactions, + new_mmr_peaks, + new_authentication_nodes, + updated_onchain_accounts, + block_has_relevant_notes, + } = state_sync_update; + + // Serialize data for updating state sync and block header + let block_num_as_str = block_header.block_num().to_string(); + + // Serialize data for updating spent notes + let nullifiers_as_str = nullifiers.iter().map(|nullifier| nullifier.to_hex()).collect(); + + // Serialize data for updating block header + let block_header_as_str = serde_json::to_string(&block_header).map_err(StoreError::InputSerializationError)?; + let new_mmr_peaks_as_str = serde_json::to_string(&new_mmr_peaks.peaks().to_vec()).map_err(StoreError::InputSerializationError)?; + + // Serialize data for updating chain MMR nodes + let mut serialized_node_ids = Vec::new(); + let mut serialized_nodes = Vec::new(); + for (id, node) in new_authentication_nodes.iter() { + let (serialized_id, serialized_node) = serialize_chain_mmr_node(*id, *node)?; + serialized_node_ids.push(serialized_id); + serialized_nodes.push(serialized_node); + }; + + // Serialize data for updating committed notes + let output_note_ids_as_str: Vec = committed_notes.updated_output_notes().iter().map(|(note_id, _)| note_id.inner().to_hex()).collect(); + let output_note_inclusion_proofs_as_str: Vec = committed_notes.updated_output_notes().iter().map(|(_, inclusion_proof)| { + let block_num = inclusion_proof.origin().block_num; + let sub_hash = inclusion_proof.sub_hash(); + let note_root = inclusion_proof.note_root(); + let note_index = inclusion_proof.origin().node_index.value(); + + // Create a NoteInclusionProof and serialize it to JSON, handle errors with `?` + let proof = NoteInclusionProof::new( + block_num, + sub_hash, + note_root, + note_index, + inclusion_proof.note_path().clone(), + ).unwrap(); + + serde_json::to_string(&proof).unwrap() + }).collect(); + + let input_note_ids_as_str: Vec = committed_notes.updated_input_notes().iter().map(|(input_note)| input_note.id().inner().to_hex()).collect(); + let input_note_inclusion_proofs_as_str: Vec = committed_notes.updated_input_notes().iter().map(|(input_note)| { + let inclusion_proof = input_note.proof(); + serde_json::to_string(inclusion_proof).unwrap() + }).collect(); + let input_note_metadatas_as_str: Vec = committed_notes.updated_input_notes().iter().map(|(input_note)| { + let metadata = input_note.note().metadata(); + serde_json::to_string(metadata).unwrap() + }).collect(); + + // TODO: LOP INTO idxdb_apply_state_sync call + // Commit new public notes + for note in committed_notes.new_public_notes() { + insert_input_note_tx(¬e.clone().into()).await.unwrap(); + } + + // Serialize data for updating committed transactions + let transactions_to_commit_as_str: Vec = committed_transactions.iter().map(|tx_id| tx_id.to_string()).collect(); + + // TODO: LOP INTO idxdb_apply_state_sync call + // Update onchain accounts on the db that have been updated onchain + for account in updated_onchain_accounts { + update_account(&account.clone()).await.unwrap(); + } + + let promise = idxdb_apply_state_sync( + block_num_as_str, + nullifiers_as_str, + block_header_as_str, + new_mmr_peaks_as_str, + block_has_relevant_notes, + serialized_node_ids, + serialized_nodes, + output_note_ids_as_str, + output_note_inclusion_proofs_as_str, + input_note_ids_as_str, + input_note_inclusion_proofs_as_str, + input_note_metadatas_as_str, + transactions_to_commit_as_str, + ); + JsFuture::from(promise).await.unwrap(); + + Ok(()) + } +} \ No newline at end of file diff --git a/wasm/src/web_client/store/sync/models.rs b/wasm/src/web_client/store/sync/models.rs new file mode 100644 index 000000000..4bfdbd09b --- /dev/null +++ b/wasm/src/web_client/store/sync/models.rs @@ -0,0 +1,11 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Serialize, Deserialize)] +pub struct SyncHeightIdxdbObject { + pub block_num: String +} + +#[derive(Serialize, Deserialize)] +pub struct NoteTagsIdxdbObject { + pub tags: String +} \ No newline at end of file diff --git a/wasm/src/web_client/store/transactions/js_bindings.rs b/wasm/src/web_client/store/transactions/js_bindings.rs new file mode 100644 index 000000000..8e00d4688 --- /dev/null +++ b/wasm/src/web_client/store/transactions/js_bindings.rs @@ -0,0 +1,39 @@ +use wasm_bindgen::prelude::*; +use wasm_bindgen_futures::*; + +#[wasm_bindgen(module = "/js/db/transactions.js")] +extern "C" { + // GETS + // ================================================================================================ + + #[wasm_bindgen(js_name = getTransactions)] + pub fn idxdb_get_transactions( + filter: String + ) -> js_sys::Promise; + + #[wasm_bindgen(js_name = insertTransactionScript)] + pub fn idxdb_insert_transaction_script( + script_hash: Vec, + script_program: Option> + ) -> js_sys::Promise; + + #[wasm_bindgen(js_name = insertProvenTransactionData)] + pub fn idxdb_insert_proven_transaction_data( + transaction_id: String, + account_id: String, + init_account_state: String, + final_account_state: String, + input_notes: String, + output_notes: Vec, + script_hash: Option>, + script_inputs: Option, + block_num: String, + committed: Option + ) -> js_sys::Promise; + + #[wasm_bindgen(js_name = markTransactionsAsCommitted)] + pub fn idxdb_mark_transactions_as_committed( + block_num: String, + transaction_ids: Vec + ) -> js_sys::Promise; +} \ No newline at end of file diff --git a/wasm/src/web_client/store/transactions/mod.rs b/wasm/src/web_client/store/transactions/mod.rs new file mode 100644 index 000000000..f118c8788 --- /dev/null +++ b/wasm/src/web_client/store/transactions/mod.rs @@ -0,0 +1,182 @@ +use std::collections::BTreeMap; + +use miden_client::{ + client::transactions::{notes_from_output, TransactionRecord, TransactionResult, TransactionStatus}, + errors::StoreError, + store::{OutputNoteRecord, TransactionFilter}, +}; +// use crate::native_code::{ +// errors::StoreError, +// store::{ +// note_record::{ +// InputNoteRecord, +// OutputNoteRecord +// }, TransactionFilter +// }, transactions::{ +// TransactionRecord, +// TransactionResult, TransactionStatus +// } +// }; +use crate::web_client::store::notes::utils::{ + insert_input_note_tx, + insert_output_note_tx, + update_note_consumer_tx_id +}; + +use miden_objects::{accounts::AccountId, assembly::ProgramAst, transaction::{OutputNotes, TransactionId, TransactionScript}, Digest, Felt}; +use miden_tx::utils::Deserializable; +use wasm_bindgen_futures::*; +use serde_wasm_bindgen::from_value; + +use super::WebStore; + +mod js_bindings; +use js_bindings::*; + +mod models; +use models::*; + +pub mod utils; +use utils::*; + +impl WebStore { + pub async fn get_transactions( + &self, + filter: TransactionFilter + ) -> Result, StoreError> { + let filter_as_str = match filter { + TransactionFilter::All => "All", + TransactionFilter::Uncomitted => "Uncomitted", + }; + + let promise = idxdb_get_transactions(filter_as_str.to_string()); + let js_value = JsFuture::from(promise).await.unwrap(); + let transactions_idxdb: Vec = from_value(js_value).unwrap(); + + let transaction_records: Result, StoreError> = transactions_idxdb.into_iter().map(|tx_idxdb| { + let native_account_id = AccountId::from_hex(&tx_idxdb.account_id).unwrap(); + let block_num_as_u32: u32 = tx_idxdb.block_num.parse::().unwrap(); + let commit_height_as_u32: Option = match tx_idxdb.commit_height { + Some(height) => Some(height.parse::().unwrap()), + _ => None, + }; + + let id: Digest = tx_idxdb.id.try_into()?; + let init_account_state: Digest = tx_idxdb.init_account_state.try_into()?; + + let final_account_state: Digest = tx_idxdb.final_account_state.try_into()?; + + let input_note_nullifiers: Vec = + serde_json::from_str(&tx_idxdb.input_notes).map_err(StoreError::JsonDataDeserializationError)?; + + let output_notes = OutputNotes::read_from_bytes(&tx_idxdb.output_notes)?; + + let transaction_script: Option = if tx_idxdb.script_hash.is_some() { + let script_hash = tx_idxdb.script_hash + .map(|hash| Digest::read_from_bytes(&hash)) + .transpose()? + .expect("Script hash should be included in the row"); + + let script_program = tx_idxdb.script_program + .map(|program| ProgramAst::from_bytes(&program)) + .transpose()? + .expect("Script program should be included in the row"); + + let script_inputs = tx_idxdb.script_inputs + .map(|hash| serde_json::from_str::>>(&hash)) + .transpose() + .map_err(StoreError::JsonDataDeserializationError)? + .expect("Script inputs should be included in the row"); + + let tx_script = TransactionScript::from_parts( + script_program, + script_hash, + script_inputs.into_iter().map(|(k, v)| (k.into(), v)), + )?; + + Some(tx_script) + } else { + None + }; + + let transaction_status = + commit_height_as_u32.map_or(TransactionStatus::Pending, TransactionStatus::Committed); + + Ok(TransactionRecord { + id: id.into(), + account_id: native_account_id, + init_account_state, + final_account_state, + input_note_nullifiers, + output_notes, + transaction_script, + block_num: block_num_as_u32, + transaction_status, + }) + }).collect(); + + transaction_records + } + + pub async fn apply_transaction( + &self, + tx_result: TransactionResult + ) -> Result<(), StoreError> { + let transaction_id = tx_result.executed_transaction().id(); + let account_id = tx_result.executed_transaction().account_id(); + let account_delta = tx_result.account_delta(); + + let (mut account, _seed) = self.get_account(account_id).await.unwrap(); + + account.apply_delta(account_delta).map_err(StoreError::AccountError)?; + + // Save only input notes that we care for (based on the note screener assessment) + let created_input_notes = tx_result.relevant_notes().to_vec(); + + // Save all output notes + let created_output_notes = notes_from_output(tx_result.created_notes()) + .cloned() + .map(OutputNoteRecord::from) + .collect::>(); + + let consumed_note_ids = + tx_result.consumed_notes().iter().map(|note| note.id()).collect::>(); + + // Transaction Data + insert_proven_transaction_data(tx_result).await.unwrap(); + + // Account Data + update_account(&account).await.unwrap(); + + // Updates for notes + for note in created_input_notes { + insert_input_note_tx(¬e).await?; + } + + for note in &created_output_notes { + insert_output_note_tx(note).await?; + } + + for note_id in consumed_note_ids { + update_note_consumer_tx_id(note_id, transaction_id).await?; + } + + Ok(()) + } + + pub(crate) async fn mark_transactions_as_committed( + &self, + block_num: u32, + transactions_to_commit: &[TransactionId] + ) -> Result { + let block_num_as_str = block_num.to_string(); + let transactions_to_commit_as_str: Vec = transactions_to_commit.iter().map(|tx_id| tx_id.inner().into()).collect(); + + let promise = idxdb_mark_transactions_as_committed(block_num_as_str, transactions_to_commit_as_str); + let js_value = JsFuture::from(promise).await.unwrap(); + let result: usize = from_value(js_value).unwrap(); + + Ok(result) + } +} + diff --git a/wasm/src/web_client/store/transactions/models.rs b/wasm/src/web_client/store/transactions/models.rs new file mode 100644 index 000000000..9a9b30b6c --- /dev/null +++ b/wasm/src/web_client/store/transactions/models.rs @@ -0,0 +1,42 @@ +use base64::{decode as base64_decode, DecodeError}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde::de::{Error, Unexpected}; + +#[derive(Serialize, Deserialize)] +pub struct TransactionIdxdbObject { + pub id: String, + pub account_id: String, // usually i64 + pub init_account_state: String, + pub final_account_state: String, + pub input_notes: String, + #[serde(deserialize_with = "base64_to_vec_u8_required", default)] + pub output_notes: Vec, + #[serde(deserialize_with = "base64_to_vec_u8_optional", default)] + pub script_hash: Option>, + #[serde(deserialize_with = "base64_to_vec_u8_optional", default)] + pub script_program: Option>, + pub script_inputs: Option, + pub block_num: String, // usually u32 + pub commit_height: Option // usually Option +} + +fn base64_to_vec_u8_required<'de, D>(deserializer: D) -> Result, D::Error> +where + D: Deserializer<'de>, +{ + let base64_str: String = Deserialize::deserialize(deserializer)?; + base64_decode(&base64_str).map_err(|e| Error::custom(format!("Base64 decode error: {}", e))) +} + +fn base64_to_vec_u8_optional<'de, D>(deserializer: D) -> Result>, D::Error> +where + D: Deserializer<'de>, +{ + let base64_str: Option = Option::deserialize(deserializer)?; + match base64_str { + Some(str) => base64_decode(&str) + .map(Some) + .map_err(|e| Error::custom(format!("Base64 decode error: {}", e))), + None => Ok(None), + } +} \ No newline at end of file diff --git a/wasm/src/web_client/store/transactions/utils.rs b/wasm/src/web_client/store/transactions/utils.rs new file mode 100644 index 000000000..5cf5a7058 --- /dev/null +++ b/wasm/src/web_client/store/transactions/utils.rs @@ -0,0 +1,136 @@ +use miden_objects::{accounts::Account, assembly::AstSerdeOptions, transaction::ToNullifier, Digest}; +use miden_tx::utils::Serializable; +use wasm_bindgen_futures::*; + +use miden_client::{ + client::transactions::TransactionResult, + errors::StoreError +}; +// use crate::native_code::{errors::StoreError, transactions::TransactionResult}; + +use crate::web_client::store::accounts::utils::{ + insert_account_asset_vault, + insert_account_storage, + insert_account_record +}; +use super::js_bindings::*; + +// TYPES +// ================================================================================================ + +type SerializedTransactionData = ( + String, + String, + String, + String, + String, + Vec, + Option>, + Option>, + Option, + String, + Option, +); + +// ================================================================================================ + +pub async fn insert_proven_transaction_data( + transaction_result: TransactionResult +) -> Result<(), StoreError> { + let ( + transaction_id, + account_id, + init_account_state, + final_account_state, + input_notes, + output_notes, + script_program, + script_hash, + script_inputs, + block_num, + committed + ) = serialize_transaction_data(transaction_result)?; + + if let Some(hash) = script_hash.clone() { + let promise = idxdb_insert_transaction_script(hash, script_program.clone()); + JsFuture::from(promise).await.unwrap(); + } + + let promise = idxdb_insert_proven_transaction_data( + transaction_id, + account_id, + init_account_state, + final_account_state, + input_notes, + output_notes, + script_hash.clone(), + script_inputs.clone(), + block_num, + committed + ); + JsFuture::from(promise).await.unwrap(); + + Ok(()) +} + +pub(super) fn serialize_transaction_data( + transaction_result: TransactionResult +) -> Result { + let executed_transaction = transaction_result.executed_transaction(); + let transaction_id: String = executed_transaction.id().inner().into(); + + let account_id_as_str: String = executed_transaction.account_id().to_string(); + let init_account_state = &executed_transaction.initial_account().hash().to_string(); + let final_account_state = &executed_transaction.final_account().hash().to_string(); + + // TODO: Double check if saving nullifiers as input notes is enough + let nullifiers: Vec = executed_transaction + .input_notes() + .iter() + .map(|x| x.nullifier().inner()) + .collect(); + + let input_notes = + serde_json::to_string(&nullifiers).map_err(StoreError::InputSerializationError)?; + + let output_notes = executed_transaction.output_notes(); + + // TODO: Scripts should be in their own tables and only identifiers should be stored here + let transaction_args = transaction_result.transaction_arguments(); + let mut script_program = None; + let mut script_hash = None; + let mut script_inputs = None; + + if let Some(tx_script) = transaction_args.tx_script() { + script_program = Some(tx_script.code().to_bytes(AstSerdeOptions { + serialize_imports: true, + })); + script_hash = Some(tx_script.hash().to_bytes()); + script_inputs = Some( + serde_json::to_string(&tx_script.inputs()) + .map_err(StoreError::InputSerializationError)?, + ); + } + + Ok(( + transaction_id, + account_id_as_str, + init_account_state.to_owned(), + final_account_state.to_owned(), + input_notes, + output_notes.to_bytes(), + script_program, + script_hash, + script_inputs, + transaction_result.block_num().to_string(), + None, + )) +} + +pub async fn update_account( + new_account_state: &Account, +) -> Result<(), ()> { + insert_account_storage(new_account_state.storage()).await; + insert_account_asset_vault(new_account_state.vault()).await; + insert_account_record(new_account_state, None).await +} \ No newline at end of file diff --git a/wasm/src/web_client/sync.rs b/wasm/src/web_client/sync.rs new file mode 100644 index 000000000..8cc3e0ccc --- /dev/null +++ b/wasm/src/web_client/sync.rs @@ -0,0 +1,19 @@ +use wasm_bindgen::prelude::*; + +use super::WebClient; + +#[wasm_bindgen] +impl WebClient { + pub async fn sync_state( + &mut self + ) -> Result { + web_sys::console::log_1(&JsValue::from_str("sync_state called")); + if let Some(client) = self.get_mut_inner() { + let sync_summary = client.sync_state().await.unwrap(); + + Ok(JsValue::from_f64(sync_summary.block_num as f64)) + } else { + Err(JsValue::from_str("Client not initialized")) + } + } +} \ No newline at end of file diff --git a/wasm/src/web_client/tags.rs b/wasm/src/web_client/tags.rs new file mode 100644 index 000000000..ebc774fc8 --- /dev/null +++ b/wasm/src/web_client/tags.rs @@ -0,0 +1,25 @@ +use super::WebClient; + +use miden_objects::notes::NoteTag; + +use wasm_bindgen::prelude::*; +use web_sys::console; + +#[wasm_bindgen] +impl WebClient { + pub async fn add_tag( + &mut self, + tag: String, + ) -> Result { + web_sys::console::log_1(&JsValue::from_str("add_tag called")); + if let Some(client) = self.get_mut_inner() { + let note_tag_as_u32 = tag.parse::().unwrap(); + let note_tag: NoteTag = note_tag_as_u32.into(); + client.add_note_tag(note_tag).await.unwrap(); + + Ok(JsValue::from_str("Okay, it worked")) + } else { + Err(JsValue::from_str("Client not initialized")) + } + } +} \ No newline at end of file diff --git a/wasm/src/web_client/transactions.rs b/wasm/src/web_client/transactions.rs new file mode 100644 index 000000000..dbc29db31 --- /dev/null +++ b/wasm/src/web_client/transactions.rs @@ -0,0 +1,47 @@ +use miden_objects::{ + accounts::AccountId, + assets::FungibleAsset, + crypto::rand::FeltRng, + notes::{ + NoteId, NoteType as MidenNoteType + } +}; + +use super::WebClient; +use crate::web_client::models::transactions::NewTransactionResult; + +use miden_client::{ + client::Client, + client::transactions::TransactionRecord, + store::TransactionFilter +}; + +use serde::{Serialize, Deserialize}; +use serde_wasm_bindgen::from_value; + +use wasm_bindgen::prelude::*; +use web_sys::console; + +#[wasm_bindgen] +impl WebClient { + pub async fn get_transactions( + &mut self + ) -> Result { + web_sys::console::log_1(&JsValue::from_str("get_transactions called")); + if let Some(client) = self.get_mut_inner() { + + let transactions: Vec = client + .get_transactions(TransactionFilter::All) + .await + .map_err(|e| JsValue::from_str(&format!("Error fetching transactions: {:?}", e)))?; + + let transactionIds: Vec = transactions.iter().map(|transaction| { + transaction.id.to_string() + }).collect(); + + serde_wasm_bindgen::to_value(&transactionIds).map_err(|e| JsValue::from_str(&e.to_string())) + } else { + Err(JsValue::from_str("Client not initialized")) + } + } +} \ No newline at end of file diff --git a/wasm/test.html b/wasm/test.html new file mode 100644 index 000000000..4d8aad108 --- /dev/null +++ b/wasm/test.html @@ -0,0 +1,1158 @@ + + + + + WASM Example + + + + + + + + + + \ No newline at end of file diff --git a/wasm/test_accounts/account0.mac b/wasm/test_accounts/account0.mac new file mode 100644 index 000000000..96129e38b Binary files /dev/null and b/wasm/test_accounts/account0.mac differ diff --git a/wasm/test_accounts/account1.mac b/wasm/test_accounts/account1.mac new file mode 100644 index 000000000..64d367103 Binary files /dev/null and b/wasm/test_accounts/account1.mac differ diff --git a/wasm/yarn.lock b/wasm/yarn.lock new file mode 100644 index 000000000..52b663416 --- /dev/null +++ b/wasm/yarn.lock @@ -0,0 +1,651 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@iarna/toml@^2.2.5": + version "2.2.5" + resolved "https://registry.yarnpkg.com/@iarna/toml/-/toml-2.2.5.tgz#b32366c89b43c6f8cefbdefac778b9c828e3ba8c" + integrity sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg== + +"@isaacs/cliui@^8.0.2": + version "8.0.2" + resolved "https://registry.yarnpkg.com/@isaacs/cliui/-/cliui-8.0.2.tgz#b37667b7bc181c168782259bab42474fbf52b550" + integrity sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA== + dependencies: + string-width "^5.1.2" + string-width-cjs "npm:string-width@^4.2.0" + strip-ansi "^7.0.1" + strip-ansi-cjs "npm:strip-ansi@^6.0.1" + wrap-ansi "^8.1.0" + wrap-ansi-cjs "npm:wrap-ansi@^7.0.0" + +"@jridgewell/sourcemap-codec@^1.4.15": + version "1.4.15" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz#d7c6e6755c78567a951e04ab52ef0fd26de59f32" + integrity sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg== + +"@pkgjs/parseargs@^0.11.0": + version "0.11.0" + resolved "https://registry.yarnpkg.com/@pkgjs/parseargs/-/parseargs-0.11.0.tgz#a77ea742fab25775145434eb1d2328cf5013ac33" + integrity sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg== + +"@rollup/plugin-commonjs@^25.0.7": + version "25.0.8" + resolved "https://registry.yarnpkg.com/@rollup/plugin-commonjs/-/plugin-commonjs-25.0.8.tgz#c77e608ab112a666b7f2a6bea625c73224f7dd34" + integrity sha512-ZEZWTK5n6Qde0to4vS9Mr5x/0UZoqCxPVR9KRUjU4kA2sO7GEUn1fop0DAwpO6z0Nw/kJON9bDmSxdWxO/TT1A== + dependencies: + "@rollup/pluginutils" "^5.0.1" + commondir "^1.0.1" + estree-walker "^2.0.2" + glob "^8.0.3" + is-reference "1.2.1" + magic-string "^0.30.3" + +"@rollup/plugin-node-resolve@^15.2.3": + version "15.2.3" + resolved "https://registry.yarnpkg.com/@rollup/plugin-node-resolve/-/plugin-node-resolve-15.2.3.tgz#e5e0b059bd85ca57489492f295ce88c2d4b0daf9" + integrity sha512-j/lym8nf5E21LwBT4Df1VD6hRO2L2iwUeUmP7litikRsVp1H6NWx20NEp0Y7su+7XGc476GnXXc4kFeZNGmaSQ== + dependencies: + "@rollup/pluginutils" "^5.0.1" + "@types/resolve" "1.20.2" + deepmerge "^4.2.2" + is-builtin-module "^3.2.1" + is-module "^1.0.0" + resolve "^1.22.1" + +"@rollup/pluginutils@^5.0.1", "@rollup/pluginutils@^5.0.2": + version "5.1.0" + resolved "https://registry.yarnpkg.com/@rollup/pluginutils/-/pluginutils-5.1.0.tgz#7e53eddc8c7f483a4ad0b94afb1f7f5fd3c771e0" + integrity sha512-XTIWOPPcpvyKI6L1NHo0lFlCyznUEyPmPY1mc3KpPVDYulHSTvyeLNVW00QTLIAFNhR3kYnJTQHeGqU4M3n09g== + dependencies: + "@types/estree" "^1.0.0" + estree-walker "^2.0.2" + picomatch "^2.3.1" + +"@types/estree@*", "@types/estree@^1.0.0": + version "1.0.5" + resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.5.tgz#a6ce3e556e00fd9895dd872dd172ad0d4bd687f4" + integrity sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw== + +"@types/resolve@1.20.2": + version "1.20.2" + resolved "https://registry.yarnpkg.com/@types/resolve/-/resolve-1.20.2.tgz#97d26e00cd4a0423b4af620abecf3e6f442b7975" + integrity sha512-60BCwRFOZCQhDncwQdxxeOEEkbc5dIMccYLwbxsS4TUNeVECQ/pBJ0j09mrHOl/JJvpRPGwO9SvE4nR2Nb/a4Q== + +"@wasm-tool/rollup-plugin-rust@^2.4.5": + version "2.4.5" + resolved "https://registry.yarnpkg.com/@wasm-tool/rollup-plugin-rust/-/rollup-plugin-rust-2.4.5.tgz#203e0be9196ad278533b0996bf29c6dae5b468ec" + integrity sha512-rrgaHm/TmiOCKkt9mz8LMQMzigyn2xLHNZDtJAAv8HDrQt9QbksUkf+mYmsnQDF7gFmWtOEEJE5/7lfYuL0fEQ== + dependencies: + "@iarna/toml" "^2.2.5" + "@rollup/pluginutils" "^5.0.2" + binaryen "^111.0.0" + chalk "^4.0.0" + glob "^10.2.2" + node-fetch "^2.0.0" + rimraf "^5.0.0" + tar "^6.1.11" + +ansi-regex@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" + integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== + +ansi-regex@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-6.0.1.tgz#3183e38fae9a65d7cb5e53945cd5897d0260a06a" + integrity sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA== + +ansi-styles@^4.0.0, ansi-styles@^4.1.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" + integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== + dependencies: + color-convert "^2.0.1" + +ansi-styles@^6.1.0: + version "6.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-6.2.1.tgz#0e62320cf99c21afff3b3012192546aacbfb05c5" + integrity sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug== + +balanced-match@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" + integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== + +binaryen@^111.0.0: + version "111.0.0" + resolved "https://registry.yarnpkg.com/binaryen/-/binaryen-111.0.0.tgz#dd970a11d8fe61959f77d609dfee3c19ad80b80a" + integrity sha512-PEXOSHFO85aj1aP4t+KGzvxQ00qXbjCysWlsDjlGkP1e9owNiYdpEkLej21Ax8LDD7xJ01rEmJDqZ/JPoW2GXw== + +brace-expansion@^1.1.7: + version "1.1.11" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== + dependencies: + balanced-match "^1.0.0" + concat-map "0.0.1" + +brace-expansion@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" + integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== + dependencies: + balanced-match "^1.0.0" + +builtin-modules@^3.3.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-3.3.0.tgz#cae62812b89801e9656336e46223e030386be7b6" + integrity sha512-zhaCDicdLuWN5UbN5IMnFqNMhNfo919sH85y2/ea+5Yg9TsTkeZxpL+JLbp6cgYFS4sRLp3YV4S6yDuqVWHYOw== + +chalk@^4.0.0: + version "4.1.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" + integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + +chownr@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/chownr/-/chownr-2.0.0.tgz#15bfbe53d2eab4cf70f18a8cd68ebe5b3cb1dece" + integrity sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ== + +color-convert@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" + integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== + dependencies: + color-name "~1.1.4" + +color-name@~1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== + +commondir@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" + integrity sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg== + +concat-map@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== + +cpr@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/cpr/-/cpr-3.0.1.tgz#b9a55038b7cd81a35c17b9761895bd8496aef1e5" + integrity sha512-Xch4PXQ/KC8lJ+KfJ9JI6eG/nmppLrPPWg5Q+vh65Qr9EjuJEubxh/H/Le1TmCZ7+Xv7iJuNRqapyOFZB+wsxA== + dependencies: + graceful-fs "^4.1.5" + minimist "^1.2.0" + mkdirp "~0.5.1" + rimraf "^2.5.4" + +cross-spawn@^7.0.0: + version "7.0.3" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" + integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== + dependencies: + path-key "^3.1.0" + shebang-command "^2.0.0" + which "^2.0.1" + +deepmerge@^4.2.2: + version "4.3.1" + resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.3.1.tgz#44b5f2147cd3b00d4b56137685966f26fd25dd4a" + integrity sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A== + +dexie@^4.0.1: + version "4.0.7" + resolved "https://registry.yarnpkg.com/dexie/-/dexie-4.0.7.tgz#c92e5032245fc075de58c636238a82ee3ff9fedb" + integrity sha512-M+Lo6rk4pekIfrc2T0o2tvVJwL6EAAM/B78DNfb8aaxFVoI1f8/rz5KTxuAnApkwqTSuxx7T5t0RKH7qprapGg== + +eastasianwidth@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/eastasianwidth/-/eastasianwidth-0.2.0.tgz#696ce2ec0aa0e6ea93a397ffcf24aa7840c827cb" + integrity sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA== + +emoji-regex@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" + integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== + +emoji-regex@^9.2.2: + version "9.2.2" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-9.2.2.tgz#840c8803b0d8047f4ff0cf963176b32d4ef3ed72" + integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg== + +estree-walker@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-2.0.2.tgz#52f010178c2a4c117a7757cfe942adb7d2da4cac" + integrity sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w== + +foreground-child@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/foreground-child/-/foreground-child-3.1.1.tgz#1d173e776d75d2772fed08efe4a0de1ea1b12d0d" + integrity sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg== + dependencies: + cross-spawn "^7.0.0" + signal-exit "^4.0.1" + +fs-minipass@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb" + integrity sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg== + dependencies: + minipass "^3.0.0" + +fs.realpath@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== + +fsevents@~2.3.2: + version "2.3.3" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6" + integrity sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw== + +function-bind@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c" + integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== + +glob@^10.2.2, glob@^10.3.7: + version "10.4.1" + resolved "https://registry.yarnpkg.com/glob/-/glob-10.4.1.tgz#0cfb01ab6a6b438177bfe6a58e2576f6efe909c2" + integrity sha512-2jelhlq3E4ho74ZyVLN03oKdAZVUa6UDZzFLVH1H7dnoax+y9qyaq8zBkfDIggjniU19z0wU18y16jMB2eyVIw== + dependencies: + foreground-child "^3.1.0" + jackspeak "^3.1.2" + minimatch "^9.0.4" + minipass "^7.1.2" + path-scurry "^1.11.1" + +glob@^7.1.3: + version "7.2.3" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" + integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.1.1" + once "^1.3.0" + path-is-absolute "^1.0.0" + +glob@^8.0.3: + version "8.1.0" + resolved "https://registry.yarnpkg.com/glob/-/glob-8.1.0.tgz#d388f656593ef708ee3e34640fdfb99a9fd1c33e" + integrity sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^5.0.1" + once "^1.3.0" + +graceful-fs@^4.1.5: + version "4.2.11" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" + integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== + +has-flag@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" + integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== + +hasown@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.2.tgz#003eaf91be7adc372e84ec59dc37252cedb80003" + integrity sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ== + dependencies: + function-bind "^1.1.2" + +inflight@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== + dependencies: + once "^1.3.0" + wrappy "1" + +inherits@2: + version "2.0.4" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +is-builtin-module@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/is-builtin-module/-/is-builtin-module-3.2.1.tgz#f03271717d8654cfcaf07ab0463faa3571581169" + integrity sha512-BSLE3HnV2syZ0FK0iMA/yUGplUeMmNz4AW5fnTunbCIqZi4vG3WjJT9FHMy5D69xmAYBHXQhJdALdpwVxV501A== + dependencies: + builtin-modules "^3.3.0" + +is-core-module@^2.13.0: + version "2.13.1" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.13.1.tgz#ad0d7532c6fea9da1ebdc82742d74525c6273384" + integrity sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw== + dependencies: + hasown "^2.0.0" + +is-fullwidth-code-point@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" + integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== + +is-module@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-module/-/is-module-1.0.0.tgz#3258fb69f78c14d5b815d664336b4cffb6441591" + integrity sha512-51ypPSPCoTEIN9dy5Oy+h4pShgJmPCygKfyRCISBI+JoWT/2oJvK8QPxmwv7b/p239jXrm9M1mlQbyKJ5A152g== + +is-reference@1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/is-reference/-/is-reference-1.2.1.tgz#8b2dac0b371f4bc994fdeaba9eb542d03002d0b7" + integrity sha512-U82MsXXiFIrjCK4otLT+o2NA2Cd2g5MLoOVXUZjIOhLurrRxpEXzI8O0KZHr3IjLvlAH1kTPYSuqer5T9ZVBKQ== + dependencies: + "@types/estree" "*" + +isexe@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== + +jackspeak@^3.1.2: + version "3.2.0" + resolved "https://registry.yarnpkg.com/jackspeak/-/jackspeak-3.2.0.tgz#54e5abaa1c673bace97873d0599f3dab0e22b850" + integrity sha512-eXIwN9gutMuB1AMW241gIHSEeaSMafWnxWXb/JGYWqifway4QgqBJLl7nYlmhGrxnHQ3wNc/QYFZ95aDtHHzpA== + dependencies: + "@isaacs/cliui" "^8.0.2" + optionalDependencies: + "@pkgjs/parseargs" "^0.11.0" + +lru-cache@^10.2.0: + version "10.2.2" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-10.2.2.tgz#48206bc114c1252940c41b25b41af5b545aca878" + integrity sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ== + +magic-string@^0.30.3: + version "0.30.10" + resolved "https://registry.yarnpkg.com/magic-string/-/magic-string-0.30.10.tgz#123d9c41a0cb5640c892b041d4cfb3bd0aa4b39e" + integrity sha512-iIRwTIf0QKV3UAnYK4PU8uiEc4SRh5jX0mwpIwETPpHdhVM4f53RSwS/vXvN1JhGX+Cs7B8qIq3d6AH49O5fAQ== + dependencies: + "@jridgewell/sourcemap-codec" "^1.4.15" + +minimatch@^3.1.1: + version "3.1.2" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" + integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== + dependencies: + brace-expansion "^1.1.7" + +minimatch@^5.0.1: + version "5.1.6" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-5.1.6.tgz#1cfcb8cf5522ea69952cd2af95ae09477f122a96" + integrity sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g== + dependencies: + brace-expansion "^2.0.1" + +minimatch@^9.0.4: + version "9.0.4" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.4.tgz#8e49c731d1749cbec05050ee5145147b32496a51" + integrity sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw== + dependencies: + brace-expansion "^2.0.1" + +minimist@^1.2.0, minimist@^1.2.6: + version "1.2.8" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" + integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== + +minipass@^3.0.0: + version "3.3.6" + resolved "https://registry.yarnpkg.com/minipass/-/minipass-3.3.6.tgz#7bba384db3a1520d18c9c0e5251c3444e95dd94a" + integrity sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw== + dependencies: + yallist "^4.0.0" + +minipass@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/minipass/-/minipass-5.0.0.tgz#3e9788ffb90b694a5d0ec94479a45b5d8738133d" + integrity sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ== + +"minipass@^5.0.0 || ^6.0.2 || ^7.0.0", minipass@^7.1.2: + version "7.1.2" + resolved "https://registry.yarnpkg.com/minipass/-/minipass-7.1.2.tgz#93a9626ce5e5e66bd4db86849e7515e92340a707" + integrity sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw== + +minizlib@^2.1.1: + version "2.1.2" + resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-2.1.2.tgz#e90d3466ba209b932451508a11ce3d3632145931" + integrity sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg== + dependencies: + minipass "^3.0.0" + yallist "^4.0.0" + +mkdirp@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" + integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== + +mkdirp@~0.5.1: + version "0.5.6" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.6.tgz#7def03d2432dcae4ba1d611445c48396062255f6" + integrity sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw== + dependencies: + minimist "^1.2.6" + +node-fetch@^2.0.0: + version "2.7.0" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d" + integrity sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A== + dependencies: + whatwg-url "^5.0.0" + +once@^1.3.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== + dependencies: + wrappy "1" + +path-is-absolute@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== + +path-key@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" + integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== + +path-parse@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" + integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== + +path-scurry@^1.11.1: + version "1.11.1" + resolved "https://registry.yarnpkg.com/path-scurry/-/path-scurry-1.11.1.tgz#7960a668888594a0720b12a911d1a742ab9f11d2" + integrity sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA== + dependencies: + lru-cache "^10.2.0" + minipass "^5.0.0 || ^6.0.2 || ^7.0.0" + +picomatch@^2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" + integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== + +resolve@^1.22.1: + version "1.22.8" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.8.tgz#b6c87a9f2aa06dfab52e3d70ac8cde321fa5a48d" + integrity sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw== + dependencies: + is-core-module "^2.13.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + +rimraf@^2.5.4: + version "2.7.1" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" + integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== + dependencies: + glob "^7.1.3" + +rimraf@^5.0.0, rimraf@^5.0.1: + version "5.0.7" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-5.0.7.tgz#27bddf202e7d89cb2e0381656380d1734a854a74" + integrity sha512-nV6YcJo5wbLW77m+8KjH8aB/7/rxQy9SZ0HY5shnwULfS+9nmTtVXAJET5NdZmCzA4fPI/Hm1wo/Po/4mopOdg== + dependencies: + glob "^10.3.7" + +rollup@^3.27.2: + version "3.29.4" + resolved "https://registry.yarnpkg.com/rollup/-/rollup-3.29.4.tgz#4d70c0f9834146df8705bfb69a9a19c9e1109981" + integrity sha512-oWzmBZwvYrU0iJHtDmhsm662rC15FRXmcjCk1xD771dFDx5jJ02ufAQQTn0etB2emNk4J9EZg/yWKpsn9BWGRw== + optionalDependencies: + fsevents "~2.3.2" + +shebang-command@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" + integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== + dependencies: + shebang-regex "^3.0.0" + +shebang-regex@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" + integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== + +signal-exit@^4.0.1: + version "4.1.0" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-4.1.0.tgz#952188c1cbd546070e2dd20d0f41c0ae0530cb04" + integrity sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw== + +"string-width-cjs@npm:string-width@^4.2.0": + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + +string-width@^4.1.0: + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + +string-width@^5.0.1, string-width@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-5.1.2.tgz#14f8daec6d81e7221d2a357e668cab73bdbca794" + integrity sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA== + dependencies: + eastasianwidth "^0.2.0" + emoji-regex "^9.2.2" + strip-ansi "^7.0.1" + +"strip-ansi-cjs@npm:strip-ansi@^6.0.1": + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + +strip-ansi@^6.0.0, strip-ansi@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + +strip-ansi@^7.0.1: + version "7.1.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45" + integrity sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ== + dependencies: + ansi-regex "^6.0.1" + +supports-color@^7.1.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" + integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== + dependencies: + has-flag "^4.0.0" + +supports-preserve-symlinks-flag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" + integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== + +tar@^6.1.11: + version "6.2.1" + resolved "https://registry.yarnpkg.com/tar/-/tar-6.2.1.tgz#717549c541bc3c2af15751bea94b1dd068d4b03a" + integrity sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A== + dependencies: + chownr "^2.0.0" + fs-minipass "^2.0.0" + minipass "^5.0.0" + minizlib "^2.1.1" + mkdirp "^1.0.3" + yallist "^4.0.0" + +tr46@~0.0.3: + version "0.0.3" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" + integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw== + +webidl-conversions@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" + integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== + +whatwg-url@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d" + integrity sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw== + dependencies: + tr46 "~0.0.3" + webidl-conversions "^3.0.0" + +which@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" + integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== + dependencies: + isexe "^2.0.0" + +"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0": + version "7.0.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + +wrap-ansi@^8.1.0: + version "8.1.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-8.1.0.tgz#56dc22368ee570face1b49819975d9b9a5ead214" + integrity sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ== + dependencies: + ansi-styles "^6.1.0" + string-width "^5.0.1" + strip-ansi "^7.0.1" + +wrappy@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== + +yallist@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" + integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== diff --git a/website/.eslintrc.json b/website/.eslintrc.json new file mode 100644 index 000000000..bffb357a7 --- /dev/null +++ b/website/.eslintrc.json @@ -0,0 +1,3 @@ +{ + "extends": "next/core-web-vitals" +} diff --git a/website/.gitignore b/website/.gitignore new file mode 100644 index 000000000..fd3dbb571 --- /dev/null +++ b/website/.gitignore @@ -0,0 +1,36 @@ +# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. + +# dependencies +/node_modules +/.pnp +.pnp.js +.yarn/install-state.gz + +# testing +/coverage + +# next.js +/.next/ +/out/ + +# production +/build + +# misc +.DS_Store +*.pem + +# debug +npm-debug.log* +yarn-debug.log* +yarn-error.log* + +# local env files +.env*.local + +# vercel +.vercel + +# typescript +*.tsbuildinfo +next-env.d.ts diff --git a/website/README.md b/website/README.md new file mode 100644 index 000000000..c4033664f --- /dev/null +++ b/website/README.md @@ -0,0 +1,36 @@ +This is a [Next.js](https://nextjs.org/) project bootstrapped with [`create-next-app`](https://github.com/vercel/next.js/tree/canary/packages/create-next-app). + +## Getting Started + +First, run the development server: + +```bash +npm run dev +# or +yarn dev +# or +pnpm dev +# or +bun dev +``` + +Open [http://localhost:3000](http://localhost:3000) with your browser to see the result. + +You can start editing the page by modifying `app/page.tsx`. The page auto-updates as you edit the file. + +This project uses [`next/font`](https://nextjs.org/docs/basic-features/font-optimization) to automatically optimize and load Inter, a custom Google Font. + +## Learn More + +To learn more about Next.js, take a look at the following resources: + +- [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API. +- [Learn Next.js](https://nextjs.org/learn) - an interactive Next.js tutorial. + +You can check out [the Next.js GitHub repository](https://github.com/vercel/next.js/) - your feedback and contributions are welcome! + +## Deploy on Vercel + +The easiest way to deploy your Next.js app is to use the [Vercel Platform](https://vercel.com/new?utm_medium=default-template&filter=next.js&utm_source=create-next-app&utm_campaign=create-next-app-readme) from the creators of Next.js. + +Check out our [Next.js deployment documentation](https://nextjs.org/docs/deployment) for more details. diff --git a/website/next.config.js b/website/next.config.js new file mode 100644 index 000000000..f6c3100b3 --- /dev/null +++ b/website/next.config.js @@ -0,0 +1,19 @@ +/** @type {import('next').NextConfig} */ +const nextConfig = { + reactStrictMode: false, + webpack: (config, { isServer }) => { + if (!isServer) { + config.experiments = { + ...config.experiments, + asyncWebAssembly: true, + syncWebAssembly: true, + topLevelAwait: true, + layers: true, + } + } + + return config; + } +}; + +module.exports = nextConfig; diff --git a/website/package.json b/website/package.json new file mode 100644 index 000000000..20e4100ca --- /dev/null +++ b/website/package.json @@ -0,0 +1,42 @@ +{ + "name": "website", + "version": "0.1.0", + "private": true, + "scripts": { + "build:workers": "tsc -p tsconfig.worker.json", + "watch:workers": "tsc -p tsconfig.worker.json -watch", + "dev": "next dev", + "build": "next build", + "start": "next start", + "lint": "next lint" + }, + "dependencies": { + "@demox-labs/miden-sdk": "file:../wasm/dist", + "@tailwindcss/forms": "^0.5.7", + "@tailwindcss/typography": "^0.5.13", + "classnames": "^2.5.1", + "dexie-react-hooks": "^1.1.7", + "framer-motion": "^11.2.0", + "jotai": "^2.8.0", + "next": "^14.2.3", + "next-themes": "^0.3.0", + "overlayscrollbars": "^2.8.0", + "overlayscrollbars-react": "^0.5.6", + "react": "^18", + "react-dom": "^18", + "react-use": "^17.5.0", + "swiper": "^11.1.3", + "wasm": "file:../wasm/pkg" + }, + "devDependencies": { + "@types/node": "^20", + "@types/react": "^18", + "@types/react-dom": "^18", + "autoprefixer": "^10.0.1", + "eslint": "^8", + "eslint-config-next": "14.1.4", + "postcss": "^8", + "tailwindcss": "^3.3.0", + "typescript": "^5" + } +} diff --git a/website/postcss.config.js b/website/postcss.config.js new file mode 100644 index 000000000..12a703d90 --- /dev/null +++ b/website/postcss.config.js @@ -0,0 +1,6 @@ +module.exports = { + plugins: { + tailwindcss: {}, + autoprefixer: {}, + }, +}; diff --git a/website/src/app/accounts/[accountId]/page.tsx b/website/src/app/accounts/[accountId]/page.tsx new file mode 100644 index 000000000..329f3829b --- /dev/null +++ b/website/src/app/accounts/[accountId]/page.tsx @@ -0,0 +1,128 @@ +'use client' +import { AssetInfo, SerializedAccount } from "@demox-labs/miden-sdk"; +import { useLayoutEffect, useRef, useState } from "react"; + +export interface IJSSerializedAccount { + id: string + nonce: string + vault_root: string + storage_root: string + code_root: string + account_type: string, + is_faucet: boolean, + is_regular_account: boolean, + is_on_chain: boolean, + assets: IJSAssetInfo[] +} + +interface IJSAssetInfo { + is_fungible: boolean, + amount: string, + faucet_id: string +} + +export class JSSerializedAccount implements IJSSerializedAccount { + id: string + nonce: string + vault_root: string + storage_root: string + code_root: string + account_type: string + is_faucet: boolean + is_regular_account: boolean + is_on_chain: boolean + assets: IJSAssetInfo[] + + constructor(account: SerializedAccount) { + this.id = account.id + this.nonce = account.nonce + this.vault_root = account.vault_root + this.storage_root = account.storage_root + this.code_root = account.code_root + this.account_type = account.account_type + this.is_faucet = account.is_faucet + this.is_regular_account = account.is_regular_account + this.is_on_chain = account.is_on_chain + this.assets = account.assets.map((asset: AssetInfo) => { + return { + is_fungible: asset.is_fungible, + amount: asset.amount, + faucet_id: asset.faucet_id + } as IJSAssetInfo + }) + } + +} + +const AccountDetailsPage = ({ params }: { params: { accountId: string } }) => { + const workerRef = useRef() + const [account, setAccount] = useState(null); + + const createWorkerAndGetAccount = async () => { + return new Promise((resolve, reject) => { + workerRef.current = new Worker(new URL('../../../workers/accounts.ts', import.meta.url), { type : "module" }); + + workerRef.current.onmessage = function(event) { + switch (event.data.type) { + case "ready": + console.log('Worker is ready. Sending message...'); + workerRef.current?.postMessage({ type: "getAccount", params: { accountId: params.accountId } }); + break; + case "getAccount": + console.log('get account worker finished', event.data.account) + setAccount(event.data.account as JSSerializedAccount); + break; + default: + console.log('invalid message:', event.data); + break; + } + }; + + workerRef.current.onerror = function(error) { + reject(error); + }; + }); + } + + useLayoutEffect(() => { + createWorkerAndGetAccount() + + return () => { + workerRef.current?.terminate(); + } + }, []) + + if (!account) return
Loading...
; + + return ( +
+
+

Account Details for account {params.accountId}

+
+

Nonce: {account.nonce}

+

Vault root: {account.vault_root}

+

Storage root: {account.storage_root}

+

Code root: {account.code_root}

+

Account Type: {account.account_type}

+

Is Faucet?: {account.is_faucet.toString()}

+

Is regaular account?: {account.is_regular_account.toString()}

+

Is on chain?: {account.is_on_chain.toString()}

+
+ +

Assets:

+
+ {account.assets.map((asset, idx) => +
+

Asset {idx}

+

Asset faucet Id: {asset.faucet_id}

+

Asset amount: {asset.amount}

+

Asset is fungible: {asset.is_fungible.toString()}

+
) + } +
+
+
+ ); +}; + +export default AccountDetailsPage; \ No newline at end of file diff --git a/website/src/app/accounts/page.tsx b/website/src/app/accounts/page.tsx new file mode 100644 index 000000000..76fbacbca --- /dev/null +++ b/website/src/app/accounts/page.tsx @@ -0,0 +1,164 @@ +'use client' + +import DashboardLayout from '@/layouts/dashboard/_dashboard'; +import { MutableRefObject, ReactElement, useLayoutEffect, useRef } from 'react'; + +import { useState } from 'react' +import Loader from '@/components/ui/loader'; +import { SerializedAccount } from '@demox-labs/miden-sdk'; +import Link from 'next/link'; + +export interface Account { + id: string + nonce: string + vault_root: string + storage_root: string + code_root: string + account_type: string, + is_faucet: boolean, + is_regular_account: boolean, + is_on_chain: boolean, +} + +function AccountsTable({ accounts, isLoading, worker }: { accounts: Account[], isLoading: boolean, worker: MutableRefObject }) { + + const getAccount = (accountId: string) => { + worker.current?.postMessage({ type: "getAccount", params: { accountId } }) + } + + return ( +
+
+

Wallets

+
+ + + + + + + + + {isLoading + ? + : accounts.map((account) => ( + { getAccount(account.id) }}> + + + + )) + } + +
IdNonce
+ + {account.id} + + {account.nonce}
+
+ ) +} + +export default function Accounts() { + const workerRef = useRef() + const [walletStorageType, setWalletStorageType] = useState('OffChain') + const [walletMutable, setWalletMutable] = useState(true) + const [createWalletLoading, setCreateWalletLoading] = useState(false) + const [fetchAccountsLoading, setFetchAccountsLoading] = useState(true) + const [accounts, setAccounts] = useState([]) + + function createWorkerAndSendMessage(message: object) { + return new Promise((resolve, reject) => { + workerRef.current = new Worker(new URL('../../workers/accounts.ts', import.meta.url), { type : "module" }); + + workerRef.current.onmessage = function(event) { + switch (event.data.type) { + case "ready": + console.log('Worker is ready. Sending message...'); + workerRef.current?.postMessage(message); + break; + case "createWallet": + console.log('create wallet worker finished') + workerRef.current?.postMessage({ type: "fetchAccounts" }) + setCreateWalletLoading(false) + break; + case "fetchAccounts": + console.log('fetch accounts worker finished', event.data.accounts) + setFetchAccountsLoading(false) + setAccounts(event.data.accounts) + break; + case "getAccount": + const account = event.data.account as SerializedAccount + console.log('get account worker finished', account) + break; + default: + console.log('invalid message:', event.data); + break; + } + }; + + workerRef.current.onerror = function(error) { + reject(error); + }; + }); + } + + useLayoutEffect(() => { + createWorkerAndSendMessage({ type: "fetchAccounts" }) + + return () => { + workerRef.current?.terminate(); + } + }, []) + + async function createWallet() { + try { + setCreateWalletLoading(true) + workerRef.current?.postMessage({ type: "createWallet", params: { storageType: walletStorageType, mutable: walletMutable } }) + } catch (error) { + console.error('Failed to call create account:', error); + } + } + + return ( +
+
+
+
+ + +
+ +
+ + +
+
+ + +
+ account.is_regular_account)} + isLoading={fetchAccountsLoading} + worker={workerRef} /> +
+ ) +} + +Accounts.getLayout = function getLayout(page: ReactElement) { + return ( + + {page} + + ) +} \ No newline at end of file diff --git a/website/src/app/faucets/page.tsx b/website/src/app/faucets/page.tsx new file mode 100644 index 000000000..711a3570e --- /dev/null +++ b/website/src/app/faucets/page.tsx @@ -0,0 +1,159 @@ +'use client' + +import DashboardLayout from '@/layouts/dashboard/_dashboard'; +import { ReactElement, useLayoutEffect, useRef } from 'react'; + +import { useState } from 'react' +import Loader from '@/components/ui/loader'; +import { Account } from '../accounts/page'; +import Link from 'next/link'; + + +function FaucetsTable({ accounts, isLoading }: { accounts: Account[], isLoading: boolean }) { + + return ( +
+
+

Faucets

+
+ + + + + + + + + {isLoading + ? + : accounts.map((account) => ( + + + + + )) + } + +
IdNonce
+ + {account.id} + + {account.nonce}
+
+ ) +} + +export default function Faucets() { + const workerRef = useRef() + const [faucetStorageType, setFaucetStorageType] = useState('OffChain') + const [tokenSymbol, setTokenSymbol] = useState("TOK") + const [decimals, setDecimals] = useState("6") + const [maxSupply, setMaxSupply] = useState("1000000") + const [createAccountLoading, setCreateAccountLoading] = useState(false) + const [fetchAccountsLoading, setFetchAccountsLoading] = useState(true) + const [accounts, setAccounts] = useState([]) + const [recentFaucetId, setRecentFaucetId] = useState("") + + function createWorkerAndSendMessage(message: object) { + return new Promise((resolve, reject) => { + workerRef.current = new Worker(new URL('../../workers/accounts.ts', import.meta.url), { type : "module" }); + + workerRef.current.onmessage = function(event) { + switch (event.data.type) { + case "ready": + workerRef.current?.postMessage(message); + break; + case "createFaucet": + workerRef.current?.postMessage({ type: "fetchAccounts" }) + setRecentFaucetId(event.data.faucetId) + setCreateAccountLoading(false) + break; + case "fetchAccounts": + setFetchAccountsLoading(false) + setAccounts(event.data.accounts) + break; + default: + console.log('invalid message:', event.data); + break; + } + }; + + workerRef.current.onerror = function(error) { + reject(error); + }; + }); + } + + useLayoutEffect(() => { + createWorkerAndSendMessage({ type: "fetchAccounts" }) + + return () => { + workerRef.current?.terminate(); + } + }, []) + + async function createFaucet() { + try { + setCreateAccountLoading(true) + workerRef.current?.postMessage({ + type: "createFaucet", + params: { + storageType: faucetStorageType, + nonFungible: false, // Only support fungible tokens for now + tokenSymbol: tokenSymbol, + decimals: decimals, + maxSupply: maxSupply + } + }) + } catch (error) { + console.error('Failed to call create account:', error); + } + } + + return ( +
+
+
+
+ + +
+ +
+ + setTokenSymbol(e.target.value)} className="text-sm bg-gray-700 text-white rounded-md h-10 w-32 mr-4 cursor-pointer" /> +
+ +
+ + setDecimals(e.target.value)} className="text-sm bg-gray-700 text-white rounded-md h-10 w-32 mr-4 cursor-pointer" /> +
+ +
+ + setMaxSupply(e.target.value)} className="text-sm bg-gray-700 text-white rounded-md h-10 w-32 mr-4 cursor-pointer" /> +
+
+ + +
+ account.is_faucet)} isLoading={fetchAccountsLoading} /> +
+ ) +} + +Faucets.getLayout = function getLayout(page: ReactElement) { + return ( + + {page} + + ) +} \ No newline at end of file diff --git a/website/src/app/favicon.ico b/website/src/app/favicon.ico new file mode 100644 index 000000000..718d6fea4 Binary files /dev/null and b/website/src/app/favicon.ico differ diff --git a/website/src/app/globals.css b/website/src/app/globals.css new file mode 100644 index 000000000..875c01e81 --- /dev/null +++ b/website/src/app/globals.css @@ -0,0 +1,33 @@ +@tailwind base; +@tailwind components; +@tailwind utilities; + +:root { + --foreground-rgb: 0, 0, 0; + --background-start-rgb: 214, 219, 220; + --background-end-rgb: 255, 255, 255; +} + +@media (prefers-color-scheme: dark) { + :root { + --foreground-rgb: 255, 255, 255; + --background-start-rgb: 0, 0, 0; + --background-end-rgb: 0, 0, 0; + } +} + +body { + color: rgb(var(--foreground-rgb)); + background: linear-gradient( + to bottom, + transparent, + rgb(var(--background-end-rgb)) + ) + rgb(var(--background-start-rgb)); +} + +@layer utilities { + .text-balance { + text-wrap: balance; + } +} diff --git a/website/src/app/layout.tsx b/website/src/app/layout.tsx new file mode 100644 index 000000000..564ae5184 --- /dev/null +++ b/website/src/app/layout.tsx @@ -0,0 +1,82 @@ +import type { Metadata } from "next"; +import { Inter } from "next/font/google"; +// base css file +import 'swiper/css'; +import '@/assets/css/scrollbar.css'; +import '@/assets/css/globals.css'; +import '@/assets/css/range-slider.css'; +import Sidebar from "@/layouts/dashboard/_sidebar"; +import { ThemeProvider } from "next-themes"; +import { Header } from "@/layouts/dashboard/_dashboard"; + +const inter = Inter({ subsets: ["latin"] }); + +export const metadata: Metadata = { + title: "Create Next App", + description: "Generated by create next app", +}; + +export default function RootLayout({ + children, +}: Readonly<{ + children: React.ReactNode; +}>) { + return ( + + { +
+ {/* */} +
+
+
+ +
+ +
+ {children} +
+ +
+
+ {/*
*/} +
+ } + + ); +} + +// export default function RootLayout({ +// children, +// }: Readonly<{ +// children: React.ReactNode; +// }>) { +// return ( +// +// { +//
+// +//
+// +//
+// {children} +//
+// +//
+// } +// +// ); +// } diff --git a/website/src/app/notes/page.tsx b/website/src/app/notes/page.tsx new file mode 100644 index 000000000..8bd89ff38 --- /dev/null +++ b/website/src/app/notes/page.tsx @@ -0,0 +1,120 @@ +'use client' + +import { ReactElement, useEffect, useLayoutEffect, useRef, useState } from 'react'; +import DashboardLayout from '@/layouts/dashboard/_dashboard'; +import Loader from '@/components/ui/loader'; + +function NotesTable({ inputNotes, outputNotes, inputNotesLoading, outputNotesLoading }: { inputNotes: string[], outputNotes: string[], inputNotesLoading: boolean, outputNotesLoading: boolean }) { + + return ( +
+
+
+

Input Notes

+
+ + + + + + + + {inputNotesLoading + ? + : inputNotes.map((noteId) => ( + + + + )) + } + +
Id
{noteId}
+
+
+
+

Output Notes

+
+ + + + + + + + {outputNotesLoading + ? + : outputNotes.map((noteId) => ( + + + + )) + } + +
Id
{noteId}
+
+
+ ) +} + +export default function Notes() { + const workerRef = useRef() + const [inputNotes, setInputNotes] = useState([]) + const [fetchInputNotes, setFetchInputNotesLoading] = useState(true) + const [outputNotes, setOutputNotes] = useState([]) + const [fetchOutputNotes, setFetchOutputNotesLoading] = useState(true) + + function createWorkerAndFetchNotes() { + return new Promise((resolve, reject) => { + workerRef.current = new Worker(new URL('../../workers/accounts.ts', import.meta.url), { type : "module" }); + + workerRef.current.onmessage = function(event) { + switch (event.data.type) { + case "ready": + console.log('Worker is ready. Sending message...'); + workerRef.current?.postMessage({ type: "fetchInputNotes", params: { noteFilter: "All" } }); + break; + case "fetchInputNotes": + console.log('fetch input notes worker finished', event.data.inputNotes) + setFetchInputNotesLoading(false) + setInputNotes(event.data.inputNotes) + workerRef.current?.postMessage({ type: "fetchOutputNotes", params: { noteFilter: "All" } }); + break; + case "fetchOutputNotes": + console.log('fetch output notes worker finished', event.data.outputNotes) + setFetchOutputNotesLoading(false) + setOutputNotes(event.data.outputNotes) + break; + default: + console.log('invalid message:', event.data); + break; + } + }; + + workerRef.current.onerror = function(error) { + reject(error); + }; + }); + } + + useLayoutEffect(() => { + createWorkerAndFetchNotes() + + return () => { + workerRef.current?.terminate(); + } + }, []) + + return ( +
+ +
+ ) +} + +Notes.getLayout = function getLayout(page: ReactElement) { + return ( + + {page} + + ) +} \ No newline at end of file diff --git a/website/src/app/page.tsx b/website/src/app/page.tsx new file mode 100644 index 000000000..5613ae0e9 --- /dev/null +++ b/website/src/app/page.tsx @@ -0,0 +1,9 @@ +'use client' + +export default function Home() { + return ( +
+

This website will serve as a testing site and demo of the Miden WASM

+
+ ); +} diff --git a/website/src/app/transactions/page.tsx b/website/src/app/transactions/page.tsx new file mode 100644 index 000000000..ef535956b --- /dev/null +++ b/website/src/app/transactions/page.tsx @@ -0,0 +1,476 @@ +'use client' + +import Loader from '@/components/ui/loader'; +import DashboardLayout from '@/layouts/dashboard/_dashboard'; +import { MutableRefObject, ReactElement, use, useLayoutEffect, useRef, useState } from "react" +import { Account } from '../accounts/page'; + +interface ConsumeParams { + targetAccountId: string, + listOfNotes: string[] +} + +interface TransactionResult { + transactionId: string, + createdNoteIds: string[], +} + +interface SwapTransactionResult { + transactionId: string, + expectedOutputNoteIds: string[], + expectedPartialNoteIds: string[], + paybackNoteTag: string +} + +interface MintTransactionProps { + accounts: Account[], + fetchAccountsLoading: boolean, + worker: MutableRefObject, + mintLoading: boolean, + setMintLoading: React.Dispatch>, + mintedTransaction: TransactionResult | null + consumeLoading: boolean, + setConsumeLoading: React.Dispatch>, +} + +interface SendTransactionProps { + accounts: Account[], + fetchAccountsLoading: boolean, + worker: MutableRefObject, + sendLoading: boolean, + setSendLoading: React.Dispatch>, + sentTransaction: TransactionResult | null, + consumeLoading: boolean, + setConsumeLoading: React.Dispatch> +} + +interface SwapTransactionProps { + accounts: Account[], + fetchAccountsLoading: boolean, + worker: MutableRefObject, + swapLoading: boolean, + setSwapLoading: React.Dispatch>, + swappedTransaction: SwapTransactionResult | null, + swapANotes: string[] | null, + swapBNotes: string[] | null, + consumeALoading: boolean, + setConsumeALoading: React.Dispatch>, + consumeBLoading: boolean, + setConsumeBLoading: React.Dispatch>, +} + +interface ConsumeTransactionProps { + consumeLoading: boolean, + setConsumeLoading: React.Dispatch>, + worker: MutableRefObject, + transactionId?: string, + consumeParams: ConsumeParams | null, + consumeType: string, + consumeTitle?: string +} + +function ConsumeTransaction({consumeLoading, setConsumeLoading, worker, transactionId, consumeParams, consumeType, consumeTitle="Consume Note" }: ConsumeTransactionProps) { + const DoComsume = async () => { + setConsumeLoading(true) + worker.current?.postMessage({ type: "consumeTransaction", params: { + targetAccountId: consumeParams?.targetAccountId, + noteIds: consumeParams?.listOfNotes, + consumeType: consumeType + } }) + } + + return( +
+
+

{consumeTitle}

+
+
+ {consumeParams + ?
+

Created Transaction Id: {transactionId}

+

Created Note Ids: {consumeParams.listOfNotes.join(", ")}

+
+ +
+
+ :

Create a note to consume it here!

} +
+
+ ) +} + +function MintTransaction({ accounts, fetchAccountsLoading, worker, mintLoading, setMintLoading, mintedTransaction, consumeLoading, setConsumeLoading }: MintTransactionProps) { + const wallets = accounts.filter(account => account.is_regular_account) + const faucets = accounts.filter(account => account.is_faucet) + const [selectedWalletId, setSelectedWallet] = useState("Select a wallet") + const [selectedFaucetId, setSelectedFaucet] = useState("Select a faucet") + + const DoMint = async () => { + setMintLoading(true) + worker.current?.postMessage({ type: "mintTransaction", params: { + faucetId: selectedFaucetId, + walletId: selectedWalletId, + noteType: "Private", + amount: "5", + } }); + } + + return fetchAccountsLoading ?
: ( +
+
+

Mint 5 tokens

+
+
+
+
+
+ +
+
+ +
+
+ +
+
+
+ +
+
+ ) +} + +function SendTransaction(props: SendTransactionProps) { + const wallets = props.accounts.filter(account => account.is_regular_account) + const faucets = props.accounts.filter(account => account.is_faucet) + const [senderId, setSenderId] = useState("Select a sender") + const [receiverId, setReceiverId] = useState("Select a recipient") + const [faucetId, setFaucetId] = useState("Select a faucet") + + const parametersSelected = (): boolean => { + return senderId != "Select a sender" && receiverId != "Select a recipient" && faucetId != "Select a faucet" + } + + const DoSend = async () => { + props.setSendLoading(true) + props.worker.current?.postMessage({ type: "sendTransaction", params: { + senderAccountId: senderId, + targetAccountId: receiverId, + faucetId: faucetId, + noteType: "Private", + amount: "1", + recallHeight: null + } }); + } + + return props.fetchAccountsLoading ?
: ( +
+
+

Send 1 token

+
+
+
+
+
+ +
+
+ +
+
+ +
+
+ +
+
+
+ +
+
+ ) +} + +function SwapTransaction(props: SwapTransactionProps) { + const wallets = props.accounts.filter(account => account.is_regular_account) + const faucets = props.accounts.filter(account => account.is_faucet) + const [walletA, setWalletA] = useState("Select wallet A") + const [faucetA, setFaucetA] = useState("Select faucet A") + const [walletB, setWalletB] = useState("Select wallet B") + const [faucetB, setFaucetB] = useState("Select faucet B") + + const parametersSelected = (): boolean => { + return walletA != "Select wallet A" && faucetA != "Select faucet A" && walletB != "Select wallet B" && faucetB != "Select faucet B" + } + + const DoSwap = async () => { + props.setSwapLoading(true) + props.worker.current?.postMessage({ type: "swapTransaction", params: { + walletA: walletA, + faucetA: faucetA, + amountOfA: "1", + faucetB: faucetB, + amountOfB: "1", + noteType: "Public" + } }); + } + + return props.fetchAccountsLoading ?
: ( +
+
+

Swap assets

+
+
+
+
+
+ +
+
+ +
+
+ +
+
+ +
+
+ +
+
+
+ +
+
+ +
+
+ ) +} + +function TransactionsTable({ transactions, isLoading }: { transactions: string[], isLoading: boolean }) { + return ( +
+
+

Transactions

+
+ + + + + + + + {isLoading + ? + : transactions.map((transactionId) => ( + + + + )) + } + +
Id
{transactionId}
+
+ ) +} + +export default function Transactions() { + const workerRef = useRef() + const [fetchTransactionsLoading, setFetchTransactionsLoading] = useState(true) + const [transactionIds, setTransactionIds] = useState([]) + const [fetchAccountsLoading, setFetchAccountsLoading] = useState(true) + const [accounts, setAccounts] = useState([]) + // Mint state + const [mintLoading, setMintLoading] = useState(false) + const [mintedTransaction, setMintedTransacton] = useState(null) + // Send State + const [sendLoading, setSendLoading] = useState(false) + const [sentTransaction, setSentTransaction] = useState(null) + // Swap State + const [swapLoading, setSwapLoading] = useState(false) + const [swappedTransaction, setSwappedTransaction] = useState(null) + // Consume state + const [mintConsumeLoading, setMintConsumeLoading] = useState(false) + const [sendConsumeLoading, setSendConsumeLoading] = useState(false) + const [swapAConsumeLoading, setSwapAConsumeLoading] = useState(false) + const [swapBConsumeLoading, setSwapBConsumeLoading] = useState(false) + const [swapANotes, setSwapANotes] = useState(null) + const [swapBNotes, setSwapBNotes] = useState(null) + + function createWorkerAndFetchTransactions() { + return new Promise((resolve, reject) => { + workerRef.current = new Worker(new URL('../../workers/accounts.ts', import.meta.url), { type : "module" }); + + workerRef.current.onmessage = function(event) { + switch (event.data.type) { + case "ready": + console.log('Worker is ready. Sending message...'); + workerRef.current?.postMessage({ type: "fetchTransactions" }); + break; + case "fetchTransactions": + console.log('fetch transaction ids worker finished', event.data.inputNotes) + setFetchTransactionsLoading(false) + setTransactionIds(event.data.transactions) + workerRef.current?.postMessage({ type: "fetchAccounts" }); + break; + case "fetchAccounts": + console.log('fetch accounts worker finished', event.data.accounts) + setFetchAccountsLoading(false) + setAccounts(event.data.accounts) + break; + case "mintTransaction": + console.log('mint transaction worker finished', event.data.mintResult.transactionId, event.data.mintResult.createdNoteIds) + setMintLoading(false) + setMintedTransacton(event.data.mintResult as TransactionResult) + workerRef.current?.postMessage({ type: "fetchTransactions" }) + break; + case "sendTransaction": + console.log('send transaction worker finished', event.data) + setSendLoading(false) + setSentTransaction(event.data.sendResult as TransactionResult) + workerRef.current?.postMessage({ type: "fetchTransactions" }) + break; + case "swapTransaction": + console.log('swap transaction worker finished', event.data) + setSwapLoading(false) + const swapResult = event.data.swapResult as SwapTransactionResult + setSwappedTransaction(swapResult) + setSwapANotes(swapResult.expectedPartialNoteIds) + setSwapBNotes(swapResult.expectedOutputNoteIds) + workerRef.current?.postMessage({ type: "fetchTransactions" }) + break; + case "consumeTransaction": + console.log('consume transaction worker finished', event.data) + setMintConsumeLoading(false) + if (event.data.consumeType == "mint") setMintedTransacton(null) + if (event.data.consumeType == "send") setSentTransaction(null) + if (event.data.consumeType == "swapA") { + setSwapAConsumeLoading(false) + setSwapANotes(null) + } + if (event.data.consumeType == "swapB") { + setSwapBConsumeLoading(false) + setSwapBNotes(null) + } + workerRef.current?.postMessage({ type: "fetchTransactions" }) + break; + default: + console.log('invalid message:', event.data); + break; + } + }; + + workerRef.current.onerror = function(error) { + reject(error); + }; + }); + } + + useLayoutEffect(() => { + createWorkerAndFetchTransactions() + + return () => { + workerRef.current?.terminate(); + } + }, []) + + return ( +
+ + + + +
+ ) +} + +Transactions.getLayout = function getLayout(page: ReactElement) { + return ( + + {page} + + ) +} \ No newline at end of file diff --git a/website/src/assets/css/globals.css b/website/src/assets/css/globals.css new file mode 100644 index 000000000..0d36e4ac3 --- /dev/null +++ b/website/src/assets/css/globals.css @@ -0,0 +1,126 @@ +@tailwind base; +@tailwind components; +@tailwind utilities; +@layer base { + :root { + --color-brand: 17 24 39; + } + html, + body { + @apply antialiased bg-body font-body dark:bg-dark dark:text-white; + } + .dynamic-html h6 { + @apply font-medium uppercase; + } + .dynamic-html p { + @apply mb-3; + } +} + +@layer utilities { + .text-case-inherit { + text-transform: inherit; + } + .letter-space-inherit { + letter-spacing: inherit; + } + .word-break-all { + word-break: break-all; + } + .animation-delay-200 { + animation-delay: 200ms; + } + .animation-delay-500 { + animation-delay: 500ms; + } + .animation-delay-700 { + animation-delay: 700ms; + } + .text-vertical { + writing-mode: tb; + text-orientation: upright; + } +} + +/* Hide spin button for input type number */ +input.spin-button-hidden::-webkit-outer-spin-button, +input.spin-button-hidden::-webkit-inner-spin-button { + -webkit-appearance: none; + margin: 0; +} + +/* html, +body { + padding: 0; + margin: 0; + font-family: -apple-system, BlinkMacSystemFont, Segoe UI, Roboto, Oxygen, + Ubuntu, Cantarell, Fira Sans, Droid Sans, Helvetica Neue, sans-serif; +} + +a { + color: inherit; + text-decoration: none; +} + +* { + box-sizing: border-box; +} */ + +/*================================================================ +Styles for Swiper slider +================================================================*/ +.swiper { + @apply flex flex-col; +} + +.swiper-wrapper { + @apply order-first; +} + +.swiper-scrollbar { + @apply relative bg-gray-200 rounded dark:bg-gray-600; +} + +.swiper-horizontal > .swiper-scrollbar { + @apply z-10 mt-4 mb-0 h-0.5 w-full sm:mt-6; +} + +.swiper-vertical > .swiper-scrollbar { + @apply absolute top-0 right-1 z-10 h-full w-0.5; +} + +.swiper-scrollbar-drag { + @apply relative bg-gray-900 rounded dark:bg-light-dark; +} + +.swiper-horizontal .swiper-scrollbar-drag { + @apply top-[-1px] left-0 h-[3px] w-full shadow-sm; +} + +.swiper-vertical .swiper-scrollbar-drag { + @apply left-[-1px] top-0 h-full w-[3px] shadow-sm; +} + +.swiper-scrollbar-cursor-drag { + @apply cursor-move; +} + +.swiper-scrollbar-lock { + @apply hidden; +} + +.transaction-table { + border-spacing: 0 12px; + margin-top: -12px; +} +.transaction-table > thead tr > th { + text-align: left; +} +.text-heading-style { + @apply text-sm font-medium tracking-wider; +} + +/* React Share button custom styles */ +.product-share .react-share__ShareButton { + @apply flex flex-col items-center justify-center; +} diff --git a/website/src/assets/css/range-slider.css b/website/src/assets/css/range-slider.css new file mode 100644 index 000000000..e096295d3 --- /dev/null +++ b/website/src/assets/css/range-slider.css @@ -0,0 +1,252 @@ +.rc-slider { + position: relative; + width: 100%; + height: 14px; + padding: 5px 0; + border-radius: 6px; + touch-action: none; + box-sizing: border-box; + -webkit-tap-highlight-color: rgba(0, 0, 0, 0); +} +.rc-slider * { + box-sizing: border-box; + -webkit-tap-highlight-color: rgba(0, 0, 0, 0); +} +.rc-slider-rail { + position: absolute; + width: 100%; + height: 4px; + border-radius: 6px; + @apply bg-gray-300; +} +.rc-slider-track { + position: absolute; + height: 4px; + border-radius: 6px; + @apply bg-brand; +} +.rc-slider-handle { + position: absolute; + width: 14px; + height: 14px; + margin-top: -5px; + @apply bg-white dark:bg-gray-700; + @apply border-2 border-solid border-gray-900 dark:border-gray-500; + border-radius: 50%; + cursor: pointer; + cursor: -webkit-grab; + cursor: grab; + touch-action: pan-x; +} +.rc-slider-handle-dragging.rc-slider-handle-dragging.rc-slider-handle-dragging { + @apply border-gray-900 dark:border-gray-500; + box-shadow: 0 0 0 5px #111827; +} +.rc-slider-handle:focus { + outline: none; + box-shadow: none; +} +.rc-slider-handle:focus-visible { + @apply border-gray-900 dark:border-gray-500; + box-shadow: 0 0 0 3px #111827; +} +.rc-slider-handle-click-focused:focus { + @apply border-gray-900 dark:border-gray-500; + box-shadow: unset; +} +.rc-slider-handle:hover { + @apply border-gray-900 dark:border-gray-500; +} +.rc-slider-handle:active { + @apply border-gray-900 dark:border-gray-500; + box-shadow: 0 0 5px #111827; + cursor: -webkit-grabbing; + cursor: grabbing; +} +.rc-slider-mark { + position: absolute; + top: 18px; + left: 0; + width: 100%; + font-size: 12px; +} +.rc-slider-mark-text { + position: absolute; + display: inline-block; + color: #999; + text-align: center; + vertical-align: middle; + cursor: pointer; +} +.rc-slider-mark-text-active { + color: #666; +} +.rc-slider-step { + position: absolute; + width: 100%; + height: 4px; + background: transparent; + pointer-events: none; +} +.rc-slider-dot { + position: absolute; + bottom: -2px; + width: 8px; + height: 8px; + vertical-align: middle; + @apply bg-white dark:bg-gray-700; + border: 2px solid #e9e9e9; + border-radius: 50%; + cursor: pointer; +} +.rc-slider-dot-active { + @apply border-gray-900 dark:border-gray-500; +} +.rc-slider-dot-reverse { + margin-right: -4px; +} +.rc-slider-disabled { + background-color: #e9e9e9; +} +.rc-slider-disabled .rc-slider-track { + background-color: #ccc; +} +.rc-slider-disabled .rc-slider-handle, +.rc-slider-disabled .rc-slider-dot { + @apply bg-white dark:bg-gray-700; + border-color: #ccc; + box-shadow: none; + cursor: not-allowed; +} +.rc-slider-disabled .rc-slider-mark-text, +.rc-slider-disabled .rc-slider-dot { + cursor: not-allowed !important; +} +.rc-slider-vertical { + width: 14px; + height: 100%; + padding: 0 5px; +} +.rc-slider-vertical .rc-slider-rail { + width: 4px; + height: 100%; +} +.rc-slider-vertical .rc-slider-track { + bottom: 0; + left: 5px; + width: 4px; +} +.rc-slider-vertical .rc-slider-handle { + margin-top: 0; + margin-left: -5px; + touch-action: pan-y; +} +.rc-slider-vertical .rc-slider-mark { + top: 0; + left: 18px; + height: 100%; +} +.rc-slider-vertical .rc-slider-step { + width: 4px; + height: 100%; +} +.rc-slider-vertical .rc-slider-dot { + margin-left: -2px; +} +.rc-slider-tooltip-zoom-down-enter, +.rc-slider-tooltip-zoom-down-appear { + display: block !important; + animation-duration: 0.3s; + animation-fill-mode: both; + animation-play-state: paused; +} +.rc-slider-tooltip-zoom-down-leave { + display: block !important; + animation-duration: 0.3s; + animation-fill-mode: both; + animation-play-state: paused; +} +.rc-slider-tooltip-zoom-down-enter.rc-slider-tooltip-zoom-down-enter-active, +.rc-slider-tooltip-zoom-down-appear.rc-slider-tooltip-zoom-down-appear-active { + animation-name: rcSliderTooltipZoomDownIn; + animation-play-state: running; +} +.rc-slider-tooltip-zoom-down-leave.rc-slider-tooltip-zoom-down-leave-active { + animation-name: rcSliderTooltipZoomDownOut; + animation-play-state: running; +} +.rc-slider-tooltip-zoom-down-enter, +.rc-slider-tooltip-zoom-down-appear { + transform: scale(0, 0); + animation-timing-function: cubic-bezier(0.23, 1, 0.32, 1); +} +.rc-slider-tooltip-zoom-down-leave { + animation-timing-function: cubic-bezier(0.755, 0.05, 0.855, 0.06); +} +@keyframes rcSliderTooltipZoomDownIn { + 0% { + transform: scale(0, 0); + transform-origin: 50% 100%; + opacity: 0; + } + 100% { + transform: scale(1, 1); + transform-origin: 50% 100%; + } +} +@keyframes rcSliderTooltipZoomDownOut { + 0% { + transform: scale(1, 1); + transform-origin: 50% 100%; + } + 100% { + transform: scale(0, 0); + transform-origin: 50% 100%; + opacity: 0; + } +} +.rc-slider-tooltip { + position: absolute; + top: -9999px; + left: -9999px; + visibility: visible; + box-sizing: border-box; + -webkit-tap-highlight-color: rgba(0, 0, 0, 0); +} +.rc-slider-tooltip * { + box-sizing: border-box; + -webkit-tap-highlight-color: rgba(0, 0, 0, 0); +} +.rc-slider-tooltip-hidden { + display: none; +} +.rc-slider-tooltip-placement-top { + padding: 4px 0 8px 0; +} +.rc-slider-tooltip-inner { + min-width: 24px; + height: 24px; + padding: 6px 2px; + color: #fff; + font-size: 12px; + line-height: 1; + text-align: center; + text-decoration: none; + background-color: #6c6c6c; + border-radius: 6px; + box-shadow: 0 0 4px #d9d9d9; +} +.rc-slider-tooltip-arrow { + position: absolute; + width: 0; + height: 0; + border-color: transparent; + border-style: solid; +} +.rc-slider-tooltip-placement-top .rc-slider-tooltip-arrow { + bottom: 4px; + left: 50%; + margin-left: -4px; + border-width: 4px 4px 0; + border-top-color: #6c6c6c; +} diff --git a/website/src/assets/css/scrollbar.css b/website/src/assets/css/scrollbar.css new file mode 100644 index 000000000..12e260798 --- /dev/null +++ b/website/src/assets/css/scrollbar.css @@ -0,0 +1,146 @@ +/* Custom scrollbar design starts from here */ +.os-theme-thin > .os-scrollbar-vertical { + bottom: 14px; + width: 14px; + padding: 6px 0; +} + +.os-theme-thin > .os-scrollbar-horizontal { + right: 14px; + height: 14px; + padding: 0px 6px; +} + +.os-theme-thin + > .os-scrollbar-horizontal + > .os-scrollbar-track + > .os-scrollbar-handle { + height: 100%; + min-width: 30px; +} + +.os-theme-thin.os-host-transition + > .os-scrollbar-horizontal + > .os-scrollbar-track + > .os-scrollbar-handle:before { + transition: height 0.3s, margin-top 0.3s, background 0.2s; +} + +.os-theme-thin + > .os-scrollbar-horizontal + > .os-scrollbar-track + > .os-scrollbar-handle:hover:before, +.os-theme-thin + > .os-scrollbar-horizontal + > .os-scrollbar-track + > .os-scrollbar-handle.active:before { + height: 4px; + margin-top: -2px; +} + +.os-theme-thin > .os-scrollbar-horizontal > .os-scrollbar-track:before, +.os-theme-thin + > .os-scrollbar-horizontal + > .os-scrollbar-track + > .os-scrollbar-handle:before { + left: 0; + right: 0; + height: 2px; + top: 50%; + margin-top: -1px; +} + +.os-theme-thin > .os-scrollbar > .os-scrollbar-track { + background: transparent; +} + +.os-theme-thin > .os-scrollbar-horizontal > .os-scrollbar-track:before, +.os-theme-thin > .os-scrollbar-vertical > .os-scrollbar-track:before { + content: ''; + display: block; + position: absolute; + @apply bg-black/10 dark:bg-gray-500; +} + +.os-theme-thin > .os-scrollbar-vertical > .os-scrollbar-track:before, +.os-theme-thin + > .os-scrollbar-vertical + > .os-scrollbar-track + > .os-scrollbar-handle:before { + top: 0; + bottom: 0; + width: 2px; + left: 50%; + margin-left: -1px; +} + +.os-theme-thin + > .os-scrollbar-vertical + > .os-scrollbar-track + > .os-scrollbar-handle { + width: 100%; + min-height: 15px; +} + +.os-theme-thin > .os-scrollbar-vertical > .os-scrollbar-track:before, +.os-theme-thin + > .os-scrollbar-vertical + > .os-scrollbar-track + > .os-scrollbar-handle:before { + top: 0; + bottom: 0; + width: 2px; + left: 50%; + margin-left: -1px; + border-radius: 15px; + @apply bg-black/10 dark:bg-gray-600; +} +.os-theme-thin + > .os-scrollbar + > .os-scrollbar-track + > .os-scrollbar-handle:before { + content: ''; + display: block; + position: absolute; + border-radius: 10px; + @apply bg-black/30 dark:bg-gray-800; +} + +.os-theme-thin + > .os-scrollbar-vertical + > .os-scrollbar-track + > .os-scrollbar-handle:hover:before, +.os-theme-thin + > .os-scrollbar-vertical + > .os-scrollbar-track + > .os-scrollbar-handle.active:before { + width: 2px; + margin-left: -1px; + background: #666; +} + +.os-theme-thin.os-host-transition + > .os-scrollbar-vertical + > .os-scrollbar-track + > .os-scrollbar-handle:before { + -webkit-transition: width 0.3s, margin-left 0.3s, background 0.2s; + transition: width 0.3s, margin-left 0.3s, background 0.2s; +} + +/* ScrollBar Height Calc */ +.cart-scrollbar { + -webkit-overflow-scrolling: touch; + max-height: calc(100vh - 160px); +} +/* .table-scrollbar { + background-color: green; +} */ +.table-scrollbar::-webkit-scrollbar { + -webkit-appearance: none; +} + +.table-scrollbar::-webkit-scrollbar-thumb { + background-color: rgba(0, 0, 0, 0.075); + border-radius: 10px; + border: 2px solid red; +} diff --git a/website/src/assets/images/404-dark.svg b/website/src/assets/images/404-dark.svg new file mode 100644 index 000000000..8e244f9a3 --- /dev/null +++ b/website/src/assets/images/404-dark.svg @@ -0,0 +1,69 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/website/src/assets/images/404-light.svg b/website/src/assets/images/404-light.svg new file mode 100644 index 000000000..9ec1518bc --- /dev/null +++ b/website/src/assets/images/404-light.svg @@ -0,0 +1,69 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/website/src/assets/images/anchor.svg b/website/src/assets/images/anchor.svg new file mode 100644 index 000000000..f873c4cfd --- /dev/null +++ b/website/src/assets/images/anchor.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/website/src/assets/images/author.jpg b/website/src/assets/images/author.jpg new file mode 100644 index 000000000..a59251643 Binary files /dev/null and b/website/src/assets/images/author.jpg differ diff --git a/website/src/assets/images/avalanche.svg b/website/src/assets/images/avalanche.svg new file mode 100644 index 000000000..261f2f4b4 --- /dev/null +++ b/website/src/assets/images/avalanche.svg @@ -0,0 +1,4 @@ + + + + diff --git a/website/src/assets/images/avatar/1.png b/website/src/assets/images/avatar/1.png new file mode 100644 index 000000000..d83a7a67f Binary files /dev/null and b/website/src/assets/images/avatar/1.png differ diff --git a/website/src/assets/images/avatar/10.jpg b/website/src/assets/images/avatar/10.jpg new file mode 100644 index 000000000..df643d318 Binary files /dev/null and b/website/src/assets/images/avatar/10.jpg differ diff --git a/website/src/assets/images/avatar/11.jpg b/website/src/assets/images/avatar/11.jpg new file mode 100644 index 000000000..1c69019ed Binary files /dev/null and b/website/src/assets/images/avatar/11.jpg differ diff --git a/website/src/assets/images/avatar/2.png b/website/src/assets/images/avatar/2.png new file mode 100644 index 000000000..b818a312f Binary files /dev/null and b/website/src/assets/images/avatar/2.png differ diff --git a/website/src/assets/images/avatar/3.png b/website/src/assets/images/avatar/3.png new file mode 100644 index 000000000..eb6419722 Binary files /dev/null and b/website/src/assets/images/avatar/3.png differ diff --git a/website/src/assets/images/avatar/4.png b/website/src/assets/images/avatar/4.png new file mode 100644 index 000000000..23baa98b3 Binary files /dev/null and b/website/src/assets/images/avatar/4.png differ diff --git a/website/src/assets/images/avatar/5.png b/website/src/assets/images/avatar/5.png new file mode 100644 index 000000000..f62a15d7f Binary files /dev/null and b/website/src/assets/images/avatar/5.png differ diff --git a/website/src/assets/images/avatar/6.png b/website/src/assets/images/avatar/6.png new file mode 100644 index 000000000..841e06d8b Binary files /dev/null and b/website/src/assets/images/avatar/6.png differ diff --git a/website/src/assets/images/avatar/7.png b/website/src/assets/images/avatar/7.png new file mode 100644 index 000000000..a734fed8e Binary files /dev/null and b/website/src/assets/images/avatar/7.png differ diff --git a/website/src/assets/images/avatar/8.jpg b/website/src/assets/images/avatar/8.jpg new file mode 100644 index 000000000..258ba83f6 Binary files /dev/null and b/website/src/assets/images/avatar/8.jpg differ diff --git a/website/src/assets/images/avatar/9.jpg b/website/src/assets/images/avatar/9.jpg new file mode 100644 index 000000000..4d50707f8 Binary files /dev/null and b/website/src/assets/images/avatar/9.jpg differ diff --git a/website/src/assets/images/bank.svg b/website/src/assets/images/bank.svg new file mode 100644 index 000000000..ae7b8bfb6 --- /dev/null +++ b/website/src/assets/images/bank.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/src/assets/images/cappasity.svg b/website/src/assets/images/cappasity.svg new file mode 100644 index 000000000..536dbaed5 --- /dev/null +++ b/website/src/assets/images/cappasity.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/website/src/assets/images/coin/binance.svg b/website/src/assets/images/coin/binance.svg new file mode 100644 index 000000000..ff659f126 --- /dev/null +++ b/website/src/assets/images/coin/binance.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/website/src/assets/images/coin/bitcoin.svg b/website/src/assets/images/coin/bitcoin.svg new file mode 100644 index 000000000..5d3c7fcf6 --- /dev/null +++ b/website/src/assets/images/coin/bitcoin.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/src/assets/images/coin/cardano.svg b/website/src/assets/images/coin/cardano.svg new file mode 100644 index 000000000..71ca7205e --- /dev/null +++ b/website/src/assets/images/coin/cardano.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/src/assets/images/coin/tether.svg b/website/src/assets/images/coin/tether.svg new file mode 100644 index 000000000..0424aa6d6 --- /dev/null +++ b/website/src/assets/images/coin/tether.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/src/assets/images/coin/usd-coin.svg b/website/src/assets/images/coin/usd-coin.svg new file mode 100644 index 000000000..b51fba8b5 --- /dev/null +++ b/website/src/assets/images/coin/usd-coin.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/website/src/assets/images/collection/collection-1.jpg b/website/src/assets/images/collection/collection-1.jpg new file mode 100644 index 000000000..d596b3664 Binary files /dev/null and b/website/src/assets/images/collection/collection-1.jpg differ diff --git a/website/src/assets/images/collection/collection-2.jpg b/website/src/assets/images/collection/collection-2.jpg new file mode 100644 index 000000000..a99a92789 Binary files /dev/null and b/website/src/assets/images/collection/collection-2.jpg differ diff --git a/website/src/assets/images/collection/collection-3.jpg b/website/src/assets/images/collection/collection-3.jpg new file mode 100644 index 000000000..f000fba7a Binary files /dev/null and b/website/src/assets/images/collection/collection-3.jpg differ diff --git a/website/src/assets/images/collection/collection-4.jpg b/website/src/assets/images/collection/collection-4.jpg new file mode 100644 index 000000000..d72423e98 Binary files /dev/null and b/website/src/assets/images/collection/collection-4.jpg differ diff --git a/website/src/assets/images/collection/collection-5.jpg b/website/src/assets/images/collection/collection-5.jpg new file mode 100644 index 000000000..e7c0da59b Binary files /dev/null and b/website/src/assets/images/collection/collection-5.jpg differ diff --git a/website/src/assets/images/collection/collection-6.jpg b/website/src/assets/images/collection/collection-6.jpg new file mode 100644 index 000000000..eff7ad9ef Binary files /dev/null and b/website/src/assets/images/collection/collection-6.jpg differ diff --git a/website/src/assets/images/collection/grid/1.jpg b/website/src/assets/images/collection/grid/1.jpg new file mode 100644 index 000000000..21508797b Binary files /dev/null and b/website/src/assets/images/collection/grid/1.jpg differ diff --git a/website/src/assets/images/collection/grid/2.jpg b/website/src/assets/images/collection/grid/2.jpg new file mode 100644 index 000000000..6e7246c49 Binary files /dev/null and b/website/src/assets/images/collection/grid/2.jpg differ diff --git a/website/src/assets/images/collection/grid/3.jpg b/website/src/assets/images/collection/grid/3.jpg new file mode 100644 index 000000000..ee8156c50 Binary files /dev/null and b/website/src/assets/images/collection/grid/3.jpg differ diff --git a/website/src/assets/images/collection/grid/4.jpg b/website/src/assets/images/collection/grid/4.jpg new file mode 100644 index 000000000..9dada3d41 Binary files /dev/null and b/website/src/assets/images/collection/grid/4.jpg differ diff --git a/website/src/assets/images/collection/grid/5.jpg b/website/src/assets/images/collection/grid/5.jpg new file mode 100644 index 000000000..273732850 Binary files /dev/null and b/website/src/assets/images/collection/grid/5.jpg differ diff --git a/website/src/assets/images/collection/grid/6.jpg b/website/src/assets/images/collection/grid/6.jpg new file mode 100644 index 000000000..f0493db1f Binary files /dev/null and b/website/src/assets/images/collection/grid/6.jpg differ diff --git a/website/src/assets/images/collection/grid/7.jpg b/website/src/assets/images/collection/grid/7.jpg new file mode 100644 index 000000000..4aa4f0d8e Binary files /dev/null and b/website/src/assets/images/collection/grid/7.jpg differ diff --git a/website/src/assets/images/collection/grid/8.jpg b/website/src/assets/images/collection/grid/8.jpg new file mode 100644 index 000000000..412c38927 Binary files /dev/null and b/website/src/assets/images/collection/grid/8.jpg differ diff --git a/website/src/assets/images/currency/bitcoin.svg b/website/src/assets/images/currency/bitcoin.svg new file mode 100644 index 000000000..4473a885c --- /dev/null +++ b/website/src/assets/images/currency/bitcoin.svg @@ -0,0 +1,4 @@ + + + + diff --git a/website/src/assets/images/currency/ethereum.svg b/website/src/assets/images/currency/ethereum.svg new file mode 100644 index 000000000..b2269dc82 --- /dev/null +++ b/website/src/assets/images/currency/ethereum.svg @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/website/src/assets/images/discord.svg b/website/src/assets/images/discord.svg new file mode 100644 index 000000000..4bbbbc9d1 --- /dev/null +++ b/website/src/assets/images/discord.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/src/assets/images/forum.svg b/website/src/assets/images/forum.svg new file mode 100644 index 000000000..16903b86c --- /dev/null +++ b/website/src/assets/images/forum.svg @@ -0,0 +1,3 @@ + + + diff --git a/website/src/assets/images/gas-station 1.svg b/website/src/assets/images/gas-station 1.svg new file mode 100644 index 000000000..14b604764 --- /dev/null +++ b/website/src/assets/images/gas-station 1.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/website/src/assets/images/icon-128.png b/website/src/assets/images/icon-128.png new file mode 100644 index 000000000..3bd20930b Binary files /dev/null and b/website/src/assets/images/icon-128.png differ diff --git a/website/src/assets/images/icon-16.png b/website/src/assets/images/icon-16.png new file mode 100644 index 000000000..059aa6fb7 Binary files /dev/null and b/website/src/assets/images/icon-16.png differ diff --git a/website/src/assets/images/icon-19.png b/website/src/assets/images/icon-19.png new file mode 100644 index 000000000..1e1c2341f Binary files /dev/null and b/website/src/assets/images/icon-19.png differ diff --git a/website/src/assets/images/icon-38.png b/website/src/assets/images/icon-38.png new file mode 100644 index 000000000..e75f56fa2 Binary files /dev/null and b/website/src/assets/images/icon-38.png differ diff --git a/website/src/assets/images/lion_slug.jpeg b/website/src/assets/images/lion_slug.jpeg new file mode 100644 index 000000000..f96d4fa88 Binary files /dev/null and b/website/src/assets/images/lion_slug.jpeg differ diff --git a/website/src/assets/images/lion_slug2.jpeg b/website/src/assets/images/lion_slug2.jpeg new file mode 100644 index 000000000..05241c4b5 Binary files /dev/null and b/website/src/assets/images/lion_slug2.jpeg differ diff --git a/website/src/assets/images/lion_slug3.jpeg b/website/src/assets/images/lion_slug3.jpeg new file mode 100644 index 000000000..4b94a4ef4 Binary files /dev/null and b/website/src/assets/images/lion_slug3.jpeg differ diff --git a/website/src/assets/images/logo-white.svg b/website/src/assets/images/logo-white.svg new file mode 100644 index 000000000..c3184bc50 --- /dev/null +++ b/website/src/assets/images/logo-white.svg @@ -0,0 +1,13 @@ + + + + + + + + + + + + + diff --git a/website/src/assets/images/logo.svg b/website/src/assets/images/logo.svg new file mode 100644 index 000000000..ac7e8a943 --- /dev/null +++ b/website/src/assets/images/logo.svg @@ -0,0 +1,13 @@ + + + + + + + + + + + + + diff --git a/website/src/assets/images/metamask.svg b/website/src/assets/images/metamask.svg new file mode 100644 index 000000000..6cb41ba98 --- /dev/null +++ b/website/src/assets/images/metamask.svg @@ -0,0 +1,61 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/website/src/assets/images/mirror.svg b/website/src/assets/images/mirror.svg new file mode 100644 index 000000000..7bc88ae73 --- /dev/null +++ b/website/src/assets/images/mirror.svg @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/website/src/assets/images/nft/nft-1.jpg b/website/src/assets/images/nft/nft-1.jpg new file mode 100644 index 000000000..14e2fb709 Binary files /dev/null and b/website/src/assets/images/nft/nft-1.jpg differ diff --git a/website/src/assets/images/nft/nft-2.jpg b/website/src/assets/images/nft/nft-2.jpg new file mode 100644 index 000000000..f75ea729b Binary files /dev/null and b/website/src/assets/images/nft/nft-2.jpg differ diff --git a/website/src/assets/images/nft/nft-3.jpg b/website/src/assets/images/nft/nft-3.jpg new file mode 100644 index 000000000..af26b0c9b Binary files /dev/null and b/website/src/assets/images/nft/nft-3.jpg differ diff --git a/website/src/assets/images/nft/nft-4.jpg b/website/src/assets/images/nft/nft-4.jpg new file mode 100644 index 000000000..5997c39e0 Binary files /dev/null and b/website/src/assets/images/nft/nft-4.jpg differ diff --git a/website/src/assets/images/pages/collection.png b/website/src/assets/images/pages/collection.png new file mode 100644 index 000000000..71491f80e Binary files /dev/null and b/website/src/assets/images/pages/collection.png differ diff --git a/website/src/assets/images/pages/create-nft.png b/website/src/assets/images/pages/create-nft.png new file mode 100644 index 000000000..fc62cc35f Binary files /dev/null and b/website/src/assets/images/pages/create-nft.png differ diff --git a/website/src/assets/images/pages/create-proposal.png b/website/src/assets/images/pages/create-proposal.png new file mode 100644 index 000000000..438c7a76c Binary files /dev/null and b/website/src/assets/images/pages/create-proposal.png differ diff --git a/website/src/assets/images/pages/farm.png b/website/src/assets/images/pages/farm.png new file mode 100644 index 000000000..90d8229c3 Binary files /dev/null and b/website/src/assets/images/pages/farm.png differ diff --git a/website/src/assets/images/pages/history.png b/website/src/assets/images/pages/history.png new file mode 100644 index 000000000..2707b4eb4 Binary files /dev/null and b/website/src/assets/images/pages/history.png differ diff --git a/website/src/assets/images/pages/home.png b/website/src/assets/images/pages/home.png new file mode 100644 index 000000000..a1e1e3197 Binary files /dev/null and b/website/src/assets/images/pages/home.png differ diff --git a/website/src/assets/images/pages/liquidity-position.png b/website/src/assets/images/pages/liquidity-position.png new file mode 100644 index 000000000..dc233b9ea Binary files /dev/null and b/website/src/assets/images/pages/liquidity-position.png differ diff --git a/website/src/assets/images/pages/liquidity.png b/website/src/assets/images/pages/liquidity.png new file mode 100644 index 000000000..26d15f421 Binary files /dev/null and b/website/src/assets/images/pages/liquidity.png differ diff --git a/website/src/assets/images/pages/nft-details.png b/website/src/assets/images/pages/nft-details.png new file mode 100644 index 000000000..2bcf32049 Binary files /dev/null and b/website/src/assets/images/pages/nft-details.png differ diff --git a/website/src/assets/images/pages/portfolio.png b/website/src/assets/images/pages/portfolio.png new file mode 100644 index 000000000..a71777693 Binary files /dev/null and b/website/src/assets/images/pages/portfolio.png differ diff --git a/website/src/assets/images/pages/proposal.png b/website/src/assets/images/pages/proposal.png new file mode 100644 index 000000000..d9e4b8cfd Binary files /dev/null and b/website/src/assets/images/pages/proposal.png differ diff --git a/website/src/assets/images/pages/search.png b/website/src/assets/images/pages/search.png new file mode 100644 index 000000000..2bb792278 Binary files /dev/null and b/website/src/assets/images/pages/search.png differ diff --git a/website/src/assets/images/pages/swap.png b/website/src/assets/images/pages/swap.png new file mode 100644 index 000000000..a86eb020b Binary files /dev/null and b/website/src/assets/images/pages/swap.png differ diff --git a/website/src/assets/images/pages/vote.png b/website/src/assets/images/pages/vote.png new file mode 100644 index 000000000..36262f350 Binary files /dev/null and b/website/src/assets/images/pages/vote.png differ diff --git a/website/src/assets/images/pancake-bunny.svg b/website/src/assets/images/pancake-bunny.svg new file mode 100644 index 000000000..c13dd09b5 --- /dev/null +++ b/website/src/assets/images/pancake-bunny.svg @@ -0,0 +1,4 @@ + + + + diff --git a/website/src/assets/images/portfolio/claimable.svg b/website/src/assets/images/portfolio/claimable.svg new file mode 100644 index 000000000..8aa36e2de --- /dev/null +++ b/website/src/assets/images/portfolio/claimable.svg @@ -0,0 +1,8 @@ + + + + + + + + diff --git a/website/src/assets/images/portfolio/curve.svg b/website/src/assets/images/portfolio/curve.svg new file mode 100644 index 000000000..ace729fe5 --- /dev/null +++ b/website/src/assets/images/portfolio/curve.svg @@ -0,0 +1,1531 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/website/src/assets/images/portfolio/deposit.svg b/website/src/assets/images/portfolio/deposit.svg new file mode 100644 index 000000000..ad10aaef6 --- /dev/null +++ b/website/src/assets/images/portfolio/deposit.svg @@ -0,0 +1,8 @@ + + + + + + + + diff --git a/website/src/assets/images/portfolio/nft.svg b/website/src/assets/images/portfolio/nft.svg new file mode 100644 index 000000000..13fc340b9 --- /dev/null +++ b/website/src/assets/images/portfolio/nft.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/website/src/assets/images/portfolio/pancake.svg b/website/src/assets/images/portfolio/pancake.svg new file mode 100644 index 000000000..0ae8b52c6 --- /dev/null +++ b/website/src/assets/images/portfolio/pancake.svg @@ -0,0 +1,8 @@ + + + + + + + + diff --git a/website/src/assets/images/portfolio/poolto-gether.svg b/website/src/assets/images/portfolio/poolto-gether.svg new file mode 100644 index 000000000..f517f8f34 --- /dev/null +++ b/website/src/assets/images/portfolio/poolto-gether.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/website/src/assets/images/portfolio/uniswap.svg b/website/src/assets/images/portfolio/uniswap.svg new file mode 100644 index 000000000..3416c2987 --- /dev/null +++ b/website/src/assets/images/portfolio/uniswap.svg @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + + + diff --git a/website/src/assets/images/portfolio/wallet.svg b/website/src/assets/images/portfolio/wallet.svg new file mode 100644 index 000000000..c2dfc0275 --- /dev/null +++ b/website/src/assets/images/portfolio/wallet.svg @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/website/src/assets/images/profile-cover.jpg b/website/src/assets/images/profile-cover.jpg new file mode 100644 index 000000000..a0145691a Binary files /dev/null and b/website/src/assets/images/profile-cover.jpg differ diff --git a/website/src/assets/images/short-kings-logo.png b/website/src/assets/images/short-kings-logo.png new file mode 100644 index 000000000..bb8ad5ac7 Binary files /dev/null and b/website/src/assets/images/short-kings-logo.png differ diff --git a/website/src/assets/images/single-nft.jpg b/website/src/assets/images/single-nft.jpg new file mode 100644 index 000000000..42a4aa7d1 Binary files /dev/null and b/website/src/assets/images/single-nft.jpg differ diff --git a/website/src/assets/images/slug_lion4.jpeg b/website/src/assets/images/slug_lion4.jpeg new file mode 100644 index 000000000..ae83691f8 Binary files /dev/null and b/website/src/assets/images/slug_lion4.jpeg differ diff --git a/website/src/assets/images/user-1.png b/website/src/assets/images/user-1.png new file mode 100644 index 000000000..3014d980b Binary files /dev/null and b/website/src/assets/images/user-1.png differ diff --git a/website/src/assets/images/user-2.png b/website/src/assets/images/user-2.png new file mode 100644 index 000000000..034c5910d Binary files /dev/null and b/website/src/assets/images/user-2.png differ diff --git a/website/src/assets/images/user-3.png b/website/src/assets/images/user-3.png new file mode 100644 index 000000000..0759fc6a7 Binary files /dev/null and b/website/src/assets/images/user-3.png differ diff --git a/website/src/assets/images/user-4.png b/website/src/assets/images/user-4.png new file mode 100644 index 000000000..7bf722315 Binary files /dev/null and b/website/src/assets/images/user-4.png differ diff --git a/website/src/assets/images/vote-pool.svg b/website/src/assets/images/vote-pool.svg new file mode 100644 index 000000000..f72b59055 --- /dev/null +++ b/website/src/assets/images/vote-pool.svg @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/website/src/components/defaultLayout.tsx b/website/src/components/defaultLayout.tsx new file mode 100644 index 000000000..759f370ba --- /dev/null +++ b/website/src/components/defaultLayout.tsx @@ -0,0 +1,10 @@ +import Sidebar from "../layouts/dashboard/_sidebar"; + +const DefaultLayout = ({ children }: {children: React.ReactNode}) => ( +
+ +
{children}
+
+); + +export default DefaultLayout; \ No newline at end of file diff --git a/website/src/components/drawer-views/container.tsx b/website/src/components/drawer-views/container.tsx new file mode 100644 index 000000000..51e7613ff --- /dev/null +++ b/website/src/components/drawer-views/container.tsx @@ -0,0 +1,65 @@ +import { Fragment, useEffect } from 'react'; +import dynamic from 'next/dynamic'; +import { useRouter } from 'next/router'; +import { Dialog } from '@/components/ui/dialog'; +import { Transition } from '@/components/ui/transition'; +import { DRAWER_VIEW, useDrawer } from '@/components/drawer-views/context'; + +const Sidebar = dynamic(() => import('@/layouts/dashboard/_sidebar')); +const DrawerMenu = dynamic(() => import('@/layouts/_layout-menu')); + +function renderDrawerContent(view: DRAWER_VIEW | string) { + switch (view) { + case 'DASHBOARD_SIDEBAR': + return ; + default: + return ; + } +} + +export default function DrawersContainer() { + const router = useRouter(); + const { view, isOpen, closeDrawer } = useDrawer(); + useEffect(() => { + // close search modal when route change + router.events.on('routeChangeStart', closeDrawer); + return () => { + router.events.off('routeChangeStart', closeDrawer); + }; + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []); + return ( + + + + + + +
+ {view && renderDrawerContent(view)} +
+
+
+
+ ); +} diff --git a/website/src/components/drawer-views/context.tsx b/website/src/components/drawer-views/context.tsx new file mode 100644 index 000000000..8da9ee00c --- /dev/null +++ b/website/src/components/drawer-views/context.tsx @@ -0,0 +1,17 @@ +'use client' +import { atom, useAtom } from 'jotai'; + +export type DRAWER_VIEW = 'DASHBOARD_SIDEBAR' | 'DRAWER_MENU' | 'DRAWER_SEARCH'; +const drawerAtom = atom({ isOpen: false, view: 'DASHBOARD_SIDEBAR' }); + +export function useDrawer() { + const [state, setState] = useAtom(drawerAtom); + const openDrawer = (view: DRAWER_VIEW) => + setState({ ...state, isOpen: true, view }); + const closeDrawer = () => setState({ ...state, isOpen: false }); + return { + ...state, + openDrawer, + closeDrawer, + }; +} diff --git a/website/src/components/hooks/useWasm.ts b/website/src/components/hooks/useWasm.ts new file mode 100644 index 000000000..de6f37891 --- /dev/null +++ b/website/src/components/hooks/useWasm.ts @@ -0,0 +1,15 @@ +import { useEffect, useState } from "react"; + +export const useWasm = () => { + const [wasm, setWasm] = useState(null); + + useEffect(() => { + if (wasm === null) { + import("wasm").then((module) => { + module.default() + return setWasm(module) + }); + } + }, []); // eslint-disable-line react-hooks/exhaustive-deps + return wasm; +}; \ No newline at end of file diff --git a/website/src/components/icons/check.tsx b/website/src/components/icons/check.tsx new file mode 100644 index 000000000..020be2b34 --- /dev/null +++ b/website/src/components/icons/check.tsx @@ -0,0 +1,10 @@ +export const Check: React.FC> = (props) => { + return ( + + + + ); +}; diff --git a/website/src/components/icons/chevron-down.tsx b/website/src/components/icons/chevron-down.tsx new file mode 100644 index 000000000..7b33d6d4d --- /dev/null +++ b/website/src/components/icons/chevron-down.tsx @@ -0,0 +1,19 @@ +export function ChevronDown(props: React.SVGAttributes<{}>) { + return ( + + + + ); +} diff --git a/website/src/components/icons/chevron-forward.tsx b/website/src/components/icons/chevron-forward.tsx new file mode 100644 index 000000000..341c5c0b0 --- /dev/null +++ b/website/src/components/icons/chevron-forward.tsx @@ -0,0 +1,19 @@ +export const ChevronForward: React.FC> = (props) => { + return ( + + + + ); +}; diff --git a/website/src/components/icons/close.tsx b/website/src/components/icons/close.tsx new file mode 100644 index 000000000..8ee6374a5 --- /dev/null +++ b/website/src/components/icons/close.tsx @@ -0,0 +1,25 @@ +export function Close(props: React.SVGAttributes<{}>) { + return ( + + + + + ); +} diff --git a/website/src/components/icons/copy.tsx b/website/src/components/icons/copy.tsx new file mode 100644 index 000000000..303242933 --- /dev/null +++ b/website/src/components/icons/copy.tsx @@ -0,0 +1,14 @@ +export const Copy: React.FC> = (props) => { + return ( + + + + + ); +}; diff --git a/website/src/components/icons/exchange.tsx b/website/src/components/icons/exchange.tsx new file mode 100644 index 000000000..49261c4d4 --- /dev/null +++ b/website/src/components/icons/exchange.tsx @@ -0,0 +1,21 @@ +export function ExchangeIcon(props: React.SVGAttributes<{}>) { + return ( + + + + + ); +} diff --git a/website/src/components/icons/export-icon.tsx b/website/src/components/icons/export-icon.tsx new file mode 100644 index 000000000..388b52701 --- /dev/null +++ b/website/src/components/icons/export-icon.tsx @@ -0,0 +1,19 @@ +export function ExportIcon(props: React.SVGAttributes<{}>) { + return ( + + + + + ); +} diff --git a/website/src/components/icons/farm.tsx b/website/src/components/icons/farm.tsx new file mode 100644 index 000000000..9164303dc --- /dev/null +++ b/website/src/components/icons/farm.tsx @@ -0,0 +1,35 @@ +export function FarmIcon(props: React.SVGAttributes<{}>) { + return ( + + + + + + + + ); +} diff --git a/website/src/components/icons/home.tsx b/website/src/components/icons/home.tsx new file mode 100644 index 000000000..a942ee1a8 --- /dev/null +++ b/website/src/components/icons/home.tsx @@ -0,0 +1,19 @@ +export const HomeIcon: React.FC> = (props) => { + return ( + + + + ); +}; diff --git a/website/src/components/icons/left-align.tsx b/website/src/components/icons/left-align.tsx new file mode 100644 index 000000000..80df99b7a --- /dev/null +++ b/website/src/components/icons/left-align.tsx @@ -0,0 +1,25 @@ +export function LeftAlign(props: React.SVGAttributes<{}>) { + return ( + + + + + + ); +} diff --git a/website/src/components/icons/moon.tsx b/website/src/components/icons/moon.tsx new file mode 100644 index 000000000..8e3f9d6cb --- /dev/null +++ b/website/src/components/icons/moon.tsx @@ -0,0 +1,24 @@ +export function Moon(props: React.SVGAttributes<{}>) { + return ( + + + + + + + + + + + ); +} diff --git a/website/src/components/icons/option.tsx b/website/src/components/icons/option.tsx new file mode 100644 index 000000000..afe283901 --- /dev/null +++ b/website/src/components/icons/option.tsx @@ -0,0 +1,37 @@ +export function OptionIcon(props: React.SVGAttributes<{}>) { + return ( + + + + + + + + + ); +} diff --git a/website/src/components/icons/oven.tsx b/website/src/components/icons/oven.tsx new file mode 100644 index 000000000..4625e2c6a --- /dev/null +++ b/website/src/components/icons/oven.tsx @@ -0,0 +1,25 @@ +export function OvenIcon(props: React.SVGAttributes<{}>) { + return ( + + + + + + ); +} diff --git a/website/src/components/icons/range-icon.tsx b/website/src/components/icons/range-icon.tsx new file mode 100644 index 000000000..37be1db2f --- /dev/null +++ b/website/src/components/icons/range-icon.tsx @@ -0,0 +1,25 @@ +export function RangeIcon(props: React.SVGAttributes<{}>) { + return ( + + + + + ); +} diff --git a/website/src/components/icons/right-align.tsx b/website/src/components/icons/right-align.tsx new file mode 100644 index 000000000..74470237d --- /dev/null +++ b/website/src/components/icons/right-align.tsx @@ -0,0 +1,25 @@ +export function RightAlign(props: React.SVGAttributes<{}>) { + return ( + + + + + + ); +} diff --git a/website/src/components/icons/search.tsx b/website/src/components/icons/search.tsx new file mode 100644 index 000000000..a38060e1c --- /dev/null +++ b/website/src/components/icons/search.tsx @@ -0,0 +1,19 @@ +export const SearchIcon: React.FC> = (props) => { + return ( + + + + ); +}; diff --git a/website/src/components/icons/sun.tsx b/website/src/components/icons/sun.tsx new file mode 100644 index 000000000..c285332b4 --- /dev/null +++ b/website/src/components/icons/sun.tsx @@ -0,0 +1,24 @@ +export function Sun(props: React.SVGAttributes<{}>) { + return ( + + + + + + + + + + + ); +} diff --git a/website/src/components/icons/unlocked.tsx b/website/src/components/icons/unlocked.tsx new file mode 100644 index 000000000..a9b20dc3a --- /dev/null +++ b/website/src/components/icons/unlocked.tsx @@ -0,0 +1,23 @@ +export function Unlocked(props: React.SVGAttributes<{}>) { + return ( + + + + + ); +} diff --git a/website/src/components/ui/base.tsx b/website/src/components/ui/base.tsx new file mode 100644 index 000000000..b4b97e78e --- /dev/null +++ b/website/src/components/ui/base.tsx @@ -0,0 +1,129 @@ +// @ts-nocheck +import { useState, useEffect } from 'react'; +import dynamic from 'next/dynamic'; +import { useRouter } from 'next/router'; +import { motion, AnimatePresence } from 'framer-motion'; +import cn from 'classnames'; +import routes from '@/config/routes'; +import ActiveLink from '@/components/ui/links/active-link'; +import AnchorLink from '@/components/ui/links/anchor-link'; +import { RangeIcon } from '@/components/icons/range-icon'; +import { ExportIcon } from '@/components/icons/export-icon'; +import { useBreakpoint } from '@/lib/hooks/use-breakpoint'; +import { useIsMounted } from '@/lib/hooks/use-is-mounted'; +import { fadeInBottom } from '@/lib/framer-motion/fade-in-bottom'; +// dynamic import +const Listbox = dynamic(() => import('@/components/ui/list-box')); + +const baseMenu = [ + { + name: 'Sign', + value: routes.sign, + }, + { + name: 'Decrypt', + value: routes.decrypt, + }, + { + name: 'Records', + value: routes.records, + }, + { + name: 'Transfer', + value: routes.transfer, + }, + { + name: 'Execute', + value: routes.execute, + }, + { + name: 'Deploy', + value: routes.deploy, + }, +]; + +function ActiveNavLink({ href, title, isActive, className }: any) { + return ( + + {title} + {isActive && ( + + )} + + ); +} + +export default function Base({ children }: React.PropsWithChildren<{}>) { + const router = useRouter(); + const isMounted = useIsMounted(); + const breakpoint = useBreakpoint(); + const currentPath = baseMenu.findIndex( + (item) => item.value === router.pathname + ); + let [selectedMenuItem, setSelectedMenuItem] = useState(baseMenu[0]); + function handleRouteOnSelect(path: string) { + router.push(path); + } + useEffect(() => { + setSelectedMenuItem(baseMenu[currentPath]); + }, [currentPath]); + return ( +
+
+ + + + {children} + + +
+
+ ); +} diff --git a/website/src/components/ui/button/button-drip.tsx b/website/src/components/ui/button/button-drip.tsx new file mode 100644 index 000000000..19f9a200b --- /dev/null +++ b/website/src/components/ui/button/button-drip.tsx @@ -0,0 +1,53 @@ +import { useEffect, useRef } from 'react'; +import cn from 'classnames'; + +interface ButtonDripTypes { + x: number; + y: number; + color: string; + fullWidth?: boolean; + onCompleted: () => void; +} + +export default function ButtonDrip({ + x = 0, + y = 0, + color, + fullWidth, + onCompleted, +}: ButtonDripTypes) { + const dripRef = useRef(null); + let top = Number.isNaN(+y) ? 0 : y - 10; + let left = Number.isNaN(+x) ? 0 : x - 10; + useEffect(() => { + if (!dripRef.current) return; + dripRef.current.addEventListener('animationend', onCompleted); + return () => { + if (!dripRef.current) return; + // eslint-disable-next-line react-hooks/exhaustive-deps + dripRef.current.removeEventListener('animationend', onCompleted); + }; + }); + return ( + + + + + + + + + + ); +} + +ButtonDrip.displayName = 'ButtonDrip'; diff --git a/website/src/components/ui/button/button-loader.tsx b/website/src/components/ui/button/button-loader.tsx new file mode 100644 index 000000000..421c56eec --- /dev/null +++ b/website/src/components/ui/button/button-loader.tsx @@ -0,0 +1,25 @@ +import Loader, { + LoaderSizeTypes, + LoaderVariantTypes, +} from '@/components/ui/loader'; + +export default function ButtonLoader({ + size, + variant, +}: { + size: LoaderSizeTypes; + variant: LoaderVariantTypes; +}) { + return ( + + + + ); +} + +ButtonLoader.displayName = 'ButtonLoader'; diff --git a/website/src/components/ui/button/button.tsx b/website/src/components/ui/button/button.tsx new file mode 100644 index 000000000..c934fb05a --- /dev/null +++ b/website/src/components/ui/button/button.tsx @@ -0,0 +1,174 @@ +'use client' +import { useState, useRef, forwardRef, useImperativeHandle } from 'react'; +import cn from 'classnames'; +import ButtonDrip from '@/components/ui/button/button-drip'; +import ButtonLoader from '@/components/ui/button/button-loader'; +import { LoaderSizeTypes, LoaderVariantTypes } from '@/components/ui/loader'; +type ShapeNames = 'rounded' | 'pill' | 'circle'; +type VariantNames = 'ghost' | 'solid' | 'transparent'; +type ColorNames = + | 'primary' + | 'white' + | 'gray' + | 'success' + | 'info' + | 'warning' + | 'danger'; +type SizeNames = 'large' | 'medium' | 'small' | 'mini'; + +const shapes: Record = { + rounded: ['rounded-md sm:rounded-lg'], + pill: ['rounded-full'], + circle: ['rounded-full'], +}; +const variants: Record = { + ghost: ['bg-transparent'], + solid: ['text-white'], + transparent: ['bg-transparent hover:bg-gray-50 dark:hover:bg-gray-800'], +}; +const colors: Record = { + primary: ['text-brand', 'bg-brand', 'border-brand'], + white: ['text-gray-900', 'bg-white', 'border-white'], + gray: ['text-gray-900', 'bg-gray-100', 'border-gray-100'], + success: ['text-green-500', 'bg-green-500', 'border-green-500'], + info: ['text-blue-500', 'bg-blue-500', 'border-blue-500'], + warning: ['text-yellow-500', 'bg-yellow-500', 'border-yellow-500'], + danger: ['text-red-500', 'bg-red-500', 'border-red-500'], +}; +const sizes: Record = { + large: ['px-7 sm:px-9 h-11 sm:h-13', 'w-11 h-11 sm:w-13 sm:h-13'], + medium: ['px-5 sm:px-8 h-10 sm:h-12', 'h-10 w-10 sm:w-12 sm:h-12'], + small: ['px-7 h-10', 'w-10 h-10'], + mini: ['px-4 h-8', 'w-8 h-8'], +}; + +export interface ButtonProps + extends React.ButtonHTMLAttributes { + isLoading?: boolean; + disabled?: boolean; + shape?: ShapeNames; + variant?: VariantNames; + color?: ColorNames; + size?: SizeNames; + fullWidth?: boolean; + loaderSize?: LoaderSizeTypes; + loaderVariant?: LoaderVariantTypes; + onClick?: React.MouseEventHandler; +} + +const Button = forwardRef( + ( + { + children, + className, + isLoading, + disabled, + fullWidth, + shape = 'pill', + variant = 'solid', + color = 'primary', + size = 'medium', + loaderSize = 'small', + loaderVariant = 'scaleUp', + onClick, + ...buttonProps + }, + ref: React.Ref + ) => { + let [dripShow, setDripShow] = useState(false); + let [dripX, setDripX] = useState(0); + let [dripY, setDripY] = useState(0); + const colorClassNames = colors[color]; + const sizeClassNames = sizes[size]; + const buttonRef = useRef(null); + useImperativeHandle(ref, () => buttonRef.current); + function dripCompletedHandle() { + setDripShow(false); + setDripX(0); + setDripY(0); + } + const clickHandler = (event: React.MouseEvent) => { + if (!isLoading && buttonRef.current) { + const rect = buttonRef.current.getBoundingClientRect(); + setDripShow(true); + setDripX(event.clientX - rect.left); + setDripY(event.clientY - rect.top); + } + onClick && onClick(event); + }; + + let buttonColorClassNames = ''; + let buttonDripColor = ''; + switch (variant) { + case 'ghost': + buttonColorClassNames = `border-2 border-solid ${colorClassNames[0]} ${colorClassNames[2]}`; + buttonDripColor = 'rgba(0, 0, 0, 0.1)'; + break; + + case 'transparent': + buttonColorClassNames = `${colorClassNames[0]} ${ + disabled || isLoading + ? '' + : 'hover:bg-gray-100 dark:hover:bg-gray-800 focus:bg-gray-100 dark:focus:bg-gray-800' + } `; + buttonDripColor = 'rgba(0, 0, 0, 0.1)'; + break; + + default: + buttonColorClassNames = `${colorClassNames[1]} ${colorClassNames[2]}`; + buttonDripColor = 'rgba(255, 255, 255, 0.3)'; + break; + } + + return ( + + ); + } +); + +Button.displayName = 'Button'; +export default Button; diff --git a/website/src/components/ui/button/index.ts b/website/src/components/ui/button/index.ts new file mode 100644 index 000000000..156d97121 --- /dev/null +++ b/website/src/components/ui/button/index.ts @@ -0,0 +1,2 @@ +export { default } from '@/components/ui/button/button'; +export type { ButtonProps } from '@/components/ui/button/button'; diff --git a/website/src/components/ui/collapsible-menu.tsx b/website/src/components/ui/collapsible-menu.tsx new file mode 100644 index 000000000..0af522c2c --- /dev/null +++ b/website/src/components/ui/collapsible-menu.tsx @@ -0,0 +1,110 @@ +'use client' +import { useState, useEffect } from 'react'; +import { usePathname } from 'next/navigation'; +import cn from 'classnames'; +import { motion } from 'framer-motion'; +import { useMeasure } from '@/lib/hooks/use-measure'; +import ActiveLink from '@/components/ui/links/active-link'; +import { ChevronDown } from '@/components/icons/chevron-down'; + +type MenuItemProps = { + name: string; + icon: React.ReactNode; + href: string; + dropdownItems?: DropdownItemProps[]; +}; + +type DropdownItemProps = { + name: string; + href: string; +}; + +export function MenuItem({ name, icon, href, dropdownItems }: MenuItemProps) { + let [isOpen, setIsOpen] = useState(false); + let [ref, { height }] = useMeasure(); + let pathname = usePathname(); + + let isChildrenActive = + dropdownItems && dropdownItems.some((item) => item.href === pathname); + + useEffect(() => { + if (isChildrenActive) { + setIsOpen(true); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []); + + return ( +
+ {dropdownItems?.length ? ( + <> +
setIsOpen(!isOpen)} + > + + {icon} + {name} + + + + + + {isChildrenActive && ( + + )} +
+ +
+
    + {dropdownItems.map((item, index) => ( +
  • + + {item.name} + +
  • + ))} +
+
+ + ) : ( + + {icon} + {name} + + {href === pathname && ( + + )} + + )} +
+ ); +} diff --git a/website/src/components/ui/dialog.tsx b/website/src/components/ui/dialog.tsx new file mode 100644 index 000000000..0c59af1fc --- /dev/null +++ b/website/src/components/ui/dialog.tsx @@ -0,0 +1 @@ +export { Dialog } from '@headlessui/react'; diff --git a/website/src/components/ui/hamburger.tsx b/website/src/components/ui/hamburger.tsx new file mode 100644 index 000000000..f5eb264f8 --- /dev/null +++ b/website/src/components/ui/hamburger.tsx @@ -0,0 +1,48 @@ +import Button, { ButtonProps } from '@/components/ui/button'; +interface HamburgerProps extends ButtonProps { + isOpen?: boolean; +} + +export default function Hamburger({ isOpen, ...props }: HamburgerProps) { + return ( + + ); +} diff --git a/website/src/components/ui/image.tsx b/website/src/components/ui/image.tsx new file mode 100644 index 000000000..893b13295 --- /dev/null +++ b/website/src/components/ui/image.tsx @@ -0,0 +1 @@ +export { default } from 'next/image'; diff --git a/website/src/components/ui/links/active-link.tsx b/website/src/components/ui/links/active-link.tsx new file mode 100644 index 000000000..954bfe586 --- /dev/null +++ b/website/src/components/ui/links/active-link.tsx @@ -0,0 +1,24 @@ +import type { LinkProps } from 'next/link'; +import { usePathname } from 'next/navigation'; +import cn from 'classnames'; +import AnchorLink from '@/components/ui/links/anchor-link'; + +interface ActiveLinkProps extends LinkProps { + activeClassName?: string; +} +const ActiveLink: React.FC< + ActiveLinkProps & Omit, 'href'> +> = ({ href, className, activeClassName = 'active', ...props }) => { + const pathname = usePathname(); + return ( + + ); +}; + +export default ActiveLink; diff --git a/website/src/components/ui/links/anchor-link.tsx b/website/src/components/ui/links/anchor-link.tsx new file mode 100644 index 000000000..a1c8c22cb --- /dev/null +++ b/website/src/components/ui/links/anchor-link.tsx @@ -0,0 +1,14 @@ +import type { LinkProps } from 'next/link'; +import NextLink from 'next/link'; + +const AnchorLink: React.FC< + LinkProps & Omit, 'href'> +> = ({ href, ...props }) => { + return ( + + + + ); +}; + +export default AnchorLink; diff --git a/website/src/components/ui/list-box.tsx b/website/src/components/ui/list-box.tsx new file mode 100644 index 000000000..45830418d --- /dev/null +++ b/website/src/components/ui/list-box.tsx @@ -0,0 +1,81 @@ +import { Fragment } from 'react'; +import { Listbox as HeadlessListbox } from '@headlessui/react'; +import cn from 'classnames'; +import { Transition } from '@/components/ui/transition'; +import { ChevronDown } from '@/components/icons/chevron-down'; + +export type ListboxOption = { + name: string; + value: string; +}; + +interface ListboxTypes { + options: ListboxOption[]; + selectedOption: ListboxOption; + onChange: React.Dispatch>; + children?: React.ReactNode; + onSelect?: (value: string) => void; + variant?: 'ghost' | 'solid' | 'transparent'; + className?: string; +} + +const listboxVariantClasses = { + ghost: + 'transition-shadow border border-gray-200 bg-white text-gray-900 hover:border-gray-900 hover:ring-1 hover:ring-gray-900 dark:border-gray-700 dark:bg-gray-800 dark:text-gray-100 dark:hover:border-gray-600 dark:hover:ring-gray-600', + solid: + 'transition-colors bg-gray-100 hover:bg-gray-200/70 dark:bg-gray-800 dark:hover:bg-gray-700', + transparent: '', +}; + +export default function Listbox({ + options, + onChange, + onSelect, + variant = 'ghost', + selectedOption, + className, + children, +}: ListboxTypes) { + return ( +
+ + +
{selectedOption?.name}
+ +
+ + + {options.map((option) => ( + + {({ selected }) => ( +
onSelect && onSelect(option.value)} + className={`flex cursor-pointer items-center rounded-md px-3 py-2 text-sm text-gray-900 transition dark:text-gray-100 ${ + selected + ? 'bg-gray-200/70 font-medium dark:bg-gray-600/60' + : 'hover:bg-gray-100 dark:hover:bg-gray-700/70' + }`} + > + {option.name} +
+ )} +
+ ))} + {/* any custom / external link or element */} + {children && children} +
+
+
+
+ ); +} diff --git a/website/src/components/ui/loader.tsx b/website/src/components/ui/loader.tsx new file mode 100644 index 000000000..7c0960a15 --- /dev/null +++ b/website/src/components/ui/loader.tsx @@ -0,0 +1,85 @@ +import cn from 'classnames'; +export type LoaderSizeTypes = 'large' | 'medium' | 'small'; +export type LoaderVariantTypes = 'blink' | 'scaleUp' | 'moveUp'; +export interface LoaderTypes + extends React.HTMLAttributes { + tag?: 'div' | 'span'; + size?: LoaderSizeTypes; + variant?: LoaderVariantTypes; + showOnlyThreeDots?: boolean; + className?: string; +} + +const variants = { + blink: 'animate-blink', + scaleUp: 'animate-scale-up', + moveUp: 'animate-move-up', +}; +const sizes = { + small: 'w-1.5 h-1.5', + medium: 'w-2.5 h-2.5', + large: 'w-3 h-3', +}; + +function handleLoaderPosition(size: LoaderSizeTypes) { + return size === 'small' ? 'relative top-1.5' : 'relative top-3'; +} + +function handleVariantClasses( + variant: LoaderVariantTypes, + size: LoaderSizeTypes +) { + return variant === 'moveUp' && size === 'small' + ? 'animate-move-up-small' + : variants[variant]; +} + +export default function Loader({ + tag = 'div', + size = 'medium', + variant = 'moveUp', + showOnlyThreeDots, + className, +}: LoaderTypes) { + let Component = tag; + return ( + + + + + {variant === 'moveUp' && !showOnlyThreeDots ? ( + + ) : null} + + ); +} diff --git a/website/src/components/ui/logo.tsx b/website/src/components/ui/logo.tsx new file mode 100644 index 000000000..86d4f7edb --- /dev/null +++ b/website/src/components/ui/logo.tsx @@ -0,0 +1,29 @@ +import Image from '@/components/ui/image'; +import AnchorLink from '@/components/ui/links/anchor-link'; +import { useIsMounted } from '@/lib/hooks/use-is-mounted'; +import { useIsDarkMode } from '@/lib/hooks/use-is-dark-mode'; +import logo from '@/assets/images/icon-128.png'; + +const Logo: React.FC> = (props) => { + const isMounted = useIsMounted(); + const { isDarkMode } = useIsDarkMode(); + + return ( + + + {isMounted && isDarkMode && ( + Leo Wallet + )} + {isMounted && !isDarkMode && ( + Leo Wallet + )} + + + ); +}; + +export default Logo; diff --git a/website/src/components/ui/menu.tsx b/website/src/components/ui/menu.tsx new file mode 100644 index 000000000..eded8c878 --- /dev/null +++ b/website/src/components/ui/menu.tsx @@ -0,0 +1 @@ +export { Menu } from '@headlessui/react'; diff --git a/website/src/components/ui/radio-group.tsx b/website/src/components/ui/radio-group.tsx new file mode 100644 index 000000000..8c18e71aa --- /dev/null +++ b/website/src/components/ui/radio-group.tsx @@ -0,0 +1 @@ +export { RadioGroup } from '@headlessui/react'; diff --git a/website/src/components/ui/scrollbar.tsx b/website/src/components/ui/scrollbar.tsx new file mode 100644 index 000000000..e70b620bd --- /dev/null +++ b/website/src/components/ui/scrollbar.tsx @@ -0,0 +1,32 @@ +import cn from 'classnames'; +import { + OverlayScrollbarsComponent, + OverlayScrollbarsComponentProps, +} from 'overlayscrollbars-react'; +// import 'overlayscrollbars/css/OverlayScrollbars.css'; + +interface ScrollbarProps extends OverlayScrollbarsComponentProps { + style?: React.CSSProperties; + className?: string; +} + +export default function Scrollbar({ + options, + style, + className, + ...props +}: React.PropsWithChildren) { + return ( + + ); +} diff --git a/website/src/components/ui/transition.tsx b/website/src/components/ui/transition.tsx new file mode 100644 index 000000000..29419fc5a --- /dev/null +++ b/website/src/components/ui/transition.tsx @@ -0,0 +1 @@ +export { Transition } from '@headlessui/react'; diff --git a/website/src/context/wasm-context.tsx b/website/src/context/wasm-context.tsx new file mode 100644 index 000000000..bbd799194 --- /dev/null +++ b/website/src/context/wasm-context.tsx @@ -0,0 +1,31 @@ +// 'use client' +// import { createContext, useContext, useState, useEffect } from 'react'; +// import { initializeWasmAndWebClient } from '../lib/wasm'; +// import * as w from 'wasm'; + +// const WasmContext = createContext(null); + +// export function WasmProvider({ children }: { children: React.ReactNode }): React.ReactNode { +// const [wasm, setWasm] = useState(null); + +// useEffect(() => { +// (async () => { +// const wasmModule = await initializeWasmAndWebClient(); +// setWasm(wasmModule); +// })(); +// }, []); + +// return ( +// +// {children} +// +// ); +// } + +// export function useWasm() { +// const context = useContext(WasmContext); +// if (context === undefined) { +// throw new Error('useWasm must be used within a WasmProvider'); +// } +// return context; +// } diff --git a/website/src/layouts/_layout.tsx b/website/src/layouts/_layout.tsx new file mode 100644 index 000000000..732f63d30 --- /dev/null +++ b/website/src/layouts/_layout.tsx @@ -0,0 +1,96 @@ +import { useWindowScroll } from '@/lib/hooks/use-window-scroll'; +import Logo from '@/components/ui/logo'; +import SearchButton from '@/components/search/button'; +import { useBreakpoint } from '@/lib/hooks/use-breakpoint'; +import { useIsMounted } from '@/lib/hooks/use-is-mounted'; +import { useDrawer } from '@/components/drawer-views/context'; +import Hamburger from '@/components/ui/hamburger'; +import { MenuItems } from '@/layouts/_layout-menu'; +import React from 'react'; +// import { WalletMultiButton } from '@demox-labs/aleo-wallet-adapter-reactui/'; + +// require('@demox-labs/aleo-wallet-adapter/dist/ui/styles.css'); + +function HeaderRightArea() { + const isMounted = useIsMounted(); + const breakpoint = useBreakpoint(); + const { openDrawer, isOpen } = useDrawer(); + return ( +
+
+ +
+ +
+ {isMounted && ['xs', 'sm', 'md', 'lg'].indexOf(breakpoint) == -1 && ( +
+ +
+ )} + {/* */} +
+ +
+ openDrawer('DRAWER_MENU')} + color="white" + className="shadow-main dark:border dark:border-solid dark:border-gray-700 dark:bg-light-dark dark:text-white" + /> +
+
+ ); +} + +export function Header() { + const windowScroll = useWindowScroll(); + const breakpoint = useBreakpoint(); + const isMounted = useIsMounted(); + const { openDrawer, isOpen } = useDrawer(); + + return ( + + ); +} + +interface LayoutProps {} + +export default function Layout({ + children, +}: React.PropsWithChildren) { + return ( +
+
+
+ {children} +
+
+ ); +} diff --git a/website/src/layouts/dashboard/_dashboard.tsx b/website/src/layouts/dashboard/_dashboard.tsx new file mode 100644 index 000000000..9fbb31e49 --- /dev/null +++ b/website/src/layouts/dashboard/_dashboard.tsx @@ -0,0 +1,65 @@ +'use client' +import { useState } from 'react'; +import cn from 'classnames'; +import { useWindowScroll } from '@/lib/hooks/use-window-scroll'; +import Hamburger from '@/components/ui/hamburger'; +import { useIsMounted } from '@/lib/hooks/use-is-mounted'; +import { useDrawer } from '@/components/drawer-views/context'; +import Sidebar from '@/layouts/dashboard/_sidebar'; +import React, { FC, useMemo } from 'react'; + +export function Header() { + const { openDrawer } = useDrawer(); + const isMounted = useIsMounted(); + let windowScroll = useWindowScroll(); + let [isOpen, setIsOpen] = useState(false); + + return ( + + ); +} + +interface DashboardLayoutProps { + contentClassName?: string; +} + +export default function Layout({ + children, + contentClassName, +}: React.PropsWithChildren) { + return ( +
+
+ +
+ {children} +
+
+ ); +} diff --git a/website/src/layouts/dashboard/_sidebar.tsx b/website/src/layouts/dashboard/_sidebar.tsx new file mode 100644 index 000000000..2c2621580 --- /dev/null +++ b/website/src/layouts/dashboard/_sidebar.tsx @@ -0,0 +1,112 @@ +'use client' +import cn from 'classnames'; +import { ChevronForward } from "../../components/icons/chevron-forward"; +import { useDrawer } from "../../components/drawer-views/context"; +import Logo from "../../components/ui/logo"; +import Button from "../../components/ui/button"; +import { Close } from "../../components/icons/close"; +import Scrollbar from "../../components/ui/scrollbar"; +import { MenuItem } from "../../components/ui/collapsible-menu"; + +const routes = { + gettingStarted: '/', + accounts: '/accounts', + faucets: '/faucets', + notes: '/notes', + transactions: '/transactions' +}; + +const menuItems = [ + { + name: 'Getting Started', + icon: , + href: routes.gettingStarted, + }, + { + name: 'Wallets', + icon: , + href: routes.accounts, + }, + { + name: 'Faucets', + icon: , + href: routes.faucets, + }, + { + name: 'Transactions', + icon: , + href: routes.transactions, + }, + { + name: 'Notes', + icon: , + href: routes.notes, + } +]; + +type SidebarProps = { + className?: string; +}; + +export default function Sidebar({ className }: SidebarProps) { + const { closeDrawer } = useDrawer(); + return ( + + ); +} + +// export default function Sidebar({ className }: SidebarProps) { +// return ( +// +// ); +// } \ No newline at end of file diff --git a/website/src/lib/db.ts b/website/src/lib/db.ts new file mode 100644 index 000000000..e8a528ec0 --- /dev/null +++ b/website/src/lib/db.ts @@ -0,0 +1,50 @@ +// import Dexie from "dexie"; +// import { IAccount, IAccountAuth, IAccountCode, IAccountStorage, IAccountVault, ITransaction, ITransactionScript, IInputNote, IOutputNote, IStateSync, IBlockHeader, IChainMmrNode } from "./dbTypes"; + +// export enum Table { +// AccountCode = 'accountCode', +// AccountStorage = 'accountStorage', +// AccountVaults = 'accountVaults', +// AccountAuth = 'accountAuth', +// Accounts = 'accounts', +// Transactions = 'transactions', +// TransactionScripts = 'transactionScripts', +// InputNotes = 'inputNotes', +// OutputNotes = 'outputNotes', +// StateSync = 'stateSync', +// BlockHeaders = 'blockHeaders', +// ChainMmrNodes = 'chainMmrNodes', +// } + +// export const db = new Dexie('MidenClientDB') +// db.version(1).stores({ +// [Table.AccountCode]: indexes('root'), +// [Table.AccountStorage]: indexes('root'), +// [Table.AccountVaults]: indexes('root'), +// [Table.AccountAuth]: indexes('accountId'), +// [Table.Accounts]: indexes('[id+nonce]', 'id', 'committed', 'codeRoot', 'storageRoot', 'vaultRoot'), +// [Table.Transactions]: indexes('id', 'scriptHash', 'blockNum', 'commitHeight'), +// [Table.TransactionScripts]: indexes('scriptHash'), +// [Table.InputNotes]: indexes('noteId', 'recipient', 'status'), +// [Table.OutputNotes]: indexes('noteId', 'recipient', 'status'), +// [Table.StateSync]: indexes('blockNum'), +// [Table.BlockHeaders]: indexes('blockNum', 'hasClientNotes'), +// [Table.ChainMmrNodes]: indexes('id') +// }); + +// function indexes(...items: string[]) { +// return items.join(','); +// } + +// export const accountCodes = db.table(Table.AccountCode); +// export const accountStorages = db.table(Table.AccountStorage); +// export const accountVaults = db.table(Table.AccountVaults); +// export const accountAuths = db.table(Table.AccountAuth); +// export const accounts = db.table(Table.Accounts); +// export const transactions = db.table(Table.Transactions); +// export const transactionScripts = db.table(Table.TransactionScripts); +// export const inputNotes = db.table(Table.InputNotes); +// export const outputNotes = db.table(Table.OutputNotes); +// export const stateSync = db.table(Table.StateSync); +// export const blockHeaders = db.table(Table.BlockHeaders); +// export const chainMmrNodes = db.table(Table.ChainMmrNodes); diff --git a/website/src/lib/dbTypes.ts b/website/src/lib/dbTypes.ts new file mode 100644 index 000000000..d280f5b0a --- /dev/null +++ b/website/src/lib/dbTypes.ts @@ -0,0 +1,85 @@ +export interface IAccountCode { + root: Blob; + procedures: Blob; + module: Blob; +} + +export interface IAccountStorage { + root: Blob; + slots: Blob; +} + +export interface IAccountVault { + root: Blob; + assets: Blob; +} + +export interface IAccountAuth { + accountId: bigint; + authInfo: Blob; +} + +export interface IAccount { + id: bigint; + codeRoot: Blob; + storageRoot: Blob; + vaultRoot: Blob; + nonce: bigint; + committed: boolean; + accountSeed: Blob; +} + +export interface ITransaction { + id: Blob; + accountId: bigint; + initAccountState: Blob; + finalAccountState: Blob; + inputNotes: Blob; + outputNotes: Blob; + scriptHash: Blob; + scriptInputs: Blob; + blockNum: bigint; + commitHeight: bigint; +} + +export interface ITransactionScript { + scriptHash: Blob; + program: Blob; +} + +export interface IInputNote { + noteId: Blob; + recipient: Blob; + assets: Blob; + status: string; + inclusionProof: string; + metadata: Blob; + details: string; +} + +export interface IOutputNote { + noteId: Blob; + recipient: Blob; + assets: Blob; + status: string; + inclusionProof: string; + metadata: Blob; + details: string; +} + +export interface IStateSync { + blockNum: bigint; + tags: Blob; +} + +export interface IBlockHeader { + blockNum: bigint; + header: Blob; + chainMmrPeaks: Blob; + hasClientNotes: boolean; +} + +export interface IChainMmrNode { + id: bigint; + node: Blob; +} \ No newline at end of file diff --git a/website/src/lib/hooks/use-breakpoint.ts b/website/src/lib/hooks/use-breakpoint.ts new file mode 100644 index 000000000..79003cf00 --- /dev/null +++ b/website/src/lib/hooks/use-breakpoint.ts @@ -0,0 +1,14 @@ +import createBreakpoint from 'react-use/lib/factory/createBreakpoint'; + +const breakPoints = { + xs: 480, + sm: 640, + md: 768, + lg: 1024, + xl: 1280, + '2xl': 1440, + '3xl': 1780, + '4xl': 2160, +}; + +export const useBreakpoint = createBreakpoint(breakPoints); diff --git a/website/src/lib/hooks/use-click-away.ts b/website/src/lib/hooks/use-click-away.ts new file mode 100644 index 000000000..71070f1d3 --- /dev/null +++ b/website/src/lib/hooks/use-click-away.ts @@ -0,0 +1 @@ +export { default as useClickAway } from 'react-use/lib/useClickAway'; diff --git a/website/src/lib/hooks/use-copy-to-clipboard.ts b/website/src/lib/hooks/use-copy-to-clipboard.ts new file mode 100644 index 000000000..5c4e21531 --- /dev/null +++ b/website/src/lib/hooks/use-copy-to-clipboard.ts @@ -0,0 +1 @@ +export { default as useCopyToClipboard } from 'react-use/lib/useCopyToClipboard'; diff --git a/website/src/lib/hooks/use-direction.ts b/website/src/lib/hooks/use-direction.ts new file mode 100644 index 000000000..b69c09e04 --- /dev/null +++ b/website/src/lib/hooks/use-direction.ts @@ -0,0 +1,7 @@ +import { useEffect } from 'react'; + +export function useDirection(layout: string) { + useEffect(() => { + document.documentElement.dir = layout; + }, [layout]); +} diff --git a/website/src/lib/hooks/use-is-dark-mode.ts b/website/src/lib/hooks/use-is-dark-mode.ts new file mode 100644 index 000000000..c3b0ee600 --- /dev/null +++ b/website/src/lib/hooks/use-is-dark-mode.ts @@ -0,0 +1,9 @@ +import { useTheme } from 'next-themes'; + +export function useIsDarkMode() { + const { resolvedTheme } = useTheme(); + + return { + isDarkMode: resolvedTheme === 'dark', + }; +} diff --git a/website/src/lib/hooks/use-is-mounted.ts b/website/src/lib/hooks/use-is-mounted.ts new file mode 100644 index 000000000..abcb44ef0 --- /dev/null +++ b/website/src/lib/hooks/use-is-mounted.ts @@ -0,0 +1,8 @@ +'use client' +import { useEffect, useState } from 'react'; + +export function useIsMounted() { + const [mounted, setMounted] = useState(false); + useEffect(() => setMounted(true), []); + return mounted; +} diff --git a/website/src/lib/hooks/use-local-storage.ts b/website/src/lib/hooks/use-local-storage.ts new file mode 100644 index 000000000..a0dbcac61 --- /dev/null +++ b/website/src/lib/hooks/use-local-storage.ts @@ -0,0 +1 @@ +export { default as useLocalStorage } from 'react-use/lib/useLocalStorage'; diff --git a/website/src/lib/hooks/use-measure.ts b/website/src/lib/hooks/use-measure.ts new file mode 100644 index 000000000..acf58cda9 --- /dev/null +++ b/website/src/lib/hooks/use-measure.ts @@ -0,0 +1 @@ +export { default as useMeasure } from 'react-use/lib/useMeasure'; diff --git a/website/src/lib/hooks/use-state-callback.ts b/website/src/lib/hooks/use-state-callback.ts new file mode 100644 index 000000000..dd17eb5dc --- /dev/null +++ b/website/src/lib/hooks/use-state-callback.ts @@ -0,0 +1,25 @@ +import { useCallback, useEffect, useRef, useState } from "react"; + + +export function useStateCallback( + initialState: T +): [T, (state: T, cb?: (state: T) => void) => void] { + const [state, setState] = useState(initialState); + const cbRef = useRef<((state: T) => void) | undefined>(undefined); // init mutable ref container for callbacks + + const setStateCallback = useCallback((state: T, cb?: (state: T) => void) => { + cbRef.current = cb; // store current, passed callback in ref + setState(state); + }, []); // keep object reference stable, exactly like `useState` + + useEffect(() => { + // cb.current is `undefined` on initial render, + // so we only invoke callback on state *updates* + if (cbRef.current) { + cbRef.current(state); + cbRef.current = undefined; // reset callback after execution + } + }, [state]); + + return [state, setStateCallback]; +} \ No newline at end of file diff --git a/website/src/lib/hooks/use-theme-color.ts b/website/src/lib/hooks/use-theme-color.ts new file mode 100644 index 000000000..d2387c403 --- /dev/null +++ b/website/src/lib/hooks/use-theme-color.ts @@ -0,0 +1,17 @@ +import { useEffect } from 'react'; + +function hexToRGB(hex: string) { + const r = parseInt(hex.slice(1, 3), 16); + const g = parseInt(hex.slice(3, 5), 16); + const b = parseInt(hex.slice(5, 7), 16); + + return `${r} ${g} ${b}`; +} + +export function useThemeColor(color: string) { + useEffect(() => { + const rgbColor = hexToRGB(color); + + document.documentElement.style.setProperty('--color-brand', rgbColor); + }, [color]); +} diff --git a/website/src/lib/hooks/use-window-scroll.ts b/website/src/lib/hooks/use-window-scroll.ts new file mode 100644 index 000000000..30b6c292d --- /dev/null +++ b/website/src/lib/hooks/use-window-scroll.ts @@ -0,0 +1 @@ +export { default as useWindowScroll } from 'react-use/lib/useWindowScroll'; diff --git a/website/src/lib/polygon-worker/accounts.ts b/website/src/lib/polygon-worker/accounts.ts new file mode 100644 index 000000000..1cfebf1d0 --- /dev/null +++ b/website/src/lib/polygon-worker/accounts.ts @@ -0,0 +1,8 @@ +// import { AccountsBuilder } from '@/workers/accounts'; +// import { spawn, Thread, Worker } from 'threads'; + +export const createBasicAccount = async () => { + const worker = new Worker('src/workers/accounts.ts', { type : "module" }) + const num = worker.postMessage(2) + return num +} \ No newline at end of file diff --git a/website/src/lib/wasm.ts b/website/src/lib/wasm.ts new file mode 100644 index 000000000..4d52f5a5e --- /dev/null +++ b/website/src/lib/wasm.ts @@ -0,0 +1,11 @@ +// import init, * as wasm from 'wasm'; + +// let wasmModule: typeof wasm; + +// export async function initializeWasmAndWebClient() { +// if (!wasmModule) { +// await init(); +// wasmModule = wasm; +// } +// return wasmModule +// } diff --git a/website/src/workers/accounts.ts b/website/src/workers/accounts.ts new file mode 100644 index 000000000..756a08a24 --- /dev/null +++ b/website/src/workers/accounts.ts @@ -0,0 +1,175 @@ +import { JSSerializedAccount } from "@/app/accounts/[accountId]/page"; +import { WebClient } from "@demox-labs/miden-sdk"; + +console.log('Worker is setting up...'); +const webClient = new WebClient(); +await webClient.create_client(); +postMessage({ type: "ready" }); + +async function pollUntilComitted(webClient: WebClient, outputNoteId: String) { + let status; + while (status !== 'Committed') { + await webClient.sync_state(); + status = await webClient.get_output_note_status(outputNoteId); + console.log('STATUS', status) + if (status !== 'Committed') { + await new Promise(resolve => setTimeout(resolve, 1000)); + } + } +} + +addEventListener('message', async (event) => { + console.log('worker received message', event.data) + const params = event.data.params + switch (event.data.type) { + case "createWallet": + console.log('creating wallet', params) + const accountId = await webClient.new_wallet(params.storageType, params.mutable); + console.log('account created', accountId); + postMessage({ type: "createWallet", accountId }); + break; + + case "createFaucet": + console.log('creating faucet', params) + const faucetId = await webClient.new_faucet( + params.storageType, + params.nonFungible, + params.tokenSymbol, + params.decimals, + params.maxSupply + ); + console.log('faucet created', faucetId); + postMessage({ type: "createFaucet", faucetId }); + break; + + case "fetchAccounts": + const accounts = await webClient.get_accounts(); + console.log('accounts fetched', accounts); + postMessage({ type: "fetchAccounts", accounts: accounts }); + break; + + case "fetchInputNotes": + console.log('fetching input notes', params) + const inputNotes = await webClient.get_input_notes(params.noteFilter); + console.log('input notes fetched', inputNotes); + postMessage({ type: "fetchInputNotes", inputNotes: inputNotes }); + break; + + case "fetchOutputNotes": + console.log('fetching output notes', params) + const outputNotes = await webClient.get_output_notes(params.noteFilter); + console.log('output notes fetched', outputNotes); + postMessage({ type: "fetchOutputNotes", outputNotes: outputNotes }); + break; + + case "fetchTransactions": + console.log('fetching transactions') + const transactions = await webClient.get_transactions(); + console.log('transactions fetched', transactions); + postMessage({ type: "fetchTransactions", transactions: transactions }); + break; + + case "getAccount": + console.log('getting account', params) + const account = await webClient.get_account(params.accountId); + console.log('account fetched', account); + postMessage({ type: "getAccount", account: new JSSerializedAccount(account) }); + break; + + case "importNote": + console.log('importing note', params) + break; + + case "exportNote": + console.log('exporting note', params) + const noteAsBytes = await webClient.export_note(params.noteId); + console.log('note exported', noteAsBytes); + postMessage({ type: "exportNote", noteAsBytes: noteAsBytes }); + break; + + case "importAccount": + console.log('importing account', params) + const result = await webClient.import_account(params.accountAsBytes); + console.log('account imported', result); + postMessage({ type: "importAccount", result }); + break; + + case "mintTransaction": + console.log('doing a mint transaction', params) + await webClient.sync_state(); + await webClient.fetch_and_cache_account_auth_by_pub_key(params.faucetId); + const mintResult = await webClient.new_mint_transaction(params.walletId, params.faucetId, params.noteType, params.amount); + await new Promise(resolve => setTimeout(resolve, 2000)); + await webClient.sync_state(); + postMessage({ + type: "mintTransaction", + mintResult: { transactionId: mintResult.transaction_id, createdNoteIds: mintResult.created_note_ids } + }); + break; + + case "sendTransaction": + console.log('doing a send transaction', params) + await webClient.sync_state(); + await webClient.fetch_and_cache_account_auth_by_pub_key(params.senderAccountId); + const sendResult = await webClient.new_send_transaction( + params.senderAccountId, + params.targetAccountId, + params.faucetId, + params.noteType, + params.amount, + params.recallHeight + ); + await new Promise(resolve => setTimeout(resolve, 2000)); + await webClient.sync_state(); + postMessage({ type: "sendTransaction", sendResult: { transactionId: sendResult.transaction_id, createdNoteIds: sendResult.created_note_ids } }); + break; + + case "swapTransaction": + console.log('doing a swap transaction', params) + await webClient.sync_state(); + await webClient.fetch_and_cache_account_auth_by_pub_key(params.walletA); + const swapResult = await webClient.new_swap_transaction( + params.walletA, + params.faucetA, + params.amountOfA, + params.faucetB, + params.amountOfB, + params.noteType + ); + await new Promise(resolve => setTimeout(resolve, 2000)); + await webClient.sync_state(); + + await webClient.add_tag(swapResult.payback_note_tag) + await new Promise(resolve => setTimeout(resolve, 10000)); + await webClient.sync_state(); + + postMessage({ type: "swapTransaction", swapResult: { + transactionId: swapResult.transaction_id, + expectedOutputNoteIds: swapResult.expected_output_note_ids, + expectedPartialNoteIds: swapResult.expected_partial_note_ids, + paybackNoteTag: swapResult.payback_note_tag + } }); + break; + + case "consumeTransaction": + console.log('doing a consume transaction', params) + await new Promise(resolve => setTimeout(resolve, 2000)); + await pollUntilComitted(webClient, params.noteIds[0]); + await webClient.sync_state(); + await webClient.fetch_and_cache_account_auth_by_pub_key(params.targetAccountId); + const consumeResult = await webClient.new_consume_transaction(params.targetAccountId, params.noteIds); + await new Promise(resolve => setTimeout(resolve, 2000)); + await webClient.sync_state(); + postMessage({ + type: "consumeTransaction", + consumeResult: { transactionId: consumeResult.transaction_id, createdNoteIds: consumeResult.created_note_ids }, + consumeType: params.consumeType + }); + break; + + default: + console.log('invalid message:', event.data); + postMessage({ type: 'invalid' }); + break; + } +}) \ No newline at end of file diff --git a/website/tailwind.config.js b/website/tailwind.config.js new file mode 100644 index 000000000..f08b3e259 --- /dev/null +++ b/website/tailwind.config.js @@ -0,0 +1,125 @@ +module.exports = { + content: [ + './src/app/**/*.{js,ts,jsx,tsx}', + './src/components/**/*.{js,ts,jsx,tsx}', + './src/layouts/**/*.{js,ts,jsx,tsx}', + ], + darkMode: 'class', + theme: { + screens: { + xs: '500px', + sm: '640px', + md: '768px', + lg: '1024px', + xl: '1280px', + '2xl': '1440px', + '3xl': '1780px', + '4xl': '2160px', // only need to control product grid mode in ultra 4k device + }, + extend: { + colors: { + brand: 'rgb(var(--color-brand) / )', + body: '#fcfcfc', + dark: '#0D1321', + 'light-dark': '#171e2e', + }, + spacing: { + 13: '3.375rem', + }, + margin: { + '1/2': '50%', + }, + padding: { + full: '100%', + }, + width: { + 'calc-320': 'calc(100% - 320px)', + 'calc-358': 'calc(100% - 358px)', + }, + fontFamily: { + body: ['Fira Code', 'monospace'], + }, + fontSize: { + '13px': ['13px', '18px'], + }, + borderWidth: { + 3: '3px', + }, + boxShadow: { + main: '0px 6px 18px rgba(0, 0, 0, 0.04)', + light: '0px 4px 4px rgba(0, 0, 0, 0.08)', + large: '0px 8px 16px rgba(17, 24, 39, 0.1)', + card: '0px 2px 6px rgba(0, 0, 0, 0.06)', + transaction: '0px 8px 16px rgba(17, 24, 39, 0.06)', + button: + '0px 2px 4px rgba(0, 0, 0, 0.06), 0px 4px 6px rgba(0, 0, 0, 0.1)', + }, + dropShadow: { + main: '0px 4px 8px rgba(0, 0, 0, 0.08)', + }, + backgroundImage: { + 'gradient-radial': 'radial-gradient(var(--tw-gradient-stops))', + }, + animation: { + blink: 'blink 1.4s infinite both;', + 'move-up': 'moveUp 500ms infinite alternate', + 'scale-up': 'scaleUp 500ms infinite alternate', + 'drip-expand': 'expand 500ms ease-in forwards', + 'drip-expand-large': 'expand-large 600ms ease-in forwards', + 'move-up-small': 'moveUpSmall 500ms infinite alternate', + }, + keyframes: { + blink: { + '0%': { opacity: 0.2 }, + '20%': { opacity: 1 }, + '100%': { opacity: 0.2 }, + }, + expand: { + '0%': { + opacity: 0, + transform: 'scale(1)', + }, + '30%': { + opacity: 1, + }, + '80%': { + opacity: 0.5, + }, + '100%': { + transform: 'scale(30)', + opacity: 0, + }, + }, + 'expand-large': { + '0%': { + opacity: 0, + transform: 'scale(1)', + }, + '30%': { + opacity: 1, + }, + '80%': { + opacity: 0.5, + }, + '100%': { + transform: 'scale(96)', + opacity: 0, + }, + }, + moveUp: { + '0%': { transform: 'translateY(0)' }, + '100%': { transform: 'translateY(-20px)' }, + }, + moveUpSmall: { + '0%': { transform: 'translateY(0)' }, + '100%': { transform: 'translateY(-10px)' }, + }, + scaleUp: { + '0%': { transform: 'scale(0)' }, + '100%': { transform: 'scale(1)' }, + }, + }, + }, + }, + plugins: [require('@tailwindcss/typography'), require('@tailwindcss/forms')], +}; diff --git a/website/tsconfig.json b/website/tsconfig.json new file mode 100644 index 000000000..956bbe5f9 --- /dev/null +++ b/website/tsconfig.json @@ -0,0 +1,40 @@ +{ + "compilerOptions": { + "target": "esnext", + "lib": [ + "dom", + "dom.iterable", + "esnext" + ], + "allowJs": true, + "skipLibCheck": true, + "strict": true, + "noEmit": true, + "esModuleInterop": true, + "module": "esnext", + "resolveJsonModule": true, + "isolatedModules": true, + "jsx": "preserve", + "incremental": true, + "plugins": [ + { + "name": "next" + } + ], + "paths": { + "@/*": [ + "./src/*" + ] + }, + "moduleResolution": "node" + }, + "include": [ + "next-env.d.ts", + "**/*.ts", + "**/*.tsx", + ".next/types/**/*.ts" + ], + "exclude": [ + "node_modules" + ] +} diff --git a/website/tsconfig.worker.json b/website/tsconfig.worker.json new file mode 100644 index 000000000..0ad4010d2 --- /dev/null +++ b/website/tsconfig.worker.json @@ -0,0 +1,11 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "outDir": "./public/workers", + "module": "ES6", + "noEmit": false + }, + "include": [ + "src/workers/**/*.ts" + ] +} \ No newline at end of file diff --git a/website/yarn.lock b/website/yarn.lock new file mode 100644 index 000000000..631083c95 --- /dev/null +++ b/website/yarn.lock @@ -0,0 +1,3120 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@alloc/quick-lru@^5.2.0": + version "5.2.0" + resolved "https://registry.yarnpkg.com/@alloc/quick-lru/-/quick-lru-5.2.0.tgz#7bf68b20c0a350f936915fcae06f58e32007ce30" + integrity sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw== + +"@babel/runtime@^7.1.2": + version "7.24.7" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.24.7.tgz#f4f0d5530e8dbdf59b3451b9b3e594b6ba082e12" + integrity sha512-UwgBRMjJP+xv857DCngvqXI3Iq6J4v0wXmwc6sapg+zyhbwmQX67LUEFrkK5tbyJ30jGuG3ZvWpBiB9LCy1kWw== + dependencies: + regenerator-runtime "^0.14.0" + +"@demox-labs/miden-sdk@file:../wasm/dist": + version "0.0.0" + +"@eslint-community/eslint-utils@^4.2.0": + version "4.4.0" + resolved "https://registry.yarnpkg.com/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz#a23514e8fb9af1269d5f7788aa556798d61c6b59" + integrity sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA== + dependencies: + eslint-visitor-keys "^3.3.0" + +"@eslint-community/regexpp@^4.6.1": + version "4.11.0" + resolved "https://registry.yarnpkg.com/@eslint-community/regexpp/-/regexpp-4.11.0.tgz#b0ffd0312b4a3fd2d6f77237e7248a5ad3a680ae" + integrity sha512-G/M/tIiMrTAxEWRfLfQJMmGNX28IxBg4PBz8XqQhqUHLFI6TL2htpIB1iQCj144V5ee/JaKyT9/WZ0MGZWfA7A== + +"@eslint/eslintrc@^2.1.4": + version "2.1.4" + resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-2.1.4.tgz#388a269f0f25c1b6adc317b5a2c55714894c70ad" + integrity sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ== + dependencies: + ajv "^6.12.4" + debug "^4.3.2" + espree "^9.6.0" + globals "^13.19.0" + ignore "^5.2.0" + import-fresh "^3.2.1" + js-yaml "^4.1.0" + minimatch "^3.1.2" + strip-json-comments "^3.1.1" + +"@eslint/js@8.57.0": + version "8.57.0" + resolved "https://registry.yarnpkg.com/@eslint/js/-/js-8.57.0.tgz#a5417ae8427873f1dd08b70b3574b453e67b5f7f" + integrity sha512-Ys+3g2TaW7gADOJzPt83SJtCDhMjndcDMFVQ/Tj9iA1BfJzFKD9mAUXT3OenpuPHbI6P/myECxRJrofUsDx/5g== + +"@humanwhocodes/config-array@^0.11.14": + version "0.11.14" + resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.11.14.tgz#d78e481a039f7566ecc9660b4ea7fe6b1fec442b" + integrity sha512-3T8LkOmg45BV5FICb15QQMsyUSWrQ8AygVfC7ZG32zOalnqrilm018ZVCw0eapXux8FtA33q8PSRSstjee3jSg== + dependencies: + "@humanwhocodes/object-schema" "^2.0.2" + debug "^4.3.1" + minimatch "^3.0.5" + +"@humanwhocodes/module-importer@^1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz#af5b2691a22b44be847b0ca81641c5fb6ad0172c" + integrity sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA== + +"@humanwhocodes/object-schema@^2.0.2": + version "2.0.3" + resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz#4a2868d75d6d6963e423bcf90b7fd1be343409d3" + integrity sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA== + +"@isaacs/cliui@^8.0.2": + version "8.0.2" + resolved "https://registry.yarnpkg.com/@isaacs/cliui/-/cliui-8.0.2.tgz#b37667b7bc181c168782259bab42474fbf52b550" + integrity sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA== + dependencies: + string-width "^5.1.2" + string-width-cjs "npm:string-width@^4.2.0" + strip-ansi "^7.0.1" + strip-ansi-cjs "npm:strip-ansi@^6.0.1" + wrap-ansi "^8.1.0" + wrap-ansi-cjs "npm:wrap-ansi@^7.0.0" + +"@jridgewell/gen-mapping@^0.3.2": + version "0.3.5" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.5.tgz#dcce6aff74bdf6dad1a95802b69b04a2fcb1fb36" + integrity sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg== + dependencies: + "@jridgewell/set-array" "^1.2.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.24" + +"@jridgewell/resolve-uri@^3.1.0": + version "3.1.2" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz#7a0ee601f60f99a20c7c7c5ff0c80388c1189bd6" + integrity sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw== + +"@jridgewell/set-array@^1.2.1": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.2.1.tgz#558fb6472ed16a4c850b889530e6b36438c49280" + integrity sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A== + +"@jridgewell/sourcemap-codec@^1.4.10", "@jridgewell/sourcemap-codec@^1.4.14", "@jridgewell/sourcemap-codec@^1.4.15": + version "1.5.0" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz#3188bcb273a414b0d215fd22a58540b989b9409a" + integrity sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ== + +"@jridgewell/trace-mapping@^0.3.24": + version "0.3.25" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz#15f190e98895f3fc23276ee14bc76b675c2e50f0" + integrity sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ== + dependencies: + "@jridgewell/resolve-uri" "^3.1.0" + "@jridgewell/sourcemap-codec" "^1.4.14" + +"@next/env@14.2.5": + version "14.2.5" + resolved "https://registry.yarnpkg.com/@next/env/-/env-14.2.5.tgz#1d9328ab828711d3517d0a1d505acb55e5ef7ad0" + integrity sha512-/zZGkrTOsraVfYjGP8uM0p6r0BDT6xWpkjdVbcz66PJVSpwXX3yNiRycxAuDfBKGWBrZBXRuK/YVlkNgxHGwmA== + +"@next/eslint-plugin-next@14.1.4": + version "14.1.4" + resolved "https://registry.yarnpkg.com/@next/eslint-plugin-next/-/eslint-plugin-next-14.1.4.tgz#d7372b5ffede0e466af8af2ff534386418827fc8" + integrity sha512-n4zYNLSyCo0Ln5b7qxqQeQ34OZKXwgbdcx6kmkQbywr+0k6M3Vinft0T72R6CDAcDrne2IAgSud4uWCzFgc5HA== + dependencies: + glob "10.3.10" + +"@next/swc-darwin-arm64@14.2.5": + version "14.2.5" + resolved "https://registry.yarnpkg.com/@next/swc-darwin-arm64/-/swc-darwin-arm64-14.2.5.tgz#d0a160cf78c18731c51cc0bff131c706b3e9bb05" + integrity sha512-/9zVxJ+K9lrzSGli1///ujyRfon/ZneeZ+v4ptpiPoOU+GKZnm8Wj8ELWU1Pm7GHltYRBklmXMTUqM/DqQ99FQ== + +"@next/swc-darwin-x64@14.2.5": + version "14.2.5" + resolved "https://registry.yarnpkg.com/@next/swc-darwin-x64/-/swc-darwin-x64-14.2.5.tgz#eb832a992407f6e6352eed05a073379f1ce0589c" + integrity sha512-vXHOPCwfDe9qLDuq7U1OYM2wUY+KQ4Ex6ozwsKxp26BlJ6XXbHleOUldenM67JRyBfVjv371oneEvYd3H2gNSA== + +"@next/swc-linux-arm64-gnu@14.2.5": + version "14.2.5" + resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-14.2.5.tgz#098fdab57a4664969bc905f5801ef5a89582c689" + integrity sha512-vlhB8wI+lj8q1ExFW8lbWutA4M2ZazQNvMWuEDqZcuJJc78iUnLdPPunBPX8rC4IgT6lIx/adB+Cwrl99MzNaA== + +"@next/swc-linux-arm64-musl@14.2.5": + version "14.2.5" + resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-14.2.5.tgz#243a1cc1087fb75481726dd289c7b219fa01f2b5" + integrity sha512-NpDB9NUR2t0hXzJJwQSGu1IAOYybsfeB+LxpGsXrRIb7QOrYmidJz3shzY8cM6+rO4Aojuef0N/PEaX18pi9OA== + +"@next/swc-linux-x64-gnu@14.2.5": + version "14.2.5" + resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-14.2.5.tgz#b8a2e436387ee4a52aa9719b718992e0330c4953" + integrity sha512-8XFikMSxWleYNryWIjiCX+gU201YS+erTUidKdyOVYi5qUQo/gRxv/3N1oZFCgqpesN6FPeqGM72Zve+nReVXQ== + +"@next/swc-linux-x64-musl@14.2.5": + version "14.2.5" + resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-14.2.5.tgz#cb8a9adad5fb8df86112cfbd363aab5c6d32757b" + integrity sha512-6QLwi7RaYiQDcRDSU/os40r5o06b5ue7Jsk5JgdRBGGp8l37RZEh9JsLSM8QF0YDsgcosSeHjglgqi25+m04IQ== + +"@next/swc-win32-arm64-msvc@14.2.5": + version "14.2.5" + resolved "https://registry.yarnpkg.com/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-14.2.5.tgz#81f996c1c38ea0900d4e7719cc8814be8a835da0" + integrity sha512-1GpG2VhbspO+aYoMOQPQiqc/tG3LzmsdBH0LhnDS3JrtDx2QmzXe0B6mSZZiN3Bq7IOMXxv1nlsjzoS1+9mzZw== + +"@next/swc-win32-ia32-msvc@14.2.5": + version "14.2.5" + resolved "https://registry.yarnpkg.com/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-14.2.5.tgz#f61c74ce823e10b2bc150e648fc192a7056422e0" + integrity sha512-Igh9ZlxwvCDsu6438FXlQTHlRno4gFpJzqPjSIBZooD22tKeI4fE/YMRoHVJHmrQ2P5YL1DoZ0qaOKkbeFWeMg== + +"@next/swc-win32-x64-msvc@14.2.5": + version "14.2.5" + resolved "https://registry.yarnpkg.com/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.2.5.tgz#ed199a920efb510cfe941cd75ed38a7be21e756f" + integrity sha512-tEQ7oinq1/CjSG9uSTerca3v4AZ+dFa+4Yu6ihaG8Ud8ddqLQgFGcnwYls13H5X5CPDPZJdYxyeMui6muOLd4g== + +"@nodelib/fs.scandir@2.1.5": + version "2.1.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" + integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== + dependencies: + "@nodelib/fs.stat" "2.0.5" + run-parallel "^1.1.9" + +"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": + version "2.0.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" + integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== + +"@nodelib/fs.walk@^1.2.3", "@nodelib/fs.walk@^1.2.8": + version "1.2.8" + resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" + integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== + dependencies: + "@nodelib/fs.scandir" "2.1.5" + fastq "^1.6.0" + +"@pkgjs/parseargs@^0.11.0": + version "0.11.0" + resolved "https://registry.yarnpkg.com/@pkgjs/parseargs/-/parseargs-0.11.0.tgz#a77ea742fab25775145434eb1d2328cf5013ac33" + integrity sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg== + +"@rushstack/eslint-patch@^1.3.3": + version "1.10.3" + resolved "https://registry.yarnpkg.com/@rushstack/eslint-patch/-/eslint-patch-1.10.3.tgz#391d528054f758f81e53210f1a1eebcf1a8b1d20" + integrity sha512-qC/xYId4NMebE6w/V33Fh9gWxLgURiNYgVNObbJl2LZv0GUUItCcCqC5axQSwRaAgaxl2mELq1rMzlswaQ0Zxg== + +"@swc/counter@^0.1.3": + version "0.1.3" + resolved "https://registry.yarnpkg.com/@swc/counter/-/counter-0.1.3.tgz#cc7463bd02949611c6329596fccd2b0ec782b0e9" + integrity sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ== + +"@swc/helpers@0.5.5": + version "0.5.5" + resolved "https://registry.yarnpkg.com/@swc/helpers/-/helpers-0.5.5.tgz#12689df71bfc9b21c4f4ca00ae55f2f16c8b77c0" + integrity sha512-KGYxvIOXcceOAbEk4bi/dVLEK9z8sZ0uBB3Il5b1rhfClSpcX0yfRO0KmTkqR2cnQDymwLB+25ZyMzICg/cm/A== + dependencies: + "@swc/counter" "^0.1.3" + tslib "^2.4.0" + +"@tailwindcss/forms@^0.5.7": + version "0.5.7" + resolved "https://registry.yarnpkg.com/@tailwindcss/forms/-/forms-0.5.7.tgz#db5421f062a757b5f828bc9286ba626c6685e821" + integrity sha512-QE7X69iQI+ZXwldE+rzasvbJiyV/ju1FGHH0Qn2W3FKbuYtqp8LKcy6iSw79fVUT5/Vvf+0XgLCeYVG+UV6hOw== + dependencies: + mini-svg-data-uri "^1.2.3" + +"@tailwindcss/typography@^0.5.13": + version "0.5.13" + resolved "https://registry.yarnpkg.com/@tailwindcss/typography/-/typography-0.5.13.tgz#cd788a4fa4d0ca2506e242d512f377b22c1f7932" + integrity sha512-ADGcJ8dX21dVVHIwTRgzrcunY6YY9uSlAHHGVKvkA+vLc5qLwEszvKts40lx7z0qc4clpjclwLeK5rVCV2P/uw== + dependencies: + lodash.castarray "^4.4.0" + lodash.isplainobject "^4.0.6" + lodash.merge "^4.6.2" + postcss-selector-parser "6.0.10" + +"@types/js-cookie@^2.2.6": + version "2.2.7" + resolved "https://registry.yarnpkg.com/@types/js-cookie/-/js-cookie-2.2.7.tgz#226a9e31680835a6188e887f3988e60c04d3f6a3" + integrity sha512-aLkWa0C0vO5b4Sr798E26QgOkss68Un0bLjs7u9qxzPT5CG+8DuNTffWES58YzJs3hrVAOs1wonycqEBqNJubA== + +"@types/json5@^0.0.29": + version "0.0.29" + resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee" + integrity sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ== + +"@types/node@^20": + version "20.14.10" + resolved "https://registry.yarnpkg.com/@types/node/-/node-20.14.10.tgz#a1a218290f1b6428682e3af044785e5874db469a" + integrity sha512-MdiXf+nDuMvY0gJKxyfZ7/6UFsETO7mGKF54MVD/ekJS6HdFtpZFBgrh6Pseu64XTb2MLyFPlbW6hj8HYRQNOQ== + dependencies: + undici-types "~5.26.4" + +"@types/prop-types@*": + version "15.7.12" + resolved "https://registry.yarnpkg.com/@types/prop-types/-/prop-types-15.7.12.tgz#12bb1e2be27293c1406acb6af1c3f3a1481d98c6" + integrity sha512-5zvhXYtRNRluoE/jAp4GVsSduVUzNWKkOZrCDBWYtE7biZywwdC2AcEzg+cSMLFRfVgeAFqpfNabiPjxFddV1Q== + +"@types/react-dom@^18": + version "18.3.0" + resolved "https://registry.yarnpkg.com/@types/react-dom/-/react-dom-18.3.0.tgz#0cbc818755d87066ab6ca74fbedb2547d74a82b0" + integrity sha512-EhwApuTmMBmXuFOikhQLIBUn6uFg81SwLMOAUgodJF14SOBOCMdU04gDoYi0WOJJHD144TL32z4yDqCW3dnkQg== + dependencies: + "@types/react" "*" + +"@types/react@*", "@types/react@^18": + version "18.3.3" + resolved "https://registry.yarnpkg.com/@types/react/-/react-18.3.3.tgz#9679020895318b0915d7a3ab004d92d33375c45f" + integrity sha512-hti/R0pS0q1/xx+TsI73XIqk26eBsISZ2R0wUijXIngRK9R/e7Xw/cXVxQK7R5JjW+SV4zGcn5hXjudkN/pLIw== + dependencies: + "@types/prop-types" "*" + csstype "^3.0.2" + +"@typescript-eslint/parser@^5.4.2 || ^6.0.0": + version "6.21.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-6.21.0.tgz#af8fcf66feee2edc86bc5d1cf45e33b0630bf35b" + integrity sha512-tbsV1jPne5CkFQCgPBcDOt30ItF7aJoZL997JSF7MhGQqOeT3svWRYxiqlfA5RUdlHN6Fi+EI9bxqbdyAUZjYQ== + dependencies: + "@typescript-eslint/scope-manager" "6.21.0" + "@typescript-eslint/types" "6.21.0" + "@typescript-eslint/typescript-estree" "6.21.0" + "@typescript-eslint/visitor-keys" "6.21.0" + debug "^4.3.4" + +"@typescript-eslint/scope-manager@6.21.0": + version "6.21.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-6.21.0.tgz#ea8a9bfc8f1504a6ac5d59a6df308d3a0630a2b1" + integrity sha512-OwLUIWZJry80O99zvqXVEioyniJMa+d2GrqpUTqi5/v5D5rOrppJVBPa0yKCblcigC0/aYAzxxqQ1B+DS2RYsg== + dependencies: + "@typescript-eslint/types" "6.21.0" + "@typescript-eslint/visitor-keys" "6.21.0" + +"@typescript-eslint/types@6.21.0": + version "6.21.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-6.21.0.tgz#205724c5123a8fef7ecd195075fa6e85bac3436d" + integrity sha512-1kFmZ1rOm5epu9NZEZm1kckCDGj5UJEf7P1kliH4LKu/RkwpsfqqGmY2OOcUs18lSlQBKLDYBOGxRVtrMN5lpg== + +"@typescript-eslint/typescript-estree@6.21.0": + version "6.21.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-6.21.0.tgz#c47ae7901db3b8bddc3ecd73daff2d0895688c46" + integrity sha512-6npJTkZcO+y2/kr+z0hc4HwNfrrP4kNYh57ek7yCNlrBjWQ1Y0OS7jiZTkgumrvkX5HkEKXFZkkdFNkaW2wmUQ== + dependencies: + "@typescript-eslint/types" "6.21.0" + "@typescript-eslint/visitor-keys" "6.21.0" + debug "^4.3.4" + globby "^11.1.0" + is-glob "^4.0.3" + minimatch "9.0.3" + semver "^7.5.4" + ts-api-utils "^1.0.1" + +"@typescript-eslint/visitor-keys@6.21.0": + version "6.21.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-6.21.0.tgz#87a99d077aa507e20e238b11d56cc26ade45fe47" + integrity sha512-JJtkDduxLi9bivAB+cYOVMtbkqdPOhZ+ZI5LC47MIRrDV4Yn2o+ZnW10Nkmr28xRpSpdJ6Sm42Hjf2+REYXm0A== + dependencies: + "@typescript-eslint/types" "6.21.0" + eslint-visitor-keys "^3.4.1" + +"@ungap/structured-clone@^1.2.0": + version "1.2.0" + resolved "https://registry.yarnpkg.com/@ungap/structured-clone/-/structured-clone-1.2.0.tgz#756641adb587851b5ccb3e095daf27ae581c8406" + integrity sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ== + +"@xobotyi/scrollbar-width@^1.9.5": + version "1.9.5" + resolved "https://registry.yarnpkg.com/@xobotyi/scrollbar-width/-/scrollbar-width-1.9.5.tgz#80224a6919272f405b87913ca13b92929bdf3c4d" + integrity sha512-N8tkAACJx2ww8vFMneJmaAgmjAG1tnVBZJRLRcx061tmsLRZHSEZSLuGWnwPtunsSLvSqXQ2wfp7Mgqg1I+2dQ== + +acorn-jsx@^5.3.2: + version "5.3.2" + resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" + integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== + +acorn@^8.9.0: + version "8.12.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.12.1.tgz#71616bdccbe25e27a54439e0046e89ca76df2248" + integrity sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg== + +ajv@^6.12.4: + version "6.12.6" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" + integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== + dependencies: + fast-deep-equal "^3.1.1" + fast-json-stable-stringify "^2.0.0" + json-schema-traverse "^0.4.1" + uri-js "^4.2.2" + +ansi-regex@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" + integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== + +ansi-regex@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-6.0.1.tgz#3183e38fae9a65d7cb5e53945cd5897d0260a06a" + integrity sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA== + +ansi-styles@^4.0.0, ansi-styles@^4.1.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" + integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== + dependencies: + color-convert "^2.0.1" + +ansi-styles@^6.1.0: + version "6.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-6.2.1.tgz#0e62320cf99c21afff3b3012192546aacbfb05c5" + integrity sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug== + +any-promise@^1.0.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/any-promise/-/any-promise-1.3.0.tgz#abc6afeedcea52e809cdc0376aed3ce39635d17f" + integrity sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A== + +anymatch@~3.1.2: + version "3.1.3" + resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.3.tgz#790c58b19ba1720a84205b57c618d5ad8524973e" + integrity sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw== + dependencies: + normalize-path "^3.0.0" + picomatch "^2.0.4" + +arg@^5.0.2: + version "5.0.2" + resolved "https://registry.yarnpkg.com/arg/-/arg-5.0.2.tgz#c81433cc427c92c4dcf4865142dbca6f15acd59c" + integrity sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg== + +argparse@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" + integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== + +aria-query@~5.1.3: + version "5.1.3" + resolved "https://registry.yarnpkg.com/aria-query/-/aria-query-5.1.3.tgz#19db27cd101152773631396f7a95a3b58c22c35e" + integrity sha512-R5iJ5lkuHybztUfuOAznmboyjWq8O6sqNqtK7CLOqdydi54VNbORp49mb14KbWgG1QD3JFO9hJdZ+y4KutfdOQ== + dependencies: + deep-equal "^2.0.5" + +array-buffer-byte-length@^1.0.0, array-buffer-byte-length@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/array-buffer-byte-length/-/array-buffer-byte-length-1.0.1.tgz#1e5583ec16763540a27ae52eed99ff899223568f" + integrity sha512-ahC5W1xgou+KTXix4sAO8Ki12Q+jf4i0+tmk3sC+zgcynshkHxzpXdImBehiUYKKKDwvfFiJl1tZt6ewscS1Mg== + dependencies: + call-bind "^1.0.5" + is-array-buffer "^3.0.4" + +array-includes@^3.1.6, array-includes@^3.1.7, array-includes@^3.1.8: + version "3.1.8" + resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.1.8.tgz#5e370cbe172fdd5dd6530c1d4aadda25281ba97d" + integrity sha512-itaWrbYbqpGXkGhZPGUulwnhVf5Hpy1xiCFsGqyIGglbBxmG5vSjxQen3/WGOjPpNEv1RtBLKxbmVXm8HpJStQ== + dependencies: + call-bind "^1.0.7" + define-properties "^1.2.1" + es-abstract "^1.23.2" + es-object-atoms "^1.0.0" + get-intrinsic "^1.2.4" + is-string "^1.0.7" + +array-union@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" + integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== + +array.prototype.findlast@^1.2.5: + version "1.2.5" + resolved "https://registry.yarnpkg.com/array.prototype.findlast/-/array.prototype.findlast-1.2.5.tgz#3e4fbcb30a15a7f5bf64cf2faae22d139c2e4904" + integrity sha512-CVvd6FHg1Z3POpBLxO6E6zr+rSKEQ9L6rZHAaY7lLfhKsWYUBBOuMs0e9o24oopj6H+geRCX0YJ+TJLBK2eHyQ== + dependencies: + call-bind "^1.0.7" + define-properties "^1.2.1" + es-abstract "^1.23.2" + es-errors "^1.3.0" + es-object-atoms "^1.0.0" + es-shim-unscopables "^1.0.2" + +array.prototype.findlastindex@^1.2.3: + version "1.2.5" + resolved "https://registry.yarnpkg.com/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.5.tgz#8c35a755c72908719453f87145ca011e39334d0d" + integrity sha512-zfETvRFA8o7EiNn++N5f/kaCw221hrpGsDmcpndVupkPzEc1Wuf3VgC0qby1BbHs7f5DVYjgtEU2LLh5bqeGfQ== + dependencies: + call-bind "^1.0.7" + define-properties "^1.2.1" + es-abstract "^1.23.2" + es-errors "^1.3.0" + es-object-atoms "^1.0.0" + es-shim-unscopables "^1.0.2" + +array.prototype.flat@^1.3.1, array.prototype.flat@^1.3.2: + version "1.3.2" + resolved "https://registry.yarnpkg.com/array.prototype.flat/-/array.prototype.flat-1.3.2.tgz#1476217df8cff17d72ee8f3ba06738db5b387d18" + integrity sha512-djYB+Zx2vLewY8RWlNCUdHjDXs2XOgm602S9E7P/UpHgfeHL00cRiIF+IN/G/aUJ7kGPb6yO/ErDI5V2s8iycA== + dependencies: + call-bind "^1.0.2" + define-properties "^1.2.0" + es-abstract "^1.22.1" + es-shim-unscopables "^1.0.0" + +array.prototype.flatmap@^1.3.2: + version "1.3.2" + resolved "https://registry.yarnpkg.com/array.prototype.flatmap/-/array.prototype.flatmap-1.3.2.tgz#c9a7c6831db8e719d6ce639190146c24bbd3e527" + integrity sha512-Ewyx0c9PmpcsByhSW4r+9zDU7sGjFc86qf/kKtuSCRdhfbk0SNLLkaT5qvcHnRGgc5NP/ly/y+qkXkqONX54CQ== + dependencies: + call-bind "^1.0.2" + define-properties "^1.2.0" + es-abstract "^1.22.1" + es-shim-unscopables "^1.0.0" + +array.prototype.toreversed@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/array.prototype.toreversed/-/array.prototype.toreversed-1.1.2.tgz#b989a6bf35c4c5051e1dc0325151bf8088954eba" + integrity sha512-wwDCoT4Ck4Cz7sLtgUmzR5UV3YF5mFHUlbChCzZBQZ+0m2cl/DH3tKgvphv1nKgFsJ48oCSg6p91q2Vm0I/ZMA== + dependencies: + call-bind "^1.0.2" + define-properties "^1.2.0" + es-abstract "^1.22.1" + es-shim-unscopables "^1.0.0" + +array.prototype.tosorted@^1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/array.prototype.tosorted/-/array.prototype.tosorted-1.1.4.tgz#fe954678ff53034e717ea3352a03f0b0b86f7ffc" + integrity sha512-p6Fx8B7b7ZhL/gmUsAy0D15WhvDccw3mnGNbZpi3pmeJdxtWsj2jEaI4Y6oo3XiHfzuSgPwKc04MYt6KgvC/wA== + dependencies: + call-bind "^1.0.7" + define-properties "^1.2.1" + es-abstract "^1.23.3" + es-errors "^1.3.0" + es-shim-unscopables "^1.0.2" + +arraybuffer.prototype.slice@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.3.tgz#097972f4255e41bc3425e37dc3f6421cf9aefde6" + integrity sha512-bMxMKAjg13EBSVscxTaYA4mRc5t1UAXa2kXiGTNfZ079HIWXEkKmkgFrh/nJqamaLSrXO5H4WFFkPEaLJWbs3A== + dependencies: + array-buffer-byte-length "^1.0.1" + call-bind "^1.0.5" + define-properties "^1.2.1" + es-abstract "^1.22.3" + es-errors "^1.2.1" + get-intrinsic "^1.2.3" + is-array-buffer "^3.0.4" + is-shared-array-buffer "^1.0.2" + +ast-types-flow@^0.0.8: + version "0.0.8" + resolved "https://registry.yarnpkg.com/ast-types-flow/-/ast-types-flow-0.0.8.tgz#0a85e1c92695769ac13a428bb653e7538bea27d6" + integrity sha512-OH/2E5Fg20h2aPrbe+QL8JZQFko0YZaF+j4mnQ7BGhfavO7OpSLa8a0y9sBwomHdSbkhTS8TQNayBfnW5DwbvQ== + +autoprefixer@^10.0.1: + version "10.4.19" + resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-10.4.19.tgz#ad25a856e82ee9d7898c59583c1afeb3fa65f89f" + integrity sha512-BaENR2+zBZ8xXhM4pUaKUxlVdxZ0EZhjvbopwnXmxRUfqDmwSpC2lAi/QXvx7NRdPCo1WKEcEF6mV64si1z4Ew== + dependencies: + browserslist "^4.23.0" + caniuse-lite "^1.0.30001599" + fraction.js "^4.3.7" + normalize-range "^0.1.2" + picocolors "^1.0.0" + postcss-value-parser "^4.2.0" + +available-typed-arrays@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz#a5cc375d6a03c2efc87a553f3e0b1522def14846" + integrity sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ== + dependencies: + possible-typed-array-names "^1.0.0" + +axe-core@^4.9.1: + version "4.9.1" + resolved "https://registry.yarnpkg.com/axe-core/-/axe-core-4.9.1.tgz#fcd0f4496dad09e0c899b44f6c4bb7848da912ae" + integrity sha512-QbUdXJVTpvUTHU7871ppZkdOLBeGUKBQWHkHrvN2V9IQWGMt61zf3B45BtzjxEJzYuj0JBjBZP/hmYS/R9pmAw== + +axobject-query@~3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/axobject-query/-/axobject-query-3.1.1.tgz#3b6e5c6d4e43ca7ba51c5babf99d22a9c68485e1" + integrity sha512-goKlv8DZrK9hUh975fnHzhNIO4jUnFCfv/dszV5VwUGDFjI6vQ2VwoyjYjYNEbBE8AH87TduWP5uyDR1D+Iteg== + dependencies: + deep-equal "^2.0.5" + +balanced-match@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" + integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== + +binary-extensions@^2.0.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.3.0.tgz#f6e14a97858d327252200242d4ccfe522c445522" + integrity sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw== + +brace-expansion@^1.1.7: + version "1.1.11" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== + dependencies: + balanced-match "^1.0.0" + concat-map "0.0.1" + +brace-expansion@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" + integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== + dependencies: + balanced-match "^1.0.0" + +braces@^3.0.3, braces@~3.0.2: + version "3.0.3" + resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.3.tgz#490332f40919452272d55a8480adc0c441358789" + integrity sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA== + dependencies: + fill-range "^7.1.1" + +browserslist@^4.23.0: + version "4.23.2" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.23.2.tgz#244fe803641f1c19c28c48c4b6ec9736eb3d32ed" + integrity sha512-qkqSyistMYdxAcw+CzbZwlBy8AGmS/eEWs+sEV5TnLRGDOL+C5M2EnH6tlZyg0YoAxGJAFKh61En9BR941GnHA== + dependencies: + caniuse-lite "^1.0.30001640" + electron-to-chromium "^1.4.820" + node-releases "^2.0.14" + update-browserslist-db "^1.1.0" + +busboy@1.6.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/busboy/-/busboy-1.6.0.tgz#966ea36a9502e43cdb9146962523b92f531f6893" + integrity sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA== + dependencies: + streamsearch "^1.1.0" + +call-bind@^1.0.2, call-bind@^1.0.5, call-bind@^1.0.6, call-bind@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.7.tgz#06016599c40c56498c18769d2730be242b6fa3b9" + integrity sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w== + dependencies: + es-define-property "^1.0.0" + es-errors "^1.3.0" + function-bind "^1.1.2" + get-intrinsic "^1.2.4" + set-function-length "^1.2.1" + +callsites@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" + integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== + +camelcase-css@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/camelcase-css/-/camelcase-css-2.0.1.tgz#ee978f6947914cc30c6b44741b6ed1df7f043fd5" + integrity sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA== + +caniuse-lite@^1.0.30001579, caniuse-lite@^1.0.30001599, caniuse-lite@^1.0.30001640: + version "1.0.30001641" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001641.tgz#3572862cd18befae3f637f2a1101cc033c6782ac" + integrity sha512-Phv5thgl67bHYo1TtMY/MurjkHhV4EDaCosezRXgZ8jzA/Ub+wjxAvbGvjoFENStinwi5kCyOYV3mi5tOGykwA== + +chalk@^4.0.0: + version "4.1.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" + integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + +chokidar@^3.5.3: + version "3.6.0" + resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.6.0.tgz#197c6cc669ef2a8dc5e7b4d97ee4e092c3eb0d5b" + integrity sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw== + dependencies: + anymatch "~3.1.2" + braces "~3.0.2" + glob-parent "~5.1.2" + is-binary-path "~2.1.0" + is-glob "~4.0.1" + normalize-path "~3.0.0" + readdirp "~3.6.0" + optionalDependencies: + fsevents "~2.3.2" + +classnames@^2.5.1: + version "2.5.1" + resolved "https://registry.yarnpkg.com/classnames/-/classnames-2.5.1.tgz#ba774c614be0f016da105c858e7159eae8e7687b" + integrity sha512-saHYOzhIQs6wy2sVxTM6bUDsQO4F50V9RQ22qBpEdCW+I+/Wmke2HOl6lS6dTpdxVhb88/I6+Hs+438c3lfUow== + +client-only@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/client-only/-/client-only-0.0.1.tgz#38bba5d403c41ab150bff64a95c85013cf73bca1" + integrity sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA== + +color-convert@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" + integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== + dependencies: + color-name "~1.1.4" + +color-name@~1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== + +commander@^4.0.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/commander/-/commander-4.1.1.tgz#9fd602bd936294e9e9ef46a3f4d6964044b18068" + integrity sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA== + +concat-map@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== + +copy-to-clipboard@^3.3.1: + version "3.3.3" + resolved "https://registry.yarnpkg.com/copy-to-clipboard/-/copy-to-clipboard-3.3.3.tgz#55ac43a1db8ae639a4bd99511c148cdd1b83a1b0" + integrity sha512-2KV8NhB5JqC3ky0r9PMCAZKbUHSwtEo4CwCs0KXgruG43gX5PMqDEBbVU4OUzw2MuAWUfsuFmWvEKG5QRfSnJA== + dependencies: + toggle-selection "^1.0.6" + +cross-spawn@^7.0.0, cross-spawn@^7.0.2: + version "7.0.3" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" + integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== + dependencies: + path-key "^3.1.0" + shebang-command "^2.0.0" + which "^2.0.1" + +css-in-js-utils@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/css-in-js-utils/-/css-in-js-utils-3.1.0.tgz#640ae6a33646d401fc720c54fc61c42cd76ae2bb" + integrity sha512-fJAcud6B3rRu+KHYk+Bwf+WFL2MDCJJ1XG9x137tJQ0xYxor7XziQtuGFbWNdqrvF4Tk26O3H73nfVqXt/fW1A== + dependencies: + hyphenate-style-name "^1.0.3" + +css-tree@^1.1.2: + version "1.1.3" + resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-1.1.3.tgz#eb4870fb6fd7707327ec95c2ff2ab09b5e8db91d" + integrity sha512-tRpdppF7TRazZrjJ6v3stzv93qxRcSsFmW6cX0Zm2NVKpxE1WV1HblnghVv9TreireHkqI/VDEsfolRF1p6y7Q== + dependencies: + mdn-data "2.0.14" + source-map "^0.6.1" + +cssesc@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" + integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== + +csstype@^3.0.2, csstype@^3.1.2: + version "3.1.3" + resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.1.3.tgz#d80ff294d114fb0e6ac500fbf85b60137d7eff81" + integrity sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw== + +damerau-levenshtein@^1.0.8: + version "1.0.8" + resolved "https://registry.yarnpkg.com/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz#b43d286ccbd36bc5b2f7ed41caf2d0aba1f8a6e7" + integrity sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA== + +data-view-buffer@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/data-view-buffer/-/data-view-buffer-1.0.1.tgz#8ea6326efec17a2e42620696e671d7d5a8bc66b2" + integrity sha512-0lht7OugA5x3iJLOWFhWK/5ehONdprk0ISXqVFn/NFrDu+cuc8iADFrGQz5BnRK7LLU3JmkbXSxaqX+/mXYtUA== + dependencies: + call-bind "^1.0.6" + es-errors "^1.3.0" + is-data-view "^1.0.1" + +data-view-byte-length@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/data-view-byte-length/-/data-view-byte-length-1.0.1.tgz#90721ca95ff280677eb793749fce1011347669e2" + integrity sha512-4J7wRJD3ABAzr8wP+OcIcqq2dlUKp4DVflx++hs5h5ZKydWMI6/D/fAot+yh6g2tHh8fLFTvNOaVN357NvSrOQ== + dependencies: + call-bind "^1.0.7" + es-errors "^1.3.0" + is-data-view "^1.0.1" + +data-view-byte-offset@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/data-view-byte-offset/-/data-view-byte-offset-1.0.0.tgz#5e0bbfb4828ed2d1b9b400cd8a7d119bca0ff18a" + integrity sha512-t/Ygsytq+R995EJ5PZlD4Cu56sWa8InXySaViRzw9apusqsOO2bQP+SbYzAhR0pFKoB+43lYy8rWban9JSuXnA== + dependencies: + call-bind "^1.0.6" + es-errors "^1.3.0" + is-data-view "^1.0.1" + +debug@^3.2.7: + version "3.2.7" + resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" + integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== + dependencies: + ms "^2.1.1" + +debug@^4.3.1, debug@^4.3.2, debug@^4.3.4: + version "4.3.5" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.5.tgz#e83444eceb9fedd4a1da56d671ae2446a01a6e1e" + integrity sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg== + dependencies: + ms "2.1.2" + +deep-equal@^2.0.5: + version "2.2.3" + resolved "https://registry.yarnpkg.com/deep-equal/-/deep-equal-2.2.3.tgz#af89dafb23a396c7da3e862abc0be27cf51d56e1" + integrity sha512-ZIwpnevOurS8bpT4192sqAowWM76JDKSHYzMLty3BZGSswgq6pBaH3DhCSW5xVAZICZyKdOBPjwww5wfgT/6PA== + dependencies: + array-buffer-byte-length "^1.0.0" + call-bind "^1.0.5" + es-get-iterator "^1.1.3" + get-intrinsic "^1.2.2" + is-arguments "^1.1.1" + is-array-buffer "^3.0.2" + is-date-object "^1.0.5" + is-regex "^1.1.4" + is-shared-array-buffer "^1.0.2" + isarray "^2.0.5" + object-is "^1.1.5" + object-keys "^1.1.1" + object.assign "^4.1.4" + regexp.prototype.flags "^1.5.1" + side-channel "^1.0.4" + which-boxed-primitive "^1.0.2" + which-collection "^1.0.1" + which-typed-array "^1.1.13" + +deep-is@^0.1.3: + version "0.1.4" + resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" + integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== + +define-data-property@^1.0.1, define-data-property@^1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/define-data-property/-/define-data-property-1.1.4.tgz#894dc141bb7d3060ae4366f6a0107e68fbe48c5e" + integrity sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A== + dependencies: + es-define-property "^1.0.0" + es-errors "^1.3.0" + gopd "^1.0.1" + +define-properties@^1.1.3, define-properties@^1.2.0, define-properties@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.2.1.tgz#10781cc616eb951a80a034bafcaa7377f6af2b6c" + integrity sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg== + dependencies: + define-data-property "^1.0.1" + has-property-descriptors "^1.0.0" + object-keys "^1.1.1" + +dexie-react-hooks@^1.1.7: + version "1.1.7" + resolved "https://registry.yarnpkg.com/dexie-react-hooks/-/dexie-react-hooks-1.1.7.tgz#75bd92a609a7c3dc3643e2fd21e7db5df6df923b" + integrity sha512-Lwv5W0Hk+uOW3kGnsU9GZoR1er1B7WQ5DSdonoNG+focTNeJbHW6vi6nBoX534VKI3/uwHebYzSw1fwY6a7mTw== + +dexie@^4.0.1: + version "4.0.8" + resolved "https://registry.yarnpkg.com/dexie/-/dexie-4.0.8.tgz#21fca70686bdaa1d86fad45b6b19316f6a084a1d" + integrity sha512-1G6cJevS17KMDK847V3OHvK2zei899GwpDiqfEXHP1ASvme6eWJmAp9AU4s1son2TeGkWmC0g3y8ezOBPnalgQ== + +didyoumean@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/didyoumean/-/didyoumean-1.2.2.tgz#989346ffe9e839b4555ecf5666edea0d3e8ad037" + integrity sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw== + +dir-glob@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" + integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== + dependencies: + path-type "^4.0.0" + +dlv@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/dlv/-/dlv-1.1.3.tgz#5c198a8a11453596e751494d49874bc7732f2e79" + integrity sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA== + +doctrine@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" + integrity sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw== + dependencies: + esutils "^2.0.2" + +doctrine@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" + integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== + dependencies: + esutils "^2.0.2" + +eastasianwidth@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/eastasianwidth/-/eastasianwidth-0.2.0.tgz#696ce2ec0aa0e6ea93a397ffcf24aa7840c827cb" + integrity sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA== + +electron-to-chromium@^1.4.820: + version "1.4.823" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.823.tgz#38587f7aa55bed14930f04091dfc65c39a3d8bd7" + integrity sha512-4h+oPeAiGQOHFyUJOqpoEcPj/xxlicxBzOErVeYVMMmAiXUXsGpsFd0QXBMaUUbnD8hhSfLf9uw+MlsoIA7j5w== + +emoji-regex@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" + integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== + +emoji-regex@^9.2.2: + version "9.2.2" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-9.2.2.tgz#840c8803b0d8047f4ff0cf963176b32d4ef3ed72" + integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg== + +enhanced-resolve@^5.12.0: + version "5.17.0" + resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.17.0.tgz#d037603789dd9555b89aaec7eb78845c49089bc5" + integrity sha512-dwDPwZL0dmye8Txp2gzFmA6sxALaSvdRDjPH0viLcKrtlOL3tw62nWWweVD1SdILDTJrbrL6tdWVN58Wo6U3eA== + dependencies: + graceful-fs "^4.2.4" + tapable "^2.2.0" + +error-stack-parser@^2.0.6: + version "2.1.4" + resolved "https://registry.yarnpkg.com/error-stack-parser/-/error-stack-parser-2.1.4.tgz#229cb01cdbfa84440bfa91876285b94680188286" + integrity sha512-Sk5V6wVazPhq5MhpO+AUxJn5x7XSXGl1R93Vn7i+zS15KDVxQijejNCrz8340/2bgLBjR9GtEG8ZVKONDjcqGQ== + dependencies: + stackframe "^1.3.4" + +es-abstract@^1.17.5, es-abstract@^1.22.1, es-abstract@^1.22.3, es-abstract@^1.23.0, es-abstract@^1.23.1, es-abstract@^1.23.2, es-abstract@^1.23.3: + version "1.23.3" + resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.23.3.tgz#8f0c5a35cd215312573c5a27c87dfd6c881a0aa0" + integrity sha512-e+HfNH61Bj1X9/jLc5v1owaLYuHdeHHSQlkhCBiTK8rBvKaULl/beGMxwrMXjpYrv4pz22BlY570vVePA2ho4A== + dependencies: + array-buffer-byte-length "^1.0.1" + arraybuffer.prototype.slice "^1.0.3" + available-typed-arrays "^1.0.7" + call-bind "^1.0.7" + data-view-buffer "^1.0.1" + data-view-byte-length "^1.0.1" + data-view-byte-offset "^1.0.0" + es-define-property "^1.0.0" + es-errors "^1.3.0" + es-object-atoms "^1.0.0" + es-set-tostringtag "^2.0.3" + es-to-primitive "^1.2.1" + function.prototype.name "^1.1.6" + get-intrinsic "^1.2.4" + get-symbol-description "^1.0.2" + globalthis "^1.0.3" + gopd "^1.0.1" + has-property-descriptors "^1.0.2" + has-proto "^1.0.3" + has-symbols "^1.0.3" + hasown "^2.0.2" + internal-slot "^1.0.7" + is-array-buffer "^3.0.4" + is-callable "^1.2.7" + is-data-view "^1.0.1" + is-negative-zero "^2.0.3" + is-regex "^1.1.4" + is-shared-array-buffer "^1.0.3" + is-string "^1.0.7" + is-typed-array "^1.1.13" + is-weakref "^1.0.2" + object-inspect "^1.13.1" + object-keys "^1.1.1" + object.assign "^4.1.5" + regexp.prototype.flags "^1.5.2" + safe-array-concat "^1.1.2" + safe-regex-test "^1.0.3" + string.prototype.trim "^1.2.9" + string.prototype.trimend "^1.0.8" + string.prototype.trimstart "^1.0.8" + typed-array-buffer "^1.0.2" + typed-array-byte-length "^1.0.1" + typed-array-byte-offset "^1.0.2" + typed-array-length "^1.0.6" + unbox-primitive "^1.0.2" + which-typed-array "^1.1.15" + +es-define-property@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/es-define-property/-/es-define-property-1.0.0.tgz#c7faefbdff8b2696cf5f46921edfb77cc4ba3845" + integrity sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ== + dependencies: + get-intrinsic "^1.2.4" + +es-errors@^1.2.1, es-errors@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/es-errors/-/es-errors-1.3.0.tgz#05f75a25dab98e4fb1dcd5e1472c0546d5057c8f" + integrity sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw== + +es-get-iterator@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/es-get-iterator/-/es-get-iterator-1.1.3.tgz#3ef87523c5d464d41084b2c3c9c214f1199763d6" + integrity sha512-sPZmqHBe6JIiTfN5q2pEi//TwxmAFHwj/XEuYjTuse78i8KxaqMTTzxPoFKuzRpDpTJ+0NAbpfenkmH2rePtuw== + dependencies: + call-bind "^1.0.2" + get-intrinsic "^1.1.3" + has-symbols "^1.0.3" + is-arguments "^1.1.1" + is-map "^2.0.2" + is-set "^2.0.2" + is-string "^1.0.7" + isarray "^2.0.5" + stop-iteration-iterator "^1.0.0" + +es-iterator-helpers@^1.0.19: + version "1.0.19" + resolved "https://registry.yarnpkg.com/es-iterator-helpers/-/es-iterator-helpers-1.0.19.tgz#117003d0e5fec237b4b5c08aded722e0c6d50ca8" + integrity sha512-zoMwbCcH5hwUkKJkT8kDIBZSz9I6mVG//+lDCinLCGov4+r7NIy0ld8o03M0cJxl2spVf6ESYVS6/gpIfq1FFw== + dependencies: + call-bind "^1.0.7" + define-properties "^1.2.1" + es-abstract "^1.23.3" + es-errors "^1.3.0" + es-set-tostringtag "^2.0.3" + function-bind "^1.1.2" + get-intrinsic "^1.2.4" + globalthis "^1.0.3" + has-property-descriptors "^1.0.2" + has-proto "^1.0.3" + has-symbols "^1.0.3" + internal-slot "^1.0.7" + iterator.prototype "^1.1.2" + safe-array-concat "^1.1.2" + +es-object-atoms@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/es-object-atoms/-/es-object-atoms-1.0.0.tgz#ddb55cd47ac2e240701260bc2a8e31ecb643d941" + integrity sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw== + dependencies: + es-errors "^1.3.0" + +es-set-tostringtag@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/es-set-tostringtag/-/es-set-tostringtag-2.0.3.tgz#8bb60f0a440c2e4281962428438d58545af39777" + integrity sha512-3T8uNMC3OQTHkFUsFq8r/BwAXLHvU/9O9mE0fBc/MY5iq/8H7ncvO947LmYA6ldWw9Uh8Yhf25zu6n7nML5QWQ== + dependencies: + get-intrinsic "^1.2.4" + has-tostringtag "^1.0.2" + hasown "^2.0.1" + +es-shim-unscopables@^1.0.0, es-shim-unscopables@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/es-shim-unscopables/-/es-shim-unscopables-1.0.2.tgz#1f6942e71ecc7835ed1c8a83006d8771a63a3763" + integrity sha512-J3yBRXCzDu4ULnQwxyToo/OjdMx6akgVC7K6few0a7F/0wLtmKKN7I73AH5T2836UuXRqN7Qg+IIUw/+YJksRw== + dependencies: + hasown "^2.0.0" + +es-to-primitive@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" + integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== + dependencies: + is-callable "^1.1.4" + is-date-object "^1.0.1" + is-symbol "^1.0.2" + +escalade@^3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.2.tgz#54076e9ab29ea5bf3d8f1ed62acffbb88272df27" + integrity sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA== + +escape-string-regexp@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" + integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== + +eslint-config-next@14.1.4: + version "14.1.4" + resolved "https://registry.yarnpkg.com/eslint-config-next/-/eslint-config-next-14.1.4.tgz#22f2ba4c0993e991249d863656a64c204bae542c" + integrity sha512-cihIahbhYAWwXJwZkAaRPpUi5t9aOi/HdfWXOjZeUOqNWXHD8X22kd1KG58Dc3MVaRx3HoR/oMGk2ltcrqDn8g== + dependencies: + "@next/eslint-plugin-next" "14.1.4" + "@rushstack/eslint-patch" "^1.3.3" + "@typescript-eslint/parser" "^5.4.2 || ^6.0.0" + eslint-import-resolver-node "^0.3.6" + eslint-import-resolver-typescript "^3.5.2" + eslint-plugin-import "^2.28.1" + eslint-plugin-jsx-a11y "^6.7.1" + eslint-plugin-react "^7.33.2" + eslint-plugin-react-hooks "^4.5.0 || 5.0.0-canary-7118f5dd7-20230705" + +eslint-import-resolver-node@^0.3.6, eslint-import-resolver-node@^0.3.9: + version "0.3.9" + resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.9.tgz#d4eaac52b8a2e7c3cd1903eb00f7e053356118ac" + integrity sha512-WFj2isz22JahUv+B788TlO3N6zL3nNJGU8CcZbPZvVEkBPaJdCV4vy5wyghty5ROFbCRnm132v8BScu5/1BQ8g== + dependencies: + debug "^3.2.7" + is-core-module "^2.13.0" + resolve "^1.22.4" + +eslint-import-resolver-typescript@^3.5.2: + version "3.6.1" + resolved "https://registry.yarnpkg.com/eslint-import-resolver-typescript/-/eslint-import-resolver-typescript-3.6.1.tgz#7b983680edd3f1c5bce1a5829ae0bc2d57fe9efa" + integrity sha512-xgdptdoi5W3niYeuQxKmzVDTATvLYqhpwmykwsh7f6HIOStGWEIL9iqZgQDF9u9OEzrRwR8no5q2VT+bjAujTg== + dependencies: + debug "^4.3.4" + enhanced-resolve "^5.12.0" + eslint-module-utils "^2.7.4" + fast-glob "^3.3.1" + get-tsconfig "^4.5.0" + is-core-module "^2.11.0" + is-glob "^4.0.3" + +eslint-module-utils@^2.7.4, eslint-module-utils@^2.8.0: + version "2.8.1" + resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.8.1.tgz#52f2404300c3bd33deece9d7372fb337cc1d7c34" + integrity sha512-rXDXR3h7cs7dy9RNpUlQf80nX31XWJEyGq1tRMo+6GsO5VmTe4UTwtmonAD4ZkAsrfMVDA2wlGJ3790Ys+D49Q== + dependencies: + debug "^3.2.7" + +eslint-plugin-import@^2.28.1: + version "2.29.1" + resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.29.1.tgz#d45b37b5ef5901d639c15270d74d46d161150643" + integrity sha512-BbPC0cuExzhiMo4Ff1BTVwHpjjv28C5R+btTOGaCRC7UEz801up0JadwkeSk5Ued6TG34uaczuVuH6qyy5YUxw== + dependencies: + array-includes "^3.1.7" + array.prototype.findlastindex "^1.2.3" + array.prototype.flat "^1.3.2" + array.prototype.flatmap "^1.3.2" + debug "^3.2.7" + doctrine "^2.1.0" + eslint-import-resolver-node "^0.3.9" + eslint-module-utils "^2.8.0" + hasown "^2.0.0" + is-core-module "^2.13.1" + is-glob "^4.0.3" + minimatch "^3.1.2" + object.fromentries "^2.0.7" + object.groupby "^1.0.1" + object.values "^1.1.7" + semver "^6.3.1" + tsconfig-paths "^3.15.0" + +eslint-plugin-jsx-a11y@^6.7.1: + version "6.9.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.9.0.tgz#67ab8ff460d4d3d6a0b4a570e9c1670a0a8245c8" + integrity sha512-nOFOCaJG2pYqORjK19lqPqxMO/JpvdCZdPtNdxY3kvom3jTvkAbOvQvD8wuD0G8BYR0IGAGYDlzqWJOh/ybn2g== + dependencies: + aria-query "~5.1.3" + array-includes "^3.1.8" + array.prototype.flatmap "^1.3.2" + ast-types-flow "^0.0.8" + axe-core "^4.9.1" + axobject-query "~3.1.1" + damerau-levenshtein "^1.0.8" + emoji-regex "^9.2.2" + es-iterator-helpers "^1.0.19" + hasown "^2.0.2" + jsx-ast-utils "^3.3.5" + language-tags "^1.0.9" + minimatch "^3.1.2" + object.fromentries "^2.0.8" + safe-regex-test "^1.0.3" + string.prototype.includes "^2.0.0" + +"eslint-plugin-react-hooks@^4.5.0 || 5.0.0-canary-7118f5dd7-20230705": + version "4.6.2" + resolved "https://registry.yarnpkg.com/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.2.tgz#c829eb06c0e6f484b3fbb85a97e57784f328c596" + integrity sha512-QzliNJq4GinDBcD8gPB5v0wh6g8q3SUi6EFF0x8N/BL9PoVs0atuGc47ozMRyOWAKdwaZ5OnbOEa3WR+dSGKuQ== + +eslint-plugin-react@^7.33.2: + version "7.34.3" + resolved "https://registry.yarnpkg.com/eslint-plugin-react/-/eslint-plugin-react-7.34.3.tgz#9965f27bd1250a787b5d4cfcc765e5a5d58dcb7b" + integrity sha512-aoW4MV891jkUulwDApQbPYTVZmeuSyFrudpbTAQuj5Fv8VL+o6df2xIGpw8B0hPjAaih1/Fb0om9grCdyFYemA== + dependencies: + array-includes "^3.1.8" + array.prototype.findlast "^1.2.5" + array.prototype.flatmap "^1.3.2" + array.prototype.toreversed "^1.1.2" + array.prototype.tosorted "^1.1.4" + doctrine "^2.1.0" + es-iterator-helpers "^1.0.19" + estraverse "^5.3.0" + jsx-ast-utils "^2.4.1 || ^3.0.0" + minimatch "^3.1.2" + object.entries "^1.1.8" + object.fromentries "^2.0.8" + object.hasown "^1.1.4" + object.values "^1.2.0" + prop-types "^15.8.1" + resolve "^2.0.0-next.5" + semver "^6.3.1" + string.prototype.matchall "^4.0.11" + +eslint-scope@^7.2.2: + version "7.2.2" + resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-7.2.2.tgz#deb4f92563390f32006894af62a22dba1c46423f" + integrity sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg== + dependencies: + esrecurse "^4.3.0" + estraverse "^5.2.0" + +eslint-visitor-keys@^3.3.0, eslint-visitor-keys@^3.4.1, eslint-visitor-keys@^3.4.3: + version "3.4.3" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz#0cd72fe8550e3c2eae156a96a4dddcd1c8ac5800" + integrity sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag== + +eslint@^8: + version "8.57.0" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.57.0.tgz#c786a6fd0e0b68941aaf624596fb987089195668" + integrity sha512-dZ6+mexnaTIbSBZWgou51U6OmzIhYM2VcNdtiTtI7qPNZm35Akpr0f6vtw3w1Kmn5PYo+tZVfh13WrhpS6oLqQ== + dependencies: + "@eslint-community/eslint-utils" "^4.2.0" + "@eslint-community/regexpp" "^4.6.1" + "@eslint/eslintrc" "^2.1.4" + "@eslint/js" "8.57.0" + "@humanwhocodes/config-array" "^0.11.14" + "@humanwhocodes/module-importer" "^1.0.1" + "@nodelib/fs.walk" "^1.2.8" + "@ungap/structured-clone" "^1.2.0" + ajv "^6.12.4" + chalk "^4.0.0" + cross-spawn "^7.0.2" + debug "^4.3.2" + doctrine "^3.0.0" + escape-string-regexp "^4.0.0" + eslint-scope "^7.2.2" + eslint-visitor-keys "^3.4.3" + espree "^9.6.1" + esquery "^1.4.2" + esutils "^2.0.2" + fast-deep-equal "^3.1.3" + file-entry-cache "^6.0.1" + find-up "^5.0.0" + glob-parent "^6.0.2" + globals "^13.19.0" + graphemer "^1.4.0" + ignore "^5.2.0" + imurmurhash "^0.1.4" + is-glob "^4.0.0" + is-path-inside "^3.0.3" + js-yaml "^4.1.0" + json-stable-stringify-without-jsonify "^1.0.1" + levn "^0.4.1" + lodash.merge "^4.6.2" + minimatch "^3.1.2" + natural-compare "^1.4.0" + optionator "^0.9.3" + strip-ansi "^6.0.1" + text-table "^0.2.0" + +espree@^9.6.0, espree@^9.6.1: + version "9.6.1" + resolved "https://registry.yarnpkg.com/espree/-/espree-9.6.1.tgz#a2a17b8e434690a5432f2f8018ce71d331a48c6f" + integrity sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ== + dependencies: + acorn "^8.9.0" + acorn-jsx "^5.3.2" + eslint-visitor-keys "^3.4.1" + +esquery@^1.4.2: + version "1.6.0" + resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.6.0.tgz#91419234f804d852a82dceec3e16cdc22cf9dae7" + integrity sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg== + dependencies: + estraverse "^5.1.0" + +esrecurse@^4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" + integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== + dependencies: + estraverse "^5.2.0" + +estraverse@^5.1.0, estraverse@^5.2.0, estraverse@^5.3.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" + integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== + +esutils@^2.0.2: + version "2.0.3" + resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" + integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== + +fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: + version "3.1.3" + resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" + integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== + +fast-glob@^3.2.9, fast-glob@^3.3.0, fast-glob@^3.3.1: + version "3.3.2" + resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.3.2.tgz#a904501e57cfdd2ffcded45e99a54fef55e46129" + integrity sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow== + dependencies: + "@nodelib/fs.stat" "^2.0.2" + "@nodelib/fs.walk" "^1.2.3" + glob-parent "^5.1.2" + merge2 "^1.3.0" + micromatch "^4.0.4" + +fast-json-stable-stringify@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" + integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== + +fast-levenshtein@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== + +fast-shallow-equal@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fast-shallow-equal/-/fast-shallow-equal-1.0.0.tgz#d4dcaf6472440dcefa6f88b98e3251e27f25628b" + integrity sha512-HPtaa38cPgWvaCFmRNhlc6NG7pv6NUHqjPgVAkWGoB9mQMwYB27/K0CvOM5Czy+qpT3e8XJ6Q4aPAnzpNpzNaw== + +fastest-stable-stringify@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/fastest-stable-stringify/-/fastest-stable-stringify-2.0.2.tgz#3757a6774f6ec8de40c4e86ec28ea02417214c76" + integrity sha512-bijHueCGd0LqqNK9b5oCMHc0MluJAx0cwqASgbWMvkO01lCYgIhacVRLcaDz3QnyYIRNJRDwMb41VuT6pHJ91Q== + +fastq@^1.6.0: + version "1.17.1" + resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.17.1.tgz#2a523f07a4e7b1e81a42b91b8bf2254107753b47" + integrity sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w== + dependencies: + reusify "^1.0.4" + +file-entry-cache@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" + integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg== + dependencies: + flat-cache "^3.0.4" + +fill-range@^7.1.1: + version "7.1.1" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.1.1.tgz#44265d3cac07e3ea7dc247516380643754a05292" + integrity sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg== + dependencies: + to-regex-range "^5.0.1" + +find-up@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc" + integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng== + dependencies: + locate-path "^6.0.0" + path-exists "^4.0.0" + +flat-cache@^3.0.4: + version "3.2.0" + resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.2.0.tgz#2c0c2d5040c99b1632771a9d105725c0115363ee" + integrity sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw== + dependencies: + flatted "^3.2.9" + keyv "^4.5.3" + rimraf "^3.0.2" + +flatted@^3.2.9: + version "3.3.1" + resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.3.1.tgz#21db470729a6734d4997002f439cb308987f567a" + integrity sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw== + +for-each@^0.3.3: + version "0.3.3" + resolved "https://registry.yarnpkg.com/for-each/-/for-each-0.3.3.tgz#69b447e88a0a5d32c3e7084f3f1710034b21376e" + integrity sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw== + dependencies: + is-callable "^1.1.3" + +foreground-child@^3.1.0: + version "3.2.1" + resolved "https://registry.yarnpkg.com/foreground-child/-/foreground-child-3.2.1.tgz#767004ccf3a5b30df39bed90718bab43fe0a59f7" + integrity sha512-PXUUyLqrR2XCWICfv6ukppP96sdFwWbNEnfEMt7jNsISjMsvaLNinAHNDYyvkyU+SZG2BTSbT5NjG+vZslfGTA== + dependencies: + cross-spawn "^7.0.0" + signal-exit "^4.0.1" + +fraction.js@^4.3.7: + version "4.3.7" + resolved "https://registry.yarnpkg.com/fraction.js/-/fraction.js-4.3.7.tgz#06ca0085157e42fda7f9e726e79fefc4068840f7" + integrity sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew== + +framer-motion@^11.2.0: + version "11.3.0" + resolved "https://registry.yarnpkg.com/framer-motion/-/framer-motion-11.3.0.tgz#dd2fe041577c520b2bf1b136b3ed31f866308f54" + integrity sha512-hjYjMUQaWuqilwRr5kC0CunHZFVMtKWHy/IdL/LPRBD0C491DKTvYwQRJ5qRXEAOT+Rth7Vi4XBe4TA4bFOn3A== + dependencies: + tslib "^2.4.0" + +fs.realpath@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== + +fsevents@~2.3.2: + version "2.3.3" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6" + integrity sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw== + +function-bind@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c" + integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== + +function.prototype.name@^1.1.5, function.prototype.name@^1.1.6: + version "1.1.6" + resolved "https://registry.yarnpkg.com/function.prototype.name/-/function.prototype.name-1.1.6.tgz#cdf315b7d90ee77a4c6ee216c3c3362da07533fd" + integrity sha512-Z5kx79swU5P27WEayXM1tBi5Ze/lbIyiNgU3qyXUOf9b2rgXYyF9Dy9Cx+IQv/Lc8WCG6L82zwUPpSS9hGehIg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.2.0" + es-abstract "^1.22.1" + functions-have-names "^1.2.3" + +functions-have-names@^1.2.3: + version "1.2.3" + resolved "https://registry.yarnpkg.com/functions-have-names/-/functions-have-names-1.2.3.tgz#0404fe4ee2ba2f607f0e0ec3c80bae994133b834" + integrity sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ== + +get-intrinsic@^1.1.3, get-intrinsic@^1.2.1, get-intrinsic@^1.2.2, get-intrinsic@^1.2.3, get-intrinsic@^1.2.4: + version "1.2.4" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.4.tgz#e385f5a4b5227d449c3eabbad05494ef0abbeadd" + integrity sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ== + dependencies: + es-errors "^1.3.0" + function-bind "^1.1.2" + has-proto "^1.0.1" + has-symbols "^1.0.3" + hasown "^2.0.0" + +get-symbol-description@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/get-symbol-description/-/get-symbol-description-1.0.2.tgz#533744d5aa20aca4e079c8e5daf7fd44202821f5" + integrity sha512-g0QYk1dZBxGwk+Ngc+ltRH2IBp2f7zBkBMBJZCDerh6EhlhSR6+9irMCuT/09zD6qkarHUSn529sK/yL4S27mg== + dependencies: + call-bind "^1.0.5" + es-errors "^1.3.0" + get-intrinsic "^1.2.4" + +get-tsconfig@^4.5.0: + version "4.7.5" + resolved "https://registry.yarnpkg.com/get-tsconfig/-/get-tsconfig-4.7.5.tgz#5e012498579e9a6947511ed0cd403272c7acbbaf" + integrity sha512-ZCuZCnlqNzjb4QprAzXKdpp/gh6KTxSJuw3IBsPnV/7fV4NxC9ckB+vPTt8w7fJA0TaSD7c55BR47JD6MEDyDw== + dependencies: + resolve-pkg-maps "^1.0.0" + +glob-parent@^5.1.2, glob-parent@~5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" + integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== + dependencies: + is-glob "^4.0.1" + +glob-parent@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3" + integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A== + dependencies: + is-glob "^4.0.3" + +glob@10.3.10: + version "10.3.10" + resolved "https://registry.yarnpkg.com/glob/-/glob-10.3.10.tgz#0351ebb809fd187fe421ab96af83d3a70715df4b" + integrity sha512-fa46+tv1Ak0UPK1TOy/pZrIybNNt4HCv7SDzwyfiOZkvZLEbjsZkJBPtDHVshZjbecAoAGSC20MjLDG/qr679g== + dependencies: + foreground-child "^3.1.0" + jackspeak "^2.3.5" + minimatch "^9.0.1" + minipass "^5.0.0 || ^6.0.2 || ^7.0.0" + path-scurry "^1.10.1" + +glob@^10.3.10: + version "10.4.5" + resolved "https://registry.yarnpkg.com/glob/-/glob-10.4.5.tgz#f4d9f0b90ffdbab09c9d77f5f29b4262517b0956" + integrity sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg== + dependencies: + foreground-child "^3.1.0" + jackspeak "^3.1.2" + minimatch "^9.0.4" + minipass "^7.1.2" + package-json-from-dist "^1.0.0" + path-scurry "^1.11.1" + +glob@^7.1.3: + version "7.2.3" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" + integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.1.1" + once "^1.3.0" + path-is-absolute "^1.0.0" + +globals@^13.19.0: + version "13.24.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-13.24.0.tgz#8432a19d78ce0c1e833949c36adb345400bb1171" + integrity sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ== + dependencies: + type-fest "^0.20.2" + +globalthis@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/globalthis/-/globalthis-1.0.4.tgz#7430ed3a975d97bfb59bcce41f5cabbafa651236" + integrity sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ== + dependencies: + define-properties "^1.2.1" + gopd "^1.0.1" + +globby@^11.1.0: + version "11.1.0" + resolved "https://registry.yarnpkg.com/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" + integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== + dependencies: + array-union "^2.1.0" + dir-glob "^3.0.1" + fast-glob "^3.2.9" + ignore "^5.2.0" + merge2 "^1.4.1" + slash "^3.0.0" + +gopd@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/gopd/-/gopd-1.0.1.tgz#29ff76de69dac7489b7c0918a5788e56477c332c" + integrity sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA== + dependencies: + get-intrinsic "^1.1.3" + +graceful-fs@^4.2.11, graceful-fs@^4.2.4: + version "4.2.11" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" + integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== + +graphemer@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/graphemer/-/graphemer-1.4.0.tgz#fb2f1d55e0e3a1849aeffc90c4fa0dd53a0e66c6" + integrity sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag== + +has-bigints@^1.0.1, has-bigints@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.2.tgz#0871bd3e3d51626f6ca0966668ba35d5602d6eaa" + integrity sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ== + +has-flag@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" + integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== + +has-property-descriptors@^1.0.0, has-property-descriptors@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz#963ed7d071dc7bf5f084c5bfbe0d1b6222586854" + integrity sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg== + dependencies: + es-define-property "^1.0.0" + +has-proto@^1.0.1, has-proto@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has-proto/-/has-proto-1.0.3.tgz#b31ddfe9b0e6e9914536a6ab286426d0214f77fd" + integrity sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q== + +has-symbols@^1.0.2, has-symbols@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" + integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== + +has-tostringtag@^1.0.0, has-tostringtag@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/has-tostringtag/-/has-tostringtag-1.0.2.tgz#2cdc42d40bef2e5b4eeab7c01a73c54ce7ab5abc" + integrity sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw== + dependencies: + has-symbols "^1.0.3" + +hasown@^2.0.0, hasown@^2.0.1, hasown@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.2.tgz#003eaf91be7adc372e84ec59dc37252cedb80003" + integrity sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ== + dependencies: + function-bind "^1.1.2" + +hyphenate-style-name@^1.0.3: + version "1.1.0" + resolved "https://registry.yarnpkg.com/hyphenate-style-name/-/hyphenate-style-name-1.1.0.tgz#1797bf50369588b47b72ca6d5e65374607cf4436" + integrity sha512-WDC/ui2VVRrz3jOVi+XtjqkDjiVjTtFaAGiW37k6b+ohyQ5wYDOGkvCZa8+H0nx3gyvv0+BST9xuOgIyGQ00gw== + +ignore@^5.2.0: + version "5.3.1" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.3.1.tgz#5073e554cd42c5b33b394375f538b8593e34d4ef" + integrity sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw== + +import-fresh@^3.2.1: + version "3.3.0" + resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" + integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== + dependencies: + parent-module "^1.0.0" + resolve-from "^4.0.0" + +imurmurhash@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" + integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA== + +inflight@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== + dependencies: + once "^1.3.0" + wrappy "1" + +inherits@2: + version "2.0.4" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +inline-style-prefixer@^7.0.0: + version "7.0.1" + resolved "https://registry.yarnpkg.com/inline-style-prefixer/-/inline-style-prefixer-7.0.1.tgz#9310f3cfa2c6f3901d1480f373981c02691781e8" + integrity sha512-lhYo5qNTQp3EvSSp3sRvXMbVQTLrvGV6DycRMJ5dm2BLMiJ30wpXKdDdgX+GmJZ5uQMucwRKHamXSst3Sj/Giw== + dependencies: + css-in-js-utils "^3.1.0" + +internal-slot@^1.0.4, internal-slot@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/internal-slot/-/internal-slot-1.0.7.tgz#c06dcca3ed874249881007b0a5523b172a190802" + integrity sha512-NGnrKwXzSms2qUUih/ILZ5JBqNTSa1+ZmP6flaIp6KmSElgE9qdndzS3cqjrDovwFdmwsGsLdeFgB6suw+1e9g== + dependencies: + es-errors "^1.3.0" + hasown "^2.0.0" + side-channel "^1.0.4" + +is-arguments@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/is-arguments/-/is-arguments-1.1.1.tgz#15b3f88fda01f2a97fec84ca761a560f123efa9b" + integrity sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-array-buffer@^3.0.2, is-array-buffer@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/is-array-buffer/-/is-array-buffer-3.0.4.tgz#7a1f92b3d61edd2bc65d24f130530ea93d7fae98" + integrity sha512-wcjaerHw0ydZwfhiKbXJWLDY8A7yV7KhjQOpb83hGgGfId/aQa4TOvwyzn2PuswW2gPCYEL/nEAiSVpdOj1lXw== + dependencies: + call-bind "^1.0.2" + get-intrinsic "^1.2.1" + +is-async-function@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-async-function/-/is-async-function-2.0.0.tgz#8e4418efd3e5d3a6ebb0164c05ef5afb69aa9646" + integrity sha512-Y1JXKrfykRJGdlDwdKlLpLyMIiWqWvuSd17TvZk68PLAOGOoF4Xyav1z0Xhoi+gCYjZVeC5SI+hYFOfvXmGRCA== + dependencies: + has-tostringtag "^1.0.0" + +is-bigint@^1.0.1: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-bigint/-/is-bigint-1.0.4.tgz#08147a1875bc2b32005d41ccd8291dffc6691df3" + integrity sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg== + dependencies: + has-bigints "^1.0.1" + +is-binary-path@~2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" + integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== + dependencies: + binary-extensions "^2.0.0" + +is-boolean-object@^1.1.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/is-boolean-object/-/is-boolean-object-1.1.2.tgz#5c6dc200246dd9321ae4b885a114bb1f75f63719" + integrity sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-callable@^1.1.3, is-callable@^1.1.4, is-callable@^1.2.7: + version "1.2.7" + resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.7.tgz#3bc2a85ea742d9e36205dcacdd72ca1fdc51b055" + integrity sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA== + +is-core-module@^2.11.0, is-core-module@^2.13.0, is-core-module@^2.13.1: + version "2.14.0" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.14.0.tgz#43b8ef9f46a6a08888db67b1ffd4ec9e3dfd59d1" + integrity sha512-a5dFJih5ZLYlRtDc0dZWP7RiKr6xIKzmn/oAYCDvdLThadVgyJwlaoQPmRtMSpz+rk0OGAgIu+TcM9HUF0fk1A== + dependencies: + hasown "^2.0.2" + +is-data-view@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-data-view/-/is-data-view-1.0.1.tgz#4b4d3a511b70f3dc26d42c03ca9ca515d847759f" + integrity sha512-AHkaJrsUVW6wq6JS8y3JnM/GJF/9cf+k20+iDzlSaJrinEo5+7vRiteOSwBhHRiAyQATN1AmY4hwzxJKPmYf+w== + dependencies: + is-typed-array "^1.1.13" + +is-date-object@^1.0.1, is-date-object@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.5.tgz#0841d5536e724c25597bf6ea62e1bd38298df31f" + integrity sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ== + dependencies: + has-tostringtag "^1.0.0" + +is-extglob@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" + integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ== + +is-finalizationregistry@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-finalizationregistry/-/is-finalizationregistry-1.0.2.tgz#c8749b65f17c133313e661b1289b95ad3dbd62e6" + integrity sha512-0by5vtUJs8iFQb5TYUHHPudOR+qXYIMKtiUzvLIZITZUjknFmziyBJuLhVRc+Ds0dREFlskDNJKYIdIzu/9pfw== + dependencies: + call-bind "^1.0.2" + +is-fullwidth-code-point@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" + integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== + +is-generator-function@^1.0.10: + version "1.0.10" + resolved "https://registry.yarnpkg.com/is-generator-function/-/is-generator-function-1.0.10.tgz#f1558baf1ac17e0deea7c0415c438351ff2b3c72" + integrity sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A== + dependencies: + has-tostringtag "^1.0.0" + +is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: + version "4.0.3" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" + integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== + dependencies: + is-extglob "^2.1.1" + +is-map@^2.0.2, is-map@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/is-map/-/is-map-2.0.3.tgz#ede96b7fe1e270b3c4465e3a465658764926d62e" + integrity sha512-1Qed0/Hr2m+YqxnM09CjA2d/i6YZNfF6R2oRAOj36eUdS6qIV/huPJNSEpKbupewFs+ZsJlxsjjPbc0/afW6Lw== + +is-negative-zero@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/is-negative-zero/-/is-negative-zero-2.0.3.tgz#ced903a027aca6381b777a5743069d7376a49747" + integrity sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw== + +is-number-object@^1.0.4: + version "1.0.7" + resolved "https://registry.yarnpkg.com/is-number-object/-/is-number-object-1.0.7.tgz#59d50ada4c45251784e9904f5246c742f07a42fc" + integrity sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ== + dependencies: + has-tostringtag "^1.0.0" + +is-number@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" + integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== + +is-path-inside@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283" + integrity sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ== + +is-regex@^1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.4.tgz#eef5663cd59fa4c0ae339505323df6854bb15958" + integrity sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-set@^2.0.2, is-set@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/is-set/-/is-set-2.0.3.tgz#8ab209ea424608141372ded6e0cb200ef1d9d01d" + integrity sha512-iPAjerrse27/ygGLxw+EBR9agv9Y6uLeYVJMu+QNCoouJ1/1ri0mGrcWpfCqFZuzzx3WjtwxG098X+n4OuRkPg== + +is-shared-array-buffer@^1.0.2, is-shared-array-buffer@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/is-shared-array-buffer/-/is-shared-array-buffer-1.0.3.tgz#1237f1cba059cdb62431d378dcc37d9680181688" + integrity sha512-nA2hv5XIhLR3uVzDDfCIknerhx8XUKnstuOERPNNIinXG7v9u+ohXF67vxm4TPTEPU6lm61ZkwP3c9PCB97rhg== + dependencies: + call-bind "^1.0.7" + +is-string@^1.0.5, is-string@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.7.tgz#0dd12bf2006f255bb58f695110eff7491eebc0fd" + integrity sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg== + dependencies: + has-tostringtag "^1.0.0" + +is-symbol@^1.0.2, is-symbol@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.4.tgz#a6dac93b635b063ca6872236de88910a57af139c" + integrity sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg== + dependencies: + has-symbols "^1.0.2" + +is-typed-array@^1.1.13: + version "1.1.13" + resolved "https://registry.yarnpkg.com/is-typed-array/-/is-typed-array-1.1.13.tgz#d6c5ca56df62334959322d7d7dd1cca50debe229" + integrity sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw== + dependencies: + which-typed-array "^1.1.14" + +is-weakmap@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/is-weakmap/-/is-weakmap-2.0.2.tgz#bf72615d649dfe5f699079c54b83e47d1ae19cfd" + integrity sha512-K5pXYOm9wqY1RgjpL3YTkF39tni1XajUIkawTLUo9EZEVUFga5gSQJF8nNS7ZwJQ02y+1YCNYcMh+HIf1ZqE+w== + +is-weakref@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-weakref/-/is-weakref-1.0.2.tgz#9529f383a9338205e89765e0392efc2f100f06f2" + integrity sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ== + dependencies: + call-bind "^1.0.2" + +is-weakset@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/is-weakset/-/is-weakset-2.0.3.tgz#e801519df8c0c43e12ff2834eead84ec9e624007" + integrity sha512-LvIm3/KWzS9oRFHugab7d+M/GcBXuXX5xZkzPmN+NxihdQlZUQ4dWuSV1xR/sq6upL1TJEDrfBgRepHFdBtSNQ== + dependencies: + call-bind "^1.0.7" + get-intrinsic "^1.2.4" + +isarray@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-2.0.5.tgz#8af1e4c1221244cc62459faf38940d4e644a5723" + integrity sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw== + +isexe@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== + +iterator.prototype@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/iterator.prototype/-/iterator.prototype-1.1.2.tgz#5e29c8924f01916cb9335f1ff80619dcff22b0c0" + integrity sha512-DR33HMMr8EzwuRL8Y9D3u2BMj8+RqSE850jfGu59kS7tbmPLzGkZmVSfyCFSDxuZiEY6Rzt3T2NA/qU+NwVj1w== + dependencies: + define-properties "^1.2.1" + get-intrinsic "^1.2.1" + has-symbols "^1.0.3" + reflect.getprototypeof "^1.0.4" + set-function-name "^2.0.1" + +jackspeak@^2.3.5: + version "2.3.6" + resolved "https://registry.yarnpkg.com/jackspeak/-/jackspeak-2.3.6.tgz#647ecc472238aee4b06ac0e461acc21a8c505ca8" + integrity sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ== + dependencies: + "@isaacs/cliui" "^8.0.2" + optionalDependencies: + "@pkgjs/parseargs" "^0.11.0" + +jackspeak@^3.1.2: + version "3.4.3" + resolved "https://registry.yarnpkg.com/jackspeak/-/jackspeak-3.4.3.tgz#8833a9d89ab4acde6188942bd1c53b6390ed5a8a" + integrity sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw== + dependencies: + "@isaacs/cliui" "^8.0.2" + optionalDependencies: + "@pkgjs/parseargs" "^0.11.0" + +jiti@^1.21.0: + version "1.21.6" + resolved "https://registry.yarnpkg.com/jiti/-/jiti-1.21.6.tgz#6c7f7398dd4b3142767f9a168af2f317a428d268" + integrity sha512-2yTgeWTWzMWkHu6Jp9NKgePDaYHbntiwvYuuJLbbN9vl7DC9DvXKOB2BC3ZZ92D3cvV/aflH0osDfwpHepQ53w== + +jotai@^2.8.0: + version "2.9.0" + resolved "https://registry.yarnpkg.com/jotai/-/jotai-2.9.0.tgz#240f37fb1bb8a8d4c8d178b423575f2da677814f" + integrity sha512-MioTpMvR78IGfJ+W8EwQj3kwTkb+u0reGnTyg3oJZMWK9rK9v8NBSC9Rhrg9jrrFYA6bGZtzJa96zsuAYF6W3w== + +js-cookie@^2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/js-cookie/-/js-cookie-2.2.1.tgz#69e106dc5d5806894562902aa5baec3744e9b2b8" + integrity sha512-HvdH2LzI/EAZcUwA8+0nKNtWHqS+ZmijLA30RwZA0bo7ToCckjK5MkGhjED9KoRcXO6BaGI3I9UIzSA1FKFPOQ== + +"js-tokens@^3.0.0 || ^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" + integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== + +js-yaml@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" + integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== + dependencies: + argparse "^2.0.1" + +json-buffer@3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.1.tgz#9338802a30d3b6605fbe0613e094008ca8c05a13" + integrity sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ== + +json-schema-traverse@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" + integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== + +json-stable-stringify-without-jsonify@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" + integrity sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw== + +json5@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.2.tgz#63d98d60f21b313b77c4d6da18bfa69d80e1d593" + integrity sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA== + dependencies: + minimist "^1.2.0" + +"jsx-ast-utils@^2.4.1 || ^3.0.0", jsx-ast-utils@^3.3.5: + version "3.3.5" + resolved "https://registry.yarnpkg.com/jsx-ast-utils/-/jsx-ast-utils-3.3.5.tgz#4766bd05a8e2a11af222becd19e15575e52a853a" + integrity sha512-ZZow9HBI5O6EPgSJLUb8n2NKgmVWTwCvHGwFuJlMjvLFqlGG6pjirPhtdsseaLZjSibD8eegzmYpUZwoIlj2cQ== + dependencies: + array-includes "^3.1.6" + array.prototype.flat "^1.3.1" + object.assign "^4.1.4" + object.values "^1.1.6" + +keyv@^4.5.3: + version "4.5.4" + resolved "https://registry.yarnpkg.com/keyv/-/keyv-4.5.4.tgz#a879a99e29452f942439f2a405e3af8b31d4de93" + integrity sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw== + dependencies: + json-buffer "3.0.1" + +language-subtag-registry@^0.3.20: + version "0.3.23" + resolved "https://registry.yarnpkg.com/language-subtag-registry/-/language-subtag-registry-0.3.23.tgz#23529e04d9e3b74679d70142df3fd2eb6ec572e7" + integrity sha512-0K65Lea881pHotoGEa5gDlMxt3pctLi2RplBb7Ezh4rRdLEOtgi7n4EwK9lamnUCkKBqaeKRVebTq6BAxSkpXQ== + +language-tags@^1.0.9: + version "1.0.9" + resolved "https://registry.yarnpkg.com/language-tags/-/language-tags-1.0.9.tgz#1ffdcd0ec0fafb4b1be7f8b11f306ad0f9c08777" + integrity sha512-MbjN408fEndfiQXbFQ1vnd+1NoLDsnQW41410oQBXiyXDMYH5z505juWa4KUE1LqxRC7DgOgZDbKLxHIwm27hA== + dependencies: + language-subtag-registry "^0.3.20" + +levn@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" + integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== + dependencies: + prelude-ls "^1.2.1" + type-check "~0.4.0" + +lilconfig@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-2.1.0.tgz#78e23ac89ebb7e1bfbf25b18043de756548e7f52" + integrity sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ== + +lilconfig@^3.0.0: + version "3.1.2" + resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-3.1.2.tgz#e4a7c3cb549e3a606c8dcc32e5ae1005e62c05cb" + integrity sha512-eop+wDAvpItUys0FWkHIKeC9ybYrTGbU41U5K7+bttZZeohvnY7M9dZ5kB21GNWiFT2q1OoPTvncPCgSOVO5ow== + +lines-and-columns@^1.1.6: + version "1.2.4" + resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632" + integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== + +locate-path@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286" + integrity sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw== + dependencies: + p-locate "^5.0.0" + +lodash.castarray@^4.4.0: + version "4.4.0" + resolved "https://registry.yarnpkg.com/lodash.castarray/-/lodash.castarray-4.4.0.tgz#c02513515e309daddd4c24c60cfddcf5976d9115" + integrity sha512-aVx8ztPv7/2ULbArGJ2Y42bG1mEQ5mGjpdvrbJcJFU3TbYybe+QlLS4pst9zV52ymy2in1KpFPiZnAOATxD4+Q== + +lodash.isplainobject@^4.0.6: + version "4.0.6" + resolved "https://registry.yarnpkg.com/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz#7c526a52d89b45c45cc690b88163be0497f550cb" + integrity sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA== + +lodash.merge@^4.6.2: + version "4.6.2" + resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" + integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== + +loose-envify@^1.1.0, loose-envify@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" + integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== + dependencies: + js-tokens "^3.0.0 || ^4.0.0" + +lru-cache@^10.2.0: + version "10.4.3" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-10.4.3.tgz#410fc8a17b70e598013df257c2446b7f3383f119" + integrity sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ== + +mdn-data@2.0.14: + version "2.0.14" + resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.14.tgz#7113fc4281917d63ce29b43446f701e68c25ba50" + integrity sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow== + +merge2@^1.3.0, merge2@^1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" + integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== + +micromatch@^4.0.4, micromatch@^4.0.5: + version "4.0.7" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.7.tgz#33e8190d9fe474a9895525f5618eee136d46c2e5" + integrity sha512-LPP/3KorzCwBxfeUuZmaR6bG2kdeHSbe0P2tY3FLRU4vYrjYz5hI4QZwV0njUx3jeuKe67YukQ1LSPZBKDqO/Q== + dependencies: + braces "^3.0.3" + picomatch "^2.3.1" + +mini-svg-data-uri@^1.2.3: + version "1.4.4" + resolved "https://registry.yarnpkg.com/mini-svg-data-uri/-/mini-svg-data-uri-1.4.4.tgz#8ab0aabcdf8c29ad5693ca595af19dd2ead09939" + integrity sha512-r9deDe9p5FJUPZAk3A59wGH7Ii9YrjjWw0jmw/liSbHl2CHiyXj6FcDXDu2K3TjVAXqiJdaw3xxwlZZr9E6nHg== + +minimatch@9.0.3: + version "9.0.3" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.3.tgz#a6e00c3de44c3a542bfaae70abfc22420a6da825" + integrity sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg== + dependencies: + brace-expansion "^2.0.1" + +minimatch@^3.0.5, minimatch@^3.1.1, minimatch@^3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" + integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== + dependencies: + brace-expansion "^1.1.7" + +minimatch@^9.0.1, minimatch@^9.0.4: + version "9.0.5" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.5.tgz#d74f9dd6b57d83d8e98cfb82133b03978bc929e5" + integrity sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow== + dependencies: + brace-expansion "^2.0.1" + +minimist@^1.2.0, minimist@^1.2.6: + version "1.2.8" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" + integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== + +"minipass@^5.0.0 || ^6.0.2 || ^7.0.0", minipass@^7.1.2: + version "7.1.2" + resolved "https://registry.yarnpkg.com/minipass/-/minipass-7.1.2.tgz#93a9626ce5e5e66bd4db86849e7515e92340a707" + integrity sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw== + +ms@2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== + +ms@^2.1.1: + version "2.1.3" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" + integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== + +mz@^2.7.0: + version "2.7.0" + resolved "https://registry.yarnpkg.com/mz/-/mz-2.7.0.tgz#95008057a56cafadc2bc63dde7f9ff6955948e32" + integrity sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q== + dependencies: + any-promise "^1.0.0" + object-assign "^4.0.1" + thenify-all "^1.0.0" + +nano-css@^5.6.1: + version "5.6.1" + resolved "https://registry.yarnpkg.com/nano-css/-/nano-css-5.6.1.tgz#964120cb1af6cccaa6d0717a473ccd876b34c197" + integrity sha512-T2Mhc//CepkTa3X4pUhKgbEheJHYAxD0VptuqFhDbGMUWVV2m+lkNiW/Ieuj35wrfC8Zm0l7HvssQh7zcEttSw== + dependencies: + "@jridgewell/sourcemap-codec" "^1.4.15" + css-tree "^1.1.2" + csstype "^3.1.2" + fastest-stable-stringify "^2.0.2" + inline-style-prefixer "^7.0.0" + rtl-css-js "^1.16.1" + stacktrace-js "^2.0.2" + stylis "^4.3.0" + +nanoid@^3.3.6, nanoid@^3.3.7: + version "3.3.7" + resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.7.tgz#d0c301a691bc8d54efa0a2226ccf3fe2fd656bd8" + integrity sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g== + +natural-compare@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" + integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw== + +next-themes@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/next-themes/-/next-themes-0.3.0.tgz#b4d2a866137a67d42564b07f3a3e720e2ff3871a" + integrity sha512-/QHIrsYpd6Kfk7xakK4svpDI5mmXP0gfvCoJdGpZQ2TOrQZmsW0QxjaiLn8wbIKjtm4BTSqLoix4lxYYOnLJ/w== + +next@^14.2.3: + version "14.2.5" + resolved "https://registry.yarnpkg.com/next/-/next-14.2.5.tgz#afe4022bb0b752962e2205836587a289270efbea" + integrity sha512-0f8aRfBVL+mpzfBjYfQuLWh2WyAwtJXCRfkPF4UJ5qd2YwrHczsrSzXU4tRMV0OAxR8ZJZWPFn6uhSC56UTsLA== + dependencies: + "@next/env" "14.2.5" + "@swc/helpers" "0.5.5" + busboy "1.6.0" + caniuse-lite "^1.0.30001579" + graceful-fs "^4.2.11" + postcss "8.4.31" + styled-jsx "5.1.1" + optionalDependencies: + "@next/swc-darwin-arm64" "14.2.5" + "@next/swc-darwin-x64" "14.2.5" + "@next/swc-linux-arm64-gnu" "14.2.5" + "@next/swc-linux-arm64-musl" "14.2.5" + "@next/swc-linux-x64-gnu" "14.2.5" + "@next/swc-linux-x64-musl" "14.2.5" + "@next/swc-win32-arm64-msvc" "14.2.5" + "@next/swc-win32-ia32-msvc" "14.2.5" + "@next/swc-win32-x64-msvc" "14.2.5" + +node-releases@^2.0.14: + version "2.0.14" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.14.tgz#2ffb053bceb8b2be8495ece1ab6ce600c4461b0b" + integrity sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw== + +normalize-path@^3.0.0, normalize-path@~3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" + integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== + +normalize-range@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942" + integrity sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA== + +object-assign@^4.0.1, object-assign@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== + +object-hash@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/object-hash/-/object-hash-3.0.0.tgz#73f97f753e7baffc0e2cc9d6e079079744ac82e9" + integrity sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw== + +object-inspect@^1.13.1: + version "1.13.2" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.13.2.tgz#dea0088467fb991e67af4058147a24824a3043ff" + integrity sha512-IRZSRuzJiynemAXPYtPe5BoI/RESNYR7TYm50MC5Mqbd3Jmw5y790sErYw3V6SryFJD64b74qQQs9wn5Bg/k3g== + +object-is@^1.1.5: + version "1.1.6" + resolved "https://registry.yarnpkg.com/object-is/-/object-is-1.1.6.tgz#1a6a53aed2dd8f7e6775ff870bea58545956ab07" + integrity sha512-F8cZ+KfGlSGi09lJT7/Nd6KJZ9ygtvYC0/UYYLI9nmQKLMnydpB9yvbv9K1uSkEu7FU9vYPmVwLg328tX+ot3Q== + dependencies: + call-bind "^1.0.7" + define-properties "^1.2.1" + +object-keys@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" + integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== + +object.assign@^4.1.4, object.assign@^4.1.5: + version "4.1.5" + resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.5.tgz#3a833f9ab7fdb80fc9e8d2300c803d216d8fdbb0" + integrity sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ== + dependencies: + call-bind "^1.0.5" + define-properties "^1.2.1" + has-symbols "^1.0.3" + object-keys "^1.1.1" + +object.entries@^1.1.8: + version "1.1.8" + resolved "https://registry.yarnpkg.com/object.entries/-/object.entries-1.1.8.tgz#bffe6f282e01f4d17807204a24f8edd823599c41" + integrity sha512-cmopxi8VwRIAw/fkijJohSfpef5PdN0pMQJN6VC/ZKvn0LIknWD8KtgY6KlQdEc4tIjcQ3HxSMmnvtzIscdaYQ== + dependencies: + call-bind "^1.0.7" + define-properties "^1.2.1" + es-object-atoms "^1.0.0" + +object.fromentries@^2.0.7, object.fromentries@^2.0.8: + version "2.0.8" + resolved "https://registry.yarnpkg.com/object.fromentries/-/object.fromentries-2.0.8.tgz#f7195d8a9b97bd95cbc1999ea939ecd1a2b00c65" + integrity sha512-k6E21FzySsSK5a21KRADBd/NGneRegFO5pLHfdQLpRDETUNJueLXs3WCzyQ3tFRDYgbq3KHGXfTbi2bs8WQ6rQ== + dependencies: + call-bind "^1.0.7" + define-properties "^1.2.1" + es-abstract "^1.23.2" + es-object-atoms "^1.0.0" + +object.groupby@^1.0.1: + version "1.0.3" + resolved "https://registry.yarnpkg.com/object.groupby/-/object.groupby-1.0.3.tgz#9b125c36238129f6f7b61954a1e7176148d5002e" + integrity sha512-+Lhy3TQTuzXI5hevh8sBGqbmurHbbIjAi0Z4S63nthVLmLxfbj4T54a4CfZrXIrt9iP4mVAPYMo/v99taj3wjQ== + dependencies: + call-bind "^1.0.7" + define-properties "^1.2.1" + es-abstract "^1.23.2" + +object.hasown@^1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/object.hasown/-/object.hasown-1.1.4.tgz#e270ae377e4c120cdcb7656ce66884a6218283dc" + integrity sha512-FZ9LZt9/RHzGySlBARE3VF+gE26TxR38SdmqOqliuTnl9wrKulaQs+4dee1V+Io8VfxqzAfHu6YuRgUy8OHoTg== + dependencies: + define-properties "^1.2.1" + es-abstract "^1.23.2" + es-object-atoms "^1.0.0" + +object.values@^1.1.6, object.values@^1.1.7, object.values@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.2.0.tgz#65405a9d92cee68ac2d303002e0b8470a4d9ab1b" + integrity sha512-yBYjY9QX2hnRmZHAjG/f13MzmBzxzYgQhFrke06TTyKY5zSTEqkOeukBzIdVA3j3ulu8Qa3MbVFShV7T2RmGtQ== + dependencies: + call-bind "^1.0.7" + define-properties "^1.2.1" + es-object-atoms "^1.0.0" + +once@^1.3.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== + dependencies: + wrappy "1" + +optionator@^0.9.3: + version "0.9.4" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.4.tgz#7ea1c1a5d91d764fb282139c88fe11e182a3a734" + integrity sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g== + dependencies: + deep-is "^0.1.3" + fast-levenshtein "^2.0.6" + levn "^0.4.1" + prelude-ls "^1.2.1" + type-check "^0.4.0" + word-wrap "^1.2.5" + +overlayscrollbars-react@^0.5.6: + version "0.5.6" + resolved "https://registry.yarnpkg.com/overlayscrollbars-react/-/overlayscrollbars-react-0.5.6.tgz#e9779f9fc2c1a3288570a45c83f8e42518bfb8c1" + integrity sha512-E5To04bL5brn9GVCZ36SnfGanxa2I2MDkWoa4Cjo5wol7l+diAgi4DBc983V7l2nOk/OLJ6Feg4kySspQEGDBw== + +overlayscrollbars@^2.8.0: + version "2.9.2" + resolved "https://registry.yarnpkg.com/overlayscrollbars/-/overlayscrollbars-2.9.2.tgz#056020a3811742b58b754fab6f775d49bd109be9" + integrity sha512-iDT84r39i7oWP72diZN2mbJUsn/taCq568aQaIrc84S87PunBT7qtsVltAF2esk7ORTRjQDnfjVYoqqTzgs8QA== + +p-limit@^3.0.2: + version "3.1.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" + integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ== + dependencies: + yocto-queue "^0.1.0" + +p-locate@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834" + integrity sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw== + dependencies: + p-limit "^3.0.2" + +package-json-from-dist@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/package-json-from-dist/-/package-json-from-dist-1.0.0.tgz#e501cd3094b278495eb4258d4c9f6d5ac3019f00" + integrity sha512-dATvCeZN/8wQsGywez1mzHtTlP22H8OEfPrVMLNr4/eGa+ijtLn/6M5f0dY8UKNrC2O9UCU6SSoG3qRKnt7STw== + +parent-module@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" + integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== + dependencies: + callsites "^3.0.0" + +path-exists@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" + integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== + +path-is-absolute@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== + +path-key@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" + integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== + +path-parse@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" + integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== + +path-scurry@^1.10.1, path-scurry@^1.11.1: + version "1.11.1" + resolved "https://registry.yarnpkg.com/path-scurry/-/path-scurry-1.11.1.tgz#7960a668888594a0720b12a911d1a742ab9f11d2" + integrity sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA== + dependencies: + lru-cache "^10.2.0" + minipass "^5.0.0 || ^6.0.2 || ^7.0.0" + +path-type@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" + integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== + +picocolors@^1.0.0, picocolors@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.1.tgz#a8ad579b571952f0e5d25892de5445bcfe25aaa1" + integrity sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew== + +picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" + integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== + +pify@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" + integrity sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog== + +pirates@^4.0.1: + version "4.0.6" + resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.6.tgz#3018ae32ecfcff6c29ba2267cbf21166ac1f36b9" + integrity sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg== + +possible-typed-array-names@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/possible-typed-array-names/-/possible-typed-array-names-1.0.0.tgz#89bb63c6fada2c3e90adc4a647beeeb39cc7bf8f" + integrity sha512-d7Uw+eZoloe0EHDIYoe+bQ5WXnGMOpmiZFTuMWCwpjzzkL2nTjcKiAk4hh8TjnGye2TwWOk3UXucZ+3rbmBa8Q== + +postcss-import@^15.1.0: + version "15.1.0" + resolved "https://registry.yarnpkg.com/postcss-import/-/postcss-import-15.1.0.tgz#41c64ed8cc0e23735a9698b3249ffdbf704adc70" + integrity sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew== + dependencies: + postcss-value-parser "^4.0.0" + read-cache "^1.0.0" + resolve "^1.1.7" + +postcss-js@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/postcss-js/-/postcss-js-4.0.1.tgz#61598186f3703bab052f1c4f7d805f3991bee9d2" + integrity sha512-dDLF8pEO191hJMtlHFPRa8xsizHaM82MLfNkUHdUtVEV3tgTp5oj+8qbEqYM57SLfc74KSbw//4SeJma2LRVIw== + dependencies: + camelcase-css "^2.0.1" + +postcss-load-config@^4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/postcss-load-config/-/postcss-load-config-4.0.2.tgz#7159dcf626118d33e299f485d6afe4aff7c4a3e3" + integrity sha512-bSVhyJGL00wMVoPUzAVAnbEoWyqRxkjv64tUl427SKnPrENtq6hJwUojroMz2VB+Q1edmi4IfrAPpami5VVgMQ== + dependencies: + lilconfig "^3.0.0" + yaml "^2.3.4" + +postcss-nested@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/postcss-nested/-/postcss-nested-6.0.1.tgz#f83dc9846ca16d2f4fa864f16e9d9f7d0961662c" + integrity sha512-mEp4xPMi5bSWiMbsgoPfcP74lsWLHkQbZc3sY+jWYd65CUwXrUaTp0fmNpa01ZcETKlIgUdFN/MpS2xZtqL9dQ== + dependencies: + postcss-selector-parser "^6.0.11" + +postcss-selector-parser@6.0.10: + version "6.0.10" + resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz#79b61e2c0d1bfc2602d549e11d0876256f8df88d" + integrity sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w== + dependencies: + cssesc "^3.0.0" + util-deprecate "^1.0.2" + +postcss-selector-parser@^6.0.11: + version "6.1.0" + resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.1.0.tgz#49694cb4e7c649299fea510a29fa6577104bcf53" + integrity sha512-UMz42UD0UY0EApS0ZL9o1XnLhSTtvvvLe5Dc2H2O56fvRZi+KulDyf5ctDhhtYJBGKStV2FL1fy6253cmLgqVQ== + dependencies: + cssesc "^3.0.0" + util-deprecate "^1.0.2" + +postcss-value-parser@^4.0.0, postcss-value-parser@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz#723c09920836ba6d3e5af019f92bc0971c02e514" + integrity sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ== + +postcss@8.4.31: + version "8.4.31" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.31.tgz#92b451050a9f914da6755af352bdc0192508656d" + integrity sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ== + dependencies: + nanoid "^3.3.6" + picocolors "^1.0.0" + source-map-js "^1.0.2" + +postcss@^8, postcss@^8.4.23: + version "8.4.39" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.39.tgz#aa3c94998b61d3a9c259efa51db4b392e1bde0e3" + integrity sha512-0vzE+lAiG7hZl1/9I8yzKLx3aR9Xbof3fBHKunvMfOCYAtMhrsnccJY2iTURb9EZd5+pLuiNV9/c/GZJOHsgIw== + dependencies: + nanoid "^3.3.7" + picocolors "^1.0.1" + source-map-js "^1.2.0" + +prelude-ls@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" + integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== + +prop-types@^15.8.1: + version "15.8.1" + resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.8.1.tgz#67d87bf1a694f48435cf332c24af10214a3140b5" + integrity sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg== + dependencies: + loose-envify "^1.4.0" + object-assign "^4.1.1" + react-is "^16.13.1" + +punycode@^2.1.0: + version "2.3.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.1.tgz#027422e2faec0b25e1549c3e1bd8309b9133b6e5" + integrity sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg== + +queue-microtask@^1.2.2: + version "1.2.3" + resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" + integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== + +react-dom@^18: + version "18.3.1" + resolved "https://registry.yarnpkg.com/react-dom/-/react-dom-18.3.1.tgz#c2265d79511b57d479b3dd3fdfa51536494c5cb4" + integrity sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw== + dependencies: + loose-envify "^1.1.0" + scheduler "^0.23.2" + +react-is@^16.13.1: + version "16.13.1" + resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4" + integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ== + +react-universal-interface@^0.6.2: + version "0.6.2" + resolved "https://registry.yarnpkg.com/react-universal-interface/-/react-universal-interface-0.6.2.tgz#5e8d438a01729a4dbbcbeeceb0b86be146fe2b3b" + integrity sha512-dg8yXdcQmvgR13RIlZbTRQOoUrDciFVoSBZILwjE2LFISxZZ8loVJKAkuzswl5js8BHda79bIb2b84ehU8IjXw== + +react-use@^17.5.0: + version "17.5.0" + resolved "https://registry.yarnpkg.com/react-use/-/react-use-17.5.0.tgz#1fae45638828a338291efa0f0c61862db7ee6442" + integrity sha512-PbfwSPMwp/hoL847rLnm/qkjg3sTRCvn6YhUZiHaUa3FA6/aNoFX79ul5Xt70O1rK+9GxSVqkY0eTwMdsR/bWg== + dependencies: + "@types/js-cookie" "^2.2.6" + "@xobotyi/scrollbar-width" "^1.9.5" + copy-to-clipboard "^3.3.1" + fast-deep-equal "^3.1.3" + fast-shallow-equal "^1.0.0" + js-cookie "^2.2.1" + nano-css "^5.6.1" + react-universal-interface "^0.6.2" + resize-observer-polyfill "^1.5.1" + screenfull "^5.1.0" + set-harmonic-interval "^1.0.1" + throttle-debounce "^3.0.1" + ts-easing "^0.2.0" + tslib "^2.1.0" + +react@^18: + version "18.3.1" + resolved "https://registry.yarnpkg.com/react/-/react-18.3.1.tgz#49ab892009c53933625bd16b2533fc754cab2891" + integrity sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ== + dependencies: + loose-envify "^1.1.0" + +read-cache@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/read-cache/-/read-cache-1.0.0.tgz#e664ef31161166c9751cdbe8dbcf86b5fb58f774" + integrity sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA== + dependencies: + pify "^2.3.0" + +readdirp@~3.6.0: + version "3.6.0" + resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7" + integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA== + dependencies: + picomatch "^2.2.1" + +reflect.getprototypeof@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/reflect.getprototypeof/-/reflect.getprototypeof-1.0.6.tgz#3ab04c32a8390b770712b7a8633972702d278859" + integrity sha512-fmfw4XgoDke3kdI6h4xcUz1dG8uaiv5q9gcEwLS4Pnth2kxT+GZ7YehS1JTMGBQmtV7Y4GFGbs2re2NqhdozUg== + dependencies: + call-bind "^1.0.7" + define-properties "^1.2.1" + es-abstract "^1.23.1" + es-errors "^1.3.0" + get-intrinsic "^1.2.4" + globalthis "^1.0.3" + which-builtin-type "^1.1.3" + +regenerator-runtime@^0.14.0: + version "0.14.1" + resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz#356ade10263f685dda125100cd862c1db895327f" + integrity sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw== + +regexp.prototype.flags@^1.5.1, regexp.prototype.flags@^1.5.2: + version "1.5.2" + resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.5.2.tgz#138f644a3350f981a858c44f6bb1a61ff59be334" + integrity sha512-NcDiDkTLuPR+++OCKB0nWafEmhg/Da8aUPLPMQbK+bxKKCm1/S5he+AqYa4PlMCVBalb4/yxIRub6qkEx5yJbw== + dependencies: + call-bind "^1.0.6" + define-properties "^1.2.1" + es-errors "^1.3.0" + set-function-name "^2.0.1" + +resize-observer-polyfill@^1.5.1: + version "1.5.1" + resolved "https://registry.yarnpkg.com/resize-observer-polyfill/-/resize-observer-polyfill-1.5.1.tgz#0e9020dd3d21024458d4ebd27e23e40269810464" + integrity sha512-LwZrotdHOo12nQuZlHEmtuXdqGoOD0OhaxopaNFxWzInpEgaLWoVuAMbTzixuosCx2nEG58ngzW3vxdWoxIgdg== + +resolve-from@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" + integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== + +resolve-pkg-maps@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz#616b3dc2c57056b5588c31cdf4b3d64db133720f" + integrity sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw== + +resolve@^1.1.7, resolve@^1.22.2, resolve@^1.22.4: + version "1.22.8" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.8.tgz#b6c87a9f2aa06dfab52e3d70ac8cde321fa5a48d" + integrity sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw== + dependencies: + is-core-module "^2.13.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + +resolve@^2.0.0-next.5: + version "2.0.0-next.5" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-2.0.0-next.5.tgz#6b0ec3107e671e52b68cd068ef327173b90dc03c" + integrity sha512-U7WjGVG9sH8tvjW5SmGbQuui75FiyjAX72HX15DwBBwF9dNiQZRQAg9nnPhYy+TUnE0+VcrttuvNI8oSxZcocA== + dependencies: + is-core-module "^2.13.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + +reusify@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" + integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== + +rimraf@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" + integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== + dependencies: + glob "^7.1.3" + +rtl-css-js@^1.16.1: + version "1.16.1" + resolved "https://registry.yarnpkg.com/rtl-css-js/-/rtl-css-js-1.16.1.tgz#4b48b4354b0ff917a30488d95100fbf7219a3e80" + integrity sha512-lRQgou1mu19e+Ya0LsTvKrVJ5TYUbqCVPAiImX3UfLTenarvPUl1QFdvu5Z3PYmHT9RCcwIfbjRQBntExyj3Zg== + dependencies: + "@babel/runtime" "^7.1.2" + +run-parallel@^1.1.9: + version "1.2.0" + resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" + integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== + dependencies: + queue-microtask "^1.2.2" + +safe-array-concat@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/safe-array-concat/-/safe-array-concat-1.1.2.tgz#81d77ee0c4e8b863635227c721278dd524c20edb" + integrity sha512-vj6RsCsWBCf19jIeHEfkRMw8DPiBb+DMXklQ/1SGDHOMlHdPUkZXFQ2YdplS23zESTijAcurb1aSgJA3AgMu1Q== + dependencies: + call-bind "^1.0.7" + get-intrinsic "^1.2.4" + has-symbols "^1.0.3" + isarray "^2.0.5" + +safe-regex-test@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/safe-regex-test/-/safe-regex-test-1.0.3.tgz#a5b4c0f06e0ab50ea2c395c14d8371232924c377" + integrity sha512-CdASjNJPvRa7roO6Ra/gLYBTzYzzPyyBXxIMdGW3USQLyjWEls2RgW5UBTXaQVp+OrpeCK3bLem8smtmheoRuw== + dependencies: + call-bind "^1.0.6" + es-errors "^1.3.0" + is-regex "^1.1.4" + +scheduler@^0.23.2: + version "0.23.2" + resolved "https://registry.yarnpkg.com/scheduler/-/scheduler-0.23.2.tgz#414ba64a3b282892e944cf2108ecc078d115cdc3" + integrity sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ== + dependencies: + loose-envify "^1.1.0" + +screenfull@^5.1.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/screenfull/-/screenfull-5.2.0.tgz#6533d524d30621fc1283b9692146f3f13a93d1ba" + integrity sha512-9BakfsO2aUQN2K9Fdbj87RJIEZ82Q9IGim7FqM5OsebfoFC6ZHXgDq/KvniuLTPdeM8wY2o6Dj3WQ7KeQCj3cA== + +semver@^6.3.1: + version "6.3.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" + integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== + +semver@^7.5.4: + version "7.6.2" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.2.tgz#1e3b34759f896e8f14d6134732ce798aeb0c6e13" + integrity sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w== + +set-function-length@^1.2.1: + version "1.2.2" + resolved "https://registry.yarnpkg.com/set-function-length/-/set-function-length-1.2.2.tgz#aac72314198eaed975cf77b2c3b6b880695e5449" + integrity sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg== + dependencies: + define-data-property "^1.1.4" + es-errors "^1.3.0" + function-bind "^1.1.2" + get-intrinsic "^1.2.4" + gopd "^1.0.1" + has-property-descriptors "^1.0.2" + +set-function-name@^2.0.1, set-function-name@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/set-function-name/-/set-function-name-2.0.2.tgz#16a705c5a0dc2f5e638ca96d8a8cd4e1c2b90985" + integrity sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ== + dependencies: + define-data-property "^1.1.4" + es-errors "^1.3.0" + functions-have-names "^1.2.3" + has-property-descriptors "^1.0.2" + +set-harmonic-interval@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/set-harmonic-interval/-/set-harmonic-interval-1.0.1.tgz#e1773705539cdfb80ce1c3d99e7f298bb3995249" + integrity sha512-AhICkFV84tBP1aWqPwLZqFvAwqEoVA9kxNMniGEUvzOlm4vLmOFLiTT3UZ6bziJTy4bOVpzWGTfSCbmaayGx8g== + +shebang-command@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" + integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== + dependencies: + shebang-regex "^3.0.0" + +shebang-regex@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" + integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== + +side-channel@^1.0.4, side-channel@^1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.6.tgz#abd25fb7cd24baf45466406b1096b7831c9215f2" + integrity sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA== + dependencies: + call-bind "^1.0.7" + es-errors "^1.3.0" + get-intrinsic "^1.2.4" + object-inspect "^1.13.1" + +signal-exit@^4.0.1: + version "4.1.0" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-4.1.0.tgz#952188c1cbd546070e2dd20d0f41c0ae0530cb04" + integrity sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw== + +slash@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" + integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== + +source-map-js@^1.0.2, source-map-js@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.2.0.tgz#16b809c162517b5b8c3e7dcd315a2a5c2612b2af" + integrity sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg== + +source-map@0.5.6: + version "0.5.6" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.6.tgz#75ce38f52bf0733c5a7f0c118d81334a2bb5f412" + integrity sha512-MjZkVp0NHr5+TPihLcadqnlVoGIoWo4IBHptutGh9wI3ttUYvCG26HkSuDi+K6lsZ25syXJXcctwgyVCt//xqA== + +source-map@^0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" + integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== + +stack-generator@^2.0.5: + version "2.0.10" + resolved "https://registry.yarnpkg.com/stack-generator/-/stack-generator-2.0.10.tgz#8ae171e985ed62287d4f1ed55a1633b3fb53bb4d" + integrity sha512-mwnua/hkqM6pF4k8SnmZ2zfETsRUpWXREfA/goT8SLCV4iOFa4bzOX2nDipWAZFPTjLvQB82f5yaodMVhK0yJQ== + dependencies: + stackframe "^1.3.4" + +stackframe@^1.3.4: + version "1.3.4" + resolved "https://registry.yarnpkg.com/stackframe/-/stackframe-1.3.4.tgz#b881a004c8c149a5e8efef37d51b16e412943310" + integrity sha512-oeVtt7eWQS+Na6F//S4kJ2K2VbRlS9D43mAlMyVpVWovy9o+jfgH8O9agzANzaiLjclA0oYzUXEM4PurhSUChw== + +stacktrace-gps@^3.0.4: + version "3.1.2" + resolved "https://registry.yarnpkg.com/stacktrace-gps/-/stacktrace-gps-3.1.2.tgz#0c40b24a9b119b20da4525c398795338966a2fb0" + integrity sha512-GcUgbO4Jsqqg6RxfyTHFiPxdPqF+3LFmQhm7MgCuYQOYuWyqxo5pwRPz5d/u6/WYJdEnWfK4r+jGbyD8TSggXQ== + dependencies: + source-map "0.5.6" + stackframe "^1.3.4" + +stacktrace-js@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/stacktrace-js/-/stacktrace-js-2.0.2.tgz#4ca93ea9f494752d55709a081d400fdaebee897b" + integrity sha512-Je5vBeY4S1r/RnLydLl0TBTi3F2qdfWmYsGvtfZgEI+SCprPppaIhQf5nGcal4gI4cGpCV/duLcAzT1np6sQqg== + dependencies: + error-stack-parser "^2.0.6" + stack-generator "^2.0.5" + stacktrace-gps "^3.0.4" + +stop-iteration-iterator@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/stop-iteration-iterator/-/stop-iteration-iterator-1.0.0.tgz#6a60be0b4ee757d1ed5254858ec66b10c49285e4" + integrity sha512-iCGQj+0l0HOdZ2AEeBADlsRC+vsnDsZsbdSiH1yNSjcfKM7fdpCMfqAL/dwF5BLiw/XhRft/Wax6zQbhq2BcjQ== + dependencies: + internal-slot "^1.0.4" + +streamsearch@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/streamsearch/-/streamsearch-1.1.0.tgz#404dd1e2247ca94af554e841a8ef0eaa238da764" + integrity sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg== + +"string-width-cjs@npm:string-width@^4.2.0": + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + +string-width@^4.1.0: + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + +string-width@^5.0.1, string-width@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-5.1.2.tgz#14f8daec6d81e7221d2a357e668cab73bdbca794" + integrity sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA== + dependencies: + eastasianwidth "^0.2.0" + emoji-regex "^9.2.2" + strip-ansi "^7.0.1" + +string.prototype.includes@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/string.prototype.includes/-/string.prototype.includes-2.0.0.tgz#8986d57aee66d5460c144620a6d873778ad7289f" + integrity sha512-E34CkBgyeqNDcrbU76cDjL5JLcVrtSdYq0MEh/B10r17pRP4ciHLwTgnuLV8Ay6cgEMLkcBkFCKyFZ43YldYzg== + dependencies: + define-properties "^1.1.3" + es-abstract "^1.17.5" + +string.prototype.matchall@^4.0.11: + version "4.0.11" + resolved "https://registry.yarnpkg.com/string.prototype.matchall/-/string.prototype.matchall-4.0.11.tgz#1092a72c59268d2abaad76582dccc687c0297e0a" + integrity sha512-NUdh0aDavY2og7IbBPenWqR9exH+E26Sv8e0/eTe1tltDGZL+GtBkDAnnyBtmekfK6/Dq3MkcGtzXFEd1LQrtg== + dependencies: + call-bind "^1.0.7" + define-properties "^1.2.1" + es-abstract "^1.23.2" + es-errors "^1.3.0" + es-object-atoms "^1.0.0" + get-intrinsic "^1.2.4" + gopd "^1.0.1" + has-symbols "^1.0.3" + internal-slot "^1.0.7" + regexp.prototype.flags "^1.5.2" + set-function-name "^2.0.2" + side-channel "^1.0.6" + +string.prototype.trim@^1.2.9: + version "1.2.9" + resolved "https://registry.yarnpkg.com/string.prototype.trim/-/string.prototype.trim-1.2.9.tgz#b6fa326d72d2c78b6df02f7759c73f8f6274faa4" + integrity sha512-klHuCNxiMZ8MlsOihJhJEBJAiMVqU3Z2nEXWfWnIqjN0gEFS9J9+IxKozWWtQGcgoa1WUZzLjKPTr4ZHNFTFxw== + dependencies: + call-bind "^1.0.7" + define-properties "^1.2.1" + es-abstract "^1.23.0" + es-object-atoms "^1.0.0" + +string.prototype.trimend@^1.0.8: + version "1.0.8" + resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.8.tgz#3651b8513719e8a9f48de7f2f77640b26652b229" + integrity sha512-p73uL5VCHCO2BZZ6krwwQE3kCzM7NKmis8S//xEC6fQonchbum4eP6kR4DLEjQFO3Wnj3Fuo8NM0kOSjVdHjZQ== + dependencies: + call-bind "^1.0.7" + define-properties "^1.2.1" + es-object-atoms "^1.0.0" + +string.prototype.trimstart@^1.0.8: + version "1.0.8" + resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.8.tgz#7ee834dda8c7c17eff3118472bb35bfedaa34dde" + integrity sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg== + dependencies: + call-bind "^1.0.7" + define-properties "^1.2.1" + es-object-atoms "^1.0.0" + +"strip-ansi-cjs@npm:strip-ansi@^6.0.1": + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + +strip-ansi@^6.0.0, strip-ansi@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + +strip-ansi@^7.0.1: + version "7.1.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45" + integrity sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ== + dependencies: + ansi-regex "^6.0.1" + +strip-bom@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" + integrity sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA== + +strip-json-comments@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" + integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== + +styled-jsx@5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/styled-jsx/-/styled-jsx-5.1.1.tgz#839a1c3aaacc4e735fed0781b8619ea5d0009d1f" + integrity sha512-pW7uC1l4mBZ8ugbiZrcIsiIvVx1UmTfw7UkC3Um2tmfUq9Bhk8IiyEIPl6F8agHgjzku6j0xQEZbfA5uSgSaCw== + dependencies: + client-only "0.0.1" + +stylis@^4.3.0: + version "4.3.2" + resolved "https://registry.yarnpkg.com/stylis/-/stylis-4.3.2.tgz#8f76b70777dd53eb669c6f58c997bf0a9972e444" + integrity sha512-bhtUjWd/z6ltJiQwg0dUfxEJ+W+jdqQd8TbWLWyeIJHlnsqmGLRFFd8e5mA0AZi/zx90smXRlN66YMTcaSFifg== + +sucrase@^3.32.0: + version "3.35.0" + resolved "https://registry.yarnpkg.com/sucrase/-/sucrase-3.35.0.tgz#57f17a3d7e19b36d8995f06679d121be914ae263" + integrity sha512-8EbVDiu9iN/nESwxeSxDKe0dunta1GOlHufmSSXxMD2z2/tMZpDMpvXQGsc+ajGo8y2uYUmixaSRUc/QPoQ0GA== + dependencies: + "@jridgewell/gen-mapping" "^0.3.2" + commander "^4.0.0" + glob "^10.3.10" + lines-and-columns "^1.1.6" + mz "^2.7.0" + pirates "^4.0.1" + ts-interface-checker "^0.1.9" + +supports-color@^7.1.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" + integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== + dependencies: + has-flag "^4.0.0" + +supports-preserve-symlinks-flag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" + integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== + +swiper@^11.1.3: + version "11.1.4" + resolved "https://registry.yarnpkg.com/swiper/-/swiper-11.1.4.tgz#2f8e303e8bf9e5bc40a3885fc637ae60ff27996c" + integrity sha512-1n7kbYJB2dFEpUHRFszq7gys/ofIBrMNibwTiMvPHwneKND/t9kImnHt6CfGPScMHgI+dWMbGTycCKGMoOO1KA== + +tailwindcss@^3.3.0: + version "3.4.4" + resolved "https://registry.yarnpkg.com/tailwindcss/-/tailwindcss-3.4.4.tgz#351d932273e6abfa75ce7d226b5bf3a6cb257c05" + integrity sha512-ZoyXOdJjISB7/BcLTR6SEsLgKtDStYyYZVLsUtWChO4Ps20CBad7lfJKVDiejocV4ME1hLmyY0WJE3hSDcmQ2A== + dependencies: + "@alloc/quick-lru" "^5.2.0" + arg "^5.0.2" + chokidar "^3.5.3" + didyoumean "^1.2.2" + dlv "^1.1.3" + fast-glob "^3.3.0" + glob-parent "^6.0.2" + is-glob "^4.0.3" + jiti "^1.21.0" + lilconfig "^2.1.0" + micromatch "^4.0.5" + normalize-path "^3.0.0" + object-hash "^3.0.0" + picocolors "^1.0.0" + postcss "^8.4.23" + postcss-import "^15.1.0" + postcss-js "^4.0.1" + postcss-load-config "^4.0.1" + postcss-nested "^6.0.1" + postcss-selector-parser "^6.0.11" + resolve "^1.22.2" + sucrase "^3.32.0" + +tapable@^2.2.0: + version "2.2.1" + resolved "https://registry.yarnpkg.com/tapable/-/tapable-2.2.1.tgz#1967a73ef4060a82f12ab96af86d52fdb76eeca0" + integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ== + +text-table@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" + integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw== + +thenify-all@^1.0.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/thenify-all/-/thenify-all-1.6.0.tgz#1a1918d402d8fc3f98fbf234db0bcc8cc10e9726" + integrity sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA== + dependencies: + thenify ">= 3.1.0 < 4" + +"thenify@>= 3.1.0 < 4": + version "3.3.1" + resolved "https://registry.yarnpkg.com/thenify/-/thenify-3.3.1.tgz#8932e686a4066038a016dd9e2ca46add9838a95f" + integrity sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw== + dependencies: + any-promise "^1.0.0" + +throttle-debounce@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/throttle-debounce/-/throttle-debounce-3.0.1.tgz#32f94d84dfa894f786c9a1f290e7a645b6a19abb" + integrity sha512-dTEWWNu6JmeVXY0ZYoPuH5cRIwc0MeGbJwah9KUNYSJwommQpCzTySTpEe8Gs1J23aeWEuAobe4Ag7EHVt/LOg== + +to-regex-range@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" + integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== + dependencies: + is-number "^7.0.0" + +toggle-selection@^1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/toggle-selection/-/toggle-selection-1.0.6.tgz#6e45b1263f2017fa0acc7d89d78b15b8bf77da32" + integrity sha512-BiZS+C1OS8g/q2RRbJmy59xpyghNBqrr6k5L/uKBGRsTfxmu3ffiRnd8mlGPUVayg8pvfi5urfnu8TU7DVOkLQ== + +ts-api-utils@^1.0.1: + version "1.3.0" + resolved "https://registry.yarnpkg.com/ts-api-utils/-/ts-api-utils-1.3.0.tgz#4b490e27129f1e8e686b45cc4ab63714dc60eea1" + integrity sha512-UQMIo7pb8WRomKR1/+MFVLTroIvDVtMX3K6OUir8ynLyzB8Jeriont2bTAtmNPa1ekAgN7YPDyf6V+ygrdU+eQ== + +ts-easing@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/ts-easing/-/ts-easing-0.2.0.tgz#c8a8a35025105566588d87dbda05dd7fbfa5a4ec" + integrity sha512-Z86EW+fFFh/IFB1fqQ3/+7Zpf9t2ebOAxNI/V6Wo7r5gqiqtxmgTlQ1qbqQcjLKYeSHPTsEmvlJUDg/EuL0uHQ== + +ts-interface-checker@^0.1.9: + version "0.1.13" + resolved "https://registry.yarnpkg.com/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz#784fd3d679722bc103b1b4b8030bcddb5db2a699" + integrity sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA== + +tsconfig-paths@^3.15.0: + version "3.15.0" + resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz#5299ec605e55b1abb23ec939ef15edaf483070d4" + integrity sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg== + dependencies: + "@types/json5" "^0.0.29" + json5 "^1.0.2" + minimist "^1.2.6" + strip-bom "^3.0.0" + +tslib@^2.1.0, tslib@^2.4.0: + version "2.6.3" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.3.tgz#0438f810ad7a9edcde7a241c3d80db693c8cbfe0" + integrity sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ== + +type-check@^0.4.0, type-check@~0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" + integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== + dependencies: + prelude-ls "^1.2.1" + +type-fest@^0.20.2: + version "0.20.2" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" + integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== + +typed-array-buffer@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/typed-array-buffer/-/typed-array-buffer-1.0.2.tgz#1867c5d83b20fcb5ccf32649e5e2fc7424474ff3" + integrity sha512-gEymJYKZtKXzzBzM4jqa9w6Q1Jjm7x2d+sh19AdsD4wqnMPDYyvwpsIc2Q/835kHuo3BEQ7CjelGhfTsoBb2MQ== + dependencies: + call-bind "^1.0.7" + es-errors "^1.3.0" + is-typed-array "^1.1.13" + +typed-array-byte-length@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/typed-array-byte-length/-/typed-array-byte-length-1.0.1.tgz#d92972d3cff99a3fa2e765a28fcdc0f1d89dec67" + integrity sha512-3iMJ9q0ao7WE9tWcaYKIptkNBuOIcZCCT0d4MRvuuH88fEoEH62IuQe0OtraD3ebQEoTRk8XCBoknUNc1Y67pw== + dependencies: + call-bind "^1.0.7" + for-each "^0.3.3" + gopd "^1.0.1" + has-proto "^1.0.3" + is-typed-array "^1.1.13" + +typed-array-byte-offset@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/typed-array-byte-offset/-/typed-array-byte-offset-1.0.2.tgz#f9ec1acb9259f395093e4567eb3c28a580d02063" + integrity sha512-Ous0vodHa56FviZucS2E63zkgtgrACj7omjwd/8lTEMEPFFyjfixMZ1ZXenpgCFBBt4EC1J2XsyVS2gkG0eTFA== + dependencies: + available-typed-arrays "^1.0.7" + call-bind "^1.0.7" + for-each "^0.3.3" + gopd "^1.0.1" + has-proto "^1.0.3" + is-typed-array "^1.1.13" + +typed-array-length@^1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/typed-array-length/-/typed-array-length-1.0.6.tgz#57155207c76e64a3457482dfdc1c9d1d3c4c73a3" + integrity sha512-/OxDN6OtAk5KBpGb28T+HZc2M+ADtvRxXrKKbUwtsLgdoxgX13hyy7ek6bFRl5+aBs2yZzB0c4CnQfAtVypW/g== + dependencies: + call-bind "^1.0.7" + for-each "^0.3.3" + gopd "^1.0.1" + has-proto "^1.0.3" + is-typed-array "^1.1.13" + possible-typed-array-names "^1.0.0" + +typescript@^5: + version "5.5.3" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.5.3.tgz#e1b0a3c394190838a0b168e771b0ad56a0af0faa" + integrity sha512-/hreyEujaB0w76zKo6717l3L0o/qEUtRgdvUBvlkhoWeOVMjMuHNHk0BRBzikzuGDqNmPQbg5ifMEqsHLiIUcQ== + +unbox-primitive@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.2.tgz#29032021057d5e6cdbd08c5129c226dff8ed6f9e" + integrity sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw== + dependencies: + call-bind "^1.0.2" + has-bigints "^1.0.2" + has-symbols "^1.0.3" + which-boxed-primitive "^1.0.2" + +undici-types@~5.26.4: + version "5.26.5" + resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-5.26.5.tgz#bcd539893d00b56e964fd2657a4866b221a65617" + integrity sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA== + +update-browserslist-db@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.1.0.tgz#7ca61c0d8650766090728046e416a8cde682859e" + integrity sha512-EdRAaAyk2cUE1wOf2DkEhzxqOQvFOoRJFNS6NeyJ01Gp2beMRpBAINjM2iDXE3KCuKhwnvHIQCJm6ThL2Z+HzQ== + dependencies: + escalade "^3.1.2" + picocolors "^1.0.1" + +uri-js@^4.2.2: + version "4.4.1" + resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" + integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== + dependencies: + punycode "^2.1.0" + +util-deprecate@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== + +"wasm@file:../wasm/pkg": + version "1.0.0" + dependencies: + dexie "^4.0.1" + +which-boxed-primitive@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6" + integrity sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg== + dependencies: + is-bigint "^1.0.1" + is-boolean-object "^1.1.0" + is-number-object "^1.0.4" + is-string "^1.0.5" + is-symbol "^1.0.3" + +which-builtin-type@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/which-builtin-type/-/which-builtin-type-1.1.3.tgz#b1b8443707cc58b6e9bf98d32110ff0c2cbd029b" + integrity sha512-YmjsSMDBYsM1CaFiayOVT06+KJeXf0o5M/CAd4o1lTadFAtacTUM49zoYxr/oroopFDfhvN6iEcBxUyc3gvKmw== + dependencies: + function.prototype.name "^1.1.5" + has-tostringtag "^1.0.0" + is-async-function "^2.0.0" + is-date-object "^1.0.5" + is-finalizationregistry "^1.0.2" + is-generator-function "^1.0.10" + is-regex "^1.1.4" + is-weakref "^1.0.2" + isarray "^2.0.5" + which-boxed-primitive "^1.0.2" + which-collection "^1.0.1" + which-typed-array "^1.1.9" + +which-collection@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/which-collection/-/which-collection-1.0.2.tgz#627ef76243920a107e7ce8e96191debe4b16c2a0" + integrity sha512-K4jVyjnBdgvc86Y6BkaLZEN933SwYOuBFkdmBu9ZfkcAbdVbpITnDmjvZ/aQjRXQrv5EPkTnD1s39GiiqbngCw== + dependencies: + is-map "^2.0.3" + is-set "^2.0.3" + is-weakmap "^2.0.2" + is-weakset "^2.0.3" + +which-typed-array@^1.1.13, which-typed-array@^1.1.14, which-typed-array@^1.1.15, which-typed-array@^1.1.9: + version "1.1.15" + resolved "https://registry.yarnpkg.com/which-typed-array/-/which-typed-array-1.1.15.tgz#264859e9b11a649b388bfaaf4f767df1f779b38d" + integrity sha512-oV0jmFtUky6CXfkqehVvBP/LSWJ2sy4vWMioiENyJLePrBO/yKyV9OyJySfAKosh+RYkIl5zJCNZ8/4JncrpdA== + dependencies: + available-typed-arrays "^1.0.7" + call-bind "^1.0.7" + for-each "^0.3.3" + gopd "^1.0.1" + has-tostringtag "^1.0.2" + +which@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" + integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== + dependencies: + isexe "^2.0.0" + +word-wrap@^1.2.5: + version "1.2.5" + resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.5.tgz#d2c45c6dd4fbce621a66f136cbe328afd0410b34" + integrity sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA== + +"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0": + version "7.0.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + +wrap-ansi@^8.1.0: + version "8.1.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-8.1.0.tgz#56dc22368ee570face1b49819975d9b9a5ead214" + integrity sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ== + dependencies: + ansi-styles "^6.1.0" + string-width "^5.0.1" + strip-ansi "^7.0.1" + +wrappy@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== + +yaml@^2.3.4: + version "2.4.5" + resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.4.5.tgz#60630b206dd6d84df97003d33fc1ddf6296cca5e" + integrity sha512-aBx2bnqDzVOyNKfsysjA2ms5ZlnjSAW2eG3/L5G/CSujfjLJTJsEw1bGw8kCf04KodQWk1pxlGnZ56CRxiawmg== + +yocto-queue@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" + integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== diff --git a/yarn.lock b/yarn.lock new file mode 100644 index 000000000..fb57ccd13 --- /dev/null +++ b/yarn.lock @@ -0,0 +1,4 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + +