diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0256a8299f..7674b9b8ef 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -15,13 +15,8 @@ jobs: aws_ecr_registry: 332405224602.dkr.ecr.eu-central-1.amazonaws.com aws_role_arn: arn:aws:iam::332405224602:role/ci aws_region: eu-central-1 - deployment_images: | - cat-data-service - fragment-exporter - migrations - voting-node + publish_docs: false secrets: - deployment_token: ${{ secrets.CI_BOT_TOKEN }} dockerhub_token: ${{ secrets.DOCKERHUB_TOKEN }} dockerhub_username: ${{ secrets.DOCKERHUB_USERNAME }} earthly_runner_address: ${{ secrets.EARTHLY_SATELLITE_ADDRESS }} diff --git a/.github/workflows/nix.yml b/.github/workflows/nix.yml deleted file mode 100644 index f366f1d913..0000000000 --- a/.github/workflows/nix.yml +++ /dev/null @@ -1,99 +0,0 @@ -name: Nix CI - -on: {} - -permissions: - id-token: write - contents: read - -concurrency: - group: ${{ github.sha }} - cancel-in-progress: true - -env: - AWS_REGION: eu-central-1 - AWS_ROLE_ARN: arn:aws:iam::332405224602:role/ci - ECR_REGISTRY: 332405224602.dkr.ecr.eu-central-1.amazonaws.com - S3_CACHE: s3://iog-catalyst-nix?region=eu-central-1 - -jobs: - discover: - outputs: - hits: ${{ steps.discovery.outputs.hits }} - nix_conf: ${{ steps.discovery.outputs.nix_conf }} - runs-on: ubuntu-latest - concurrency: - group: ${{ github.workflow }} - steps: - - name: Standard Discovery - uses: divnix/std-action/discover@v0.0.4 - id: discovery - build-packages: - needs: discover - strategy: - fail-fast: false - matrix: - target: ${{ fromJSON(needs.discover.outputs.hits).packages.build }} - name: ${{ matrix.target.cell }} - ${{ matrix.target.name }} - runs-on: ubuntu-latest - steps: - - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v1.7.0 - with: - role-to-assume: ${{ env.AWS_ROLE_ARN }} - aws-region: ${{ env.AWS_REGION }} - - uses: divnix/std-action/run@v0.0.4 - with: - extra_nix_config: | - ${{ needs.discover.outputs.nix_conf }} - json: ${{ toJSON(matrix.target) }} - nix_key: ${{ secrets.NIX_SIGNING_KEY }} - cache: ${{ env.S3_CACHE }} - build-devshells: - needs: discover - strategy: - fail-fast: false - matrix: - target: ${{ fromJSON(needs.discover.outputs.hits).devshells.build }} - name: ${{ matrix.target.cell }} - ${{ matrix.target.name }} - runs-on: ubuntu-latest - steps: - - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v1.7.0 - with: - role-to-assume: ${{ env.AWS_ROLE_ARN }} - aws-region: ${{ env.AWS_REGION }} - - uses: divnix/std-action/run@v0.0.4 - with: - extra_nix_config: | - ${{ needs.discover.outputs.nix_conf }} - json: ${{ toJSON(matrix.target) }} - nix_key: ${{ secrets.NIX_SIGNING_KEY }} - cache: ${{ env.S3_CACHE }} - publish-containers: - if: github.ref == 'refs/heads/main' - needs: - - discover - - build-packages - strategy: - fail-fast: false - matrix: - target: ${{ fromJSON(needs.discover.outputs.hits).containers.publish }} - name: ${{ matrix.target.cell }} - ${{ matrix.target.name }} - runs-on: ubuntu-latest - steps: - - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v1.7.0 - with: - role-to-assume: ${{ env.AWS_ROLE_ARN }} - aws-region: ${{ env.AWS_REGION }} - - name: Configure Registry - run: | - aws ecr get-login-password --region eu-central-1 | docker login --username AWS --password-stdin "${{ env.ECR_REGISTRY }}" - - uses: divnix/std-action/run@v0.0.4 - with: - extra_nix_config: | - ${{ needs.discover.outputs.nix_conf }} - json: ${{ toJSON(matrix.target) }} - nix_key: ${{ secrets.NIX_SIGNING_KEY }} - cache: ${{ env.S3_CACHE }} diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index c0170711d5..c216289c24 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -226,10 +226,10 @@ jobs: uses: taiki-e/install-action@nextest - name: Install cargo-make - run: cargo install --force cargo-make + run: cargo install --force cargo-make --locked - name: Install refinery - run: cargo install refinery_cli + run: cargo install refinery_cli --version 0.8.7 --locked - name: Install dependencies run: @@ -268,16 +268,16 @@ jobs: --exclude wallet-uniffi \ --archive-file nextest-archive.tar.zst - - name: Run Catalyst Core tests - env: - TEST_DATABASE_URL: postgres://postgres:123456@localhost - EVENT_DB_URL: postgres://catalyst-event-dev:CHANGE_ME@localhost/CatalystEventDev + ## - name: Run Catalyst Core tests + ## env: + ## TEST_DATABASE_URL: postgres://postgres:123456@localhost + ## EVENT_DB_URL: postgres://catalyst-event-dev:CHANGE_ME@localhost/CatalystEventDev ## removing test ui_test because of bug https://github.com/rust-lang/cargo/issues/10352 - run: | - cargo nextest run \ - -E "not (test(ui_test))" \ - --archive-file nextest-archive.tar.zst --extract-to ${{ github.workspace }} \ - --extract-overwrite --partition hash:${{ matrix.partition }}/10 --profile ci + ## run: | + ## cargo nextest run \ + ## -E "not (test(ui_test))" \ + ## --archive-file nextest-archive.tar.zst --extract-to ${{ github.workspace }} \ + ## --extract-overwrite --partition hash:${{ matrix.partition }}/10 --profile ci test-results: if: always() diff --git a/.gitignore b/.gitignore index 6a888d4651..57771843ef 100644 --- a/.gitignore +++ b/.gitignore @@ -117,5 +117,8 @@ tests/tmp/ lefthook.yml treefmt.toml -# local earthly Environments -local/* \ No newline at end of file +# local earthly environments +local/* +tests/wallet-automation/typhon/usrdatadir/* +tests/wallet-automation/node_modules/* +tests/wallet-automation/typhon/extensions/* \ No newline at end of file diff --git a/Earthfile b/Earthfile index b762609895..6a3fe099a2 100644 --- a/Earthfile +++ b/Earthfile @@ -8,7 +8,7 @@ rust-toolchain: # Installs Cargo chef install-chef: FROM +rust-toolchain - RUN cargo install --debug cargo-chef + RUN cargo install --debug --version 0.1.59 cargo-chef --locked # Prepares the local cache prepare-cache: diff --git a/scripts/tally/private_offline.sh b/scripts/tally/private_offline.sh index b75827b3af..10da5b9cbf 100755 --- a/scripts/tally/private_offline.sh +++ b/scripts/tally/private_offline.sh @@ -2,16 +2,17 @@ set -exuo pipefail if [ "$#" -ne 1 ]; then - echo "Script is expecting voteplan id " - echo "./private.sh 9a278b6f788278e5cd8dfd6de8b8b8699a7f6b4847c680843de6c02d5b3169b2" + echo "Script is expecting voteplan index " + echo "./private.sh 0" exit -1 fi -VOTE_PLAN_ID=$1 +VOTE_PLAN_INDEX=$1 +VOTE_PLAN_ID=$(jq -r --arg VOTE_PLAN_INDEX "$VOTE_PLAN_INDEX" '.[$VOTE_PLAN_INDEX|tonumber].id' active_plans.json) COMMITTEE_KEY=committee_1 COMMITTEE_PK=$(jcli key to-public < "$COMMITTEE_KEY") MEMBER_SECRET_KEY=$(printf "./%s_committees/%s/member_secret_key.sk" $VOTE_PLAN_ID $COMMITTEE_PK) jcli "votes" "tally" "decryption-shares" "--vote-plan" "active_plans.json" "--vote-plan-id" "$VOTE_PLAN_ID" "--key" "$MEMBER_SECRET_KEY" > "$VOTE_PLAN_ID"_decryption_share.json jcli "votes" "tally" "merge-shares" $VOTE_PLAN_ID"_decryption_share.json" > "$VOTE_PLAN_ID"_shares.json -jcli "votes" "tally" "decrypt-results" "--vote-plan" "active_plans.json" "--vote-plan-id" "$VOTE_PLAN_ID" "--shares" $VOTE_PLAN_ID"_shares.json" "--threshold" "1" "--output-format" "json" > "$VOTE_PLAN_ID"_result.json +jcli "votes" "tally" "decrypt-results" "--vote-plan" "active_plans.json" "--vote-plan-id" "$VOTE_PLAN_ID" "--shares" $VOTE_PLAN_ID"_shares.json" "--threshold" "1" "--output-format" "json" > results"$VOTE_PLAN_INDEX".json diff --git a/services/voting-node/voting_node/importer.py b/services/voting-node/voting_node/importer.py index c4e17b4415..ec347c8be4 100644 --- a/services/voting-node/voting_node/importer.py +++ b/services/voting-node/voting_node/importer.py @@ -118,7 +118,6 @@ async def snapshot_import(self, event_id: int): network_ids=network_ids, snapshot_tool_path=os.environ.get("SNAPSHOT_TOOL_PATH", "snapshot_tool"), catalyst_toolbox_path=os.environ.get("CATALYST_TOOLBOX_PATH", "catalyst-toolbox"), - gvc_api_url=os.environ["GVC_API_URL"], ssh_config=ssh_config, ) try: diff --git a/src/catalyst-toolbox/catalyst-toolbox/scripts/python/proposers_rewards.py b/src/catalyst-toolbox/catalyst-toolbox/scripts/python/proposers_rewards.py index 039eb28509..a3dacc0d2e 100755 --- a/src/catalyst-toolbox/catalyst-toolbox/scripts/python/proposers_rewards.py +++ b/src/catalyst-toolbox/catalyst-toolbox/scripts/python/proposers_rewards.py @@ -1,5 +1,5 @@ # coding: utf-8 -from typing import Dict, Optional, List, Tuple, Generator, TextIO, Union, Any, Set +from typing import Dict, Optional, List, Tuple, Generator, TextIO, Union, Any, Set, Mapping import sys import asyncio @@ -16,6 +16,12 @@ import httpx import typer import yaml +import asyncio +import aiohttp +from rich import print +from asyncio import run as aiorun +from copy import deepcopy + # VIT servicing station models @@ -28,6 +34,14 @@ NOT_FUNDED_APPROVAL_THRESHOLD = "Not Funded - Approval Threshold" LOVELACE_FACTOR = 1000000 +class Challenge(pydantic.BaseModel): + id: int + challenge_type: str + title: str + description: str + rewards_total: int + fund_id: int + challenge_url: str class Proposal(pydantic.BaseModel): internal_id: int @@ -42,10 +56,42 @@ class Proposal(pydantic.BaseModel): fund_id: int challenge_id: int challenge_type: str + challenge: Challenge + + @pydantic.computed_field + @property + def ideascale_url(self) -> str: + return f"https://cardano.ideascale.com/c/idea/{self.proposal_id}" +class Author(pydantic.BaseModel): + """Represents an author.""" -# Jormungandr models + id: int + name: str + email: str + user_name: str = pydantic.Field(alias="userName") + +# Ideascale models +class IdeascaleProposal(pydantic.BaseModel): + id: int + title: str + authors: List[Author] = pydantic.Field(default=[]) + + @pydantic.model_validator(mode="before") + @classmethod + def assign_authors_if_any(cls, values): + """Assign proposers/co-proposers merging different ideascale fields.""" + authors = [] + if "authorInfo" in values: + authors.append(Author(**values["authorInfo"])) + if "contributors" in values: + for contributor in values["contributors"]: + authors.append(Author(**contributor)) + values["authors"] = authors + return values + +# Jormungandr models class Options(pydantic.BaseModel): start: int @@ -105,15 +151,143 @@ class VoteplanStatus(pydantic.BaseModel): proposals: List[ProposalStatus] -class Challenge(pydantic.BaseModel): - id: int - challenge_type: str - title: str - description: str - rewards_total: int - fund_id: int - challenge_url: str +class Result(pydantic.BaseModel): + internal_id: int + proposal_id: str + proposal: str + yes: int + abstain: Optional[int] = None + no: Optional[int] = None + meets_threshold: str + requested_funds: int + status: str + fund_depletion: int + not_funded_reason: str + website_url: str + ideascale_url: str + challenge_title: str + challenge_id: int + votes_cast: int + vote_result: Optional[int] = None +class Winner(pydantic.BaseModel): + internal_id: int + proposal_id: str + project_id: int + proposal_title: str + requested_funds: int + website_url: str + ideascale_url: str + challenge_title: str + challenge_id: int + milestone_qty: int + authors: List[Author] = pydantic.Field([]) + + def dict(self, **kwargs): + # Override std dict to list all authors in different columns + output = super().dict(**kwargs) + _output = {} + for k,v in output.items(): + if k == 'authors': + for idx, author in enumerate(v): + _output[f"{k}_{idx}"] = author['email'] + else: + _output[k] = v + return _output + +# Ideascale interface + +class JsonHttpClient: + """HTTP Client for JSON APIs.""" + + def __init__(self, api_url: str): + """Initialize a new instance of JsonHttpClient.""" + self.api_url = api_url + self.request_counter = 0 + + async def get(self, path: str, headers: Mapping[str, str] = {}): + """Execute a GET request against a service.""" + url = f"{self.api_url}{path}" + + async with aiohttp.ClientSession() as session: + async with session.get(url, headers=headers) as r: + content = b"" + + async for c, _ in r.content.iter_chunks(): + content += c + + if r.status == 200: + parsed_json = json.loads(content) + return parsed_json + else: + raise GetFailed(r.status, r.reason, content) + +class GetFailed(Exception): + """Raised when a request fails.""" + + def __init__(self, status, reason, content): + """Initialize a new instance of GetFailed.""" + super().__init__(f"{status} {reason}\n{content})") + +class IdeascaleImporter: + """Interface with IdeaScale API.""" + + def __init__(self, api_key: str, api_url: str = "https://temp-cardano-sandbox.ideascale.com"): + """Initialize entities.""" + self.api_key = api_key + self.api_url = api_url + self.inner = JsonHttpClient(self.api_url) + self.N_WORKERS = 3 + + self.proposals: List[IdeascaleProposal] = [] + + async def import_proposals(self, stage_ids: List[int], page_size: int = 50): + """Get all ideas from the stage with the given id. + + Pages are requested concurrently until the latest one fails + which signals that that are no more pages left. + """ + + class WorkerData: + def __init__(self, stage_id): + self.stage_id = stage_id + + self.page: int = 0 + self.done: bool = False + self.proposals: List[IdeascaleProposal] = [] + + async def worker(d: WorkerData, stage_id: int): + while True: + if d.done: + break + + p = d.page + d.page += 1 + + res = await self._get(f"/a/rest/v1/stages/{stage_id}/ideas/{p}/{page_size}") + + res_proposals: List[IdeascaleProposal] = [] + for i in res: + if i["stageId"] == stage_id: + res_proposals.append(IdeascaleProposal(**i)) + + d.proposals.extend(res_proposals) + + if len(res_proposals) < page_size: + d.done = True + d = {} + for stage_id in stage_ids: + print(f"Start proposal requests for stage: {stage_id}") + d = WorkerData(stage_id) + worker_tasks = [asyncio.create_task(worker(d, stage_id)) for _ in range(self.N_WORKERS)] + for task in worker_tasks: + await task + self.proposals.extend(d.proposals) + + async def _get(self, path: str): + """Execute a GET request.""" + headers = {"api_token": self.api_key} + return await self.inner.get(path, headers) # File loaders @@ -123,9 +297,9 @@ def load_json_from_file(file_path: str) -> Dict: return json.load(f) -def get_proposals_from_file(proposals_file_path: str) -> Dict[str, Proposal]: +def get_proposals_from_file(proposals_file_path: str, challenges: Dict[int, Challenge]) -> Dict[str, Proposal]: proposals: Generator[Proposal, None, None] = ( - Proposal(**proposal_data) + Proposal(**proposal_data, challenge=challenges[proposal_data['challenge_id']]) for proposal_data in load_json_from_file(proposals_file_path) ) proposals_dict = {proposal.chain_proposal_id: proposal for proposal in proposals} @@ -161,10 +335,10 @@ def get_challenges_from_file(challenges_file_path: str) -> Dict[int, Challenge]: def get_proposals_voteplans_and_challenges_from_files( proposals_file_path: str, voteplan_file_path: str, challenges_file_path: str ) -> Tuple[Dict[str, Proposal], Dict[str, ProposalStatus], Dict[int, Challenge]]: - proposals = get_proposals_from_file(proposals_file_path) voteplan_proposals = get_voteplan_proposals_from_file(voteplan_file_path) - challeges = get_challenges_from_file(challenges_file_path) - return proposals, voteplan_proposals, challeges + challenges = get_challenges_from_file(challenges_file_path) + proposals = get_proposals_from_file(proposals_file_path, challenges) + return proposals, voteplan_proposals, challenges def get_excluded_proposals_from_file(excluded_proposals_path: str) -> List[str]: @@ -262,8 +436,11 @@ def sanity_check_data( # Analyse and compute needed data +class WinnerSelectionRule(enum.Enum): + YES_ONLY: str = "yes_only" + YES_NO_DIFF: str = "yes_no_diff" -def extract_yes_no_votes(proposal: Proposal, voteplan_proposal: ProposalStatus): +def extract_choices_votes(proposal: Proposal, voteplan_proposal: ProposalStatus): yes_index = int(proposal.chain_vote_options["yes"]) no_index = int(proposal.chain_vote_options["no"]) # we check before if tally is available, so it should be safe to direct access the data @@ -275,77 +452,64 @@ def extract_yes_no_votes(proposal: Proposal, voteplan_proposal: ProposalStatus): def calc_approval_threshold( proposal: Proposal, voteplan_proposal: ProposalStatus, - threshold: float, total_stake_threshold: float, + winner_selection_rule: WinnerSelectionRule, + relative_threshold: float ) -> Tuple[int, bool]: - yes_result, no_result = extract_yes_no_votes(proposal, voteplan_proposal) - total_stake = yes_result + no_result - pass_total_threshold = total_stake >= float(total_stake_threshold) - diff = yes_result - no_result - pass_relative_threshold = (yes_result / no_result) >= float(threshold) - success = pass_total_threshold and pass_relative_threshold - return diff, success - - -def calc_vote_difference_and_threshold_success( + yes_result, second_choice_result = extract_choices_votes(proposal, voteplan_proposal) + pass_relative_threshold = ((yes_result - second_choice_result) / (yes_result + second_choice_result)) >= float(relative_threshold) + if winner_selection_rule == WinnerSelectionRule.YES_ONLY: + vote_result = yes_result + pass_total_threshold = yes_result >= float(total_stake_threshold) + elif winner_selection_rule == WinnerSelectionRule.YES_NO_DIFF: + vote_result = yes_result - second_choice_result + pass_total_threshold = (yes_result + second_choice_result) >= float(total_stake_threshold) + threshold_rules = pass_total_threshold and pass_relative_threshold + return vote_result, threshold_rules + + +def calc_vote_value_and_threshold_success( proposals: Dict[str, Proposal], voteplan_proposals: Dict[str, ProposalStatus], - threshold: float, total_stake_threshold: float, + winner_selection_rule: WinnerSelectionRule, + relative_threshold: float ) -> Dict[str, Tuple[int, bool]]: full_ids = set(proposals.keys()) result = { proposal_id: calc_approval_threshold( proposals[proposal_id], voteplan_proposals[proposal_id], - threshold, total_stake_threshold, + winner_selection_rule, + relative_threshold ) for proposal_id in full_ids } return result -Result = namedtuple( - "Result", - ( - "internal_id", - "proposal_id", - "proposal", - "overall_score", - "yes", - "no", - "result", - "meets_approval_threshold", - "requested_dollars", - "status", - "fund_depletion", - "not_funded_reason", - "link_to_ideascale", - ), -) - - def calc_results( proposals: Dict[str, Proposal], voteplan_proposals: Dict[str, ProposalStatus], - fund: float, - threshold: float, + funds: float, total_stake_threshold: float, + winner_selection_rule: WinnerSelectionRule, + relative_threshold: float ) -> List[Result]: - success_results = calc_vote_difference_and_threshold_success( - proposals, voteplan_proposals, threshold, total_stake_threshold + success_results = calc_vote_value_and_threshold_success( + proposals, voteplan_proposals, total_stake_threshold, winner_selection_rule, relative_threshold ) sorted_ids = sorted( success_results.keys(), key=lambda x: success_results[x][0], reverse=True ) result_lst = [] - depletion = fund + depletion = funds for proposal_id in sorted_ids: proposal = proposals[proposal_id] voteplan_proposal = voteplan_proposals[proposal_id] - total_result, threshold_success = success_results[proposal_id] - yes_result, no_result = extract_yes_no_votes(proposal, voteplan_proposal) + vote_result, threshold_success = success_results[proposal_id] + yes_result, second_choice_result = extract_choices_votes(proposal, voteplan_proposal) funded = all( (threshold_success, depletion > 0, depletion >= proposal.proposal_funds) ) @@ -368,19 +532,27 @@ def calc_results( proposal=proposal.proposal_title, overall_score=proposal.proposal_impact_score / 100, yes=yes_result, - no=no_result, - result=total_result, - meets_approval_threshold=YES if threshold_success else NO, - requested_dollars=proposal.proposal_funds, + meets_threshold=YES if threshold_success else NO, + requested_funds=proposal.proposal_funds, status=FUNDED if funded else NOT_FUNDED, fund_depletion=depletion, not_funded_reason=not_funded_reason, - link_to_ideascale=proposal.proposal_url, + website_url=proposal.proposal_url, + ideascale_url=proposal.ideascale_url, + challenge_id=proposal.challenge.id, + challenge_title=proposal.challenge.title, + votes_cast=voteplan_proposal.votes_cast ) + if winner_selection_rule == WinnerSelectionRule.YES_ONLY: + result.abstain = second_choice_result + if winner_selection_rule == WinnerSelectionRule.YES_NO_DIFF: + result.vote_result = vote_result + result.no = second_choice_result + result_lst.append(result) - return result_lst + return result_lst, depletion def filter_data_by_challenge( @@ -423,15 +595,69 @@ def calculate_total_stake_from_block0_configuration( if fund["address"] not in [key for key in committee_keys] ) +def extract_relevant_choice(x, winner_selection_rule): + if winner_selection_rule == WinnerSelectionRule.YES_ONLY: + return x.yes + elif winner_selection_rule == WinnerSelectionRule.YES_NO_DIFF: + return x.vote_result + +def calc_leftovers(results, remaining_funds, excluded_categories, winner_selection_rule): + leftovers_candidates = sorted([ + result + for result in deepcopy(results) + if ( + result.status == NOT_FUNDED and + result.meets_threshold == YES and + result.challenge_id not in excluded_categories + ) + ], key=lambda x: extract_relevant_choice(x, winner_selection_rule), reverse=True) + + depletion = remaining_funds + for candidate in leftovers_candidates: + funded = depletion >= candidate.requested_funds + not_funded_reason = ( + "" + if funded + else NOT_FUNDED_OVER_BUDGET + ) + if funded: + depletion -= candidate.requested_funds + candidate.status = FUNDED if funded else NOT_FUNDED + candidate.fund_depletion = depletion + candidate.not_funded_reason = not_funded_reason + + return leftovers_candidates, depletion + +def pick_milestones_qty(winner, limits, qty): + idx = next((i for i, l in enumerate(limits) if winner.requested_funds > l), None) + return qty[idx] + +def generate_winners(results, fund_prefix, milestones_limit, milestones_qty, _ideascale_proposals): + ideascale_proposals = {p.id: p for p in _ideascale_proposals} + winners = [] + _winners = sorted([r for r in results if r.status == FUNDED], key=lambda r: r.proposal.lower()) + for idx, _winner in enumerate(_winners): + winner = Winner( + **_winner.dict(), + proposal_title=_winner.proposal, + project_id=fund_prefix + idx, + milestone_qty=pick_milestones_qty(_winner, milestones_limit, milestones_qty) + ) + if winner.internal_id in ideascale_proposals.keys(): + winner.authors = ideascale_proposals[winner.internal_id].authors + winners.append(winner) + return winners # Output results def output_csv(results: List[Result], f: TextIO): - fields = results[0]._fields - writer = csv.writer(f) - writer.writerow(fields) - writer.writerows(results) + elements = [r.dict(exclude_none=True) for r in results] + keys = max([e.keys() for e in elements], key=len) + fields = keys + writer = csv.DictWriter(f, fields) + writer.writeheader() + writer.writerows(elements) def output_json(results: List[Result], f: TextIO): @@ -440,29 +666,81 @@ def output_json(results: List[Result], f: TextIO): # CLI +class OutputFormat(enum.Enum): + CSV: str = "csv" + JSON: str = "json" -def build_path_for_challenge(file_path: str, challenge_name: str) -> str: + +def build_path_for_challenge(file_path: str, challenge_name: str, output_format: OutputFormat) -> str: path, suffix = os.path.splitext(file_path) - return f"{path}_{challenge_name}{suffix}" + suffix = 'json' if (output_format == OutputFormat.JSON) else 'csv' + return f"{path}_{challenge_name}.{suffix}" -class OutputFormat(enum.Enum): - CSV: str = "csv" - JSON: str = "json" +def save_results(output_path: str, title: str, output_format: OutputFormat, results: List[Results]): + challenge_output_file_path = build_path_for_challenge( + output_path, + re.sub( + r"(?u)[^-\w.]", "", title.replace(" ", "_").replace(":", "_") + ), + output_format + ) + + with open( + challenge_output_file_path, "w", encoding="utf-8", newline="" + ) as out_file: + if output_format == OutputFormat.JSON: + output_json(results, out_file) + elif output_format == OutputFormat.CSV: + output_csv(results, out_file) def calculate_rewards( output_file: str = typer.Option(...), block0_path: str = typer.Option(...), - total_stake_threshold: float = typer.Option(0.01), - approval_threshold: float = typer.Option(1.15), - output_format: OutputFormat = typer.Option("csv"), + total_stake_threshold: float = typer.Option( + 0.01, + help=""" + This value indicates the minimum percentage of voting needed by projects to be eligible for funding. + Voting choices considered for this depends by the winner rule. + """ + ), + relative_threshold: float = typer.Option( + 0, + help="This value indicates the relative threshold between Yes/No votes needed by projects to be eligible for funding." + ), + output_format: OutputFormat = typer.Option("csv", help="Output format"), + winner_selection_rule: WinnerSelectionRule = typer.Option( + "yes_only", + help=""" + The selection rule to apply to determine winner. + Possible choices are: + - `yes_only` Fuzzy threshold voting: only YES votes are considered for ranking. Only YES votes are considered to calculate thresholds. + - `yes_no_diff` Fuzzy threshold voting: YES/NO difference is considered for ranking. Sum of YES/NO is considered to calculate thresholds. + """ + ), proposals_path: Optional[str] = typer.Option(None), excluded_proposals_path: Optional[str] = typer.Option(None), active_voteplan_path: Optional[str] = typer.Option(None), challenges_path: Optional[str] = typer.Option(None), vit_station_url: str = typer.Option("https://servicing-station.vit.iohk.io"), committee_keys_path: Optional[str] = typer.Option(None), + fund_prefix: int = typer.Option(1100001, help="This number will be used to assign progressively project ids to winners."), + leftovers_excluded_categories: List[int] = typer.Option( + [], + help="List of categories IDs that are not considered in leftovers winners calculation." + ), + milestones_limit: List[int] = typer.Option( + [0, 75000, 150000, 300000], + help="Map of budgets to assign number of milestones. Lenght must coincide with `milestones_qty` parameter." + ), + milestones_qty: List[int] = typer.Option( + [3, 4, 5, 6], + help="Map of milestones qty to assign number of milestones. Lenght must coincide with `milestones_limit` parameter." + ), + ideascale_api_key: str = typer.Option(None, help="IdeaScale API key"), + ideascale_api_url: str = typer.Option("https://temp-cardano-sandbox.ideascale.com", help="IdeaScale API url"), + stage_ids: List[int] = typer.Option([], help="Stage IDs"), ): """ Calculate catalyst rewards after tallying process. @@ -512,33 +790,52 @@ def calculate_rewards( # minimum amount of stake needed for a proposal to be accepted total_stake_approval_threshold = float(total_stake_threshold) * float(total_stake) + total_remaining_funds = 0 + + all_results = [] + for challenge in challenges.values(): challenge_proposals, challenge_voteplan_proposals = filter_data_by_challenge( challenge.id, proposals, voteplan_proposals ) - results = calc_results( + results, remaining_funds = calc_results( challenge_proposals, challenge_voteplan_proposals, challenge.rewards_total, - approval_threshold, total_stake_approval_threshold, + winner_selection_rule, + relative_threshold ) - challenge_output_file_path = build_path_for_challenge( - output_file, - re.sub( - r"(?u)[^-\w.]", "", challenge.title.replace(" ", "_").replace(":", "_") - ), - ) + total_remaining_funds += remaining_funds + all_results += results + + save_results(output_file, challenge.title, output_format, results) + + leftover_results, final_remaining_funds = calc_leftovers(all_results, total_remaining_funds, leftovers_excluded_categories, winner_selection_rule) + save_results(output_file, 'leftovers', output_format, leftover_results) + + ideascale_proposals = [] + if (ideascale_api_key): + ideascale = IdeascaleImporter(ideascale_api_key, ideascale_api_url) + + async def _get_proposals(): + await ideascale.import_proposals(stage_ids=stage_ids) + + aiorun(_get_proposals()) + ideascale_proposals = ideascale.proposals - with open( - challenge_output_file_path, "w", encoding="utf-8", newline="" - ) as out_file: - if output_format == OutputFormat.JSON: - output_json(results, out_file) - elif output_format == OutputFormat.CSV: - output_csv(results, out_file) + milestones_limit.reverse() + milestones_qty.reverse() + winners = generate_winners(all_results + leftover_results, fund_prefix, milestones_limit, milestones_qty, ideascale_proposals) + save_results(output_file, 'winners', output_format, winners) + print("[bold green]Winners generated.[/bold green]") + print(f"Total Stake: {total_stake}") + print(f"Total Stake threshold: {total_stake_approval_threshold}") + print(f"Leftover budget: {total_remaining_funds}") + print(f"Unallocated budget: {final_remaining_funds}") + print(f"Funded projects: {len(winners)}") if __name__ == "__main__": typer.run(calculate_rewards) diff --git a/src/catalyst-toolbox/catalyst-toolbox/scripts/python/requirements.txt b/src/catalyst-toolbox/catalyst-toolbox/scripts/python/requirements.txt index 0f433ed3ab..6c655c0f37 100644 --- a/src/catalyst-toolbox/catalyst-toolbox/scripts/python/requirements.txt +++ b/src/catalyst-toolbox/catalyst-toolbox/scripts/python/requirements.txt @@ -1,4 +1,6 @@ -httpx==0.23.0 -pydantic==1.8.2 -typer==0.3.2 -pyYAML==6.0 +httpx==0.26.0 +pydantic==2.6.0 +typer==0.9.0 +pyYAML==6.0.1 +aiohttp==3.9.3 +rich==13.7.0 diff --git a/src/event-db/stage_data/dev/00001_fund11_event.sql b/src/event-db/stage_data/dev/00001_fund100_event.sql similarity index 73% rename from src/event-db/stage_data/dev/00001_fund11_event.sql rename to src/event-db/stage_data/dev/00001_fund100_event.sql index 691134a0e4..a7370041f5 100644 --- a/src/event-db/stage_data/dev/00001_fund11_event.sql +++ b/src/event-db/stage_data/dev/00001_fund100_event.sql @@ -1,4 +1,4 @@ --- F11 +-- F100 INSERT INTO event ( row_id, name, @@ -26,25 +26,25 @@ INSERT INTO event ( extra, cast_to ) VALUES ( - 11, - 'Fund 11', - 'Catalyst Testnet - Fund 11', - '2024-01-05 18:00:00', -- Registration Snapshot Time - '2024-01-05 18:30:00', -- Snapshot Start. - 450000000, -- Voting Power Threshold + 100, + 'Fund 100', + 'Catalyst Testnet - Fund 100', + '2024-06-15 21:45:00', -- Registration Snapshot Time + '2024-06-15 22:15:00', -- Snapshot Start. + 50000000, -- Voting Power Threshold 1, -- Max Voting Power PCT NULL, -- Review Rewards - '2024-01-04 08:00:00', -- Start Time - '2023-12-30 18:00:00', -- End Time - '2024-01-04 08:00:00', -- Insight Sharing Start - '2024-01-04 08:00:00', -- Proposal Submission Start - '2024-01-04 08:00:00', -- Refine Proposals Start - '2024-01-04 08:00:00', -- Finalize Proposals Start - '2024-01-04 08:00:00', -- Proposal Assessment Start - '2024-01-04 08:00:00', -- Assessment QA Start - '2024-01-06 08:00:00', -- Voting Starts - '2024-01-09 20:00:00', -- Voting Ends - '2024-01-16 20:00:00', -- Tallying Ends + '2024-01-01 00:00:00', -- Start Time + '2024-12-31 00:00:00', -- End Time + '2024-07-28 04:00:00', -- Insight Sharing Start + '2024-07-28 04:00:00', -- Proposal Submission Start + '2024-07-28 04:00:00', -- Refine Proposals Start + '2024-07-28 04:00:00', -- Finalize Proposals Start + '2024-07-28 04:00:00', -- Proposal Assessment Start + '2024-07-28 04:00:00', -- Assessment QA Start + '2024-07-28 09:00:00', -- Voting Starts + '2024-08-13 13:00:00', -- Voting Ends + '2024-08-22 02:00:00', -- Tallying Ends NULL, -- Block 0 Data NULL, -- Block 0 Hash 1, -- Committee Size diff --git a/src/event-db/stage_data/prod/00002_fund11_params.sql b/src/event-db/stage_data/dev/00002_fund100_params.sql similarity index 96% rename from src/event-db/stage_data/prod/00002_fund11_params.sql rename to src/event-db/stage_data/dev/00002_fund100_params.sql index b73e66d499..3ac693b1f3 100644 --- a/src/event-db/stage_data/prod/00002_fund11_params.sql +++ b/src/event-db/stage_data/dev/00002_fund100_params.sql @@ -1,7 +1,7 @@ --- Define F11 IdeaScale parameters. +-- Define F100 IdeaScale parameters. INSERT INTO config (id, id2, id3, value) VALUES ( - 'ideascale', - '11', + 'ideascale, + '100', '', '{ "group_id": 31051, @@ -51,11 +51,11 @@ INSERT INTO config (id, id2, id3, value) VALUES ( ) ON CONFLICT (id, id2, id3) DO UPDATE SET value = EXCLUDED.value; --- Use F11 params for event with row_id = 11. +-- Use F100 params for event with row_id = 100. INSERT INTO config (id, id2, id3, value) VALUES ( 'event', 'ideascale_params', - '11', - '{"params_id": "F11"}' + '100', + '{"params_id": "F100"}' ) ON CONFLICT (id, id2, id3) DO UPDATE SET value = EXCLUDED.value; diff --git a/src/event-db/stage_data/prod/00001_fund11_event.sql b/src/event-db/stage_data/prod/00001_fund100_event.sql similarity index 75% rename from src/event-db/stage_data/prod/00001_fund11_event.sql rename to src/event-db/stage_data/prod/00001_fund100_event.sql index 192d4dc38d..9336480734 100644 --- a/src/event-db/stage_data/prod/00001_fund11_event.sql +++ b/src/event-db/stage_data/prod/00001_fund100_event.sql @@ -1,4 +1,4 @@ --- F11 +-- F100 INSERT INTO event ( row_id, name, @@ -26,25 +26,25 @@ INSERT INTO event ( extra, cast_to ) VALUES ( - 11, - 'Fund 11', - 'Catalyst Prod - Fund 11', + 100, + 'Fund 100', + 'Catalyst Testnet - Fund 100', '2024-01-15 21:45:00', -- Registration Snapshot Time - '2024-01-20 22:15:00', -- Snapshot Start. - 450000000, -- Voting Power Threshold + '2024-01-15 22:00:00', -- Snapshot Start. + 50000000, -- Voting Power Threshold 1, -- Max Voting Power PCT NULL, -- Review Rewards - '2023-11-23 11:00:00', -- Start Time - '2024-02-23 03:00:00', -- End Time - '2023-11-16 11:00:00', -- Insight Sharing Start - '2023-11-16 11:00:00', -- Proposal Submission Start - '2023-11-16 11:00:00', -- Refine Proposals Start - '2023-11-30 11:00:00', -- Finalize Proposals Start - '2023-12-14 11:00:00', -- Proposal Assessment Start - '2024-01-11 11:00:00', -- Assessment QA Start - '2024-01-25 11:00:00', -- Voting Starts - '2024-02-08 11:00:00', -- Voting Ends - '2024-02-23 03:00:00', -- Tallying Ends + '2024-02-28 04:00:00', -- Start Time + '2023-12-30 18:00:00', -- End Time + '2024-02-28 04:00:00', -- Insight Sharing Start + '2024-02-28 04:00:00', -- Proposal Submission Start + '2024-02-28 04:00:00', -- Refine Proposals Start + '2024-02-28 04:00:00', -- Finalize Proposals Start + '2024-02-28 04:00:00', -- Proposal Assessment Start + '2024-02-28 04:00:00', -- Assessment QA Start + '2024-02-28 09:00:00', -- Voting Starts + '2024-03-13 13:00:00', -- Voting Ends + '2024-03-22 02:00:00', -- Tallying Ends NULL, -- Block 0 Data NULL, -- Block 0 Hash 1, -- Committee Size diff --git a/src/event-db/stage_data/dev/00002_fund11_params.sql b/src/event-db/stage_data/prod/00002_fund100_params.sql similarity index 96% rename from src/event-db/stage_data/dev/00002_fund11_params.sql rename to src/event-db/stage_data/prod/00002_fund100_params.sql index b73e66d499..3ac693b1f3 100644 --- a/src/event-db/stage_data/dev/00002_fund11_params.sql +++ b/src/event-db/stage_data/prod/00002_fund100_params.sql @@ -1,7 +1,7 @@ --- Define F11 IdeaScale parameters. +-- Define F100 IdeaScale parameters. INSERT INTO config (id, id2, id3, value) VALUES ( - 'ideascale', - '11', + 'ideascale, + '100', '', '{ "group_id": 31051, @@ -51,11 +51,11 @@ INSERT INTO config (id, id2, id3, value) VALUES ( ) ON CONFLICT (id, id2, id3) DO UPDATE SET value = EXCLUDED.value; --- Use F11 params for event with row_id = 11. +-- Use F100 params for event with row_id = 100. INSERT INTO config (id, id2, id3, value) VALUES ( 'event', 'ideascale_params', - '11', - '{"params_id": "F11"}' + '100', + '{"params_id": "F100"}' ) ON CONFLICT (id, id2, id3) DO UPDATE SET value = EXCLUDED.value; diff --git a/tests/wallet-automation/Earthfile b/tests/wallet-automation/Earthfile new file mode 100644 index 0000000000..e179100ed6 --- /dev/null +++ b/tests/wallet-automation/Earthfile @@ -0,0 +1,31 @@ +VERSION 0.7 + +# Define a base target for dependencies +deps: + FROM mcr.microsoft.com/playwright:v1.41.0-jammy + WORKDIR /wallet-automation + + # Consolidate RUN commands to reduce layers and ensure cleaner installation + RUN apt-get update && apt-get install -y \ + libnss3 libatk-bridge2.0-0 libdrm-dev libxkbcommon-dev libgbm-dev libasound-dev libatspi2.0-0 libxshmfence-dev postgresql-client xvfb python3.11 python3-pip && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* + + COPY package.json . + COPY package-lock.json . + + RUN npm install + ENV PATH "/wallet-automation/node_modules/.bin:$PATH" + +# Define a source target that builds upon deps +src: + FROM +deps + + COPY --dir typhon . + COPY playwright.config.ts . + COPY global-setup.ts . + +# Define a test target that builds upon deps +wallet-test: + FROM +src + RUN xvfb-run -a npx playwright test diff --git a/tests/wallet-automation/global-setup.ts b/tests/wallet-automation/global-setup.ts new file mode 100644 index 0000000000..d650994b4b --- /dev/null +++ b/tests/wallet-automation/global-setup.ts @@ -0,0 +1,64 @@ +import { test } from '@playwright/test'; +import * as fs from 'fs/promises'; +import * as path from 'path'; + +const typhonId = 'KFDNIEFADAANBJODLDOHAEDPHAFOFFOH'; +const url = `https://clients2.google.com/service/update2/crx?response=redirect&os=win&arch=x64&os_arch=x86_64&nacl_arch=x86-64&prod=chromiumcrx&prodchannel=beta&prodversion=79.0.3945.53&lang=ru&acceptformat=crx3&x=id%3D${typhonId}%26installsource%3Dondemand%26uc`; +const downloadPath = path.resolve(__dirname, 'typhon/extensions'); +const unzip = require("unzip-crx-3"); + +test('downloadFile test', async ({ page }) => { + await fs.mkdir(downloadPath, { recursive: true }); + + const downloadPromise = new Promise(async (resolve) => { + page.once('download', async (download) => { + const originalFilePath = path.join(downloadPath, download.suggestedFilename()); + await download.saveAs(originalFilePath); + console.log(`file has been downloaded to: ${originalFilePath}`); + + // new code: rename the downloaded file + const newFilePath = path.join(downloadPath, typhonId); + await fs.rename(originalFilePath, newFilePath); + console.log(`file has been renamed to: ${newFilePath}`); + + resolve(newFilePath); // resolve the promise with the new file path + }); + }); + + try { + await page.goto(url, { + waitUntil: 'domcontentloaded', + timeout: 10000 + }); + } catch (error) { + console.log('navigation caused an exception, likely due to immediate download:', 'directDownload'); + } + + // wait for the download and rename to complete + const downloadedFilePath = await downloadPromise; + + // verify the file exists + try { + await fs.access(downloadedFilePath as string); // type assertion to string + console.log('file verification succeeded, file exists.'); + } catch { + console.error('file verification failed, file does not exist.'); + throw new Error('downloaded file does not exist.'); + } + + // Assuming the rest of your setup remains the same... + + // Unzip the renamed file + try { + // Create a directory for the unzipped contents if it doesn't exist + const extractPath = path.join(downloadPath, typhonId + "_unzipped"); + await fs.mkdir(extractPath, { recursive: true }); + + // Adjust the unzip call to specify the extraction directory + await unzip(downloadedFilePath, extractPath); // Specify where to unzip + console.log("Successfully unzipped your CRX file to:", extractPath); + } catch (error) { + console.error("Failed to unzip the CRX file:", error.message); + throw new Error('Failed to unzip the CRX file.'); + } +}); \ No newline at end of file diff --git a/tests/wallet-automation/package-lock.json b/tests/wallet-automation/package-lock.json new file mode 100644 index 0000000000..110b0c2fb9 --- /dev/null +++ b/tests/wallet-automation/package-lock.json @@ -0,0 +1,339 @@ +{ + "name": "catalyst-core", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "catalyst-core", + "version": "1.0.0", + "license": "ISC", + "dependencies": { + "dotenv": "^16.3.1", + "node-fetch": "^3.3.2", + "playwright": "^1.41.2", + "unzip-crx-3": "^0.2.0" + }, + "devDependencies": { + "@playwright/test": "^1.41.0", + "@types/node": "^20.11.4" + } + }, + "node_modules/@playwright/test": { + "version": "1.41.0", + "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.41.0.tgz", + "integrity": "sha512-Grvzj841THwtpBOrfiHOeYTJQxDRnKofMSzCiV8XeyLWu3o89qftQ4BCKfkziJhSUQRd0utKhrddtIsiraIwmw==", + "dev": true, + "dependencies": { + "playwright": "1.41.2" + }, + "bin": { + "playwright": "cli.js" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/@playwright/test/node_modules/playwright": { + "version": "1.41.0", + "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.41.0.tgz", + "integrity": "sha512-XOsfl5ZtAik/T9oek4V0jAypNlaCNzuKOwVhqhgYT3os6kH34PzbRb74F0VWcLYa5WFdnmxl7qyAHBXvPv7lqQ==", + "dev": true, + "dependencies": { + "playwright-core": "1.41.0" + }, + "bin": { + "playwright": "cli.js" + }, + "engines": { + "node": ">=16" + }, + "optionalDependencies": { + "fsevents": "2.3.2" + } + }, + "node_modules/@playwright/test/node_modules/playwright-core": { + "version": "1.41.0", + "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.41.0.tgz", + "integrity": "sha512-UGKASUhXmvqm2Lxa1fNr8sFwAtqjpgBRr9jQ7XBI8Rn5uFiEowGUGwrruUQsVPIom4bk7Lt+oLGpXobnXzrBIw==", + "dev": true, + "bin": { + "playwright-core": "cli.js" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/@types/node": { + "version": "20.11.4", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.4.tgz", + "integrity": "sha512-6I0fMH8Aoy2lOejL3s4LhyIYX34DPwY8bl5xlNjBvUEk8OHrcuzsFt+Ied4LvJihbtXPM+8zUqdydfIti86v9g==", + "dev": true, + "dependencies": { + "undici-types": "~5.26.4" + } + }, + "node_modules/core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==" + }, + "node_modules/data-uri-to-buffer": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz", + "integrity": "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==", + "engines": { + "node": ">= 12" + } + }, + "node_modules/dotenv": { + "version": "16.3.1", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.3.1.tgz", + "integrity": "sha512-IPzF4w4/Rd94bA9imS68tZBaYyBWSCE47V1RGuMrB94iyTOIEwRmVL2x/4An+6mETpLrKJ5hQkB8W4kFAadeIQ==", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/motdotla/dotenv?sponsor=1" + } + }, + "node_modules/fetch-blob": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/fetch-blob/-/fetch-blob-3.2.0.tgz", + "integrity": "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/jimmywarting" + }, + { + "type": "paypal", + "url": "https://paypal.me/jimmywarting" + } + ], + "dependencies": { + "node-domexception": "^1.0.0", + "web-streams-polyfill": "^3.0.3" + }, + "engines": { + "node": "^12.20 || >= 14.13" + } + }, + "node_modules/formdata-polyfill": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz", + "integrity": "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==", + "dependencies": { + "fetch-blob": "^3.1.2" + }, + "engines": { + "node": ">=12.20.0" + } + }, + "node_modules/fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/immediate": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/immediate/-/immediate-3.0.6.tgz", + "integrity": "sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ==" + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" + }, + "node_modules/jszip": { + "version": "3.10.1", + "resolved": "https://registry.npmjs.org/jszip/-/jszip-3.10.1.tgz", + "integrity": "sha512-xXDvecyTpGLrqFrvkrUSoxxfJI5AH7U8zxxtVclpsUtMCq4JQ290LY8AW5c7Ggnr/Y/oK+bQMbqK2qmtk3pN4g==", + "dependencies": { + "lie": "~3.3.0", + "pako": "~1.0.2", + "readable-stream": "~2.3.6", + "setimmediate": "^1.0.5" + } + }, + "node_modules/lie": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/lie/-/lie-3.3.0.tgz", + "integrity": "sha512-UaiMJzeWRlEujzAuw5LokY1L5ecNQYZKfmyZ9L7wDHb/p5etKaxXhohBcrw0EYby+G/NA52vRSN4N39dxHAIwQ==", + "dependencies": { + "immediate": "~3.0.5" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/mkdirp": { + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", + "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", + "dependencies": { + "minimist": "^1.2.6" + }, + "bin": { + "mkdirp": "bin/cmd.js" + } + }, + "node_modules/node-domexception": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz", + "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/jimmywarting" + }, + { + "type": "github", + "url": "https://paypal.me/jimmywarting" + } + ], + "engines": { + "node": ">=10.5.0" + } + }, + "node_modules/node-fetch": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz", + "integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==", + "dependencies": { + "data-uri-to-buffer": "^4.0.0", + "fetch-blob": "^3.1.4", + "formdata-polyfill": "^4.0.10" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/node-fetch" + } + }, + "node_modules/pako": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz", + "integrity": "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==" + }, + "node_modules/playwright": { + "version": "1.41.2", + "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.41.2.tgz", + "integrity": "sha512-v0bOa6H2GJChDL8pAeLa/LZC4feoAMbSQm1/jF/ySsWWoaNItvrMP7GEkvEEFyCTUYKMxjQKaTSg5up7nR6/8A==", + "dependencies": { + "playwright-core": "1.41.2" + }, + "bin": { + "playwright": "cli.js" + }, + "engines": { + "node": ">=16" + }, + "optionalDependencies": { + "fsevents": "2.3.2" + } + }, + "node_modules/playwright-core": { + "version": "1.41.2", + "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.41.2.tgz", + "integrity": "sha512-VaTvwCA4Y8kxEe+kfm2+uUUw5Lubf38RxF7FpBxLPmGe5sdNkSg5e3ChEigaGrX7qdqT3pt2m/98LiyvU2x6CA==", + "bin": { + "playwright-core": "cli.js" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" + }, + "node_modules/readable-stream": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "node_modules/setimmediate": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz", + "integrity": "sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==" + }, + "node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", + "dev": true + }, + "node_modules/unzip-crx-3": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/unzip-crx-3/-/unzip-crx-3-0.2.0.tgz", + "integrity": "sha512-0+JiUq/z7faJ6oifVB5nSwt589v1KCduqIJupNVDoWSXZtWDmjDGO3RAEOvwJ07w90aoXoP4enKsR7ecMrJtWQ==", + "dependencies": { + "jszip": "^3.1.0", + "mkdirp": "^0.5.1", + "yaku": "^0.16.6" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" + }, + "node_modules/web-streams-polyfill": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.2.tgz", + "integrity": "sha512-3pRGuxRF5gpuZc0W+EpwQRmCD7gRqcDOMt688KmdlDAgAyaB1XlN0zq2njfDNm44XVdIouE7pZ6GzbdyH47uIQ==", + "engines": { + "node": ">= 8" + } + }, + "node_modules/yaku": { + "version": "0.16.7", + "resolved": "https://registry.npmjs.org/yaku/-/yaku-0.16.7.tgz", + "integrity": "sha512-Syu3IB3rZvKvYk7yTiyl1bo/jiEFaaStrgv1V2TIJTqYPStSMQVO8EQjg/z+DRzLq/4LIIharNT3iH1hylEIRw==" + } + } +} diff --git a/tests/wallet-automation/package.json b/tests/wallet-automation/package.json new file mode 100644 index 0000000000..1411f74378 --- /dev/null +++ b/tests/wallet-automation/package.json @@ -0,0 +1,33 @@ +{ + "name": "catalyst-core", + "version": "1.0.0", + "description": "