diff --git a/.changeset/mean-impalas-leave.md b/.changeset/mean-impalas-leave.md new file mode 100644 index 000000000..cb089e14e --- /dev/null +++ b/.changeset/mean-impalas-leave.md @@ -0,0 +1,6 @@ +--- +'@hyperlane-xyz/core': patch +'@hyperlane-xyz/helloworld': patch +--- + +fix: `TokenRouter.transferRemote` with hook overrides diff --git a/.changeset/olive-geckos-behave.md b/.changeset/olive-geckos-behave.md new file mode 100644 index 000000000..fef39310d --- /dev/null +++ b/.changeset/olive-geckos-behave.md @@ -0,0 +1,5 @@ +--- +'@hyperlane-xyz/sdk': patch +--- + +Do not consider xERC20 a collateral standard to fix fungibility checking logic while maintaining mint limit checking diff --git a/.changeset/pre.json b/.changeset/pre.json deleted file mode 100644 index 741a3c3c6..000000000 --- a/.changeset/pre.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "mode": "pre", - "tag": "alpha", - "initialVersions": { - "@hyperlane-xyz/core": "4.0.0-alpha.0", - "@hyperlane-xyz/ccip-server": "4.0.0-alpha.0", - "@hyperlane-xyz/cli": "4.0.0-alpha.0", - "@hyperlane-xyz/helloworld": "4.0.0-alpha.0", - "@hyperlane-xyz/infra": "4.0.0-alpha.0", - "@hyperlane-xyz/sdk": "4.0.0-alpha.0", - "@hyperlane-xyz/utils": "4.0.0-alpha.0" - }, - "changesets": [ - "bright-emus-double", - "five-baboons-smoke", - "late-rings-attack", - "sharp-geckos-wash", - "slimy-toys-argue" - ] -} diff --git a/.changeset/sixty-ducks-brush.md b/.changeset/sixty-ducks-brush.md new file mode 100644 index 000000000..603793cc3 --- /dev/null +++ b/.changeset/sixty-ducks-brush.md @@ -0,0 +1,6 @@ +--- +'@hyperlane-xyz/cli': patch +'@hyperlane-xyz/sdk': patch +--- + +Support priorityFee fetching from RPC and some better logging diff --git a/.github/workflows/agent-release-artifacts.yml b/.github/workflows/agent-release-artifacts.yml index 282712098..f54822817 100644 --- a/.github/workflows/agent-release-artifacts.yml +++ b/.github/workflows/agent-release-artifacts.yml @@ -43,7 +43,7 @@ jobs: runs-on: ${{ matrix.OS }} steps: - name: checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: ubuntu setup if: ${{ matrix.OS == 'larger-runner' }} run: | @@ -74,7 +74,7 @@ jobs: run: chmod ug+x,-w relayer scraper validator working-directory: rust/target/${{ matrix.TARGET }}/release - name: upload binaries - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: ${{ matrix.TARGET }}-${{ needs.prepare.outputs.tag_sha }}-${{ needs.prepare.outputs.tag_date }} path: | diff --git a/.github/workflows/monorepo-docker.yml b/.github/workflows/monorepo-docker.yml index b9d90db2b..629e08acb 100644 --- a/.github/workflows/monorepo-docker.yml +++ b/.github/workflows/monorepo-docker.yml @@ -36,7 +36,7 @@ jobs: if: needs.check-env.outputs.gcloud-service-key == 'true' steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: ref: ${{ github.event.pull_request.head.sha || github.sha }} submodules: recursive @@ -48,7 +48,7 @@ jobs: echo "TAG_SHA=$(echo '${{ github.sha }}' | cut -b 1-7)" >> $GITHUB_OUTPUT - name: Docker meta id: meta - uses: docker/metadata-action@v3 + uses: docker/metadata-action@v5 with: # list of Docker images to use as base name for tags images: | @@ -59,15 +59,15 @@ jobs: type=ref,event=pr type=raw,value=${{ steps.taggen.outputs.TAG_SHA }}-${{ steps.taggen.outputs.TAG_DATE }} - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Login to GCR - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: gcr.io username: _json_key password: ${{ secrets.GCLOUD_SERVICE_KEY }} - name: Build and push - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v5 with: context: ./ file: ./Dockerfile diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 6fc7eba09..dfedb241a 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -19,14 +19,14 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout Repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: # check out full history fetch-depth: 0 submodules: recursive - name: Setup Node.js 18.x - uses: actions/setup-node@v3 + uses: actions/setup-node@v4 with: node-version: 18.x diff --git a/.github/workflows/rust-docker.yml b/.github/workflows/rust-docker.yml index 1463d11a1..0da80ff79 100644 --- a/.github/workflows/rust-docker.yml +++ b/.github/workflows/rust-docker.yml @@ -33,7 +33,7 @@ jobs: if: needs.check-env.outputs.gcloud-service-key == 'true' steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: ref: ${{ github.event.pull_request.head.sha || github.sha }} - name: Generate tag data @@ -43,7 +43,7 @@ jobs: echo "TAG_SHA=$(echo '${{ github.sha }}' | cut -b 1-7)" >> $GITHUB_OUTPUT - name: Docker meta id: meta - uses: docker/metadata-action@v3 + uses: docker/metadata-action@v5 with: # list of Docker images to use as base name for tags images: | @@ -54,15 +54,15 @@ jobs: type=ref,event=pr type=raw,value=${{ steps.taggen.outputs.TAG_SHA }}-${{ steps.taggen.outputs.TAG_DATE }} - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Login to GCR - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: gcr.io username: _json_key password: ${{ secrets.GCLOUD_SERVICE_KEY }} - name: Build and push - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v5 with: context: ./rust file: ./rust/Dockerfile diff --git a/.github/workflows/rust-skipped.yml b/.github/workflows/rust-skipped.yml index 2d9fe8a5d..a854837a0 100644 --- a/.github/workflows/rust-skipped.yml +++ b/.github/workflows/rust-skipped.yml @@ -18,12 +18,10 @@ env: jobs: test-rs: runs-on: ubuntu-latest - steps: - run: 'echo "No test required" ' lint-rs: runs-on: ubuntu-latest - steps: - run: 'echo "No lint required" ' diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index a6e270a48..0a2001630 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -6,6 +6,7 @@ on: paths: - 'rust/**' - .github/workflows/rust.yml + - '!*.md' # Support for merge queues merge_group: # Allows you to run this workflow manually from the Actions tab @@ -28,7 +29,7 @@ jobs: runs-on: larger-runner steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: ref: ${{ github.event.pull_request.head.sha || github.sha }} - uses: actions-rs/toolchain@v1 @@ -38,8 +39,8 @@ jobs: - name: rust cache uses: Swatinem/rust-cache@v2 with: - prefix-key: "v2-rust" - shared-key: "test" + prefix-key: 'v2-rust' + shared-key: 'test' workspaces: | ./rust - name: Free disk space @@ -56,7 +57,7 @@ jobs: runs-on: larger-runner steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: ref: ${{ github.event.pull_request.head.sha || github.sha }} - uses: actions-rs/toolchain@v1 @@ -68,8 +69,8 @@ jobs: - name: rust cache uses: Swatinem/rust-cache@v2 with: - prefix-key: "v2-rust" - shared-key: "lint" + prefix-key: 'v2-rust' + shared-key: 'lint' workspaces: | ./rust - name: Free disk space diff --git a/.github/workflows/static-analysis.yml b/.github/workflows/static-analysis.yml index 881d8f9bd..a3eba2c88 100644 --- a/.github/workflows/static-analysis.yml +++ b/.github/workflows/static-analysis.yml @@ -18,13 +18,13 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: ref: ${{ github.event.pull_request.head.sha || github.sha }} submodules: recursive - name: yarn-cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | **/node_modules @@ -35,13 +35,13 @@ jobs: run: yarn install - name: foundry-install - uses: onbjerg/foundry-toolchain@v1 + uses: foundry-rs/foundry-toolchain@v1 - name: forge-build run: cd solidity && forge build --build-info - name: Static analysis - uses: crytic/slither-action@v0.3.0 + uses: crytic/slither-action@v0.4.0 id: slither with: target: 'solidity/' @@ -51,6 +51,6 @@ jobs: ignore-compile: true - name: Upload SARIF file - uses: github/codeql-action/upload-sarif@v2 + uses: github/codeql-action/upload-sarif@v3 with: sarif_file: ${{ steps.slither.outputs.sarif }} diff --git a/.github/workflows/storage-analysis.yml b/.github/workflows/storage-analysis.yml index f50a17b4d..70e77f0dd 100644 --- a/.github/workflows/storage-analysis.yml +++ b/.github/workflows/storage-analysis.yml @@ -14,17 +14,17 @@ jobs: steps: # Checkout the PR branch - name: Checkout PR branch - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: ${{ github.event.pull_request.head.sha || github.sha }} submodules: recursive - - uses: actions/setup-node@v3 + - uses: actions/setup-node@v4 with: node-version: 18 - name: yarn-cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | **/node_modules @@ -35,7 +35,7 @@ jobs: run: yarn install - name: foundry-install - uses: onbjerg/foundry-toolchain@v1 + uses: foundry-rs/foundry-toolchain@v1 # Run the command on PR branch - name: Run command on PR branch diff --git a/.github/workflows/test-skipped.yml b/.github/workflows/test-skipped.yml new file mode 100644 index 000000000..3cff77784 --- /dev/null +++ b/.github/workflows/test-skipped.yml @@ -0,0 +1,109 @@ +name: test + +on: + push: + branches: [main] + pull_request: + branches: + - '*' + paths: + - '*.md' + - '!**/*' + merge_group: + +concurrency: + group: e2e-${{ github.ref }} + cancel-in-progress: ${{ github.ref_name != 'main' }} + +jobs: + yarn-install: + runs-on: ubuntu-latest + steps: + - name: Instant pass + run: echo "yarn-install job passed" + + yarn-build: + runs-on: ubuntu-latest + steps: + - name: Instant pass + run: echo "yarn-build job passed" + + checkout-registry: + runs-on: ubuntu-latest + steps: + - name: Instant pass + run: echo "checkout-registry job passed" + + lint-prettier: + runs-on: ubuntu-latest + steps: + - name: Instant pass + run: echo "lint-prettier job passed" + + yarn-test: + runs-on: ubuntu-latest + steps: + - name: Instant pass + run: echo "yarn-test job passed" + + agent-configs: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + environment: [mainnet3, testnet4] + steps: + - name: Instant pass + run: echo "agent-configs job passed" + + e2e-matrix: + runs-on: ubuntu-latest + if: github.event_name == 'push' || (github.event_name == 'pull_request' && github.base_ref == 'main') || github.event_name == 'merge_group' + strategy: + matrix: + e2e-type: [cosmwasm, non-cosmwasm] + steps: + - name: Instant pass + run: echo "e2e-matrix job passed" + + e2e: + runs-on: ubuntu-latest + if: always() + steps: + - name: Instant pass + run: echo "e2e job passed" + + cli-e2e: + runs-on: ubuntu-latest + if: github.event_name == 'push' || (github.event_name == 'pull_request' && github.base_ref == 'main') || github.event_name == 'merge_group' + strategy: + matrix: + include: + - test-type: preset_hook_enabled + - test-type: configure_hook_enabled + - test-type: pi_with_core_chain + steps: + - name: Instant pass + run: echo "cli-e2e job passed" + + env-test: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + environment: [mainnet3] + chain: [ethereum, arbitrum, optimism, inevm, viction] + module: [core, igp] + include: + - environment: testnet4 + chain: sepolia + module: core + steps: + - name: Instant pass + run: echo "env-test job passed" + + coverage: + runs-on: ubuntu-latest + steps: + - name: Instant pass + run: echo "coverage job passed" diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 7f4f8c9cb..b83bc9c35 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -7,6 +7,8 @@ on: pull_request: branches: - '*' # run against all branches + paths-ignore: + - '*.md' # Support for merge queues merge_group: # Allows you to run this workflow manually from the Actions tab @@ -28,17 +30,17 @@ jobs: yarn-install: runs-on: ubuntu-latest steps: - - uses: actions/setup-node@v3 + - uses: actions/setup-node@v4 with: node-version: 18 - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: ref: ${{ github.event.pull_request.head.sha || github.sha }} submodules: recursive - name: yarn-cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | **/node_modules @@ -59,14 +61,14 @@ jobs: runs-on: ubuntu-latest needs: [yarn-install] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: ref: ${{ github.event.pull_request.head.sha || github.sha }} submodules: recursive fetch-depth: 0 - name: yarn-cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | **/node_modules @@ -74,7 +76,7 @@ jobs: key: ${{ runner.os }}-yarn-cache-${{ hashFiles('./yarn.lock') }} - name: build-cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ./* @@ -87,7 +89,7 @@ jobs: checkout-registry: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: repository: hyperlane-xyz/hyperlane-registry ref: main @@ -103,7 +105,7 @@ jobs: - run: echo "REGISTRY_URI_ABSOLUTE=$(realpath $REGISTRY_URI)" >> $GITHUB_ENV - name: registry-cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ${{ env.REGISTRY_URI_ABSOLUTE }} @@ -113,14 +115,14 @@ jobs: runs-on: ubuntu-latest needs: [yarn-install] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: ref: ${{ github.event.pull_request.head.sha || github.sha }} # check out full history fetch-depth: 0 - name: yarn-cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | **/node_modules @@ -143,17 +145,17 @@ jobs: runs-on: ubuntu-latest needs: [yarn-build, checkout-registry] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: ref: ${{ github.event.pull_request.head.sha || github.sha }} submodules: recursive fetch-depth: 0 - name: foundry-install - uses: onbjerg/foundry-toolchain@v1 + uses: foundry-rs/foundry-toolchain@v1 - name: build-cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ./* @@ -165,7 +167,7 @@ jobs: - run: echo "REGISTRY_URI_ABSOLUTE=$(realpath $REGISTRY_URI)" >> $GITHUB_ENV - name: registry-cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ${{ env.REGISTRY_URI_ABSOLUTE }} @@ -182,13 +184,13 @@ jobs: matrix: environment: [mainnet3, testnet4] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: ref: ${{ github.event.pull_request.head.sha || github.sha }} fetch-depth: 0 - name: yarn-cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | **/node_modules @@ -196,7 +198,7 @@ jobs: key: ${{ runner.os }}-yarn-cache-${{ hashFiles('./yarn.lock') }} - name: build-cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ./* @@ -208,7 +210,7 @@ jobs: - run: echo "REGISTRY_URI_ABSOLUTE=$(realpath $REGISTRY_URI)" >> $GITHUB_ENV - name: registry-cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ${{ env.REGISTRY_URI_ABSOLUTE }} @@ -232,17 +234,17 @@ jobs: matrix: e2e-type: [cosmwasm, non-cosmwasm] steps: - - uses: actions/setup-node@v3 + - uses: actions/setup-node@v4 with: node-version: 18 - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: ref: ${{ github.event.pull_request.head.sha || github.sha }} submodules: recursive - name: foundry-install - uses: onbjerg/foundry-toolchain@v1 + uses: foundry-rs/foundry-toolchain@v1 - name: setup rust uses: actions-rs/toolchain@v1 @@ -265,7 +267,7 @@ jobs: make-default: true - name: yarn-cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | **/node_modules @@ -273,7 +275,7 @@ jobs: key: ${{ runner.os }}-yarn-cache-${{ hashFiles('./yarn.lock') }} - name: build-cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ./* @@ -285,14 +287,14 @@ jobs: - run: echo "REGISTRY_URI_ABSOLUTE=$(realpath $REGISTRY_URI)" >> $GITHUB_ENV - name: registry-cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ${{ env.REGISTRY_URI_ABSOLUTE }} key: hyperlane-registry-${{ github.event.pull_request.head.sha || github.sha }} - name: cargo-cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ~/.cargo @@ -327,7 +329,7 @@ jobs: cli-e2e: runs-on: larger-runner - if: github.event_name == 'push' || (github.event_name == 'pull_request' && github.base_ref == 'main' || github.base_ref == 'cli-2.0') || github.event_name == 'merge_group' + if: github.event_name == 'push' || (github.event_name == 'pull_request' && github.base_ref == 'main') || github.event_name == 'merge_group' needs: [yarn-build, checkout-registry] strategy: matrix: @@ -336,17 +338,17 @@ jobs: - test-type: configure_hook_enabled - test-type: pi_with_core_chain steps: - - uses: actions/setup-node@v3 + - uses: actions/setup-node@v4 with: node-version: 18 - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: ref: ${{ github.event.pull_request.head.sha || github.sha }} submodules: recursive - name: foundry-install - uses: onbjerg/foundry-toolchain@v1 + uses: foundry-rs/foundry-toolchain@v1 - name: setup rust uses: actions-rs/toolchain@v1 @@ -369,7 +371,7 @@ jobs: make-default: true - name: yarn-cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | **/node_modules @@ -377,7 +379,7 @@ jobs: key: ${{ runner.os }}-yarn-cache-${{ hashFiles('./yarn.lock') }} - name: build-cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ./* @@ -389,14 +391,14 @@ jobs: - run: echo "REGISTRY_URI_ABSOLUTE=$(realpath $REGISTRY_URI)" >> $GITHUB_ENV - name: registry-cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ${{ env.REGISTRY_URI_ABSOLUTE }} key: hyperlane-registry-${{ github.event.pull_request.head.sha || github.sha }} - name: cargo-cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ~/.cargo @@ -420,15 +422,15 @@ jobs: module: core steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: ref: ${{ github.event.pull_request.head.sha || github.sha }} - name: foundry-install - uses: onbjerg/foundry-toolchain@v1 + uses: foundry-rs/foundry-toolchain@v1 - name: build-cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ./* @@ -440,7 +442,7 @@ jobs: - run: echo "REGISTRY_URI_ABSOLUTE=$(realpath $REGISTRY_URI)" >> $GITHUB_ENV - name: registry-cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ${{ env.REGISTRY_URI_ABSOLUTE }} @@ -454,13 +456,13 @@ jobs: needs: [yarn-test] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: ref: ${{ github.event.pull_request.head.sha || github.sha }} fetch-depth: 0 - name: yarn-cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | **/node_modules @@ -468,7 +470,7 @@ jobs: key: ${{ runner.os }}-yarn-cache-${{ hashFiles('./yarn.lock') }} - name: build-cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ./* @@ -476,7 +478,7 @@ jobs: key: ${{ github.event.pull_request.head.sha || github.sha }} - name: foundry-install - uses: onbjerg/foundry-toolchain@v1 + uses: foundry-rs/foundry-toolchain@v1 - name: Run tests with coverage run: yarn coverage @@ -484,6 +486,6 @@ jobs: NODE_OPTIONS: --max_old_space_size=4096 - name: Upload coverage reports to Codecov with GitHub Action - uses: codecov/codecov-action@v3 + uses: codecov/codecov-action@v4 with: token: ${{ secrets.CODECOV_TOKEN }} diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index a88580d40..2466daf87 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -9,10 +9,10 @@ This CoC applies to all members of the Hyperlane Network's community including, **Code** 1. Never harass or bully anyone. Not verbally, not physically, not sexually. Harassment will not be tolerated. -2. Never discrimnate on the basis of personal characteristics or group membership. +2. Never discriminate on the basis of personal characteristics or group membership. 3. Treat your fellow contributors with respect, fairness, and professionalism, especially in situations of high pressure. -4. Seek, offer, and accept objective critism of yours and others work, strive to acknowledge the contributions of others. -5. Be transparent and honest about your qualifications and any potential conflicts of interest. Transparency is a key tenant of the Hyperlane project and we expect it from all contributors. +4. Seek, offer, and accept objective criticism of yours and others work, strive to acknowledge the contributions of others. +5. Be transparent and honest about your qualifications and any potential conflicts of interest. Transparency is a key tenet of the Hyperlane project and we expect it from all contributors. 6. Bring an open and curious mind, the Hyperlane project is designed to enable developers to express their curiosity, experiment, and build things we couldn't have imagined ourselves. 7. Stay on track - Do your best to avoid off-topic discussion and make sure you are posting to the correct channel and repositories. Distractions are costly and it is far too easy for work to go off track. 8. Step down properly - Think of your fellow contributors when you step down from the project. Contributors of open-source projects come and go. It is crucial that when you leave the project or reduce your contribution significantly you do so in a way that minimizes disruption and keeps continuity in mind. Concretely this means telling your fellow contributors you are leaving and taking the proper steps to enable a smooth transition for other contributors to pick up where you left off. diff --git a/README.md b/README.md index 04551feee..7ff7af932 100644 --- a/README.md +++ b/README.md @@ -103,3 +103,9 @@ See [`rust/README.md`](rust/README.md) - Create a summary of change highlights - Create a "breaking changes" section with any changes required - Deploy agents with the new image tag (if it makes sense to) + +### Releasing packages to NPM + +We use [changesets](https://github.com/changesets/changesets) to release to NPM. You can use the `release` script in `package.json` to publish. + +For an alpha or beta version, follow the directions [here](https://github.com/changesets/changesets/blob/main/docs/prereleases.md). diff --git a/funding.json b/funding.json new file mode 100644 index 000000000..7eca8c578 --- /dev/null +++ b/funding.json @@ -0,0 +1,5 @@ +{ + "opRetro": { + "projectId": "0xa47182d330bd0c5c69b1418462f3f742099138f09bff057189cdd19676a6acd1" + } +} diff --git a/package.json b/package.json index 3da986a51..36ea71a2a 100644 --- a/package.json +++ b/package.json @@ -6,7 +6,7 @@ "@trivago/prettier-plugin-sort-imports": "^4.2.1", "@typescript-eslint/eslint-plugin": "^7.4.0", "@typescript-eslint/parser": "^7.4.0", - "eslint": "^9.0.0", + "eslint": "^8.57.0", "eslint-config-prettier": "^9.1.0", "eslint-plugin-jest": "^28.2.0", "husky": "^8.0.0", diff --git a/rust/.vscode/extensions.json b/rust/.vscode/extensions.json index e38df3a9f..c8e7623ea 100644 --- a/rust/.vscode/extensions.json +++ b/rust/.vscode/extensions.json @@ -4,7 +4,7 @@ // List of extensions which should be recommended for users of this workspace. "recommendations": [ - "panicbit.cargo", + "rust-lang.rust-analyzer", "tamasfe.even-better-toml", "rust-lang.rust-analyzer", ], diff --git a/rust/Cargo.lock b/rust/Cargo.lock index f29c2c80e..f1c4f79b2 100644 --- a/rust/Cargo.lock +++ b/rust/Cargo.lock @@ -7275,7 +7275,9 @@ dependencies = [ "macro_rules_attribute", "maplit", "nix 0.26.4", + "once_cell", "regex", + "relayer", "ripemd", "serde", "serde_json", diff --git a/rust/agents/relayer/Cargo.toml b/rust/agents/relayer/Cargo.toml index 2df8f54d9..cf35092f7 100644 --- a/rust/agents/relayer/Cargo.toml +++ b/rust/agents/relayer/Cargo.toml @@ -38,7 +38,7 @@ tracing-futures.workspace = true tracing.workspace = true hyperlane-core = { path = "../../hyperlane-core", features = ["agent", "async"] } -hyperlane-base = { path = "../../hyperlane-base" } +hyperlane-base = { path = "../../hyperlane-base", features = ["test-utils"] } hyperlane-ethereum = { path = "../../chains/hyperlane-ethereum" } [dev-dependencies] diff --git a/rust/agents/relayer/src/lib.rs b/rust/agents/relayer/src/lib.rs new file mode 100644 index 000000000..62b896d62 --- /dev/null +++ b/rust/agents/relayer/src/lib.rs @@ -0,0 +1,10 @@ +mod merkle_tree; +mod msg; +mod processor; +mod prover; +mod relayer; +mod server; +mod settings; + +pub use msg::GAS_EXPENDITURE_LOG_MESSAGE; +pub use relayer::*; diff --git a/rust/agents/relayer/src/main.rs b/rust/agents/relayer/src/main.rs index 1223702f8..7d085f529 100644 --- a/rust/agents/relayer/src/main.rs +++ b/rust/agents/relayer/src/main.rs @@ -11,15 +11,7 @@ use eyre::Result; use hyperlane_base::agent_main; -use crate::relayer::Relayer; - -mod merkle_tree; -mod msg; -mod processor; -mod prover; -mod relayer; -mod server; -mod settings; +use relayer::Relayer; #[tokio::main(flavor = "multi_thread", worker_threads = 20)] async fn main() -> Result<()> { diff --git a/rust/agents/relayer/src/msg/gas_payment/mod.rs b/rust/agents/relayer/src/msg/gas_payment/mod.rs index cd9dd61c0..a07210391 100644 --- a/rust/agents/relayer/src/msg/gas_payment/mod.rs +++ b/rust/agents/relayer/src/msg/gas_payment/mod.rs @@ -19,6 +19,8 @@ use crate::{ mod policies; +pub const GAS_EXPENDITURE_LOG_MESSAGE: &str = "Recording gas expenditure for message"; + #[async_trait] pub trait GasPaymentPolicy: Debug + Send + Sync { /// Returns Some(gas_limit) if the policy has approved the transaction or @@ -132,6 +134,13 @@ impl GasPaymentEnforcer { } pub fn record_tx_outcome(&self, message: &HyperlaneMessage, outcome: TxOutcome) -> Result<()> { + // This log is required in E2E, hence the use of a `const` + debug!( + msg=%message, + ?outcome, + "{}", + GAS_EXPENDITURE_LOG_MESSAGE, + ); self.db.process_gas_expenditure(InterchainGasExpenditure { message_id: message.id(), gas_used: outcome.gas_used, diff --git a/rust/agents/relayer/src/msg/mod.rs b/rust/agents/relayer/src/msg/mod.rs index 60c2ce0c5..dd7bac22b 100644 --- a/rust/agents/relayer/src/msg/mod.rs +++ b/rust/agents/relayer/src/msg/mod.rs @@ -30,5 +30,6 @@ pub(crate) mod metadata; pub(crate) mod op_queue; pub(crate) mod op_submitter; pub(crate) mod pending_message; -pub(crate) mod pending_operation; pub(crate) mod processor; + +pub use gas_payment::GAS_EXPENDITURE_LOG_MESSAGE; diff --git a/rust/agents/relayer/src/msg/op_queue.rs b/rust/agents/relayer/src/msg/op_queue.rs index ef8c2ad2d..007208554 100644 --- a/rust/agents/relayer/src/msg/op_queue.rs +++ b/rust/agents/relayer/src/msg/op_queue.rs @@ -1,24 +1,20 @@ use std::{cmp::Reverse, collections::BinaryHeap, sync::Arc}; use derive_new::new; -use hyperlane_core::MpmcReceiver; +use hyperlane_core::{PendingOperation, QueueOperation}; use prometheus::{IntGauge, IntGaugeVec}; -use tokio::sync::Mutex; -use tracing::{info, instrument}; +use tokio::sync::{broadcast::Receiver, Mutex}; +use tracing::{debug, info, instrument}; use crate::server::MessageRetryRequest; -use super::pending_operation::PendingOperation; - -pub type QueueOperation = Box; - /// Queue of generic operations that can be submitted to a destination chain. /// Includes logic for maintaining queue metrics by the destination and `app_context` of an operation #[derive(Debug, Clone, new)] pub struct OpQueue { metrics: IntGaugeVec, queue_metrics_label: String, - retry_rx: MpmcReceiver, + retry_rx: Arc>>, #[new(default)] queue: Arc>>>, } @@ -41,7 +37,7 @@ impl OpQueue { } /// Pop multiple elements at once from the queue and update metrics - #[instrument(skip(self), ret, fields(queue_label=%self.queue_metrics_label), level = "debug")] + #[instrument(skip(self), fields(queue_label=%self.queue_metrics_label), level = "debug")] pub async fn pop_many(&mut self, limit: usize) -> Vec { self.process_retry_requests().await; let mut queue = self.queue.lock().await; @@ -55,6 +51,15 @@ impl OpQueue { break; } } + // This function is called very often by the op_submitter tasks, so only log when there are operations to pop + // to avoid spamming the logs + if !popped.is_empty() { + debug!( + queue_label = %self.queue_metrics_label, + operations = ?popped, + "Popped OpQueue operations" + ); + } popped } @@ -64,7 +69,7 @@ impl OpQueue { // The other consideration is whether to put the channel receiver in the OpQueue or in a dedicated task // that also holds an Arc to the Mutex. For simplicity, we'll put it in the OpQueue for now. let mut message_retry_requests = vec![]; - while let Ok(message_id) = self.retry_rx.receiver.try_recv() { + while let Ok(message_id) = self.retry_rx.lock().await.try_recv() { message_retry_requests.push(message_id); } if message_retry_requests.is_empty() { @@ -101,15 +106,15 @@ impl OpQueue { #[cfg(test)] mod test { use super::*; - use crate::msg::pending_operation::PendingOperationResult; use hyperlane_core::{ - HyperlaneDomain, HyperlaneMessage, KnownHyperlaneDomain, MpmcChannel, TryBatchAs, - TxOutcome, H256, + HyperlaneDomain, HyperlaneMessage, KnownHyperlaneDomain, PendingOperationResult, + TryBatchAs, TxOutcome, H256, U256, }; use std::{ collections::VecDeque, time::{Duration, Instant}, }; + use tokio::sync; #[derive(Debug, Clone)] struct MockPendingOperation { @@ -174,6 +179,10 @@ mod test { todo!() } + fn get_tx_cost_estimate(&self) -> Option { + todo!() + } + /// This will be called after the operation has been submitted and is /// responsible for checking if the operation has reached a point at /// which we consider it safe from reorgs. @@ -181,6 +190,14 @@ mod test { todo!() } + fn set_operation_outcome( + &mut self, + _submission_outcome: TxOutcome, + _submission_estimated_cost: U256, + ) { + todo!() + } + fn next_attempt_after(&self) -> Option { Some( Instant::now() @@ -212,13 +229,17 @@ mod test { #[tokio::test] async fn test_multiple_op_queues_message_id() { let (metrics, queue_metrics_label) = dummy_metrics_and_label(); - let mpmc_channel = MpmcChannel::new(100); + let broadcaster = sync::broadcast::Sender::new(100); let mut op_queue_1 = OpQueue::new( metrics.clone(), queue_metrics_label.clone(), - mpmc_channel.receiver(), + Arc::new(Mutex::new(broadcaster.subscribe())), + ); + let mut op_queue_2 = OpQueue::new( + metrics, + queue_metrics_label, + Arc::new(Mutex::new(broadcaster.subscribe())), ); - let mut op_queue_2 = OpQueue::new(metrics, queue_metrics_label, mpmc_channel.receiver()); // Add some operations to the queue with increasing `next_attempt_after` values let destination_domain: HyperlaneDomain = KnownHyperlaneDomain::Injective.into(); @@ -244,11 +265,10 @@ mod test { } // Retry by message ids - let mpmc_tx = mpmc_channel.sender(); - mpmc_tx + broadcaster .send(MessageRetryRequest::MessageId(op_ids[1])) .unwrap(); - mpmc_tx + broadcaster .send(MessageRetryRequest::MessageId(op_ids[2])) .unwrap(); @@ -278,11 +298,11 @@ mod test { #[tokio::test] async fn test_destination_domain() { let (metrics, queue_metrics_label) = dummy_metrics_and_label(); - let mpmc_channel = MpmcChannel::new(100); + let broadcaster = sync::broadcast::Sender::new(100); let mut op_queue = OpQueue::new( metrics.clone(), queue_metrics_label.clone(), - mpmc_channel.receiver(), + Arc::new(Mutex::new(broadcaster.subscribe())), ); // Add some operations to the queue with increasing `next_attempt_after` values @@ -304,8 +324,7 @@ mod test { } // Retry by domain - let mpmc_tx = mpmc_channel.sender(); - mpmc_tx + broadcaster .send(MessageRetryRequest::DestinationDomain( destination_domain_2.id(), )) diff --git a/rust/agents/relayer/src/msg/op_submitter.rs b/rust/agents/relayer/src/msg/op_submitter.rs index dc3091149..84731aa63 100644 --- a/rust/agents/relayer/src/msg/op_submitter.rs +++ b/rust/agents/relayer/src/msg/op_submitter.rs @@ -1,10 +1,14 @@ +use std::sync::Arc; use std::time::Duration; use derive_new::new; use futures::future::join_all; use futures_util::future::try_join_all; +use hyperlane_core::total_estimated_cost; use prometheus::{IntCounter, IntGaugeVec}; +use tokio::sync::broadcast::Sender; use tokio::sync::mpsc; +use tokio::sync::Mutex; use tokio::task::JoinHandle; use tokio::time::sleep; use tokio_metrics::TaskMonitor; @@ -14,14 +18,13 @@ use tracing::{info, warn}; use hyperlane_base::CoreMetrics; use hyperlane_core::{ BatchItem, ChainCommunicationError, ChainResult, HyperlaneDomain, HyperlaneDomainProtocol, - HyperlaneMessage, MpmcReceiver, TxOutcome, + HyperlaneMessage, PendingOperationResult, QueueOperation, TxOutcome, }; use crate::msg::pending_message::CONFIRM_DELAY; use crate::server::MessageRetryRequest; -use super::op_queue::{OpQueue, QueueOperation}; -use super::pending_operation::*; +use super::op_queue::OpQueue; /// SerialSubmitter accepts operations over a channel. It is responsible for /// executing the right strategy to deliver those messages to the destination @@ -77,7 +80,7 @@ pub struct SerialSubmitter { /// Receiver for new messages to submit. rx: mpsc::UnboundedReceiver, /// Receiver for retry requests. - retry_rx: MpmcReceiver, + retry_tx: Sender, /// Metrics for serial submitter. metrics: SerialSubmitterMetrics, /// Max batch size for submitting messages @@ -101,24 +104,24 @@ impl SerialSubmitter { domain, metrics, rx: rx_prepare, - retry_rx, + retry_tx, max_batch_size, task_monitor, } = self; let prepare_queue = OpQueue::new( metrics.submitter_queue_length.clone(), "prepare_queue".to_string(), - retry_rx.clone(), + Arc::new(Mutex::new(retry_tx.subscribe())), ); let submit_queue = OpQueue::new( metrics.submitter_queue_length.clone(), "submit_queue".to_string(), - retry_rx.clone(), + Arc::new(Mutex::new(retry_tx.subscribe())), ); let confirm_queue = OpQueue::new( metrics.submitter_queue_length.clone(), "confirm_queue".to_string(), - retry_rx, + Arc::new(Mutex::new(retry_tx.subscribe())), ); let tasks = [ @@ -241,6 +244,7 @@ async fn prepare_task( metrics.ops_dropped.inc(); } PendingOperationResult::Confirm => { + debug!(?op, "Pushing operation to confirm queue"); confirm_queue.push(op).await; } } @@ -425,11 +429,10 @@ impl OperationBatch { async fn submit(self, confirm_queue: &mut OpQueue, metrics: &SerialSubmitterMetrics) { match self.try_submit_as_batch(metrics).await { Ok(outcome) => { - // TODO: use the `tx_outcome` with the total gas expenditure - // We'll need to proportionally set `used_gas` based on the tx_outcome, so it can be updated in the confirm step - // which means we need to add a `set_transaction_outcome` fn to `PendingOperation` info!(outcome=?outcome, batch_size=self.operations.len(), batch=?self.operations, "Submitted transaction batch"); + let total_estimated_cost = total_estimated_cost(&self.operations); for mut op in self.operations { + op.set_operation_outcome(outcome.clone(), total_estimated_cost); op.set_next_attempt_after(CONFIRM_DELAY); confirm_queue.push(op).await; } @@ -459,8 +462,6 @@ impl OperationBatch { return Err(ChainCommunicationError::BatchIsEmpty); }; - // We use the estimated gas limit from the prior call to - // `process_estimate_costs` to avoid a second gas estimation. let outcome = first_item.mailbox.process_batch(&batch).await?; metrics.ops_submitted.inc_by(self.operations.len() as u64); Ok(outcome) diff --git a/rust/agents/relayer/src/msg/pending_message.rs b/rust/agents/relayer/src/msg/pending_message.rs index b2f8369d0..a0c373adc 100644 --- a/rust/agents/relayer/src/msg/pending_message.rs +++ b/rust/agents/relayer/src/msg/pending_message.rs @@ -9,8 +9,9 @@ use derive_new::new; use eyre::Result; use hyperlane_base::{db::HyperlaneRocksDB, CoreMetrics}; use hyperlane_core::{ - BatchItem, ChainCommunicationError, ChainResult, HyperlaneChain, HyperlaneDomain, - HyperlaneMessage, Mailbox, MessageSubmissionData, TryBatchAs, TxOutcome, H256, U256, + gas_used_by_operation, make_op_try, BatchItem, ChainCommunicationError, ChainResult, + HyperlaneChain, HyperlaneDomain, HyperlaneMessage, Mailbox, MessageSubmissionData, + PendingOperation, PendingOperationResult, TryBatchAs, TxOutcome, H256, U256, }; use prometheus::{IntCounter, IntGauge}; use tracing::{debug, error, info, instrument, trace, warn}; @@ -18,7 +19,6 @@ use tracing::{debug, error, info, instrument, trace, warn}; use super::{ gas_payment::GasPaymentEnforcer, metadata::{BaseMetadataBuilder, MessageMetadataBuilder, MetadataBuilder}, - pending_operation::*, }; pub const CONFIRM_DELAY: Duration = if cfg!(any(test, feature = "test-utils")) { @@ -259,7 +259,7 @@ impl PendingOperation for PendingMessage { let state = self .submission_data - .take() + .clone() .expect("Pending message must be prepared before it can be submitted"); // We use the estimated gas limit from the prior call to @@ -271,7 +271,7 @@ impl PendingOperation for PendingMessage { .await; match tx_outcome { Ok(outcome) => { - self.set_submission_outcome(outcome); + self.set_operation_outcome(outcome, state.gas_limit); } Err(e) => { error!(error=?e, "Error when processing message"); @@ -283,6 +283,10 @@ impl PendingOperation for PendingMessage { self.submission_outcome = Some(outcome); } + fn get_tx_cost_estimate(&self) -> Option { + self.submission_data.as_ref().map(|d| d.gas_limit) + } + async fn confirm(&mut self) -> PendingOperationResult { make_op_try!(|| { // Provider error; just try again later @@ -313,15 +317,6 @@ impl PendingOperation for PendingMessage { ); PendingOperationResult::Success } else { - if let Some(outcome) = &self.submission_outcome { - if let Err(e) = self - .ctx - .origin_gas_payment_enforcer - .record_tx_outcome(&self.message, outcome.clone()) - { - error!(error=?e, "Error when recording tx outcome"); - } - } warn!( tx_outcome=?self.submission_outcome, message_id=?self.message.id(), @@ -331,6 +326,50 @@ impl PendingOperation for PendingMessage { } } + fn set_operation_outcome( + &mut self, + submission_outcome: TxOutcome, + submission_estimated_cost: U256, + ) { + let Some(operation_estimate) = self.get_tx_cost_estimate() else { + warn!("Cannot set operation outcome without a cost estimate set previously"); + return; + }; + // calculate the gas used by the operation + let gas_used_by_operation = match gas_used_by_operation( + &submission_outcome, + submission_estimated_cost, + operation_estimate, + ) { + Ok(gas_used_by_operation) => gas_used_by_operation, + Err(e) => { + warn!(error = %e, "Error when calculating gas used by operation, falling back to charging the full cost of the tx. Are gas estimates enabled for this chain?"); + submission_outcome.gas_used + } + }; + let operation_outcome = TxOutcome { + gas_used: gas_used_by_operation, + ..submission_outcome + }; + // record it in the db, to subtract from the sender's igp allowance + if let Err(e) = self + .ctx + .origin_gas_payment_enforcer + .record_tx_outcome(&self.message, operation_outcome.clone()) + { + error!(error=?e, "Error when recording tx outcome"); + } + // set the outcome in `Self` as well, for later logging + self.set_submission_outcome(operation_outcome); + debug!( + actual_gas_for_message = ?gas_used_by_operation, + message_gas_estimate = ?operation_estimate, + submission_gas_estimate = ?submission_estimated_cost, + message = ?self.message, + "Gas used by message submission" + ); + } + fn next_attempt_after(&self) -> Option { self.next_attempt_after } @@ -343,7 +382,6 @@ impl PendingOperation for PendingMessage { self.reset_attempts(); } - #[cfg(test)] fn set_retries(&mut self, retries: u32) { self.set_retries(retries); } diff --git a/rust/agents/relayer/src/msg/processor.rs b/rust/agents/relayer/src/msg/processor.rs index 166ee6561..1c81c3017 100644 --- a/rust/agents/relayer/src/msg/processor.rs +++ b/rust/agents/relayer/src/msg/processor.rs @@ -13,12 +13,12 @@ use hyperlane_base::{ db::{HyperlaneRocksDB, ProcessMessage}, CoreMetrics, }; -use hyperlane_core::{HyperlaneDomain, HyperlaneMessage}; +use hyperlane_core::{HyperlaneDomain, HyperlaneMessage, QueueOperation}; use prometheus::IntGauge; use tokio::sync::mpsc::UnboundedSender; use tracing::{debug, instrument, trace}; -use super::{metadata::AppContextClassifier, op_queue::QueueOperation, pending_message::*}; +use super::{metadata::AppContextClassifier, pending_message::*}; use crate::{processor::ProcessorExt, settings::matching_list::MatchingList}; /// Finds unprocessed messages from an origin and submits then through a channel @@ -138,7 +138,10 @@ impl DirectionalNonceIterator { #[instrument] fn iterate(&mut self) { match self.direction { - NonceDirection::High => self.nonce = self.nonce.map(|n| n.saturating_add(1)), + NonceDirection::High => { + self.nonce = self.nonce.map(|n| n.saturating_add(1)); + debug!(?self, "Iterating high nonce"); + } NonceDirection::Low => { if let Some(nonce) = self.nonce { // once the message with nonce zero is processed, we should stop going backwards @@ -155,6 +158,7 @@ impl DirectionalNonceIterator { if let Some(message) = self.indexed_message_with_nonce()? { Self::update_max_nonce_gauge(&message, metrics); if !self.is_message_processed()? { + debug!(?message, iterator=?self, "Found processable message"); return Ok(MessageStatus::Processable(message)); } else { return Ok(MessageStatus::Processed); @@ -235,7 +239,11 @@ impl ProcessorExt for MessageProcessor { // nonce. // Scan until we find next nonce without delivery confirmation. if let Some(msg) = self.try_get_unprocessed_message().await? { - debug!(?msg, "Processor working on message"); + debug!( + ?msg, + cursor = ?self.nonce_iterator, + "Processor working on message" + ); let destination = msg.destination; // Skip if not whitelisted. diff --git a/rust/agents/relayer/src/relayer.rs b/rust/agents/relayer/src/relayer.rs index 0496e38ca..4206c0584 100644 --- a/rust/agents/relayer/src/relayer.rs +++ b/rust/agents/relayer/src/relayer.rs @@ -13,13 +13,15 @@ use hyperlane_base::{ metrics::{AgentMetrics, MetricsUpdater}, settings::ChainConf, BaseAgent, ChainMetrics, ContractSyncMetrics, ContractSyncer, CoreMetrics, HyperlaneAgentCore, + SyncOptions, }; use hyperlane_core::{ - HyperlaneDomain, HyperlaneMessage, InterchainGasPayment, MerkleTreeInsertion, MpmcChannel, - MpmcReceiver, U256, + HyperlaneDomain, HyperlaneMessage, InterchainGasPayment, MerkleTreeInsertion, QueueOperation, + H512, U256, }; use tokio::{ sync::{ + broadcast::{Receiver, Sender}, mpsc::{self, UnboundedReceiver, UnboundedSender}, RwLock, }, @@ -33,7 +35,6 @@ use crate::{ msg::{ gas_payment::GasPaymentEnforcer, metadata::{BaseMetadataBuilder, IsmAwareAppContextClassifier}, - op_queue::QueueOperation, op_submitter::{SerialSubmitter, SerialSubmitterMetrics}, pending_message::{MessageContext, MessageSubmissionMetrics}, processor::{MessageProcessor, MessageProcessorMetrics}, @@ -134,7 +135,7 @@ impl BaseAgent for Relayer { let contract_sync_metrics = Arc::new(ContractSyncMetrics::new(&core_metrics)); - let message_syncs = settings + let message_syncs: HashMap<_, Arc>> = settings .contract_syncs::( settings.origin_chains.iter(), &core_metrics, @@ -305,8 +306,8 @@ impl BaseAgent for Relayer { } // run server - let mpmc_channel = MpmcChannel::::new(ENDPOINT_MESSAGES_QUEUE_SIZE); - let custom_routes = relayer_server::routes(mpmc_channel.sender()); + let sender = Sender::::new(ENDPOINT_MESSAGES_QUEUE_SIZE); + let custom_routes = relayer_server::routes(sender.clone()); let server = self .core @@ -328,7 +329,7 @@ impl BaseAgent for Relayer { self.run_destination_submitter( dest_domain, receive_channel, - mpmc_channel.receiver(), + sender.clone(), // Default to submitting one message at a time if there is no batch config self.core.settings.chains[dest_domain.name()] .connection @@ -352,14 +353,26 @@ impl BaseAgent for Relayer { } for origin in &self.origin_chains { + let maybe_broadcaster = self + .message_syncs + .get(origin) + .and_then(|sync| sync.get_broadcaster()); tasks.push(self.run_message_sync(origin, task_monitor.clone()).await); tasks.push( - self.run_interchain_gas_payment_sync(origin, task_monitor.clone()) - .await, + self.run_interchain_gas_payment_sync( + origin, + maybe_broadcaster.clone().map(|b| b.subscribe()), + task_monitor.clone(), + ) + .await, ); tasks.push( - self.run_merkle_tree_hook_syncs(origin, task_monitor.clone()) - .await, + self.run_merkle_tree_hook_syncs( + origin, + maybe_broadcaster.map(|b| b.subscribe()), + task_monitor.clone(), + ) + .await, ); } @@ -394,7 +407,7 @@ impl Relayer { tokio::spawn(TaskMonitor::instrument(&task_monitor, async move { contract_sync .clone() - .sync("dispatched_messages", cursor) + .sync("dispatched_messages", cursor.into()) .await })) .instrument(info_span!("MessageSync")) @@ -403,6 +416,7 @@ impl Relayer { async fn run_interchain_gas_payment_sync( &self, origin: &HyperlaneDomain, + tx_id_receiver: Option>, task_monitor: TaskMonitor, ) -> Instrumented> { let index_settings = self.as_ref().settings.chains[origin.name()].index_settings(); @@ -413,7 +427,13 @@ impl Relayer { .clone(); let cursor = contract_sync.cursor(index_settings).await; tokio::spawn(TaskMonitor::instrument(&task_monitor, async move { - contract_sync.clone().sync("gas_payments", cursor).await + contract_sync + .clone() + .sync( + "gas_payments", + SyncOptions::new(Some(cursor), tx_id_receiver), + ) + .await })) .instrument(info_span!("IgpSync")) } @@ -421,13 +441,20 @@ impl Relayer { async fn run_merkle_tree_hook_syncs( &self, origin: &HyperlaneDomain, + tx_id_receiver: Option>, task_monitor: TaskMonitor, ) -> Instrumented> { let index_settings = self.as_ref().settings.chains[origin.name()].index.clone(); let contract_sync = self.merkle_tree_hook_syncs.get(origin).unwrap().clone(); let cursor = contract_sync.cursor(index_settings).await; tokio::spawn(TaskMonitor::instrument(&task_monitor, async move { - contract_sync.clone().sync("merkle_tree_hook", cursor).await + contract_sync + .clone() + .sync( + "merkle_tree_hook", + SyncOptions::new(Some(cursor), tx_id_receiver), + ) + .await })) .instrument(info_span!("MerkleTreeHookSync")) } @@ -498,7 +525,7 @@ impl Relayer { &self, destination: &HyperlaneDomain, receiver: UnboundedReceiver, - retry_receiver_channel: MpmcReceiver, + retry_receiver_channel: Sender, batch_size: u32, task_monitor: TaskMonitor, ) -> Instrumented> { diff --git a/rust/agents/relayer/src/server.rs b/rust/agents/relayer/src/server.rs index 9f6936a22..264ef0380 100644 --- a/rust/agents/relayer/src/server.rs +++ b/rust/agents/relayer/src/server.rs @@ -3,13 +3,11 @@ use axum::{ routing, Router, }; use derive_new::new; -use hyperlane_core::{ChainCommunicationError, H256}; +use hyperlane_core::{ChainCommunicationError, QueueOperation, H256}; use serde::Deserialize; use std::str::FromStr; use tokio::sync::broadcast::Sender; -use crate::msg::op_queue::QueueOperation; - const MESSAGE_RETRY_API_BASE: &str = "/message_retry"; pub const ENDPOINT_MESSAGES_QUEUE_SIZE: usize = 1_000; @@ -109,12 +107,12 @@ mod tests { use super::*; use axum::http::StatusCode; use ethers::utils::hex::ToHex; - use hyperlane_core::{MpmcChannel, MpmcReceiver}; use std::net::SocketAddr; + use tokio::sync::broadcast::{Receiver, Sender}; - fn setup_test_server() -> (SocketAddr, MpmcReceiver) { - let mpmc_channel = MpmcChannel::::new(ENDPOINT_MESSAGES_QUEUE_SIZE); - let message_retry_api = MessageRetryApi::new(mpmc_channel.sender()); + fn setup_test_server() -> (SocketAddr, Receiver) { + let broadcast_tx = Sender::::new(ENDPOINT_MESSAGES_QUEUE_SIZE); + let message_retry_api = MessageRetryApi::new(broadcast_tx.clone()); let (path, retry_router) = message_retry_api.get_route(); let app = Router::new().nest(path, retry_router); @@ -124,7 +122,7 @@ mod tests { let addr = server.local_addr(); tokio::spawn(server); - (addr, mpmc_channel.receiver()) + (addr, broadcast_tx.subscribe()) } #[tokio::test] @@ -148,7 +146,7 @@ mod tests { assert_eq!(response.status(), StatusCode::OK); assert_eq!( - rx.receiver.try_recv().unwrap(), + rx.try_recv().unwrap(), MessageRetryRequest::MessageId(message_id) ); } @@ -172,7 +170,7 @@ mod tests { assert_eq!(response.status(), StatusCode::OK); assert_eq!( - rx.receiver.try_recv().unwrap(), + rx.try_recv().unwrap(), MessageRetryRequest::DestinationDomain(destination_domain) ); } diff --git a/rust/agents/scraper/src/agent.rs b/rust/agents/scraper/src/agent.rs index d71343281..f33f00556 100644 --- a/rust/agents/scraper/src/agent.rs +++ b/rust/agents/scraper/src/agent.rs @@ -5,10 +5,13 @@ use derive_more::AsRef; use futures::future::try_join_all; use hyperlane_base::{ metrics::AgentMetrics, settings::IndexSettings, BaseAgent, ChainMetrics, ContractSyncMetrics, - ContractSyncer, CoreMetrics, HyperlaneAgentCore, MetricsUpdater, + ContractSyncer, CoreMetrics, HyperlaneAgentCore, MetricsUpdater, SyncOptions, +}; +use hyperlane_core::{Delivery, HyperlaneDomain, HyperlaneMessage, InterchainGasPayment, H512}; +use tokio::{ + sync::broadcast::{Receiver, Sender}, + task::JoinHandle, }; -use hyperlane_core::{Delivery, HyperlaneDomain, HyperlaneMessage, InterchainGasPayment}; -use tokio::task::JoinHandle; use tracing::{info_span, instrument::Instrumented, trace, Instrument}; use crate::{chain_scraper::HyperlaneSqlDb, db::ScraperDb, settings::ScraperSettings}; @@ -135,16 +138,16 @@ impl Scraper { let domain = scraper.domain.clone(); let mut tasks = Vec::with_capacity(2); - tasks.push( - self.build_message_indexer( + let (message_indexer, maybe_broadcaster) = self + .build_message_indexer( domain.clone(), self.core_metrics.clone(), self.contract_sync_metrics.clone(), db.clone(), index_settings.clone(), ) - .await, - ); + .await; + tasks.push(message_indexer); tasks.push( self.build_delivery_indexer( domain.clone(), @@ -152,6 +155,7 @@ impl Scraper { self.contract_sync_metrics.clone(), db.clone(), index_settings.clone(), + maybe_broadcaster.clone().map(|b| b.subscribe()), ) .await, ); @@ -162,6 +166,7 @@ impl Scraper { self.contract_sync_metrics.clone(), db, index_settings.clone(), + maybe_broadcaster.map(|b| b.subscribe()), ) .await, ); @@ -182,7 +187,7 @@ impl Scraper { contract_sync_metrics: Arc, db: HyperlaneSqlDb, index_settings: IndexSettings, - ) -> Instrumented> { + ) -> (Instrumented>, Option>) { let sync = self .as_ref() .settings @@ -195,9 +200,12 @@ impl Scraper { .await .unwrap(); let cursor = sync.cursor(index_settings.clone()).await; - tokio::spawn(async move { sync.sync("message_dispatch", cursor).await }).instrument( - info_span!("ChainContractSync", chain=%domain.name(), event="message_dispatch"), - ) + let maybe_broadcaser = sync.get_broadcaster(); + let task = tokio::spawn(async move { sync.sync("message_dispatch", cursor.into()).await }) + .instrument( + info_span!("ChainContractSync", chain=%domain.name(), event="message_dispatch"), + ); + (task, maybe_broadcaser) } async fn build_delivery_indexer( @@ -207,6 +215,7 @@ impl Scraper { contract_sync_metrics: Arc, db: HyperlaneSqlDb, index_settings: IndexSettings, + tx_id_receiver: Option>, ) -> Instrumented> { let sync = self .as_ref() @@ -222,8 +231,11 @@ impl Scraper { let label = "message_delivery"; let cursor = sync.cursor(index_settings.clone()).await; - tokio::spawn(async move { sync.sync(label, cursor).await }) - .instrument(info_span!("ChainContractSync", chain=%domain.name(), event=label)) + tokio::spawn(async move { + sync.sync(label, SyncOptions::new(Some(cursor), tx_id_receiver)) + .await + }) + .instrument(info_span!("ChainContractSync", chain=%domain.name(), event=label)) } async fn build_interchain_gas_payment_indexer( @@ -233,6 +245,7 @@ impl Scraper { contract_sync_metrics: Arc, db: HyperlaneSqlDb, index_settings: IndexSettings, + tx_id_receiver: Option>, ) -> Instrumented> { let sync = self .as_ref() @@ -248,7 +261,10 @@ impl Scraper { let label = "gas_payment"; let cursor = sync.cursor(index_settings.clone()).await; - tokio::spawn(async move { sync.sync(label, cursor).await }) - .instrument(info_span!("ChainContractSync", chain=%domain.name(), event=label)) + tokio::spawn(async move { + sync.sync(label, SyncOptions::new(Some(cursor), tx_id_receiver)) + .await + }) + .instrument(info_span!("ChainContractSync", chain=%domain.name(), event=label)) } } diff --git a/rust/agents/validator/src/validator.rs b/rust/agents/validator/src/validator.rs index 043ac9249..23e96aeb5 100644 --- a/rust/agents/validator/src/validator.rs +++ b/rust/agents/validator/src/validator.rs @@ -210,7 +210,10 @@ impl Validator { let contract_sync = self.merkle_tree_hook_sync.clone(); let cursor = contract_sync.cursor(index_settings).await; tokio::spawn(async move { - contract_sync.clone().sync("merkle_tree_hook", cursor).await; + contract_sync + .clone() + .sync("merkle_tree_hook", cursor.into()) + .await; }) .instrument(info_span!("MerkleTreeHookSyncer")) } diff --git a/rust/chains/hyperlane-cosmos/src/interchain_gas.rs b/rust/chains/hyperlane-cosmos/src/interchain_gas.rs index 4ba2ca87a..4444a56ea 100644 --- a/rust/chains/hyperlane-cosmos/src/interchain_gas.rs +++ b/rust/chains/hyperlane-cosmos/src/interchain_gas.rs @@ -202,7 +202,7 @@ impl CosmosInterchainGasPaymasterIndexer { #[async_trait] impl Indexer for CosmosInterchainGasPaymasterIndexer { - async fn fetch_logs( + async fn fetch_logs_in_range( &self, range: RangeInclusive, ) -> ChainResult, LogMeta)>> { diff --git a/rust/chains/hyperlane-cosmos/src/mailbox.rs b/rust/chains/hyperlane-cosmos/src/mailbox.rs index 7f686cb85..833b92b89 100644 --- a/rust/chains/hyperlane-cosmos/src/mailbox.rs +++ b/rust/chains/hyperlane-cosmos/src/mailbox.rs @@ -350,7 +350,7 @@ impl CosmosMailboxIndexer { #[async_trait] impl Indexer for CosmosMailboxIndexer { - async fn fetch_logs( + async fn fetch_logs_in_range( &self, range: RangeInclusive, ) -> ChainResult, LogMeta)>> { @@ -397,7 +397,7 @@ impl Indexer for CosmosMailboxIndexer { #[async_trait] impl Indexer for CosmosMailboxIndexer { - async fn fetch_logs( + async fn fetch_logs_in_range( &self, range: RangeInclusive, ) -> ChainResult, LogMeta)>> { diff --git a/rust/chains/hyperlane-cosmos/src/merkle_tree_hook.rs b/rust/chains/hyperlane-cosmos/src/merkle_tree_hook.rs index c8e798096..54acdf80f 100644 --- a/rust/chains/hyperlane-cosmos/src/merkle_tree_hook.rs +++ b/rust/chains/hyperlane-cosmos/src/merkle_tree_hook.rs @@ -283,7 +283,7 @@ impl CosmosMerkleTreeHookIndexer { #[async_trait] impl Indexer for CosmosMerkleTreeHookIndexer { /// Fetch list of logs between `range` of blocks - async fn fetch_logs( + async fn fetch_logs_in_range( &self, range: RangeInclusive, ) -> ChainResult, LogMeta)>> { diff --git a/rust/chains/hyperlane-ethereum/src/contracts/interchain_gas.rs b/rust/chains/hyperlane-ethereum/src/contracts/interchain_gas.rs index 8ed514c83..76345ec8f 100644 --- a/rust/chains/hyperlane-ethereum/src/contracts/interchain_gas.rs +++ b/rust/chains/hyperlane-ethereum/src/contracts/interchain_gas.rs @@ -10,12 +10,14 @@ use ethers::prelude::Middleware; use hyperlane_core::{ ChainCommunicationError, ChainResult, ContractLocator, HyperlaneAbi, HyperlaneChain, HyperlaneContract, HyperlaneDomain, HyperlaneProvider, Indexed, Indexer, - InterchainGasPaymaster, InterchainGasPayment, LogMeta, SequenceAwareIndexer, H160, H256, + InterchainGasPaymaster, InterchainGasPayment, LogMeta, SequenceAwareIndexer, H160, H256, H512, }; use tracing::instrument; +use super::utils::fetch_raw_logs_and_log_meta; use crate::interfaces::i_interchain_gas_paymaster::{ - IInterchainGasPaymaster as EthereumInterchainGasPaymasterInternal, IINTERCHAINGASPAYMASTER_ABI, + GasPaymentFilter, IInterchainGasPaymaster as EthereumInterchainGasPaymasterInternal, + IINTERCHAINGASPAYMASTER_ABI, }; use crate::{BuildableWithProvider, ConnectionConf, EthereumProvider}; @@ -86,7 +88,7 @@ where { /// Note: This call may return duplicates depending on the provider used #[instrument(err, skip(self))] - async fn fetch_logs( + async fn fetch_logs_in_range( &self, range: RangeInclusive, ) -> ChainResult, LogMeta)>> { @@ -124,6 +126,32 @@ where .as_u32() .saturating_sub(self.reorg_period)) } + + async fn fetch_logs_by_tx_hash( + &self, + tx_hash: H512, + ) -> ChainResult, LogMeta)>> { + let logs = fetch_raw_logs_and_log_meta::( + tx_hash, + self.provider.clone(), + self.contract.address(), + ) + .await? + .into_iter() + .map(|(log, log_meta)| { + ( + Indexed::new(InterchainGasPayment { + message_id: H256::from(log.message_id), + destination: log.destination_domain, + payment: log.payment.into(), + gas_amount: log.gas_amount.into(), + }), + log_meta, + ) + }) + .collect(); + Ok(logs) + } } #[async_trait] diff --git a/rust/chains/hyperlane-ethereum/src/contracts/mailbox.rs b/rust/chains/hyperlane-ethereum/src/contracts/mailbox.rs index d70c2bfc7..37933f5f4 100644 --- a/rust/chains/hyperlane-ethereum/src/contracts/mailbox.rs +++ b/rust/chains/hyperlane-ethereum/src/contracts/mailbox.rs @@ -11,6 +11,7 @@ use ethers::abi::{AbiEncode, Detokenize}; use ethers::prelude::Middleware; use ethers_contract::builders::ContractCall; use futures_util::future::join_all; +use hyperlane_core::H512; use tracing::instrument; use hyperlane_core::{ @@ -25,10 +26,12 @@ use crate::interfaces::arbitrum_node_interface::ArbitrumNodeInterface; use crate::interfaces::i_mailbox::{ IMailbox as EthereumMailboxInternal, ProcessCall, IMAILBOX_ABI, }; +use crate::interfaces::mailbox::DispatchFilter; use crate::tx::{call_with_lag, fill_tx_gas_params, report_tx}; use crate::{BuildableWithProvider, ConnectionConf, EthereumProvider, TransactionOverrides}; use super::multicall::{self, build_multicall}; +use super::utils::fetch_raw_logs_and_log_meta; impl std::fmt::Display for EthereumMailboxInternal where @@ -134,7 +137,7 @@ where /// Note: This call may return duplicates depending on the provider used #[instrument(err, skip(self))] - async fn fetch_logs( + async fn fetch_logs_in_range( &self, range: RangeInclusive, ) -> ChainResult, LogMeta)>> { @@ -157,6 +160,27 @@ where events.sort_by(|a, b| a.0.inner().nonce.cmp(&b.0.inner().nonce)); Ok(events) } + + async fn fetch_logs_by_tx_hash( + &self, + tx_hash: H512, + ) -> ChainResult, LogMeta)>> { + let logs = fetch_raw_logs_and_log_meta::( + tx_hash, + self.provider.clone(), + self.contract.address(), + ) + .await? + .into_iter() + .map(|(log, log_meta)| { + ( + HyperlaneMessage::from(log.message.to_vec()).into(), + log_meta, + ) + }) + .collect(); + Ok(logs) + } } #[async_trait] @@ -183,7 +207,7 @@ where /// Note: This call may return duplicates depending on the provider used #[instrument(err, skip(self))] - async fn fetch_logs( + async fn fetch_logs_in_range( &self, range: RangeInclusive, ) -> ChainResult, LogMeta)>> { diff --git a/rust/chains/hyperlane-ethereum/src/contracts/merkle_tree_hook.rs b/rust/chains/hyperlane-ethereum/src/contracts/merkle_tree_hook.rs index a94ceff32..5836838ef 100644 --- a/rust/chains/hyperlane-ethereum/src/contracts/merkle_tree_hook.rs +++ b/rust/chains/hyperlane-ethereum/src/contracts/merkle_tree_hook.rs @@ -11,13 +11,17 @@ use tracing::instrument; use hyperlane_core::{ ChainCommunicationError, ChainResult, Checkpoint, ContractLocator, HyperlaneChain, HyperlaneContract, HyperlaneDomain, HyperlaneProvider, Indexed, Indexer, LogMeta, - MerkleTreeHook, MerkleTreeInsertion, SequenceAwareIndexer, H256, + MerkleTreeHook, MerkleTreeInsertion, SequenceAwareIndexer, H256, H512, }; -use crate::interfaces::merkle_tree_hook::{MerkleTreeHook as MerkleTreeHookContract, Tree}; +use crate::interfaces::merkle_tree_hook::{ + InsertedIntoTreeFilter, MerkleTreeHook as MerkleTreeHookContract, Tree, +}; use crate::tx::call_with_lag; use crate::{BuildableWithProvider, ConnectionConf, EthereumProvider}; +use super::utils::fetch_raw_logs_and_log_meta; + // We don't need the reverse of this impl, so it's ok to disable the clippy lint #[allow(clippy::from_over_into)] impl Into for Tree { @@ -108,7 +112,7 @@ where { /// Note: This call may return duplicates depending on the provider used #[instrument(err, skip(self))] - async fn fetch_logs( + async fn fetch_logs_in_range( &self, range: RangeInclusive, ) -> ChainResult, LogMeta)>> { @@ -142,6 +146,27 @@ where .as_u32() .saturating_sub(self.reorg_period)) } + + async fn fetch_logs_by_tx_hash( + &self, + tx_hash: H512, + ) -> ChainResult, LogMeta)>> { + let logs = fetch_raw_logs_and_log_meta::( + tx_hash, + self.provider.clone(), + self.contract.address(), + ) + .await? + .into_iter() + .map(|(log, log_meta)| { + ( + MerkleTreeInsertion::new(log.index, H256::from(log.message_id)).into(), + log_meta, + ) + }) + .collect(); + Ok(logs) + } } #[async_trait] diff --git a/rust/chains/hyperlane-ethereum/src/contracts/mod.rs b/rust/chains/hyperlane-ethereum/src/contracts/mod.rs index 32ad5b953..1a39fae07 100644 --- a/rust/chains/hyperlane-ethereum/src/contracts/mod.rs +++ b/rust/chains/hyperlane-ethereum/src/contracts/mod.rs @@ -1,11 +1,8 @@ pub use {interchain_gas::*, mailbox::*, merkle_tree_hook::*, validator_announce::*}; mod interchain_gas; - mod mailbox; - mod merkle_tree_hook; - mod multicall; - +mod utils; mod validator_announce; diff --git a/rust/chains/hyperlane-ethereum/src/contracts/utils.rs b/rust/chains/hyperlane-ethereum/src/contracts/utils.rs new file mode 100644 index 000000000..bdf3e52f9 --- /dev/null +++ b/rust/chains/hyperlane-ethereum/src/contracts/utils.rs @@ -0,0 +1,48 @@ +use std::sync::Arc; + +use ethers::{ + abi::RawLog, + providers::Middleware, + types::{H160 as EthersH160, H256 as EthersH256}, +}; +use ethers_contract::{ContractError, EthEvent, LogMeta as EthersLogMeta}; +use hyperlane_core::{ChainResult, LogMeta, H512}; +use tracing::warn; + +pub async fn fetch_raw_logs_and_log_meta( + tx_hash: H512, + provider: Arc, + contract_address: EthersH160, +) -> ChainResult> +where + M: Middleware + 'static, +{ + let ethers_tx_hash: EthersH256 = tx_hash.into(); + let receipt = provider + .get_transaction_receipt(ethers_tx_hash) + .await + .map_err(|err| ContractError::::MiddlewareError(err))?; + let Some(receipt) = receipt else { + warn!(%tx_hash, "No receipt found for tx hash"); + return Ok(vec![]); + }; + + let logs: Vec<(T, LogMeta)> = receipt + .logs + .into_iter() + .filter_map(|log| { + // Filter out logs that aren't emitted by this contract + if log.address != contract_address { + return None; + } + let raw_log = RawLog { + topics: log.topics.clone(), + data: log.data.to_vec(), + }; + let log_meta: EthersLogMeta = (&log).into(); + let event_filter = T::decode_log(&raw_log).ok(); + event_filter.map(|log| (log, log_meta.into())) + }) + .collect(); + Ok(logs) +} diff --git a/rust/chains/hyperlane-fuel/src/interchain_gas.rs b/rust/chains/hyperlane-fuel/src/interchain_gas.rs index d969210a6..3385872c3 100644 --- a/rust/chains/hyperlane-fuel/src/interchain_gas.rs +++ b/rust/chains/hyperlane-fuel/src/interchain_gas.rs @@ -35,7 +35,7 @@ pub struct FuelInterchainGasPaymasterIndexer {} #[async_trait] impl Indexer for FuelInterchainGasPaymasterIndexer { - async fn fetch_logs( + async fn fetch_logs_in_range( &self, range: RangeInclusive, ) -> ChainResult, LogMeta)>> { diff --git a/rust/chains/hyperlane-fuel/src/mailbox.rs b/rust/chains/hyperlane-fuel/src/mailbox.rs index 035fe6e6d..5e8f0cf05 100644 --- a/rust/chains/hyperlane-fuel/src/mailbox.rs +++ b/rust/chains/hyperlane-fuel/src/mailbox.rs @@ -126,7 +126,7 @@ pub struct FuelMailboxIndexer {} #[async_trait] impl Indexer for FuelMailboxIndexer { - async fn fetch_logs( + async fn fetch_logs_in_range( &self, range: RangeInclusive, ) -> ChainResult, LogMeta)>> { @@ -140,7 +140,7 @@ impl Indexer for FuelMailboxIndexer { #[async_trait] impl Indexer for FuelMailboxIndexer { - async fn fetch_logs( + async fn fetch_logs_in_range( &self, range: RangeInclusive, ) -> ChainResult, LogMeta)>> { diff --git a/rust/chains/hyperlane-sealevel/src/interchain_gas.rs b/rust/chains/hyperlane-sealevel/src/interchain_gas.rs index 494583381..beebcb9db 100644 --- a/rust/chains/hyperlane-sealevel/src/interchain_gas.rs +++ b/rust/chains/hyperlane-sealevel/src/interchain_gas.rs @@ -246,7 +246,7 @@ impl SealevelInterchainGasPaymasterIndexer { #[async_trait] impl Indexer for SealevelInterchainGasPaymasterIndexer { #[instrument(err, skip(self))] - async fn fetch_logs( + async fn fetch_logs_in_range( &self, range: RangeInclusive, ) -> ChainResult, LogMeta)>> { diff --git a/rust/chains/hyperlane-sealevel/src/mailbox.rs b/rust/chains/hyperlane-sealevel/src/mailbox.rs index 3fc8393d1..beb4e86c3 100644 --- a/rust/chains/hyperlane-sealevel/src/mailbox.rs +++ b/rust/chains/hyperlane-sealevel/src/mailbox.rs @@ -646,7 +646,7 @@ impl SequenceAwareIndexer for SealevelMailboxIndexer { #[async_trait] impl Indexer for SealevelMailboxIndexer { - async fn fetch_logs( + async fn fetch_logs_in_range( &self, range: RangeInclusive, ) -> ChainResult, LogMeta)>> { @@ -670,7 +670,7 @@ impl Indexer for SealevelMailboxIndexer { #[async_trait] impl Indexer for SealevelMailboxIndexer { - async fn fetch_logs( + async fn fetch_logs_in_range( &self, _range: RangeInclusive, ) -> ChainResult, LogMeta)>> { diff --git a/rust/chains/hyperlane-sealevel/src/merkle_tree_hook.rs b/rust/chains/hyperlane-sealevel/src/merkle_tree_hook.rs index 9fe48053c..8c1132add 100644 --- a/rust/chains/hyperlane-sealevel/src/merkle_tree_hook.rs +++ b/rust/chains/hyperlane-sealevel/src/merkle_tree_hook.rs @@ -83,11 +83,11 @@ pub struct SealevelMerkleTreeHookIndexer(SealevelMailboxIndexer); #[async_trait] impl Indexer for SealevelMerkleTreeHookIndexer { - async fn fetch_logs( + async fn fetch_logs_in_range( &self, range: RangeInclusive, ) -> ChainResult, LogMeta)>> { - let messages = Indexer::::fetch_logs(&self.0, range).await?; + let messages = Indexer::::fetch_logs_in_range(&self.0, range).await?; let merkle_tree_insertions = messages .into_iter() .map(|(m, meta)| (message_to_merkle_tree_insertion(m.inner()).into(), meta)) diff --git a/rust/hyperlane-base/src/contract_sync/cursors/mod.rs b/rust/hyperlane-base/src/contract_sync/cursors/mod.rs index c7d7274d6..016454d04 100644 --- a/rust/hyperlane-base/src/contract_sync/cursors/mod.rs +++ b/rust/hyperlane-base/src/contract_sync/cursors/mod.rs @@ -13,8 +13,18 @@ pub enum CursorType { RateLimited, } +// H256 * 1M = 32MB per origin chain worst case +// With one such channel per origin chain. +const TX_ID_CHANNEL_CAPACITY: Option = Some(1_000_000); + pub trait Indexable { + /// Returns the configured cursor type of this type for the given domain, (e.g. `SequenceAware` or `RateLimited`) fn indexing_cursor(domain: HyperlaneDomainProtocol) -> CursorType; + /// Indexing tasks may have channels open between them to share information that improves reliability (such as the txid where a message event was indexed). + /// By default this method is None, and it should return a channel capacity if this indexing task is to broadcast anything to other tasks. + fn broadcast_channel_size() -> Option { + None + } } impl Indexable for HyperlaneMessage { @@ -26,6 +36,11 @@ impl Indexable for HyperlaneMessage { HyperlaneDomainProtocol::Cosmos => CursorType::SequenceAware, } } + + // Only broadcast txids from the message indexing task + fn broadcast_channel_size() -> Option { + TX_ID_CHANNEL_CAPACITY + } } impl Indexable for InterchainGasPayment { diff --git a/rust/hyperlane-base/src/contract_sync/cursors/rate_limited.rs b/rust/hyperlane-base/src/contract_sync/cursors/rate_limited.rs index d85b3618f..242028acb 100644 --- a/rust/hyperlane-base/src/contract_sync/cursors/rate_limited.rs +++ b/rust/hyperlane-base/src/contract_sync/cursors/rate_limited.rs @@ -216,6 +216,16 @@ where } } +impl Debug for RateLimitedContractSyncCursor { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("RateLimitedContractSyncCursor") + .field("tip", &self.tip) + .field("last_tip_update", &self.last_tip_update) + .field("sync_state", &self.sync_state) + .finish() + } +} + #[cfg(test)] pub(crate) mod test { use super::*; @@ -234,7 +244,7 @@ pub(crate) mod test { #[async_trait] impl Indexer<()> for Indexer { - async fn fetch_logs(&self, range: RangeInclusive) -> ChainResult , LogMeta)>>; + async fn fetch_logs_in_range(&self, range: RangeInclusive) -> ChainResult , LogMeta)>>; async fn get_finalized_block_number(&self) -> ChainResult; } } diff --git a/rust/hyperlane-base/src/contract_sync/cursors/sequence_aware/backward.rs b/rust/hyperlane-base/src/contract_sync/cursors/sequence_aware/backward.rs index 3efd04a8d..2ef9e3f27 100644 --- a/rust/hyperlane-base/src/contract_sync/cursors/sequence_aware/backward.rs +++ b/rust/hyperlane-base/src/contract_sync/cursors/sequence_aware/backward.rs @@ -9,10 +9,13 @@ use hyperlane_core::{ HyperlaneSequenceAwareIndexerStoreReader, IndexMode, Indexed, LogMeta, SequenceIndexed, }; use itertools::Itertools; +use tokio::time::sleep; use tracing::{debug, instrument, warn}; use super::{LastIndexedSnapshot, TargetSnapshot}; +const MAX_BACKWARD_SYNC_BLOCKING_TIME: Duration = Duration::from_secs(5); + /// A sequence-aware cursor that syncs backward until there are no earlier logs to index. pub(crate) struct BackwardSequenceAwareSyncCursor { /// The max chunk size to query for logs. @@ -32,6 +35,17 @@ pub(crate) struct BackwardSequenceAwareSyncCursor { index_mode: IndexMode, } +impl Debug for BackwardSequenceAwareSyncCursor { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("BackwardSequenceAwareSyncCursor") + .field("chunk_size", &self.chunk_size) + .field("last_indexed_snapshot", &self.last_indexed_snapshot) + .field("current_indexing_snapshot", &self.current_indexing_snapshot) + .field("index_mode", &self.index_mode) + .finish() + } +} + impl BackwardSequenceAwareSyncCursor { #[instrument( skip(db), @@ -68,7 +82,11 @@ impl BackwardSequenceAwareSyncCursor { #[instrument(ret)] pub async fn get_next_range(&mut self) -> Result>> { // Skip any already indexed logs. - self.skip_indexed().await?; + tokio::select! { + res = self.skip_indexed() => res?, + // return early to allow the forward cursor to also make progress + _ = sleep(MAX_BACKWARD_SYNC_BLOCKING_TIME) => { return Ok(None); } + }; // If `self.current_indexing_snapshot` is None, we are synced and there are no more ranges to query. // Otherwise, we query the next range, searching for logs prior to and including the current indexing snapshot. diff --git a/rust/hyperlane-base/src/contract_sync/cursors/sequence_aware/forward.rs b/rust/hyperlane-base/src/contract_sync/cursors/sequence_aware/forward.rs index 374b4b797..5d22374b2 100644 --- a/rust/hyperlane-base/src/contract_sync/cursors/sequence_aware/forward.rs +++ b/rust/hyperlane-base/src/contract_sync/cursors/sequence_aware/forward.rs @@ -41,6 +41,18 @@ pub(crate) struct ForwardSequenceAwareSyncCursor { index_mode: IndexMode, } +impl Debug for ForwardSequenceAwareSyncCursor { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("ForwardSequenceAwareSyncCursor") + .field("chunk_size", &self.chunk_size) + .field("last_indexed_snapshot", &self.last_indexed_snapshot) + .field("current_indexing_snapshot", &self.current_indexing_snapshot) + .field("target_snapshot", &self.target_snapshot) + .field("index_mode", &self.index_mode) + .finish() + } +} + impl ForwardSequenceAwareSyncCursor { #[instrument( skip(db, latest_sequence_querier), @@ -493,7 +505,7 @@ pub(crate) mod test { where T: Sequenced + Debug, { - async fn fetch_logs( + async fn fetch_logs_in_range( &self, _range: RangeInclusive, ) -> ChainResult, LogMeta)>> { diff --git a/rust/hyperlane-base/src/contract_sync/cursors/sequence_aware/mod.rs b/rust/hyperlane-base/src/contract_sync/cursors/sequence_aware/mod.rs index d3abb4384..9303438b0 100644 --- a/rust/hyperlane-base/src/contract_sync/cursors/sequence_aware/mod.rs +++ b/rust/hyperlane-base/src/contract_sync/cursors/sequence_aware/mod.rs @@ -62,6 +62,7 @@ pub enum SyncDirection { /// A cursor that prefers to sync forward, but will sync backward if there is nothing to /// sync forward. +#[derive(Debug)] pub(crate) struct ForwardBackwardSequenceAwareSyncCursor { forward: ForwardSequenceAwareSyncCursor, backward: BackwardSequenceAwareSyncCursor, diff --git a/rust/hyperlane-base/src/contract_sync/mod.rs b/rust/hyperlane-base/src/contract_sync/mod.rs index 85bf36c1c..9c8ba75d6 100644 --- a/rust/hyperlane-base/src/contract_sync/mod.rs +++ b/rust/hyperlane-base/src/contract_sync/mod.rs @@ -10,9 +10,13 @@ use hyperlane_core::{ HyperlaneSequenceAwareIndexerStore, HyperlaneWatermarkedLogStore, Indexer, SequenceAwareIndexer, }; +use hyperlane_core::{Indexed, LogMeta, H512}; pub use metrics::ContractSyncMetrics; +use prometheus::core::{AtomicI64, AtomicU64, GenericCounter, GenericGauge}; +use tokio::sync::broadcast::error::TryRecvError; +use tokio::sync::broadcast::{Receiver as BroadcastReceiver, Sender as BroadcastSender}; use tokio::time::sleep; -use tracing::{debug, info, warn}; +use tracing::{debug, info, instrument, trace, warn}; use crate::settings::IndexSettings; @@ -27,17 +31,33 @@ const SLEEP_DURATION: Duration = Duration::from_secs(5); /// Entity that drives the syncing of an agent's db with on-chain data. /// Extracts chain-specific data (emitted checkpoints, messages, etc) from an /// `indexer` and fills the agent's db with this data. -#[derive(Debug, new, Clone)] -pub struct ContractSync, I: Indexer> { +#[derive(Debug)] +pub struct ContractSync, I: Indexer> { domain: HyperlaneDomain, db: D, indexer: I, metrics: ContractSyncMetrics, + broadcast_sender: Option>, _phantom: PhantomData, } +impl, I: Indexer> ContractSync { + /// Create a new ContractSync + pub fn new(domain: HyperlaneDomain, db: D, indexer: I, metrics: ContractSyncMetrics) -> Self { + Self { + domain, + db, + indexer, + metrics, + broadcast_sender: T::broadcast_channel_size().map(BroadcastSender::new), + _phantom: PhantomData, + } + } +} + impl ContractSync where + T: Indexable + Debug + Send + Sync + Clone + Eq + Hash + 'static, D: HyperlaneLogStore, I: Indexer + 'static, { @@ -45,82 +65,161 @@ where pub fn domain(&self) -> &HyperlaneDomain { &self.domain } -} -impl ContractSync -where - T: Debug + Send + Sync + Clone + Eq + Hash + 'static, - D: HyperlaneLogStore, - I: Indexer + 'static, -{ + fn get_broadcaster(&self) -> Option> { + self.broadcast_sender.clone() + } + /// Sync logs and write them to the LogStore - #[tracing::instrument(name = "ContractSync", fields(domain=self.domain().name()), skip(self, cursor))] - pub async fn sync(&self, label: &'static str, mut cursor: Box>) { + #[instrument(name = "ContractSync", fields(domain=self.domain().name()), skip(self, opts))] + pub async fn sync(&self, label: &'static str, mut opts: SyncOptions) { let chain_name = self.domain.as_ref(); - let indexed_height = self + let indexed_height_metric = self .metrics .indexed_height .with_label_values(&[label, chain_name]); - let stored_logs = self + let stored_logs_metric = self .metrics .stored_events .with_label_values(&[label, chain_name]); loop { - indexed_height.set(cursor.latest_queried_block() as i64); + if let Some(rx) = opts.tx_id_receiver.as_mut() { + self.fetch_logs_from_receiver(rx, &stored_logs_metric).await; + } + if let Some(cursor) = opts.cursor.as_mut() { + self.fetch_logs_with_cursor(cursor, &stored_logs_metric, &indexed_height_metric) + .await; + } + } + } - let (action, eta) = match cursor.next_action().await { - Ok((action, eta)) => (action, eta), - Err(err) => { - warn!(?err, "Error getting next action"); - sleep(SLEEP_DURATION).await; - continue; - } - }; - let sleep_duration = match action { - // Use `loop` but always break - this allows for returning a value - // from the loop (the sleep duration) - #[allow(clippy::never_loop)] - CursorAction::Query(range) => loop { - debug!(?range, "Looking for events in index range"); - - let logs = match self.indexer.fetch_logs(range.clone()).await { + #[instrument(fields(domain=self.domain().name()), skip(self, recv, stored_logs_metric))] + async fn fetch_logs_from_receiver( + &self, + recv: &mut BroadcastReceiver, + stored_logs_metric: &GenericCounter, + ) { + loop { + match recv.try_recv() { + Ok(tx_id) => { + let logs = match self.indexer.fetch_logs_by_tx_hash(tx_id).await { Ok(logs) => logs, Err(err) => { - warn!(?err, "Error fetching logs"); - break SLEEP_DURATION; + warn!(?err, ?tx_id, "Error fetching logs for tx id"); + continue; } }; - let deduped_logs = HashSet::<_>::from_iter(logs); - let logs = Vec::from_iter(deduped_logs); - + let logs = self.dedupe_and_store_logs(logs, stored_logs_metric).await; + let num_logs = logs.len() as u64; info!( - ?range, - num_logs = logs.len(), - estimated_time_to_sync = fmt_sync_time(eta), - "Found log(s) in index range" + num_logs, + ?tx_id, + sequences = ?logs.iter().map(|(log, _)| log.sequence).collect::>(), + "Found log(s) for tx id" ); - // Store deliveries - let stored = match self.db.store_logs(&logs).await { - Ok(stored) => stored, - Err(err) => { - warn!(?err, "Error storing logs in db"); - break SLEEP_DURATION; - } - }; - // Report amount of deliveries stored into db - stored_logs.inc_by(stored as u64); - // Update cursor - if let Err(err) = cursor.update(logs, range).await { - warn!(?err, "Error updating cursor"); + } + Err(TryRecvError::Empty) => { + trace!("No txid received"); + break; + } + Err(err) => { + warn!(?err, "Error receiving txid from channel"); + break; + } + } + } + } + + #[instrument(fields(domain=self.domain().name()), skip(self, stored_logs_metric, indexed_height_metric))] + async fn fetch_logs_with_cursor( + &self, + cursor: &mut Box>, + stored_logs_metric: &GenericCounter, + indexed_height_metric: &GenericGauge, + ) { + indexed_height_metric.set(cursor.latest_queried_block() as i64); + let (action, eta) = match cursor.next_action().await { + Ok((action, eta)) => (action, eta), + Err(err) => { + warn!(?err, "Error getting next action"); + sleep(SLEEP_DURATION).await; + return; + } + }; + let sleep_duration = match action { + // Use `loop` but always break - this allows for returning a value + // from the loop (the sleep duration) + #[allow(clippy::never_loop)] + CursorAction::Query(range) => loop { + debug!(?range, "Looking for events in index range"); + + let logs = match self.indexer.fetch_logs_in_range(range.clone()).await { + Ok(logs) => logs, + Err(err) => { + warn!(?err, ?range, "Error fetching logs in range"); break SLEEP_DURATION; - }; - break Default::default(); - }, - CursorAction::Sleep(duration) => duration, - }; - sleep(sleep_duration).await; + } + }; + + let logs = self.dedupe_and_store_logs(logs, stored_logs_metric).await; + let logs_found = logs.len() as u64; + info!( + ?range, + num_logs = logs_found, + estimated_time_to_sync = fmt_sync_time(eta), + sequences = ?logs.iter().map(|(log, _)| log.sequence).collect::>(), + cursor = ?cursor, + "Found log(s) in index range" + ); + + if let Some(tx) = self.broadcast_sender.as_ref() { + logs.iter().for_each(|(_, meta)| { + if let Err(err) = tx.send(meta.transaction_id) { + trace!(?err, "Error sending txid to receiver"); + } + }); + } + + // Update cursor + if let Err(err) = cursor.update(logs, range).await { + warn!(?err, "Error updating cursor"); + break SLEEP_DURATION; + }; + break Default::default(); + }, + CursorAction::Sleep(duration) => duration, + }; + sleep(sleep_duration).await + } + + async fn dedupe_and_store_logs( + &self, + logs: Vec<(Indexed, LogMeta)>, + stored_logs_metric: &GenericCounter, + ) -> Vec<(Indexed, LogMeta)> { + let deduped_logs = HashSet::<_>::from_iter(logs); + let logs = Vec::from_iter(deduped_logs); + + // Store deliveries + let stored = match self.db.store_logs(&logs).await { + Ok(stored) => stored, + Err(err) => { + warn!(?err, "Error storing logs in db"); + Default::default() + } + }; + if stored > 0 { + debug!( + domain = self.domain.as_ref(), + count = stored, + sequences = ?logs.iter().map(|(log, _)| log.sequence).collect::>(), + "Stored logs in db", + ); } + // Report amount of deliveries stored into db + stored_logs_metric.inc_by(stored as u64); + logs } } @@ -141,16 +240,38 @@ pub trait ContractSyncer: Send + Sync { async fn cursor(&self, index_settings: IndexSettings) -> Box>; /// Syncs events from the indexer using the provided cursor - async fn sync(&self, label: &'static str, cursor: Box>); + async fn sync(&self, label: &'static str, opts: SyncOptions); /// The domain of this syncer fn domain(&self) -> &HyperlaneDomain; + + /// If this syncer is also a broadcaster, return the channel to receive txids + fn get_broadcaster(&self) -> Option>; +} + +#[derive(new)] +/// Options for syncing events +pub struct SyncOptions { + // Keep as optional fields for now to run them simultaneously. + // Might want to refactor into an enum later, where we either index with a cursor or rely on receiving + // txids from a channel to other indexing tasks + cursor: Option>>, + tx_id_receiver: Option>, +} + +impl From>> for SyncOptions { + fn from(cursor: Box>) -> Self { + Self { + cursor: Some(cursor), + tx_id_receiver: None, + } + } } #[async_trait] impl ContractSyncer for WatermarkContractSync where - T: Debug + Send + Sync + Clone + Eq + Hash + 'static, + T: Indexable + Debug + Send + Sync + Clone + Eq + Hash + 'static, { /// Returns a new cursor to be used for syncing events from the indexer based on time async fn cursor(&self, index_settings: IndexSettings) -> Box> { @@ -172,13 +293,17 @@ where ) } - async fn sync(&self, label: &'static str, cursor: Box>) { - ContractSync::sync(self, label, cursor).await; + async fn sync(&self, label: &'static str, opts: SyncOptions) { + ContractSync::sync(self, label, opts).await } fn domain(&self) -> &HyperlaneDomain { ContractSync::domain(self) } + + fn get_broadcaster(&self) -> Option> { + ContractSync::get_broadcaster(self) + } } /// Log store for sequence aware cursors @@ -191,7 +316,7 @@ pub type SequencedDataContractSync = #[async_trait] impl ContractSyncer for SequencedDataContractSync where - T: Send + Sync + Debug + Clone + Eq + Hash + 'static, + T: Indexable + Send + Sync + Debug + Clone + Eq + Hash + 'static, { /// Returns a new cursor to be used for syncing dispatched messages from the indexer async fn cursor(&self, index_settings: IndexSettings) -> Box> { @@ -207,11 +332,15 @@ where ) } - async fn sync(&self, label: &'static str, cursor: Box>) { - ContractSync::sync(self, label, cursor).await; + async fn sync(&self, label: &'static str, opts: SyncOptions) { + ContractSync::sync(self, label, opts).await; } fn domain(&self) -> &HyperlaneDomain { ContractSync::domain(self) } + + fn get_broadcaster(&self) -> Option> { + ContractSync::get_broadcaster(self) + } } diff --git a/rust/hyperlane-base/src/db/rocks/hyperlane_db.rs b/rust/hyperlane-base/src/db/rocks/hyperlane_db.rs index 3d164ce26..b4323613a 100644 --- a/rust/hyperlane-base/src/db/rocks/hyperlane_db.rs +++ b/rust/hyperlane-base/src/db/rocks/hyperlane_db.rs @@ -242,10 +242,10 @@ impl HyperlaneRocksDB { &self, event: InterchainGasExpenditure, ) -> DbResult<()> { - let existing_payment = self.retrieve_gas_expenditure_by_message_id(event.message_id)?; - let total = existing_payment + event; + let existing_expenditure = self.retrieve_gas_expenditure_by_message_id(event.message_id)?; + let total = existing_expenditure + event; - debug!(?event, new_total_gas_payment=?total, "Storing gas payment"); + debug!(?event, new_total_gas_expenditure=?total, "Storing gas expenditure"); self.store_interchain_gas_expenditure_data_by_message_id( &total.message_id, &InterchainGasExpenditureData { diff --git a/rust/hyperlane-base/src/settings/base.rs b/rust/hyperlane-base/src/settings/base.rs index 59b8fa11a..6757a545e 100644 --- a/rust/hyperlane-base/src/settings/base.rs +++ b/rust/hyperlane-base/src/settings/base.rs @@ -160,7 +160,7 @@ impl Settings { db: Arc, ) -> eyre::Result>> where - T: Debug, + T: Indexable + Debug, SequenceIndexer: TryFromWithMetrics, D: HyperlaneLogStore + HyperlaneSequenceAwareIndexerStoreReader + 'static, { @@ -184,7 +184,7 @@ impl Settings { db: Arc, ) -> eyre::Result>> where - T: Debug, + T: Indexable + Debug, SequenceIndexer: TryFromWithMetrics, D: HyperlaneLogStore + HyperlaneWatermarkedLogStore + 'static, { diff --git a/rust/hyperlane-core/Cargo.toml b/rust/hyperlane-core/Cargo.toml index 5f34bc209..21ee23235 100644 --- a/rust/hyperlane-core/Cargo.toml +++ b/rust/hyperlane-core/Cargo.toml @@ -49,7 +49,7 @@ uint.workspace = true tokio = { workspace = true, features = ["rt", "time"] } [features] -default = [] +default = ["strum"] float = [] test-utils = ["dep:config"] agent = ["ethers", "strum"] diff --git a/rust/hyperlane-core/src/chain.rs b/rust/hyperlane-core/src/chain.rs index 667d392ad..03f4dc44d 100644 --- a/rust/hyperlane-core/src/chain.rs +++ b/rust/hyperlane-core/src/chain.rs @@ -51,6 +51,7 @@ impl<'a> std::fmt::Display for ContractLocator<'a> { pub enum KnownHyperlaneDomain { Ethereum = 1, Sepolia = 11155111, + Holesky = 17000, Polygon = 137, @@ -82,6 +83,18 @@ pub enum KnownHyperlaneDomain { Injective = 6909546, InEvm = 2525, + Ancient8 = 888888888, + + Blast = 81457, + + Mode = 34443, + + Redstone = 690, + + Viction = 88, + + Zetachain = 7000, + PlumeTestnet = 161221135, // -- Local test chains -- @@ -215,10 +228,11 @@ impl KnownHyperlaneDomain { many_to_one!(match self { Mainnet: [ Ethereum, Avalanche, Arbitrum, Polygon, Optimism, BinanceSmartChain, Celo, - Moonbeam, Gnosis, MantaPacific, Neutron, Injective, InEvm + Moonbeam, Gnosis, MantaPacific, Neutron, Injective, InEvm, Ancient8, Blast, + Mode, Redstone, Viction, Zetachain ], Testnet: [ - Alfajores, MoonbaseAlpha, Sepolia, ScrollSepolia, Chiado, PlumeTestnet, Fuji, BinanceSmartChainTestnet + Alfajores, MoonbaseAlpha, Sepolia, ScrollSepolia, Chiado, PlumeTestnet, Fuji, BinanceSmartChainTestnet, Holesky ], LocalTestChain: [Test1, Test2, Test3, FuelTest1, SealevelTest1, SealevelTest2, CosmosTest99990, CosmosTest99991], }) @@ -229,10 +243,11 @@ impl KnownHyperlaneDomain { many_to_one!(match self { HyperlaneDomainProtocol::Ethereum: [ - Ethereum, Sepolia, Polygon, Avalanche, Fuji, Arbitrum, + Ethereum, Sepolia, Holesky, Polygon, Avalanche, Fuji, Arbitrum, Optimism, BinanceSmartChain, BinanceSmartChainTestnet, Celo, Gnosis, - Alfajores, Moonbeam, InEvm, MoonbaseAlpha, ScrollSepolia, - Chiado, MantaPacific, PlumeTestnet, Test1, Test2, Test3 + Alfajores, Moonbeam, InEvm, Ancient8, Blast, Mode, Redstone, Viction, + Zetachain, MoonbaseAlpha, ScrollSepolia, Chiado, MantaPacific, PlumeTestnet, + Test1, Test2, Test3 ], HyperlaneDomainProtocol::Fuel: [FuelTest1], HyperlaneDomainProtocol::Sealevel: [SealevelTest1, SealevelTest2], @@ -246,9 +261,10 @@ impl KnownHyperlaneDomain { many_to_one!(match self { HyperlaneDomainTechnicalStack::ArbitrumNitro: [Arbitrum, PlumeTestnet], HyperlaneDomainTechnicalStack::Other: [ - Ethereum, Sepolia, Polygon, Avalanche, Fuji, Optimism, + Ethereum, Sepolia, Holesky, Polygon, Avalanche, Fuji, Optimism, BinanceSmartChain, BinanceSmartChainTestnet, Celo, Gnosis, Alfajores, Moonbeam, MoonbaseAlpha, - ScrollSepolia, Chiado, MantaPacific, Neutron, Injective, InEvm, + ScrollSepolia, Chiado, MantaPacific, Neutron, Injective, InEvm, Ancient8, Blast, Mode, Redstone, + Viction, Zetachain, Test1, Test2, Test3, FuelTest1, SealevelTest1, SealevelTest2, CosmosTest99990, CosmosTest99991 ], }) diff --git a/rust/hyperlane-core/src/traits/cursor.rs b/rust/hyperlane-core/src/traits/cursor.rs index cfe92b8dc..b835b94df 100644 --- a/rust/hyperlane-core/src/traits/cursor.rs +++ b/rust/hyperlane-core/src/traits/cursor.rs @@ -1,4 +1,8 @@ -use std::{fmt, ops::RangeInclusive, time::Duration}; +use std::{ + fmt::{self, Debug}, + ops::RangeInclusive, + time::Duration, +}; use async_trait::async_trait; use auto_impl::auto_impl; @@ -9,7 +13,7 @@ use crate::{Indexed, LogMeta}; /// A cursor governs event indexing for a contract. #[async_trait] #[auto_impl(Box)] -pub trait ContractSyncCursor: Send + Sync + 'static { +pub trait ContractSyncCursor: Debug + Send + Sync + 'static { /// The next block range that should be queried. /// This method should be tolerant to being called multiple times in a row /// without any updates in between. diff --git a/rust/hyperlane-core/src/traits/indexer.rs b/rust/hyperlane-core/src/traits/indexer.rs index 3db7e4f57..1c05360ff 100644 --- a/rust/hyperlane-core/src/traits/indexer.rs +++ b/rust/hyperlane-core/src/traits/indexer.rs @@ -11,7 +11,7 @@ use async_trait::async_trait; use auto_impl::auto_impl; use serde::Deserialize; -use crate::{ChainResult, Indexed, LogMeta}; +use crate::{ChainResult, Indexed, LogMeta, H512}; /// Indexing mode. #[derive(Copy, Debug, Default, Deserialize, Clone)] @@ -29,13 +29,21 @@ pub enum IndexMode { #[auto_impl(&, Box, Arc,)] pub trait Indexer: Send + Sync + Debug { /// Fetch list of logs between blocks `from` and `to`, inclusive. - async fn fetch_logs( + async fn fetch_logs_in_range( &self, range: RangeInclusive, ) -> ChainResult, LogMeta)>>; /// Get the chain's latest block number that has reached finality async fn get_finalized_block_number(&self) -> ChainResult; + + /// Fetch list of logs emitted in a transaction with the given hash. + async fn fetch_logs_by_tx_hash( + &self, + _tx_hash: H512, + ) -> ChainResult, LogMeta)>> { + Ok(vec![]) + } } /// Interface for indexing data in sequence. diff --git a/rust/hyperlane-core/src/traits/mod.rs b/rust/hyperlane-core/src/traits/mod.rs index e85b04f4a..b168a1892 100644 --- a/rust/hyperlane-core/src/traits/mod.rs +++ b/rust/hyperlane-core/src/traits/mod.rs @@ -10,6 +10,7 @@ pub use interchain_security_module::*; pub use mailbox::*; pub use merkle_tree_hook::*; pub use multisig_ism::*; +pub use pending_operation::*; pub use provider::*; pub use routing_ism::*; pub use signing::*; @@ -29,6 +30,7 @@ mod interchain_security_module; mod mailbox; mod merkle_tree_hook; mod multisig_ism; +mod pending_operation; mod provider; mod routing_ism; mod signing; diff --git a/rust/agents/relayer/src/msg/pending_operation.rs b/rust/hyperlane-core/src/traits/pending_operation.rs similarity index 75% rename from rust/agents/relayer/src/msg/pending_operation.rs rename to rust/hyperlane-core/src/traits/pending_operation.rs index 206e062e2..c6d494467 100644 --- a/rust/agents/relayer/src/msg/pending_operation.rs +++ b/rust/hyperlane-core/src/traits/pending_operation.rs @@ -4,10 +4,16 @@ use std::{ time::{Duration, Instant}, }; +use crate::{ + ChainResult, FixedPointNumber, HyperlaneDomain, HyperlaneMessage, TryBatchAs, TxOutcome, H256, + U256, +}; use async_trait::async_trait; -use hyperlane_core::{HyperlaneDomain, HyperlaneMessage, TryBatchAs, TxOutcome, H256}; +use num::CheckedDiv; +use tracing::warn; -use super::op_queue::QueueOperation; +/// Boxed operation that can be stored in an operation queue +pub type QueueOperation = Box; /// A pending operation that will be run by the submitter and cause a /// transaction to be sent. @@ -67,11 +73,21 @@ pub trait PendingOperation: Send + Sync + Debug + TryBatchAs { /// Set the outcome of the `submit` call fn set_submission_outcome(&mut self, outcome: TxOutcome); + /// Get the estimated the cost of the `submit` call + fn get_tx_cost_estimate(&self) -> Option; + /// This will be called after the operation has been submitted and is /// responsible for checking if the operation has reached a point at /// which we consider it safe from reorgs. async fn confirm(&mut self) -> PendingOperationResult; + /// Record the outcome of the operation + fn set_operation_outcome( + &mut self, + submission_outcome: TxOutcome, + submission_estimated_cost: U256, + ); + /// Get the earliest instant at which this should next be attempted. /// /// This is only used for sorting, the functions are responsible for @@ -85,11 +101,41 @@ pub trait PendingOperation: Send + Sync + Debug + TryBatchAs { /// retried immediately. fn reset_attempts(&mut self); - #[cfg(test)] /// Set the number of times this operation has been retried. + #[cfg(any(test, feature = "test-utils"))] fn set_retries(&mut self, retries: u32); } +/// Utility fn to calculate the total estimated cost of an operation batch +pub fn total_estimated_cost(ops: &[Box]) -> U256 { + ops.iter() + .fold(U256::zero(), |acc, op| match op.get_tx_cost_estimate() { + Some(cost_estimate) => acc.saturating_add(cost_estimate), + None => { + warn!(operation=?op, "No cost estimate available for operation, defaulting to 0"); + acc + } + }) +} + +/// Calculate the gas used by an operation (either in a batch or single-submission), by looking at the total cost of the tx, +/// and the estimated cost of the operation compared to the sum of the estimates of all operations in the batch. +/// When using this for single-submission rather than a batch, +/// the `tx_estimated_cost` should be the same as the `tx_estimated_cost` +pub fn gas_used_by_operation( + tx_outcome: &TxOutcome, + tx_estimated_cost: U256, + operation_estimated_cost: U256, +) -> ChainResult { + let gas_used_by_tx = FixedPointNumber::try_from(tx_outcome.gas_used)?; + let operation_gas_estimate = FixedPointNumber::try_from(operation_estimated_cost)?; + let tx_gas_estimate = FixedPointNumber::try_from(tx_estimated_cost)?; + let gas_used_by_operation = (gas_used_by_tx * operation_gas_estimate) + .checked_div(&tx_gas_estimate) + .ok_or(eyre::eyre!("Division by zero"))?; + gas_used_by_operation.try_into() +} + impl Display for QueueOperation { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!( @@ -138,6 +184,7 @@ impl Ord for QueueOperation { } } +/// Possible outcomes of performing an action on a pending operation (such as `prepare`, `submit` or `confirm`). #[derive(Debug)] pub enum PendingOperationResult { /// Promote to the next step @@ -153,6 +200,7 @@ pub enum PendingOperationResult { } /// create a `op_try!` macro for the `on_retry` handler. +#[macro_export] macro_rules! make_op_try { ($on_retry:expr) => { /// Handle a result and either return early with retry or a critical failure on @@ -181,5 +229,3 @@ macro_rules! make_op_try { } }; } - -pub(super) use make_op_try; diff --git a/rust/hyperlane-core/src/types/channel.rs b/rust/hyperlane-core/src/types/channel.rs deleted file mode 100644 index 2a0bbb897..000000000 --- a/rust/hyperlane-core/src/types/channel.rs +++ /dev/null @@ -1,50 +0,0 @@ -use derive_new::new; -use tokio::sync::broadcast::{Receiver, Sender}; - -/// Multi-producer, multi-consumer channel -pub struct MpmcChannel { - sender: Sender, - receiver: MpmcReceiver, -} - -impl MpmcChannel { - /// Creates a new `MpmcChannel` with the specified capacity. - /// - /// # Arguments - /// - /// * `capacity` - The maximum number of messages that can be buffered in the channel. - pub fn new(capacity: usize) -> Self { - let (sender, receiver) = tokio::sync::broadcast::channel(capacity); - Self { - sender: sender.clone(), - receiver: MpmcReceiver::new(sender, receiver), - } - } - - /// Returns a clone of the sender end of the channel. - pub fn sender(&self) -> Sender { - self.sender.clone() - } - - /// Returns a clone of the receiver end of the channel. - pub fn receiver(&self) -> MpmcReceiver { - self.receiver.clone() - } -} - -/// Clonable receiving end of a multi-producer, multi-consumer channel -#[derive(Debug, new)] -pub struct MpmcReceiver { - sender: Sender, - /// The receiving end of the channel. - pub receiver: Receiver, -} - -impl Clone for MpmcReceiver { - fn clone(&self) -> Self { - Self { - sender: self.sender.clone(), - receiver: self.sender.subscribe(), - } - } -} diff --git a/rust/hyperlane-core/src/types/mod.rs b/rust/hyperlane-core/src/types/mod.rs index 59f20630b..c8b2ad346 100644 --- a/rust/hyperlane-core/src/types/mod.rs +++ b/rust/hyperlane-core/src/types/mod.rs @@ -8,8 +8,6 @@ pub use self::primitive_types::*; pub use ::primitive_types as ethers_core_types; pub use announcement::*; pub use chain_data::*; -#[cfg(feature = "async")] -pub use channel::*; pub use checkpoint::*; pub use indexing::*; pub use log_metadata::*; @@ -21,8 +19,6 @@ use crate::{Decode, Encode, HyperlaneProtocolError}; mod announcement; mod chain_data; -#[cfg(feature = "async")] -mod channel; mod checkpoint; mod indexing; mod log_metadata; diff --git a/rust/hyperlane-core/src/types/primitive_types.rs b/rust/hyperlane-core/src/types/primitive_types.rs index 2a3c53d40..c5636b3b9 100644 --- a/rust/hyperlane-core/src/types/primitive_types.rs +++ b/rust/hyperlane-core/src/types/primitive_types.rs @@ -3,11 +3,15 @@ #![allow(clippy::assign_op_pattern)] #![allow(clippy::reversed_empty_ranges)] -use std::{ops::Mul, str::FromStr}; +use std::{ + ops::{Div, Mul}, + str::FromStr, +}; use bigdecimal::{BigDecimal, RoundingMode}; use borsh::{BorshDeserialize, BorshSerialize}; use fixed_hash::impl_fixed_hash_conversions; +use num::CheckedDiv; use num_traits::Zero; use uint::construct_uint; @@ -421,6 +425,27 @@ where } } +impl Div for FixedPointNumber +where + T: Into, +{ + type Output = FixedPointNumber; + + fn div(self, rhs: T) -> Self::Output { + let rhs = rhs.into(); + Self(self.0 / rhs.0) + } +} + +impl CheckedDiv for FixedPointNumber { + fn checked_div(&self, v: &Self) -> Option { + if v.0.is_zero() { + return None; + } + Some(Self(self.0.clone() / v.0.clone())) + } +} + impl FromStr for FixedPointNumber { type Err = ChainCommunicationError; diff --git a/rust/utils/backtrace-oneline/src/lib.rs b/rust/utils/backtrace-oneline/src/lib.rs index 0c69ee374..61261f11d 100644 --- a/rust/utils/backtrace-oneline/src/lib.rs +++ b/rust/utils/backtrace-oneline/src/lib.rs @@ -118,7 +118,7 @@ impl BacktraceFrameFmt<'_, '_, '_> { symbol.name(), // TODO: this isn't great that we don't end up printing anything // with non-utf8 filenames. Thankfully almost everything is utf8 so - // this shouldn't be too too bad. + // this shouldn't be too bad. symbol .filename() .and_then(|p| Some(BytesOrWideString::Bytes(p.to_str()?.as_bytes()))), diff --git a/rust/utils/run-locally/Cargo.toml b/rust/utils/run-locally/Cargo.toml index 45c07d030..99b0e41c9 100644 --- a/rust/utils/run-locally/Cargo.toml +++ b/rust/utils/run-locally/Cargo.toml @@ -28,11 +28,13 @@ ethers-contract.workspace = true tokio.workspace = true maplit.workspace = true nix = { workspace = true, features = ["signal"], default-features = false } +once_cell.workspace = true tempfile.workspace = true ureq = { workspace = true, default-features = false } which.workspace = true macro_rules_attribute.workspace = true regex.workspace = true +relayer = { path = "../../agents/relayer"} hyperlane-cosmwasm-interface.workspace = true cosmwasm-schema.workspace = true diff --git a/rust/utils/run-locally/src/config.rs b/rust/utils/run-locally/src/config.rs index 7e1358dfd..476a10725 100644 --- a/rust/utils/run-locally/src/config.rs +++ b/rust/utils/run-locally/src/config.rs @@ -6,6 +6,7 @@ pub struct Config { pub ci_mode: bool, pub ci_mode_timeout: u64, pub kathy_messages: u64, + pub sealevel_enabled: bool, // TODO: Include count of sealevel messages in a field separate from `kathy_messages`? } @@ -26,6 +27,9 @@ impl Config { .map(|r| r.parse::().unwrap()); r.unwrap_or(16) }, + sealevel_enabled: env::var("SEALEVEL_ENABLED") + .map(|k| k.parse::().unwrap()) + .unwrap_or(true), }) } } diff --git a/rust/utils/run-locally/src/cosmos/cli.rs b/rust/utils/run-locally/src/cosmos/cli.rs index 4258f149c..934a3758a 100644 --- a/rust/utils/run-locally/src/cosmos/cli.rs +++ b/rust/utils/run-locally/src/cosmos/cli.rs @@ -152,7 +152,7 @@ impl OsmosisCLI { .arg("grpc.address", &endpoint.grpc_addr) // default is 0.0.0.0:9090 .arg("rpc.pprof_laddr", pprof_addr) // default is localhost:6060 .arg("log_level", "panic") - .spawn("COSMOS"); + .spawn("COSMOS", None); endpoint.wait_for_node(); diff --git a/rust/utils/run-locally/src/cosmos/mod.rs b/rust/utils/run-locally/src/cosmos/mod.rs index 1a3f1e7cd..48cc117e2 100644 --- a/rust/utils/run-locally/src/cosmos/mod.rs +++ b/rust/utils/run-locally/src/cosmos/mod.rs @@ -271,7 +271,7 @@ fn launch_cosmos_validator( .hyp_env("SIGNER_SIGNER_TYPE", "hexKey") .hyp_env("SIGNER_KEY", agent_config.signer.key) .hyp_env("TRACING_LEVEL", if debug { "debug" } else { "info" }) - .spawn("VAL"); + .spawn("VAL", None); validator } @@ -299,7 +299,7 @@ fn launch_cosmos_relayer( .hyp_env("TRACING_LEVEL", if debug { "debug" } else { "info" }) .hyp_env("GASPAYMENTENFORCEMENT", "[{\"type\": \"none\"}]") .hyp_env("METRICSPORT", metrics.to_string()) - .spawn("RLY"); + .spawn("RLY", None); relayer } diff --git a/rust/utils/run-locally/src/ethereum/mod.rs b/rust/utils/run-locally/src/ethereum/mod.rs index bebe06348..acdd3057d 100644 --- a/rust/utils/run-locally/src/ethereum/mod.rs +++ b/rust/utils/run-locally/src/ethereum/mod.rs @@ -36,7 +36,7 @@ pub fn start_anvil(config: Arc) -> AgentHandles { } log!("Launching anvil..."); let anvil_args = Program::new("anvil").flag("silent").filter_logs(|_| false); // for now do not keep any of the anvil logs - let anvil = anvil_args.spawn("ETH"); + let anvil = anvil_args.spawn("ETH", None); sleep(Duration::from_secs(10)); diff --git a/rust/utils/run-locally/src/invariants.rs b/rust/utils/run-locally/src/invariants.rs index 690021046..2191f2ac8 100644 --- a/rust/utils/run-locally/src/invariants.rs +++ b/rust/utils/run-locally/src/invariants.rs @@ -1,14 +1,15 @@ -// use std::path::Path; - +use std::fs::File; use std::path::Path; use crate::config::Config; use crate::metrics::agent_balance_sum; +use crate::utils::get_matching_lines; use maplit::hashmap; +use relayer::GAS_EXPENDITURE_LOG_MESSAGE; use crate::logging::log; use crate::solana::solana_termination_invariants_met; -use crate::{fetch_metric, ZERO_MERKLE_INSERTION_KATHY_MESSAGES}; +use crate::{fetch_metric, AGENT_LOGGING_DIR, ZERO_MERKLE_INSERTION_KATHY_MESSAGES}; // This number should be even, so the messages can be split into two equal halves // sent before and after the relayer spins up, to avoid rounding errors. @@ -19,11 +20,16 @@ pub const SOL_MESSAGES_EXPECTED: u32 = 20; pub fn termination_invariants_met( config: &Config, starting_relayer_balance: f64, - solana_cli_tools_path: &Path, - solana_config_path: &Path, + solana_cli_tools_path: Option<&Path>, + solana_config_path: Option<&Path>, ) -> eyre::Result { let eth_messages_expected = (config.kathy_messages / 2) as u32 * 2; - let total_messages_expected = eth_messages_expected + SOL_MESSAGES_EXPECTED; + let sol_messages_expected = if config.sealevel_enabled { + SOL_MESSAGES_EXPECTED + } else { + 0 + }; + let total_messages_expected = eth_messages_expected + sol_messages_expected; let lengths = fetch_metric("9092", "hyperlane_submitter_queue_length", &hashmap! {})?; assert!(!lengths.is_empty(), "Could not find queue length metric"); @@ -55,6 +61,19 @@ pub fn termination_invariants_met( .iter() .sum::(); + let log_file_path = AGENT_LOGGING_DIR.join("RLY-output.log"); + let relayer_logfile = File::open(log_file_path)?; + let gas_expenditure_log_count = + get_matching_lines(&relayer_logfile, GAS_EXPENDITURE_LOG_MESSAGE) + .unwrap() + .len(); + + // Zero insertion messages don't reach `submit` stage where gas is spent, so we only expect these logs for the other messages. + assert_eq!( + gas_expenditure_log_count as u32, total_messages_expected, + "Didn't record gas payment for all delivered messages" + ); + let gas_payment_sealevel_events_count = fetch_metric( "9092", "hyperlane_contract_sync_stored_events", @@ -76,9 +95,13 @@ pub fn termination_invariants_met( return Ok(false); } - if !solana_termination_invariants_met(solana_cli_tools_path, solana_config_path) { - log!("Solana termination invariants not met"); - return Ok(false); + if let Some((solana_cli_tools_path, solana_config_path)) = + solana_cli_tools_path.zip(solana_config_path) + { + if !solana_termination_invariants_met(solana_cli_tools_path, solana_config_path) { + log!("Solana termination invariants not met"); + return Ok(false); + } } let dispatched_messages_scraped = fetch_metric( diff --git a/rust/utils/run-locally/src/main.rs b/rust/utils/run-locally/src/main.rs index a287b2bd1..1bf299075 100644 --- a/rust/utils/run-locally/src/main.rs +++ b/rust/utils/run-locally/src/main.rs @@ -11,12 +11,17 @@ //! the end conditions are met, the test is a failure. Defaults to 10 min. //! - `E2E_KATHY_MESSAGES`: Number of kathy messages to dispatch. Defaults to 16 if CI mode is enabled. //! else false. +//! - `SEALEVEL_ENABLED`: true/false, enables sealevel testing. Defaults to true. use std::{ - fs, + collections::HashMap, + fs::{self, File}, path::Path, process::{Child, ExitCode}, - sync::atomic::{AtomicBool, Ordering}, + sync::{ + atomic::{AtomicBool, Ordering}, + Arc, Mutex, + }, thread::sleep, time::{Duration, Instant}, }; @@ -24,6 +29,7 @@ use std::{ use ethers_contract::MULTICALL_ADDRESS; use logging::log; pub use metrics::fetch_metric; +use once_cell::sync::Lazy; use program::Program; use tempfile::tempdir; @@ -46,6 +52,12 @@ mod program; mod solana; mod utils; +pub static AGENT_LOGGING_DIR: Lazy<&Path> = Lazy::new(|| { + let dir = Path::new("/tmp/test_logs"); + fs::create_dir_all(dir).unwrap(); + dir +}); + /// These private keys are from hardhat/anvil's testing accounts. const RELAYER_KEYS: &[&str] = &[ // test1 @@ -61,17 +73,18 @@ const RELAYER_KEYS: &[&str] = &[ ]; /// These private keys are from hardhat/anvil's testing accounts. /// These must be consistent with the ISM config for the test. -const VALIDATOR_KEYS: &[&str] = &[ +const ETH_VALIDATOR_KEYS: &[&str] = &[ // eth "0x47e179ec197488593b187f80a00eb0da91f1b9d0b13f8733639f19c30a34926a", "0x8b3a350cf5c34c9194ca85829a2df0ec3153be0318b5e2d3348e872092edffba", "0x92db14e403b83dfe3df233f83dfa3a0d7096f21ca9b0d6d6b8d88b2b4ec1564e", +]; + +const SEALEVEL_VALIDATOR_KEYS: &[&str] = &[ // sealevel "0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d", ]; -const VALIDATOR_ORIGIN_CHAINS: &[&str] = &["test1", "test2", "test3", "sealeveltest1"]; - const AGENT_BIN_PATH: &str = "target/debug"; const INFRA_PATH: &str = "../typescript/infra"; const MONOREPO_ROOT_PATH: &str = "../"; @@ -87,14 +100,15 @@ static SHUTDOWN: AtomicBool = AtomicBool::new(false); /// cleanup purposes at this time. #[derive(Default)] struct State { - agents: Vec<(String, Child)>, + #[allow(clippy::type_complexity)] + agents: HashMap>>)>, watchers: Vec>>, data: Vec>, } impl State { fn push_agent(&mut self, handles: AgentHandles) { - self.agents.push((handles.0, handles.1)); + self.agents.insert(handles.0, (handles.1, handles.5)); self.watchers.push(handles.2); self.watchers.push(handles.3); self.data.push(handles.4); @@ -105,9 +119,7 @@ impl Drop for State { fn drop(&mut self) { SHUTDOWN.store(true, Ordering::Relaxed); log!("Signaling children to stop..."); - // stop children in reverse order - self.agents.reverse(); - for (name, mut agent) in self.agents.drain(..) { + for (name, (mut agent, _)) in self.agents.drain() { log!("Stopping child {}", name); stop_child(&mut agent); } @@ -122,6 +134,7 @@ impl Drop for State { drop(data) } fs::remove_dir_all(SOLANA_CHECKPOINT_LOCATION).unwrap_or_default(); + fs::remove_dir_all::<&Path>(AGENT_LOGGING_DIR.as_ref()).unwrap_or_default(); } } @@ -133,20 +146,27 @@ fn main() -> ExitCode { }) .unwrap(); - assert_eq!(VALIDATOR_ORIGIN_CHAINS.len(), VALIDATOR_KEYS.len()); - const VALIDATOR_COUNT: usize = VALIDATOR_KEYS.len(); - let config = Config::load(); - - let solana_checkpoint_path = Path::new(SOLANA_CHECKPOINT_LOCATION); - fs::remove_dir_all(solana_checkpoint_path).unwrap_or_default(); - let checkpoints_dirs: Vec = (0..VALIDATOR_COUNT - 1) + let mut validator_origin_chains = ["test1", "test2", "test3"].to_vec(); + let mut validator_keys = ETH_VALIDATOR_KEYS.to_vec(); + let mut validator_count: usize = validator_keys.len(); + let mut checkpoints_dirs: Vec = (0..validator_count) .map(|_| Box::new(tempdir().unwrap()) as DynPath) - .chain([Box::new(solana_checkpoint_path) as DynPath]) .collect(); + if config.sealevel_enabled { + validator_origin_chains.push("sealeveltest1"); + let mut sealevel_keys = SEALEVEL_VALIDATOR_KEYS.to_vec(); + validator_keys.append(&mut sealevel_keys); + let solana_checkpoint_path = Path::new(SOLANA_CHECKPOINT_LOCATION); + fs::remove_dir_all(solana_checkpoint_path).unwrap_or_default(); + checkpoints_dirs.push(Box::new(solana_checkpoint_path) as DynPath); + validator_count += 1; + } + assert_eq!(validator_origin_chains.len(), validator_keys.len()); + let rocks_db_dir = tempdir().unwrap(); let relayer_db = concat_path(&rocks_db_dir, "relayer"); - let validator_dbs = (0..VALIDATOR_COUNT) + let validator_dbs = (0..validator_count) .map(|i| concat_path(&rocks_db_dir, format!("validator{i}"))) .collect::>(); @@ -200,15 +220,6 @@ fn main() -> ExitCode { r#"[{ "type": "minimum", "payment": "1", - "matchingList": [ - { - "originDomain": ["13375","13376"], - "destinationDomain": ["13375","13376"] - } - ] - }, - { - "type": "none" }]"#, ) .arg( @@ -216,11 +227,15 @@ fn main() -> ExitCode { "http://127.0.0.1:8545,http://127.0.0.1:8545,http://127.0.0.1:8545", ) // default is used for TEST3 - .arg("defaultSigner.key", RELAYER_KEYS[2]) - .arg( + .arg("defaultSigner.key", RELAYER_KEYS[2]); + let relayer_env = if config.sealevel_enabled { + relayer_env.arg( "relayChains", "test1,test2,test3,sealeveltest1,sealeveltest2", - ); + ) + } else { + relayer_env.arg("relayChains", "test1,test2,test3") + }; let base_validator_env = common_agent_env .clone() @@ -242,14 +257,14 @@ fn main() -> ExitCode { .hyp_env("INTERVAL", "5") .hyp_env("CHECKPOINTSYNCER_TYPE", "localStorage"); - let validator_envs = (0..VALIDATOR_COUNT) + let validator_envs = (0..validator_count) .map(|i| { base_validator_env .clone() .hyp_env("METRICSPORT", (9094 + i).to_string()) .hyp_env("DB", validator_dbs[i].to_str().unwrap()) - .hyp_env("ORIGINCHAINNAME", VALIDATOR_ORIGIN_CHAINS[i]) - .hyp_env("VALIDATOR_KEY", VALIDATOR_KEYS[i]) + .hyp_env("ORIGINCHAINNAME", validator_origin_chains[i]) + .hyp_env("VALIDATOR_KEY", validator_keys[i]) .hyp_env( "CHECKPOINTSYNCER_PATH", (*checkpoints_dirs[i]).as_ref().to_str().unwrap(), @@ -283,7 +298,7 @@ fn main() -> ExitCode { .join(", ") ); log!("Relayer DB in {}", relayer_db.display()); - (0..VALIDATOR_COUNT).for_each(|i| { + (0..validator_count).for_each(|i| { log!("Validator {} DB in {}", i + 1, validator_dbs[i].display()); }); @@ -291,9 +306,14 @@ fn main() -> ExitCode { // Ready to run... // - let (solana_path, solana_path_tempdir) = install_solana_cli_tools().join(); - state.data.push(Box::new(solana_path_tempdir)); - let solana_program_builder = build_solana_programs(solana_path.clone()); + let solana_paths = if config.sealevel_enabled { + let (solana_path, solana_path_tempdir) = install_solana_cli_tools().join(); + state.data.push(Box::new(solana_path_tempdir)); + let solana_program_builder = build_solana_programs(solana_path.clone()); + Some((solana_program_builder.join(), solana_path)) + } else { + None + }; // this task takes a long time in the CI so run it in parallel log!("Building rust..."); @@ -303,15 +323,18 @@ fn main() -> ExitCode { .arg("bin", "relayer") .arg("bin", "validator") .arg("bin", "scraper") - .arg("bin", "init-db") - .arg("bin", "hyperlane-sealevel-client") + .arg("bin", "init-db"); + let build_rust = if config.sealevel_enabled { + build_rust.arg("bin", "hyperlane-sealevel-client") + } else { + build_rust + }; + let build_rust = build_rust .filter_logs(|l| !l.contains("workspace-inheritance")) .run(); let start_anvil = start_anvil(config.clone()); - let solana_program_path = solana_program_builder.join(); - log!("Running postgres db..."); let postgres = Program::new("docker") .cmd("run") @@ -320,24 +343,31 @@ fn main() -> ExitCode { .arg("env", "POSTGRES_PASSWORD=47221c18c610") .arg("publish", "5432:5432") .cmd("postgres:14") - .spawn("SQL"); + .spawn("SQL", None); state.push_agent(postgres); build_rust.join(); let solana_ledger_dir = tempdir().unwrap(); - let start_solana_validator = start_solana_test_validator( - solana_path.clone(), - solana_program_path, - solana_ledger_dir.as_ref().to_path_buf(), - ); + let solana_config_path = if let Some((solana_program_path, solana_path)) = solana_paths.clone() + { + let start_solana_validator = start_solana_test_validator( + solana_path.clone(), + solana_program_path, + solana_ledger_dir.as_ref().to_path_buf(), + ); + + let (solana_config_path, solana_validator) = start_solana_validator.join(); + state.push_agent(solana_validator); + Some(solana_config_path) + } else { + None + }; - let (solana_config_path, solana_validator) = start_solana_validator.join(); - state.push_agent(solana_validator); state.push_agent(start_anvil.join()); // spawn 1st validator before any messages have been sent to test empty mailbox - state.push_agent(validator_envs.first().unwrap().clone().spawn("VL1")); + state.push_agent(validator_envs.first().unwrap().clone().spawn("VL1", None)); sleep(Duration::from_secs(5)); @@ -345,7 +375,7 @@ fn main() -> ExitCode { Program::new(concat_path(AGENT_BIN_PATH, "init-db")) .run() .join(); - state.push_agent(scraper_env.spawn("SCR")); + state.push_agent(scraper_env.spawn("SCR", None)); // Send half the kathy messages before starting the rest of the agents let kathy_env_single_insertion = Program::new("yarn") @@ -378,22 +408,35 @@ fn main() -> ExitCode { .arg("required-hook", "merkleTreeHook"); kathy_env_double_insertion.clone().run().join(); - // Send some sealevel messages before spinning up the agents, to test the backward indexing cursor - for _i in 0..(SOL_MESSAGES_EXPECTED / 2) { - initiate_solana_hyperlane_transfer(solana_path.clone(), solana_config_path.clone()).join(); + if let Some((solana_config_path, (_, solana_path))) = + solana_config_path.clone().zip(solana_paths.clone()) + { + // Send some sealevel messages before spinning up the agents, to test the backward indexing cursor + for _i in 0..(SOL_MESSAGES_EXPECTED / 2) { + initiate_solana_hyperlane_transfer(solana_path.clone(), solana_config_path.clone()) + .join(); + } } // spawn the rest of the validators for (i, validator_env) in validator_envs.into_iter().enumerate().skip(1) { - let validator = validator_env.spawn(make_static(format!("VL{}", 1 + i))); + let validator = validator_env.spawn( + make_static(format!("VL{}", 1 + i)), + Some(AGENT_LOGGING_DIR.as_ref()), + ); state.push_agent(validator); } - state.push_agent(relayer_env.spawn("RLY")); + state.push_agent(relayer_env.spawn("RLY", Some(&AGENT_LOGGING_DIR))); - // Send some sealevel messages after spinning up the relayer, to test the forward indexing cursor - for _i in 0..(SOL_MESSAGES_EXPECTED / 2) { - initiate_solana_hyperlane_transfer(solana_path.clone(), solana_config_path.clone()).join(); + if let Some((solana_config_path, (_, solana_path))) = + solana_config_path.clone().zip(solana_paths.clone()) + { + // Send some sealevel messages before spinning up the agents, to test the backward indexing cursor + for _i in 0..(SOL_MESSAGES_EXPECTED / 2) { + initiate_solana_hyperlane_transfer(solana_path.clone(), solana_config_path.clone()) + .join(); + } } log!("Setup complete! Agents running in background..."); @@ -402,7 +445,11 @@ fn main() -> ExitCode { // Send half the kathy messages after the relayer comes up kathy_env_double_insertion.clone().run().join(); kathy_env_zero_insertion.clone().run().join(); - state.push_agent(kathy_env_single_insertion.flag("mineforever").spawn("KTY")); + state.push_agent( + kathy_env_single_insertion + .flag("mineforever") + .spawn("KTY", None), + ); let loop_start = Instant::now(); // give things a chance to fully start. @@ -412,12 +459,14 @@ fn main() -> ExitCode { while !SHUTDOWN.load(Ordering::Relaxed) { if config.ci_mode { // for CI we have to look for the end condition. - // if termination_invariants_met(&config, starting_relayer_balance) if termination_invariants_met( &config, starting_relayer_balance, - &solana_path, - &solana_config_path, + solana_paths + .clone() + .map(|(_, solana_path)| solana_path) + .as_deref(), + solana_config_path.as_deref(), ) .unwrap_or(false) { @@ -432,7 +481,7 @@ fn main() -> ExitCode { } // verify long-running tasks are still running - for (name, child) in state.agents.iter_mut() { + for (name, (child, _)) in state.agents.iter_mut() { if let Some(status) = child.try_wait().unwrap() { if !status.success() { log!( diff --git a/rust/utils/run-locally/src/program.rs b/rust/utils/run-locally/src/program.rs index 5c2768ae1..3775ef8e9 100644 --- a/rust/utils/run-locally/src/program.rs +++ b/rust/utils/run-locally/src/program.rs @@ -2,14 +2,14 @@ use std::{ collections::BTreeMap, ffi::OsStr, fmt::{Debug, Display, Formatter}, - io::{BufRead, BufReader, Read}, + fs::{File, OpenOptions}, + io::{BufRead, BufReader, Read, Write}, path::{Path, PathBuf}, process::{Command, Stdio}, sync::{ atomic::{AtomicBool, Ordering}, - mpsc, - mpsc::Sender, - Arc, + mpsc::{self, Sender}, + Arc, Mutex, }, thread::{sleep, spawn}, time::Duration, @@ -240,8 +240,18 @@ impl Program { }) } - pub fn spawn(self, log_prefix: &'static str) -> AgentHandles { + pub fn spawn(self, log_prefix: &'static str, logs_dir: Option<&Path>) -> AgentHandles { let mut command = self.create_command(); + let log_file = logs_dir.map(|logs_dir| { + let log_file_name = format!("{}-output.log", log_prefix); + let log_file_path = logs_dir.join(log_file_name); + let log_file = OpenOptions::new() + .append(true) + .create(true) + .open(log_file_path) + .expect("Failed to create a log file"); + Arc::new(Mutex::new(log_file)) + }); command.stdout(Stdio::piped()).stderr(Stdio::piped()); log!("Spawning {}...", &self); @@ -250,17 +260,35 @@ impl Program { .unwrap_or_else(|e| panic!("Failed to start {:?} with error: {e}", &self)); let child_stdout = child.stdout.take().unwrap(); let filter = self.get_filter(); - let stdout = - spawn(move || prefix_log(child_stdout, log_prefix, &RUN_LOG_WATCHERS, filter, None)); + let cloned_log_file = log_file.clone(); + let stdout = spawn(move || { + prefix_log( + child_stdout, + log_prefix, + &RUN_LOG_WATCHERS, + filter, + cloned_log_file, + None, + ) + }); let child_stderr = child.stderr.take().unwrap(); - let stderr = - spawn(move || prefix_log(child_stderr, log_prefix, &RUN_LOG_WATCHERS, filter, None)); + let stderr = spawn(move || { + prefix_log( + child_stderr, + log_prefix, + &RUN_LOG_WATCHERS, + filter, + None, + None, + ) + }); ( log_prefix.to_owned(), child, Box::new(SimpleTaskHandle(stdout)), Box::new(SimpleTaskHandle(stderr)), self.get_memory(), + log_file.clone(), ) } @@ -281,13 +309,13 @@ impl Program { let stdout = child.stdout.take().unwrap(); let name = self.get_bin_name(); let running = running.clone(); - spawn(move || prefix_log(stdout, &name, &running, filter, stdout_ch_tx)) + spawn(move || prefix_log(stdout, &name, &running, filter, None, stdout_ch_tx)) }; let stderr = { let stderr = child.stderr.take().unwrap(); let name = self.get_bin_name(); let running = running.clone(); - spawn(move || prefix_log(stderr, &name, &running, filter, None)) + spawn(move || prefix_log(stderr, &name, &running, filter, None, None)) }; let status = loop { @@ -321,6 +349,7 @@ fn prefix_log( prefix: &str, run_log_watcher: &AtomicBool, filter: Option, + file: Option>>, channel: Option>, ) { let mut reader = BufReader::new(output).lines(); @@ -340,6 +369,10 @@ fn prefix_log( } } println!("<{prefix}> {line}"); + if let Some(file) = &file { + let mut writer = file.lock().expect("Failed to acquire lock for log file"); + writeln!(writer, "{}", line).unwrap_or(()); + } if let Some(channel) = &channel { // ignore send errors channel.send(line).unwrap_or(()); diff --git a/rust/utils/run-locally/src/solana.rs b/rust/utils/run-locally/src/solana.rs index bf5b7d417..9b0fe41e4 100644 --- a/rust/utils/run-locally/src/solana.rs +++ b/rust/utils/run-locally/src/solana.rs @@ -202,7 +202,7 @@ pub fn start_solana_test_validator( concat_path(&solana_programs_path, lib).to_str().unwrap(), ); } - let validator = args.spawn("SOL"); + let validator = args.spawn("SOL", None); sleep(Duration::from_secs(5)); log!("Deploying the hyperlane programs to solana"); diff --git a/rust/utils/run-locally/src/utils.rs b/rust/utils/run-locally/src/utils.rs index 206b4bc69..531970174 100644 --- a/rust/utils/run-locally/src/utils.rs +++ b/rust/utils/run-locally/src/utils.rs @@ -1,5 +1,8 @@ +use std::fs::File; +use std::io::{self, BufRead}; use std::path::{Path, PathBuf}; use std::process::Child; +use std::sync::{Arc, Mutex}; use std::thread::JoinHandle; use nix::libc::pid_t; @@ -54,6 +57,8 @@ pub type AgentHandles = ( Box>, // data to drop once program exits Box, + // file with stdout logs + Option>>, ); pub type LogFilter = fn(&str) -> bool; @@ -112,3 +117,16 @@ pub fn stop_child(child: &mut Child) { } }; } + +pub fn get_matching_lines(file: &File, search_string: &str) -> io::Result> { + let reader = io::BufReader::new(file); + + // Read lines and collect those that contain the search string + let matching_lines: Vec = reader + .lines() + .map_while(Result::ok) + .filter(|line| line.contains(search_string)) + .collect(); + + Ok(matching_lines) +} diff --git a/solidity/CHANGELOG.md b/solidity/CHANGELOG.md index 49a969e8d..0763cef17 100644 --- a/solidity/CHANGELOG.md +++ b/solidity/CHANGELOG.md @@ -1,28 +1,22 @@ # @hyperlane-xyz/core -## 4.0.0-alpha.2 +## 3.15.0 -### Patch Changes - -- @hyperlane-xyz/utils@4.0.0-alpha.2 +### Minor Changes -## 4.0.0-alpha.0 +- 51bfff683: Mint/burn limit checking for xERC20 bridging + Corrects CLI output for HypXERC20 and HypXERC20Lockbox deployments ### Patch Changes -- @hyperlane-xyz/utils@4.0.0-alpha.0 - -## 4.0.0 - -### Major Changes +- @hyperlane-xyz/utils@3.15.0 -- 74c879fa1: Merge branch 'cli-2.0' into main. +## 3.14.0 ### Patch Changes -- Updated dependencies [341b8affd] -- Updated dependencies [74c879fa1] - - @hyperlane-xyz/utils@4.0.0-alpha +- a8a68f6f6: fix: make XERC20 and XERC20 Lockbox proxy-able + - @hyperlane-xyz/utils@3.14.0 ## 3.13.0 diff --git a/solidity/contracts/client/GasRouter.sol b/solidity/contracts/client/GasRouter.sol index 6f4b248e5..c5f9efd1c 100644 --- a/solidity/contracts/client/GasRouter.sol +++ b/solidity/contracts/client/GasRouter.sol @@ -43,13 +43,13 @@ abstract contract GasRouter is Router { */ function quoteGasPayment( uint32 _destinationDomain - ) external view returns (uint256 _gasPayment) { - return _quoteDispatch(_destinationDomain, ""); + ) external view returns (uint256) { + return _GasRouter_quoteDispatch(_destinationDomain, "", address(hook)); } - function _metadata( + function _GasRouter_hookMetadata( uint32 _destination - ) internal view virtual override returns (bytes memory) { + ) internal view returns (bytes memory) { return StandardHookMetadata.overrideGasLimit(destinationGas[_destination]); } @@ -57,4 +57,34 @@ abstract contract GasRouter is Router { function _setDestinationGas(uint32 domain, uint256 gas) internal { destinationGas[domain] = gas; } + + function _GasRouter_dispatch( + uint32 _destination, + uint256 _value, + bytes memory _messageBody, + address _hook + ) internal returns (bytes32) { + return + _Router_dispatch( + _destination, + _value, + _messageBody, + _GasRouter_hookMetadata(_destination), + _hook + ); + } + + function _GasRouter_quoteDispatch( + uint32 _destination, + bytes memory _messageBody, + address _hook + ) internal view returns (uint256) { + return + _Router_quoteDispatch( + _destination, + _messageBody, + _GasRouter_hookMetadata(_destination), + _hook + ); + } } diff --git a/solidity/contracts/client/MailboxClient.sol b/solidity/contracts/client/MailboxClient.sol index bf6b79c34..3e0b751e6 100644 --- a/solidity/contracts/client/MailboxClient.sol +++ b/solidity/contracts/client/MailboxClient.sol @@ -95,85 +95,4 @@ abstract contract MailboxClient is OwnableUpgradeable { function _isDelivered(bytes32 id) internal view returns (bool) { return mailbox.delivered(id); } - - function _metadata( - uint32 /*_destinationDomain*/ - ) internal view virtual returns (bytes memory) { - return ""; - } - - function _dispatch( - uint32 _destinationDomain, - bytes32 _recipient, - bytes memory _messageBody - ) internal virtual returns (bytes32) { - return - _dispatch(_destinationDomain, _recipient, msg.value, _messageBody); - } - - function _dispatch( - uint32 _destinationDomain, - bytes32 _recipient, - uint256 _value, - bytes memory _messageBody - ) internal virtual returns (bytes32) { - return - mailbox.dispatch{value: _value}( - _destinationDomain, - _recipient, - _messageBody, - _metadata(_destinationDomain), - hook - ); - } - - function _dispatch( - uint32 _destinationDomain, - bytes32 _recipient, - uint256 _value, - bytes memory _messageBody, - bytes memory _hookMetadata, - IPostDispatchHook _hook - ) internal virtual returns (bytes32) { - return - mailbox.dispatch{value: _value}( - _destinationDomain, - _recipient, - _messageBody, - _hookMetadata, - _hook - ); - } - - function _quoteDispatch( - uint32 _destinationDomain, - bytes32 _recipient, - bytes memory _messageBody - ) internal view virtual returns (uint256) { - return - mailbox.quoteDispatch( - _destinationDomain, - _recipient, - _messageBody, - _metadata(_destinationDomain), - hook - ); - } - - function _quoteDispatch( - uint32 _destinationDomain, - bytes32 _recipient, - bytes memory _messageBody, - bytes calldata _hookMetadata, - IPostDispatchHook _hook - ) internal view virtual returns (uint256) { - return - mailbox.quoteDispatch( - _destinationDomain, - _recipient, - _messageBody, - _hookMetadata, - _hook - ); - } } diff --git a/solidity/contracts/client/Router.sol b/solidity/contracts/client/Router.sol index ef14912f9..52f3fe537 100644 --- a/solidity/contracts/client/Router.sol +++ b/solidity/contracts/client/Router.sol @@ -167,28 +167,73 @@ abstract contract Router is MailboxClient, IMessageRecipient { ); } - function _dispatch( + function _Router_dispatch( uint32 _destinationDomain, - bytes memory _messageBody - ) internal virtual returns (bytes32) { - return _dispatch(_destinationDomain, msg.value, _messageBody); + uint256 _value, + bytes memory _messageBody, + bytes memory _hookMetadata, + address _hook + ) internal returns (bytes32) { + bytes32 _router = _mustHaveRemoteRouter(_destinationDomain); + return + mailbox.dispatch{value: _value}( + _destinationDomain, + _router, + _messageBody, + _hookMetadata, + IPostDispatchHook(_hook) + ); } + /** + * DEPRECATED: Use `_Router_dispatch` instead + * @dev For backward compatibility with v2 client contracts + */ function _dispatch( uint32 _destinationDomain, - uint256 _value, bytes memory _messageBody - ) internal virtual returns (bytes32) { + ) internal returns (bytes32) { + return + _Router_dispatch( + _destinationDomain, + msg.value, + _messageBody, + "", + address(hook) + ); + } + + function _Router_quoteDispatch( + uint32 _destinationDomain, + bytes memory _messageBody, + bytes memory _hookMetadata, + address _hook + ) internal view returns (uint256) { bytes32 _router = _mustHaveRemoteRouter(_destinationDomain); return - super._dispatch(_destinationDomain, _router, _value, _messageBody); + mailbox.quoteDispatch( + _destinationDomain, + _router, + _messageBody, + _hookMetadata, + IPostDispatchHook(_hook) + ); } + /** + * DEPRECATED: Use `_Router_quoteDispatch` instead + * @dev For backward compatibility with v2 client contracts + */ function _quoteDispatch( uint32 _destinationDomain, bytes memory _messageBody - ) internal view virtual returns (uint256) { - bytes32 _router = _mustHaveRemoteRouter(_destinationDomain); - return super._quoteDispatch(_destinationDomain, _router, _messageBody); + ) internal view returns (uint256) { + return + _Router_quoteDispatch( + _destinationDomain, + _messageBody, + "", + address(hook) + ); } } diff --git a/solidity/contracts/test/ERC20Test.sol b/solidity/contracts/test/ERC20Test.sol index 928a86051..b9e43a7ac 100644 --- a/solidity/contracts/test/ERC20Test.sol +++ b/solidity/contracts/test/ERC20Test.sol @@ -3,6 +3,7 @@ pragma solidity >=0.8.0; import "@openzeppelin/contracts/token/ERC20/ERC20.sol"; +import "../token/interfaces/IXERC20Lockbox.sol"; import "../token/interfaces/IXERC20.sol"; import "../token/interfaces/IFiatToken.sol"; @@ -66,15 +67,62 @@ contract XERC20Test is ERC20Test, IXERC20 { _burn(account, amount); } - function setLimits( - address /* _bridge */, - uint256 /* _mintingLimit */, - uint256 /* _burningLimit */ - ) external pure { - require(false, "setLimits(): not implemented"); + function setLimits(address, uint256, uint256) external pure { + assert(false); } function owner() external pure returns (address) { return address(0x0); } + + function burningCurrentLimitOf( + address _bridge + ) external view returns (uint256) { + return type(uint256).max; + } + + function mintingCurrentLimitOf( + address _bridge + ) external view returns (uint256) { + return type(uint256).max; + } +} + +contract XERC20LockboxTest is IXERC20Lockbox { + IXERC20 public immutable XERC20; + IERC20 public immutable ERC20; + + constructor( + string memory name, + string memory symbol, + uint256 totalSupply, + uint8 __decimals + ) { + ERC20Test erc20 = new ERC20Test(name, symbol, totalSupply, __decimals); + erc20.transfer(msg.sender, totalSupply); + ERC20 = erc20; + XERC20 = new XERC20Test(name, symbol, 0, __decimals); + } + + function depositTo(address _user, uint256 _amount) public { + ERC20.transferFrom(msg.sender, address(this), _amount); + XERC20.mint(_user, _amount); + } + + function deposit(uint256 _amount) external { + depositTo(msg.sender, _amount); + } + + function depositNativeTo(address) external payable { + assert(false); + } + + function withdrawTo(address _user, uint256 _amount) public { + XERC20.burn(msg.sender, _amount); + ERC20Test(address(ERC20)).mintTo(_user, _amount); + } + + function withdraw(uint256 _amount) external { + withdrawTo(msg.sender, _amount); + } } diff --git a/solidity/contracts/test/TestGasRouter.sol b/solidity/contracts/test/TestGasRouter.sol index 3c8f6ecbf..b74bd6ac6 100644 --- a/solidity/contracts/test/TestGasRouter.sol +++ b/solidity/contracts/test/TestGasRouter.sol @@ -7,7 +7,7 @@ contract TestGasRouter is GasRouter { constructor(address _mailbox) GasRouter(_mailbox) {} function dispatch(uint32 _destination, bytes memory _msg) external payable { - _dispatch(_destination, _msg); + _GasRouter_dispatch(_destination, msg.value, _msg, address(hook)); } function _handle(uint32, bytes32, bytes calldata) internal pure override {} diff --git a/solidity/contracts/token/HypNative.sol b/solidity/contracts/token/HypNative.sol index 5afafdd96..5b620d00a 100644 --- a/solidity/contracts/token/HypNative.sol +++ b/solidity/contracts/token/HypNative.sol @@ -36,24 +36,16 @@ contract HypNative is TokenRouter { /** * @inheritdoc TokenRouter - * @dev uses (`msg.value` - `_amount`) as interchain gas payment and `msg.sender` as refund address. + * @dev uses (`msg.value` - `_amount`) as hook payment and `msg.sender` as refund address. */ function transferRemote( uint32 _destination, bytes32 _recipient, uint256 _amount - ) public payable virtual override returns (bytes32 messageId) { + ) external payable virtual override returns (bytes32 messageId) { require(msg.value >= _amount, "Native: amount exceeds msg.value"); - uint256 gasPayment = msg.value - _amount; - return - _transferRemote( - _destination, - _recipient, - _amount, - gasPayment, - bytes(""), - address(0) - ); + uint256 _hookPayment = msg.value - _amount; + return _transferRemote(_destination, _recipient, _amount, _hookPayment); } function balanceOf( diff --git a/solidity/contracts/token/README.md b/solidity/contracts/token/README.md index 3d8c90082..99edbd11d 100644 --- a/solidity/contracts/token/README.md +++ b/solidity/contracts/token/README.md @@ -6,7 +6,7 @@ For instructions on deploying Warp Routes, see [the deployment documentation](ht ## Warp Route Architecture -A Warp Route is a collection of [`TokenRouter`](./contracts/libs/TokenRouter.sol) contracts deployed across a set of Hyperlane chains. These contracts leverage the `Router` pattern to implement access control and routing logic for remote token transfers. These contracts send and receive [`Messages`](./contracts/libs/Message.sol) which encode payloads containing a transfer `amount` and `recipient` address. +A Warp Route is a collection of [`TokenRouter`](./libs/TokenRouter.sol) contracts deployed across a set of Hyperlane chains. These contracts leverage the `Router` pattern to implement access control and routing logic for remote token transfers. These contracts send and receive [`Messages`](./libs/TokenMessage.sol) which encode payloads containing a transfer `amount` and `recipient` address. ```mermaid %%{ init: { @@ -39,7 +39,7 @@ graph LR Mailbox_G[(Mailbox)] end - HYP_E -. "router" .- HYP_P -. "router" .- HYP_G + HYP_E -. "TokenMessage" .- HYP_P -. "TokenMessage" .- HYP_G ``` diff --git a/solidity/contracts/token/extensions/HypNativeScaled.sol b/solidity/contracts/token/extensions/HypNativeScaled.sol index 625fa7e0e..88ccaa468 100644 --- a/solidity/contracts/token/extensions/HypNativeScaled.sol +++ b/solidity/contracts/token/extensions/HypNativeScaled.sol @@ -25,18 +25,16 @@ contract HypNativeScaled is HypNative { uint32 _destination, bytes32 _recipient, uint256 _amount - ) public payable override returns (bytes32 messageId) { + ) external payable override returns (bytes32 messageId) { require(msg.value >= _amount, "Native: amount exceeds msg.value"); - uint256 gasPayment = msg.value - _amount; - uint256 scaledAmount = _amount / scale; + uint256 _hookPayment = msg.value - _amount; + uint256 _scaledAmount = _amount / scale; return _transferRemote( _destination, _recipient, - scaledAmount, - gasPayment, - bytes(""), - address(0) + _scaledAmount, + _hookPayment ); } diff --git a/solidity/contracts/token/extensions/HypXERC20.sol b/solidity/contracts/token/extensions/HypXERC20.sol index 9f50b4537..84ab7a879 100644 --- a/solidity/contracts/token/extensions/HypXERC20.sol +++ b/solidity/contracts/token/extensions/HypXERC20.sol @@ -8,7 +8,9 @@ contract HypXERC20 is HypERC20Collateral { constructor( address _xerc20, address _mailbox - ) HypERC20Collateral(_xerc20, _mailbox) {} + ) HypERC20Collateral(_xerc20, _mailbox) { + _disableInitializers(); + } function _transferFromSender( uint256 _amountOrId diff --git a/solidity/contracts/token/extensions/HypXERC20Lockbox.sol b/solidity/contracts/token/extensions/HypXERC20Lockbox.sol index f4a860917..f2c26e784 100644 --- a/solidity/contracts/token/extensions/HypXERC20Lockbox.sol +++ b/solidity/contracts/token/extensions/HypXERC20Lockbox.sol @@ -17,18 +17,40 @@ contract HypXERC20Lockbox is HypERC20Collateral { ) HypERC20Collateral(address(IXERC20Lockbox(_lockbox).ERC20()), _mailbox) { lockbox = IXERC20Lockbox(_lockbox); xERC20 = lockbox.XERC20(); + approveLockbox(); + _disableInitializers(); + } - // grant infinite approvals to lockbox + /** + * @notice Approve the lockbox to spend the wrapped token and xERC20 + * @dev This function is idempotent and need not be access controlled + */ + function approveLockbox() public { require( - IERC20(wrappedToken).approve(_lockbox, MAX_INT), + IERC20(wrappedToken).approve(address(lockbox), MAX_INT), "erc20 lockbox approve failed" ); require( - xERC20.approve(_lockbox, MAX_INT), + xERC20.approve(address(lockbox), MAX_INT), "xerc20 lockbox approve failed" ); } + /** + * @notice Initialize the contract + * @param _hook The address of the hook contract + * @param _ism The address of the interchain security module + * @param _owner The address of the owner + */ + function initialize( + address _hook, + address _ism, + address _owner + ) public override initializer { + approveLockbox(); + _MailboxClient_initialize(_hook, _ism, _owner); + } + function _transferFromSender( uint256 _amount ) internal override returns (bytes memory) { diff --git a/solidity/contracts/token/interfaces/IXERC20.sol b/solidity/contracts/token/interfaces/IXERC20.sol index 2c9bad49a..61b03e46b 100644 --- a/solidity/contracts/token/interfaces/IXERC20.sol +++ b/solidity/contracts/token/interfaces/IXERC20.sol @@ -36,4 +36,22 @@ interface IXERC20 is IERC20 { ) external; function owner() external returns (address); + + /** + * @notice Returns the current limit of a bridge + * @param _bridge the bridge we are viewing the limits of + * @return _limit The limit the bridge has + */ + function burningCurrentLimitOf( + address _bridge + ) external view returns (uint256 _limit); + + /** + * @notice Returns the current limit of a bridge + * @param _bridge the bridge we are viewing the limits of + * @return _limit The limit the bridge has + */ + function mintingCurrentLimitOf( + address _bridge + ) external view returns (uint256 _limit); } diff --git a/solidity/contracts/token/libs/FastTokenRouter.sol b/solidity/contracts/token/libs/FastTokenRouter.sol index b5fdaaee3..8e4418a89 100644 --- a/solidity/contracts/token/libs/FastTokenRouter.sol +++ b/solidity/contracts/token/libs/FastTokenRouter.sol @@ -109,9 +109,11 @@ abstract contract FastTokenRouter is TokenRouter { _fastTransferId ); - messageId = _dispatch( + messageId = _GasRouter_dispatch( _destination, - TokenMessage.format(_recipient, _amountOrId, metadata) + msg.value, + TokenMessage.format(_recipient, _amountOrId, metadata), + address(hook) ); emit SentTransferRemote(_destination, _recipient, _amountOrId); } diff --git a/solidity/contracts/token/libs/TokenRouter.sol b/solidity/contracts/token/libs/TokenRouter.sol index c5d0bf6b1..96ffcf838 100644 --- a/solidity/contracts/token/libs/TokenRouter.sol +++ b/solidity/contracts/token/libs/TokenRouter.sol @@ -57,14 +57,7 @@ abstract contract TokenRouter is GasRouter { uint256 _amountOrId ) external payable virtual returns (bytes32 messageId) { return - _transferRemote( - _destination, - _recipient, - _amountOrId, - msg.value, - bytes(""), - address(0) - ); + _transferRemote(_destination, _recipient, _amountOrId, msg.value); } /** @@ -97,45 +90,45 @@ abstract contract TokenRouter is GasRouter { ); } - /** - * @notice Transfers `_amountOrId` token to `_recipient` on `_destination` domain. - * @dev Delegates transfer logic to `_transferFromSender` implementation. - * @dev The metadata is the token metadata, and is DIFFERENT than the hook metadata. - * @dev Emits `SentTransferRemote` event on the origin chain. - * @param _destination The identifier of the destination chain. - * @param _recipient The address of the recipient on the destination chain. - * @param _amountOrId The amount or identifier of tokens to be sent to the remote recipient. - * @param _gasPayment The amount of native token to pay for interchain gas. - * @param _hookMetadata The metadata passed into the hook - * @param _hook The post dispatch hook to be called by the Mailbox - * @return messageId The identifier of the dispatched message. - */ function _transferRemote( uint32 _destination, bytes32 _recipient, uint256 _amountOrId, - uint256 _gasPayment, - bytes memory _hookMetadata, - address _hook + uint256 _value ) internal returns (bytes32 messageId) { - bytes memory metadata = _transferFromSender(_amountOrId); - - if (address(_hook) == address(0)) { - messageId = _dispatch( - _destination, - _gasPayment, - TokenMessage.format(_recipient, _amountOrId, metadata) - ); - } else { - messageId = _dispatch( + return + _transferRemote( _destination, _recipient, - _gasPayment, - TokenMessage.format(_recipient, _amountOrId, metadata), - _hookMetadata, - IPostDispatchHook(_hook) + _amountOrId, + _value, + _GasRouter_hookMetadata(_destination), + address(hook) ); - } + } + + function _transferRemote( + uint32 _destination, + bytes32 _recipient, + uint256 _amountOrId, + uint256 _value, + bytes memory _hookMetadata, + address _hook + ) internal virtual returns (bytes32 messageId) { + bytes memory _tokenMetadata = _transferFromSender(_amountOrId); + bytes memory _tokenMessage = TokenMessage.format( + _recipient, + _amountOrId, + _tokenMetadata + ); + + messageId = _Router_dispatch( + _destination, + _value, + _tokenMessage, + _hookMetadata, + _hook + ); emit SentTransferRemote(_destination, _recipient, _amountOrId); } diff --git a/solidity/coverage.sh b/solidity/coverage.sh index bd9a3e232..a3f7d463f 100755 --- a/solidity/coverage.sh +++ b/solidity/coverage.sh @@ -14,7 +14,7 @@ fi lcov --version # exclude FastTokenRouter until https://github.com/hyperlane-xyz/hyperlane-monorepo/issues/2806 -EXCLUDE="*test* *mock* *node_modules* *FastHyp*" +EXCLUDE="*test* *mock* *node_modules* *script* *FastHyp*" lcov \ --rc lcov_branch_coverage=1 \ --remove lcov.info $EXCLUDE \ diff --git a/solidity/foundry.toml b/solidity/foundry.toml index 8180d9b58..51a9912cb 100644 --- a/solidity/foundry.toml +++ b/solidity/foundry.toml @@ -14,7 +14,11 @@ fs_permissions = [ { access = "read", path = "./script/avs/"}, { access = "write", path = "./fixtures" } ] -ignored_warnings_from = ['fx-portal'] +ignored_warnings_from = [ + 'lib', + 'test', + 'contracts/test' +] [profile.ci] verbosity = 4 diff --git a/solidity/package.json b/solidity/package.json index 48e2e1926..0d2921a58 100644 --- a/solidity/package.json +++ b/solidity/package.json @@ -1,10 +1,10 @@ { "name": "@hyperlane-xyz/core", "description": "Core solidity contracts for Hyperlane", - "version": "4.0.0-alpha.2", + "version": "3.15.0", "dependencies": { "@eth-optimism/contracts": "^0.6.0", - "@hyperlane-xyz/utils": "4.0.0-alpha.2", + "@hyperlane-xyz/utils": "3.15.0", "@layerzerolabs/lz-evm-oapp-v2": "2.0.2", "@openzeppelin/contracts": "^4.9.3", "@openzeppelin/contracts-upgradeable": "^v4.9.3", @@ -15,7 +15,9 @@ "@nomiclabs/hardhat-ethers": "^2.2.3", "@nomiclabs/hardhat-waffle": "^2.0.6", "@typechain/ethers-v5": "^11.1.2", + "@typechain/ethers-v6": "^0.5.1", "@typechain/hardhat": "^9.1.0", + "@types/node": "^18.14.5", "chai": "^4.3.6", "ethereum-waffle": "^4.0.10", "ethers": "^5.7.2", @@ -26,6 +28,7 @@ "prettier-plugin-solidity": "^1.1.3", "solhint": "^4.5.4", "solhint-plugin-prettier": "^0.0.5", + "solidity-bytes-utils": "^0.8.0", "solidity-coverage": "^0.8.3", "ts-generator": "^0.1.1", "ts-node": "^10.8.0", diff --git a/solidity/script/avs/eigenlayer_addresses.json b/solidity/script/avs/eigenlayer_addresses.json index 60e2fceea..1c20dae29 100644 --- a/solidity/script/avs/eigenlayer_addresses.json +++ b/solidity/script/avs/eigenlayer_addresses.json @@ -5,14 +5,54 @@ "avsDirectory": "0x135DDa560e946695d6f155dACaFC6f1F25C1F5AF", "paymentCoordinator": "", "strategies": [ + { + "name": "swETH", + "strategy": "0x0Fe4F44beE93503346A3Ac9EE5A26b130a5796d6" + }, + { + "name": "oETH", + "strategy": "0x13760F50a9d7377e4F20CB8CF9e4c26586c658ff" + }, + { + "name": "rETH", + "strategy": "0x1BeE69b7dFFfA4E2d53C2a2Df135C388AD25dCD2" + }, + { + "name": "mETH", + "strategy": "0x298aFB19A105D59E74658C4C334Ff360BadE6dd2" + }, { "name": "cbETH", "strategy": "0x54945180dB7943c0ed0FEE7EdaB2Bd24620256bc" }, + { + "name": "osETH", + "strategy": "0x57ba429517c3473B6d34CA9aCd56c0e735b94c02" + }, + { + "name": "wBETH", + "strategy": "0x7CA911E83dabf90C90dD3De5411a10F1A6112184" + }, + { + "name": "sfrxETH", + "strategy": "0x8CA7A5d6f3acd3A7A8bC468a8CD0FB14B6BD28b6" + }, { "name": "stETH", "strategy": "0x93c4b944D05dfe6df7645A86cd2206016c51564D" }, + { + "name": "ETHx", + "strategy": "0x9d7eD45EE2E8FC5482fa2428f15C971e6369011d" + }, + { + "name": "ankrETH", + "strategy": "0xa4C637e0F704745D182e4D38cAb7E7485321d059" + }, + { + "name": "lsETH", + "strategy": "0xAe60d8180437b5C34bB956822ac2710972584473" + }, { "name": "Beacon Chain ETH", "strategy": "0xbeaC0eeEeeeeEEeEeEEEEeeEEeEeeeEeeEEBEaC0" diff --git a/solidity/script/xerc20/.env.blast b/solidity/script/xerc20/.env.blast new file mode 100644 index 000000000..b0db0d828 --- /dev/null +++ b/solidity/script/xerc20/.env.blast @@ -0,0 +1,4 @@ +export ROUTER_ADDRESS=0xA34ceDf9068C5deE726C67A4e1DCfCc2D6E2A7fD +export ERC20_ADDRESS=0x2416092f143378750bb29b79eD961ab195CcEea5 +export XERC20_ADDRESS=0x2416092f143378750bb29b79eD961ab195CcEea5 +export RPC_URL="https://rpc.blast.io" diff --git a/solidity/script/xerc20/.env.ethereum b/solidity/script/xerc20/.env.ethereum new file mode 100644 index 000000000..4d6366a8c --- /dev/null +++ b/solidity/script/xerc20/.env.ethereum @@ -0,0 +1,5 @@ +export ROUTER_ADDRESS=0x8dfbEA2582F41c8C4Eb25252BbA392fd3c09449A +export ADMIN_ADDRESS=0xa5B0D537CeBE97f087Dc5FE5732d70719caaEc1D +export ERC20_ADDRESS=0xbf5495Efe5DB9ce00f80364C8B423567e58d2110 +export XERC20_ADDRESS=0x2416092f143378750bb29b79eD961ab195CcEea5 +export RPC_URL="https://eth.merkle.io" diff --git a/solidity/script/xerc20/ApproveLockbox.s.sol b/solidity/script/xerc20/ApproveLockbox.s.sol new file mode 100644 index 000000000..182306eab --- /dev/null +++ b/solidity/script/xerc20/ApproveLockbox.s.sol @@ -0,0 +1,50 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 +pragma solidity >=0.8.0; + +import "forge-std/Script.sol"; + +import {AnvilRPC} from "test/AnvilRPC.sol"; +import {TypeCasts} from "contracts/libs/TypeCasts.sol"; + +import {ITransparentUpgradeableProxy} from "@openzeppelin/contracts/proxy/transparent/TransparentUpgradeableProxy.sol"; + +import {ProxyAdmin} from "contracts/upgrade/ProxyAdmin.sol"; + +import {HypXERC20Lockbox} from "contracts/token/extensions/HypXERC20Lockbox.sol"; +import {IXERC20Lockbox} from "contracts/token/interfaces/IXERC20Lockbox.sol"; +import {IXERC20} from "contracts/token/interfaces/IXERC20.sol"; +import {IERC20} from "contracts/token/interfaces/IXERC20.sol"; + +// source .env. +// forge script ApproveLockbox.s.sol --broadcast --rpc-url localhost:XXXX +contract ApproveLockbox is Script { + address router = vm.envAddress("ROUTER_ADDRESS"); + address admin = vm.envAddress("ADMIN_ADDRESS"); + uint256 deployerPrivateKey = vm.envUint("PRIVATE_KEY"); + + ITransparentUpgradeableProxy proxy = ITransparentUpgradeableProxy(router); + HypXERC20Lockbox old = HypXERC20Lockbox(router); + address lockbox = address(old.lockbox()); + address mailbox = address(old.mailbox()); + ProxyAdmin proxyAdmin = ProxyAdmin(admin); + + function run() external { + assert(proxyAdmin.getProxyAdmin(proxy) == admin); + + vm.startBroadcast(deployerPrivateKey); + HypXERC20Lockbox logic = new HypXERC20Lockbox(lockbox, mailbox); + proxyAdmin.upgradeAndCall( + proxy, + address(logic), + abi.encodeCall(HypXERC20Lockbox.approveLockbox, ()) + ); + vm.stopBroadcast(); + + vm.expectRevert("Initializable: contract is already initialized"); + HypXERC20Lockbox(address(proxy)).initialize( + address(0), + address(0), + mailbox + ); + } +} diff --git a/solidity/script/xerc20/GrantLimits.s.sol b/solidity/script/xerc20/GrantLimits.s.sol new file mode 100644 index 000000000..e2c79bae6 --- /dev/null +++ b/solidity/script/xerc20/GrantLimits.s.sol @@ -0,0 +1,37 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 +pragma solidity >=0.8.0; + +import "forge-std/Script.sol"; + +import {AnvilRPC} from "test/AnvilRPC.sol"; + +import {IXERC20Lockbox} from "contracts/token/interfaces/IXERC20Lockbox.sol"; +import {IXERC20} from "contracts/token/interfaces/IXERC20.sol"; +import {IERC20} from "contracts/token/interfaces/IXERC20.sol"; + +// source .env. +// anvil --fork-url $RPC_URL --port XXXX +// forge script GrantLimits.s.sol --broadcast --unlocked --rpc-url localhost:XXXX +contract GrantLimits is Script { + address tester = 0xa7ECcdb9Be08178f896c26b7BbD8C3D4E844d9Ba; + uint256 amount = 1 gwei; + + address router = vm.envAddress("ROUTER_ADDRESS"); + IERC20 erc20 = IERC20(vm.envAddress("ERC20_ADDRESS")); + IXERC20 xerc20 = IXERC20(vm.envAddress("XERC20_ADDRESS")); + + function runFrom(address account) internal { + AnvilRPC.setBalance(account, 1 ether); + AnvilRPC.impersonateAccount(account); + vm.broadcast(account); + } + + function run() external { + address owner = xerc20.owner(); + runFrom(owner); + xerc20.setLimits(router, amount, amount); + + runFrom(address(erc20)); + erc20.transfer(tester, amount); + } +} diff --git a/solidity/script/xerc20/ezETH.s.sol b/solidity/script/xerc20/ezETH.s.sol new file mode 100644 index 000000000..f6171eb63 --- /dev/null +++ b/solidity/script/xerc20/ezETH.s.sol @@ -0,0 +1,127 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 +pragma solidity >=0.8.0; + +import "forge-std/Script.sol"; + +import {IXERC20Lockbox} from "../../contracts/token/interfaces/IXERC20Lockbox.sol"; +import {IXERC20} from "../../contracts/token/interfaces/IXERC20.sol"; +import {IERC20} from "../../contracts/token/interfaces/IXERC20.sol"; +import {HypXERC20Lockbox} from "../../contracts/token/extensions/HypXERC20Lockbox.sol"; +import {HypERC20Collateral} from "../../contracts/token/HypERC20Collateral.sol"; +import {HypXERC20} from "../../contracts/token/extensions/HypXERC20.sol"; +import {TransparentUpgradeableProxy} from "../../contracts/upgrade/TransparentUpgradeableProxy.sol"; +import {ProxyAdmin} from "../../contracts/upgrade/ProxyAdmin.sol"; + +import {TypeCasts} from "../../contracts/libs/TypeCasts.sol"; +import {TokenMessage} from "../../contracts/token/libs/TokenMessage.sol"; + +contract ezETH is Script { + using TypeCasts for address; + + string ETHEREUM_RPC_URL = vm.envString("ETHEREUM_RPC_URL"); + string BLAST_RPC_URL = vm.envString("BLAST_RPC_URL"); + + uint256 ethereumFork; + uint32 ethereumDomainId = 1; + address ethereumMailbox = 0xc005dc82818d67AF737725bD4bf75435d065D239; + address ethereumLockbox = 0xC8140dA31E6bCa19b287cC35531c2212763C2059; + + uint256 blastFork; + uint32 blastDomainId = 81457; + address blastXERC20 = 0x2416092f143378750bb29b79eD961ab195CcEea5; + address blastMailbox = 0x3a867fCfFeC2B790970eeBDC9023E75B0a172aa7; + + uint256 amount = 100; + + function setUp() public { + ethereumFork = vm.createFork(ETHEREUM_RPC_URL); + blastFork = vm.createFork(BLAST_RPC_URL); + } + + function run() external { + address deployer = address(this); + bytes32 recipient = deployer.addressToBytes32(); + bytes memory tokenMessage = TokenMessage.format(recipient, amount, ""); + vm.selectFork(ethereumFork); + HypXERC20Lockbox hypXERC20Lockbox = new HypXERC20Lockbox( + ethereumLockbox, + ethereumMailbox + ); + ProxyAdmin ethAdmin = new ProxyAdmin(); + TransparentUpgradeableProxy ethProxy = new TransparentUpgradeableProxy( + address(hypXERC20Lockbox), + address(ethAdmin), + abi.encodeCall( + HypXERC20Lockbox.initialize, + (address(0), address(0), deployer) + ) + ); + hypXERC20Lockbox = HypXERC20Lockbox(address(ethProxy)); + + vm.selectFork(blastFork); + HypXERC20 hypXERC20 = new HypXERC20(blastXERC20, blastMailbox); + ProxyAdmin blastAdmin = new ProxyAdmin(); + TransparentUpgradeableProxy blastProxy = new TransparentUpgradeableProxy( + address(hypXERC20), + address(blastAdmin), + abi.encodeCall( + HypERC20Collateral.initialize, + (address(0), address(0), deployer) + ) + ); + hypXERC20 = HypXERC20(address(blastProxy)); + hypXERC20.enrollRemoteRouter( + ethereumDomainId, + address(hypXERC20Lockbox).addressToBytes32() + ); + + // grant `amount` mint and burn limit to warp route + vm.prank(IXERC20(blastXERC20).owner()); + IXERC20(blastXERC20).setLimits(address(hypXERC20), amount, amount); + + // test sending `amount` on warp route + vm.prank(0x7BE481D464CAD7ad99500CE8A637599eB8d0FCDB); // ezETH whale + IXERC20(blastXERC20).transfer(address(this), amount); + IXERC20(blastXERC20).approve(address(hypXERC20), amount); + uint256 value = hypXERC20.quoteGasPayment(ethereumDomainId); + hypXERC20.transferRemote{value: value}( + ethereumDomainId, + recipient, + amount + ); + + // test receiving `amount` on warp route + vm.prank(blastMailbox); + hypXERC20.handle( + ethereumDomainId, + address(hypXERC20Lockbox).addressToBytes32(), + tokenMessage + ); + + vm.selectFork(ethereumFork); + hypXERC20Lockbox.enrollRemoteRouter( + blastDomainId, + address(hypXERC20).addressToBytes32() + ); + + // grant `amount` mint and burn limit to warp route + IXERC20 ethereumXERC20 = hypXERC20Lockbox.xERC20(); + vm.prank(ethereumXERC20.owner()); + ethereumXERC20.setLimits(address(hypXERC20Lockbox), amount, amount); + + // test sending `amount` on warp route + IERC20 erc20 = IXERC20Lockbox(ethereumLockbox).ERC20(); + vm.prank(ethereumLockbox); + erc20.transfer(address(this), amount); + erc20.approve(address(hypXERC20Lockbox), amount); + hypXERC20Lockbox.transferRemote(blastDomainId, recipient, amount); + + // test receiving `amount` on warp route + vm.prank(ethereumMailbox); + hypXERC20Lockbox.handle( + blastDomainId, + address(hypXERC20).addressToBytes32(), + tokenMessage + ); + } +} diff --git a/solidity/test/AnvilRPC.sol b/solidity/test/AnvilRPC.sol new file mode 100644 index 000000000..eb1be413a --- /dev/null +++ b/solidity/test/AnvilRPC.sol @@ -0,0 +1,107 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 +pragma solidity >=0.8.0; + +import "forge-std/Vm.sol"; +import {Strings} from "@openzeppelin/contracts/utils/Strings.sol"; + +// see https://book.getfoundry.sh/reference/anvil/#supported-rpc-methods +library AnvilRPC { + using Strings for address; + using Strings for uint256; + + using AnvilRPC for string; + using AnvilRPC for string[1]; + using AnvilRPC for string[2]; + using AnvilRPC for string[3]; + + Vm private constant vm = + Vm(address(uint160(uint256(keccak256("hevm cheat code"))))); + + string private constant OPEN_ARRAY = "["; + string private constant CLOSE_ARRAY = "]"; + string private constant COMMA = ","; + string private constant EMPTY_ARRAY = "[]"; + + function escaped( + string memory value + ) internal pure returns (string memory) { + return string.concat(ESCAPED_QUOTE, value, ESCAPED_QUOTE); + } + + function toString( + string[1] memory values + ) internal pure returns (string memory) { + return string.concat(OPEN_ARRAY, values[0], CLOSE_ARRAY); + } + + function toString( + string[2] memory values + ) internal pure returns (string memory) { + return + string.concat(OPEN_ARRAY, values[0], COMMA, values[1], CLOSE_ARRAY); + } + + function toString( + string[3] memory values + ) internal pure returns (string memory) { + return + string.concat( + OPEN_ARRAY, + values[0], + COMMA, + values[1], + COMMA, + values[2], + CLOSE_ARRAY + ); + } + + function impersonateAccount(address account) internal { + vm.rpc( + "anvil_impersonateAccount", + [account.toHexString().escaped()].toString() + ); + } + + function setBalance(address account, uint256 balance) internal { + vm.rpc( + "anvil_setBalance", + [account.toHexString().escaped(), balance.toString()].toString() + ); + } + + function setCode(address account, bytes memory code) internal { + vm.rpc( + "anvil_setCode", + [account.toHexString().escaped(), string(code).escaped()].toString() + ); + } + + function setStorageAt( + address account, + uint256 slot, + uint256 value + ) internal { + vm.rpc( + "anvil_setStorageAt", + [ + account.toHexString().escaped(), + slot.toHexString(), + value.toHexString() + ].toString() + ); + } + + function resetFork(string memory rpcUrl) internal { + string memory obj = string.concat( + // solhint-disable-next-line quotes + '{"forking":{"jsonRpcUrl":', + string(rpcUrl).escaped(), + "}}" + ); + vm.rpc("anvil_reset", [obj].toString()); + } +} + +// here to prevent syntax highlighting from breaking +string constant ESCAPED_QUOTE = '"'; diff --git a/solidity/test/InterchainAccountRouter.t.sol b/solidity/test/InterchainAccountRouter.t.sol index d96685706..b610532c1 100644 --- a/solidity/test/InterchainAccountRouter.t.sol +++ b/solidity/test/InterchainAccountRouter.t.sol @@ -479,6 +479,7 @@ contract InterchainAccountRouterTest is Test { uint64 payment, bytes32 data ) public { + CallLib.Call[] memory calls = getCalls(data); vm.assume(payment < gasLimit * igp.gasPrice()); // arrange bytes memory metadata = StandardHookMetadata.formatMetadata( @@ -495,11 +496,7 @@ contract InterchainAccountRouterTest is Test { // act vm.expectRevert("IGP: insufficient interchain gas payment"); - originRouter.callRemote{value: payment}( - destination, - getCalls(data), - metadata - ); + originRouter.callRemote{value: payment}(destination, calls, metadata); } function testFuzz_callRemoteWithOverrides_default(bytes32 data) public { diff --git a/solidity/test/token/HypERC20.t.sol b/solidity/test/token/HypERC20.t.sol index 82c5359b7..9191d3225 100644 --- a/solidity/test/token/HypERC20.t.sol +++ b/solidity/test/token/HypERC20.t.sol @@ -19,13 +19,15 @@ import {TransparentUpgradeableProxy} from "@openzeppelin/contracts/proxy/transpa import {Mailbox} from "../../contracts/Mailbox.sol"; import {TypeCasts} from "../../contracts/libs/TypeCasts.sol"; import {TestMailbox} from "../../contracts/test/TestMailbox.sol"; -import {XERC20Test, FiatTokenTest, ERC20Test} from "../../contracts/test/ERC20Test.sol"; +import {XERC20LockboxTest, XERC20Test, FiatTokenTest, ERC20Test} from "../../contracts/test/ERC20Test.sol"; import {TestPostDispatchHook} from "../../contracts/test/TestPostDispatchHook.sol"; import {TestInterchainGasPaymaster} from "../../contracts/test/TestInterchainGasPaymaster.sol"; import {GasRouter} from "../../contracts/client/GasRouter.sol"; +import {IPostDispatchHook} from "../../contracts/interfaces/hooks/IPostDispatchHook.sol"; import {HypERC20} from "../../contracts/token/HypERC20.sol"; import {HypERC20Collateral} from "../../contracts/token/HypERC20Collateral.sol"; +import {HypXERC20Lockbox} from "../../contracts/token/extensions/HypXERC20Lockbox.sol"; import {IXERC20} from "../../contracts/token/interfaces/IXERC20.sol"; import {IFiatToken} from "../../contracts/token/interfaces/IFiatToken.sol"; import {HypXERC20} from "../../contracts/token/extensions/HypXERC20.sol"; @@ -197,38 +199,38 @@ abstract contract HypTokenTest is Test { function _performRemoteTransferWithHook( uint256 _msgValue, - uint256 _amount + uint256 _amount, + address _hook, + bytes memory _hookMetadata ) internal returns (bytes32 messageId) { vm.prank(ALICE); messageId = localToken.transferRemote{value: _msgValue}( DESTINATION, BOB.addressToBytes32(), _amount, - bytes(""), - address(noopHook) + _hookMetadata, + address(_hook) ); _processTransfers(BOB, _amount); assertEq(remoteToken.balanceOf(BOB), _amount); } - function testTransfer_withHookSpecified() public { + function testTransfer_withHookSpecified( + uint256 fee, + bytes calldata metadata + ) public { + TestPostDispatchHook hook = new TestPostDispatchHook(); + hook.setFee(fee); + vm.prank(ALICE); primaryToken.approve(address(localToken), TRANSFER_AMT); bytes32 messageId = _performRemoteTransferWithHook( REQUIRED_VALUE, - TRANSFER_AMT + TRANSFER_AMT, + address(hook), + metadata ); - assertTrue(noopHook.messageDispatched(messageId)); - /// @dev Using this test would be ideal, but vm.expectCall with nested functions more than 1 level deep is broken - /// In other words, the call graph of Route.transferRemote() -> Mailbox.dispatch() -> Hook.postDispatch() does not work with expectCall - // vm.expectCall( - // address(noopHook), - // abi.encodeCall( - // IPostDispatchHook.postDispatch, - // (bytes(""), outboundMessage) - // ) - // ); - /// @dev Also, using expectedCall with Mailbox.dispatch() won't work either because overloaded function selection is broken, see https://github.com/ethereum/solidity/issues/13815 + assertTrue(hook.messageDispatched(messageId)); } function testBenchmark_overheadGasUsage() public virtual { @@ -442,6 +444,80 @@ contract HypXERC20Test is HypTokenTest { } } +contract HypXERC20LockboxTest is HypTokenTest { + using TypeCasts for address; + HypXERC20Lockbox internal xerc20Lockbox; + + function setUp() public override { + super.setUp(); + + XERC20LockboxTest lockbox = new XERC20LockboxTest( + NAME, + SYMBOL, + TOTAL_SUPPLY, + DECIMALS + ); + primaryToken = ERC20Test(address(lockbox.ERC20())); + + localToken = new HypXERC20Lockbox( + address(lockbox), + address(localMailbox) + ); + xerc20Lockbox = HypXERC20Lockbox(address(localToken)); + + xerc20Lockbox.enrollRemoteRouter( + DESTINATION, + address(remoteToken).addressToBytes32() + ); + + primaryToken.transfer(ALICE, 1000e18); + + _enrollRemoteTokenRouter(); + } + + uint256 constant MAX_INT = 2 ** 256 - 1; + + function testApproval() public { + assertEq( + xerc20Lockbox.xERC20().allowance( + address(localToken), + address(xerc20Lockbox.lockbox()) + ), + MAX_INT + ); + assertEq( + xerc20Lockbox.wrappedToken().allowance( + address(localToken), + address(xerc20Lockbox.lockbox()) + ), + MAX_INT + ); + } + + function testRemoteTransfer() public { + uint256 balanceBefore = localToken.balanceOf(ALICE); + + vm.prank(ALICE); + primaryToken.approve(address(localToken), TRANSFER_AMT); + vm.expectCall( + address(xerc20Lockbox.xERC20()), + abi.encodeCall(IXERC20.burn, (address(localToken), TRANSFER_AMT)) + ); + _performRemoteTransferWithEmit(REQUIRED_VALUE, TRANSFER_AMT, 0); + assertEq(localToken.balanceOf(ALICE), balanceBefore - TRANSFER_AMT); + } + + function testHandle() public { + uint256 balanceBefore = localToken.balanceOf(ALICE); + vm.expectCall( + address(xerc20Lockbox.xERC20()), + abi.encodeCall(IXERC20.mint, (address(localToken), TRANSFER_AMT)) + ); + _handleLocalTransfer(TRANSFER_AMT); + assertEq(localToken.balanceOf(ALICE), balanceBefore + TRANSFER_AMT); + } +} + contract HypFiatTokenTest is HypTokenTest { using TypeCasts for address; HypFiatToken internal fiatToken; diff --git a/tools/grafana/easy-relayer-dashboard-external.json b/tools/grafana/easy-relayer-dashboard-external.json new file mode 100644 index 000000000..49efaaf2f --- /dev/null +++ b/tools/grafana/easy-relayer-dashboard-external.json @@ -0,0 +1,436 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "grafana", + "uid": "-- Grafana --" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "id": 66, + "links": [], + "panels": [ + { + "datasource": { + "type": "prometheus", + "uid": "grafanacloud-prom" + }, + "description": "There shouldn't be abrupt changes, especially for a specific pair", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "bars", + "fillOpacity": 78, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "normal" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 9, + "w": 12, + "x": 0, + "y": 0 + }, + "id": 2, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "grafanacloud-prom" + }, + "editorMode": "code", + "expr": "sum by (origin,remote)(round(increase(hyperlane_messages_processed_count[5m])))", + "hide": false, + "interval": "", + "legendFormat": "{{hyperlane_deployment}}: {{origin}}->{{remote}}", + "range": true, + "refId": "A" + } + ], + "title": "Messages Processed", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "grafanacloud-prom" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 9, + "w": 12, + "x": 0, + "y": 9 + }, + "id": 1, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "grafanacloud-prom" + }, + "editorMode": "code", + "expr": "sum by (remote, queue_name)(\n hyperlane_submitter_queue_length{queue_name=\"prepare_queue\"}\n)", + "interval": "", + "legendFormat": "{{hyperlane_deployment }} - {{remote}}", + "range": true, + "refId": "A" + } + ], + "title": "Prepare queues (all)", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "grafanacloud-prom" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 18 + }, + "id": 10, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "grafanacloud-prom" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "sum by(remote, queue_name) (hyperlane_submitter_queue_length{queue_name=\"submit_queue\"})", + "fullMetaSearch": false, + "includeNullMetadata": true, + "interval": "", + "legendFormat": "{{remote}}", + "range": true, + "refId": "A", + "useBackend": false + } + ], + "title": "Submit Queues", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "grafanacloud-prom" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 26 + }, + "id": 8, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "grafanacloud-prom" + }, + "disableTextWrap": false, + "editorMode": "code", + "expr": "sum by(remote, queue_name) (avg_over_time(hyperlane_submitter_queue_length{queue_name=\"confirm_queue\"}[20m]))", + "fullMetaSearch": false, + "includeNullMetadata": true, + "interval": "", + "legendFormat": "{{remote}}", + "range": true, + "refId": "A", + "useBackend": false + } + ], + "title": "Confirm Queues", + "type": "timeseries" + } + ], + "refresh": "1m", + "schemaVersion": 39, + "tags": [], + "templating": { + "list": [] + }, + "time": { + "from": "now-7d", + "to": "now" + }, + "timeRangeUpdatedDuringEditOrView": false, + "timepicker": {}, + "timezone": "browser", + "title": "Easy Dashboard (External Sharing Template)", + "uid": "afdf6ada6uzvgga", + "version": 5, + "weekStart": "" +} \ No newline at end of file diff --git a/typescript/ccip-server/CHANGELOG.md b/typescript/ccip-server/CHANGELOG.md index dfe9ec8e7..ee27ea446 100644 --- a/typescript/ccip-server/CHANGELOG.md +++ b/typescript/ccip-server/CHANGELOG.md @@ -1,10 +1,8 @@ # @hyperlane-xyz/ccip-server -## 4.0.0-alpha.2 +## 3.15.0 -## 4.0.0-alpha.0 - -## 4.0.0-alpha +## 3.14.0 ## 3.13.0 diff --git a/typescript/ccip-server/package.json b/typescript/ccip-server/package.json index eb64fae8f..c5e859ac2 100644 --- a/typescript/ccip-server/package.json +++ b/typescript/ccip-server/package.json @@ -1,6 +1,6 @@ { "name": "@hyperlane-xyz/ccip-server", - "version": "4.0.0-alpha.2", + "version": "3.15.0", "description": "CCIP server", "typings": "dist/index.d.ts", "typedocMain": "src/index.ts", diff --git a/typescript/cli/CHANGELOG.md b/typescript/cli/CHANGELOG.md index 19cdd7ead..be0e07baa 100644 --- a/typescript/cli/CHANGELOG.md +++ b/typescript/cli/CHANGELOG.md @@ -1,63 +1,28 @@ # @hyperlane-xyz/cli -## 4.0.0-alpha.2 +## 3.15.0 ### Minor Changes -- 4040db723: Fix createDefaultWarpIsmConfig to default to trusted relayer and fallback routing without prompts +- 51bfff683: Mint/burn limit checking for xERC20 bridging + Corrects CLI output for HypXERC20 and HypXERC20Lockbox deployments ### Patch Changes -- 3283eefd6: Removes default pattern for chain name when creating a new chain. -- 6b63c5d82: Adds deployment support for IsmConfig within a WarpRouteConfig -- Updated dependencies [6b63c5d82] - - @hyperlane-xyz/sdk@4.0.0-alpha.2 - - @hyperlane-xyz/utils@4.0.0-alpha.2 +- Updated dependencies [51bfff683] + - @hyperlane-xyz/sdk@3.15.0 + - @hyperlane-xyz/utils@3.15.0 -## 4.0.0-alpha.0 +## 3.14.0 ### Minor Changes -- 6db9fa9ad: Implement hyperlane warp deploy +- f4bbfcf08: AVS deployment on mainnet ### Patch Changes -- Updated dependencies [bdcbe1d16] -- Updated dependencies [6db9fa9ad] - - @hyperlane-xyz/sdk@4.0.0-alpha.0 - - @hyperlane-xyz/utils@4.0.0-alpha.0 - -## 4.0.0-alpha - -### Major Changes - -- 74c879fa1: Merge branch 'cli-2.0' into main. - -### Minor Changes - -- 1ec61debd: Support hook config objects in warp config -- 4663018fc: Implement hyperlane core config to return CoreConfig -- 84bc0bd7f: Adds 'hyperlane warp config'. -- b560bfc26: Adds 'hyperlane core config'. -- 642bc686d: Rename chain and config commands. Update hl core configure to prompt user for owner. -- ba4c9a7da: Add warp send in favor of send transfer. -- 7089c910f: Adds 'hyperlane warp read'. -- 1d0d1bb36: Implements `hyperlane core deploy` -- 44a2ffa1b: Adds 'hyperlane core read'. - -### Patch Changes - -- Updated dependencies [eb23e7729] -- Updated dependencies [1ec61debd] -- Updated dependencies [4663018fc] -- Updated dependencies [5e5886f2c] -- Updated dependencies [341b8affd] -- Updated dependencies [3dabcbdca] -- Updated dependencies [1d0d1bb36] -- Updated dependencies [74c879fa1] -- Updated dependencies [4bf7301ea] - - @hyperlane-xyz/sdk@4.0.0 - - @hyperlane-xyz/utils@4.0.0-alpha +- @hyperlane-xyz/sdk@3.14.0 +- @hyperlane-xyz/utils@3.14.0 ## 3.13.0 diff --git a/typescript/cli/package.json b/typescript/cli/package.json index 68dbb24a8..2c3cb62c4 100644 --- a/typescript/cli/package.json +++ b/typescript/cli/package.json @@ -1,13 +1,13 @@ { "name": "@hyperlane-xyz/cli", - "version": "4.0.0-alpha.2", + "version": "3.15.0", "description": "A command-line utility for common Hyperlane operations", "dependencies": { "@aws-sdk/client-kms": "^3.577.0", "@aws-sdk/client-s3": "^3.577.0", "@hyperlane-xyz/registry": "1.3.0", - "@hyperlane-xyz/sdk": "4.0.0-alpha.2", - "@hyperlane-xyz/utils": "4.0.0-alpha.2", + "@hyperlane-xyz/sdk": "3.15.0", + "@hyperlane-xyz/utils": "3.15.0", "@inquirer/prompts": "^3.0.0", "asn1.js": "^5.4.1", "bignumber.js": "^9.1.1", @@ -22,6 +22,8 @@ "zod-validation-error": "^3.3.0" }, "devDependencies": { + "@ethersproject/abi": "*", + "@ethersproject/providers": "*", "@types/mocha": "^10.0.1", "@types/node": "^18.14.5", "@types/yargs": "^17.0.24", diff --git a/typescript/cli/src/avs/config.ts b/typescript/cli/src/avs/config.ts index 681ed9dee..79715a676 100644 --- a/typescript/cli/src/avs/config.ts +++ b/typescript/cli/src/avs/config.ts @@ -16,4 +16,10 @@ export const avsAddresses: ChainMap = { ecdsaStakeRegistry: '0xFfa913705484C9BAea32Ffe9945BeA099A1DFF72', hyperlaneServiceManager: '0xc76E477437065093D353b7d56c81ff54D167B0Ab', }, + ethereum: { + avsDirectory: '0x135dda560e946695d6f155dacafc6f1f25c1f5af', + proxyAdmin: '0x75EE15Ee1B4A75Fa3e2fDF5DF3253c25599cc659', + ecdsaStakeRegistry: '0x272CF0BB70D3B4f79414E0823B426d2EaFd48910', + hyperlaneServiceManager: '0xe8E59c6C8B56F2c178f63BCFC4ce5e5e2359c8fc', + }, }; diff --git a/typescript/cli/src/avs/stakeRegistry.ts b/typescript/cli/src/avs/stakeRegistry.ts index a159e8e2f..d1bdd0716 100644 --- a/typescript/cli/src/avs/stakeRegistry.ts +++ b/typescript/cli/src/avs/stakeRegistry.ts @@ -24,12 +24,12 @@ export async function registerOperatorWithSignature({ context, chain, operatorKeyPath, - avsSigningKey, + avsSigningKeyAddress, }: { context: WriteCommandContext; chain: ChainName; operatorKeyPath: string; - avsSigningKey: Address; + avsSigningKeyAddress: Address; }) { const { multiProvider } = context; @@ -67,13 +67,13 @@ export async function registerOperatorWithSignature({ } log( - `Registering operator ${operatorAsSigner.address} attesting ${avsSigningKey} with signature on ${chain}...`, + `Registering operator ${operatorAsSigner.address} attesting ${avsSigningKeyAddress} with signature on ${chain}...`, ); await multiProvider.handleTx( chain, ecdsaStakeRegistry.registerOperatorWithSignature( operatorSignature, - avsSigningKey, + avsSigningKeyAddress, ), ); logBlue(`Operator ${operatorAsSigner.address} registered to Hyperlane AVS`); @@ -119,7 +119,7 @@ async function readOperatorFromEncryptedJson( message: 'Enter the password for the operator key file: ', }); - return Wallet.fromEncryptedJson(encryptedJson, keyFilePassword); + return await Wallet.fromEncryptedJson(encryptedJson, keyFilePassword); } async function getOperatorSignature( diff --git a/typescript/cli/src/commands/avs.ts b/typescript/cli/src/commands/avs.ts index 04a51b6b6..ce238df62 100644 --- a/typescript/cli/src/commands/avs.ts +++ b/typescript/cli/src/commands/avs.ts @@ -40,7 +40,7 @@ export const registrationOptions: { [k: string]: Options } = { description: 'Path to the operator key file', demandOption: true, }, - avsSigningKey: { + avsSigningKeyAddress: { type: 'string', description: 'Address of the AVS signing key', demandOption: true, @@ -50,17 +50,22 @@ export const registrationOptions: { [k: string]: Options } = { const registerCommand: CommandModuleWithWriteContext<{ chain: ChainName; operatorKeyPath: string; - avsSigningKey: Address; + avsSigningKeyAddress: Address; }> = { command: 'register', describe: 'Register operator with the AVS', builder: registrationOptions, - handler: async ({ context, chain, operatorKeyPath, avsSigningKey }) => { + handler: async ({ + context, + chain, + operatorKeyPath, + avsSigningKeyAddress, + }) => { await registerOperatorWithSignature({ context, chain, operatorKeyPath, - avsSigningKey, + avsSigningKeyAddress, }); process.exit(0); }, diff --git a/typescript/cli/src/deploy/warp.ts b/typescript/cli/src/deploy/warp.ts index 6af9f6758..42331b08a 100644 --- a/typescript/cli/src/deploy/warp.ts +++ b/typescript/cli/src/deploy/warp.ts @@ -144,7 +144,7 @@ async function executeDeploy(params: DeployParams) { const deployedContracts = await deployer.deploy(modifiedConfig); - logGreen('✅ Hyp token deployments complete'); + logGreen('✅ Warp contract deployments complete'); const warpCoreConfig = await getWarpCoreConfig(params, deployedContracts); if (!isDryRun) { diff --git a/typescript/cli/src/validator/address.ts b/typescript/cli/src/validator/address.ts index a0a2cc4ea..d816fcb1f 100644 --- a/typescript/cli/src/validator/address.ts +++ b/typescript/cli/src/validator/address.ts @@ -141,7 +141,7 @@ function getEthereumAddress(publicKey: Buffer): string { async function getAccessKeyId(skipConfirmation: boolean) { if (skipConfirmation) throw new Error('No AWS access key ID set.'); else - return input({ + return await input({ message: 'Please enter AWS access key ID or use the AWS_ACCESS_KEY_ID environment variable.', }); @@ -150,7 +150,7 @@ async function getAccessKeyId(skipConfirmation: boolean) { async function getSecretAccessKey(skipConfirmation: boolean) { if (skipConfirmation) throw new Error('No AWS secret access key set.'); else - return input({ + return await input({ message: 'Please enter AWS secret access key or use the AWS_SECRET_ACCESS_KEY environment variable.', }); @@ -159,7 +159,7 @@ async function getSecretAccessKey(skipConfirmation: boolean) { async function getRegion(skipConfirmation: boolean) { if (skipConfirmation) throw new Error('No AWS region set.'); else - return input({ + return await input({ message: 'Please enter AWS region or use the AWS_REGION environment variable.', }); diff --git a/typescript/cli/src/version.ts b/typescript/cli/src/version.ts index eef5a6f42..eb5b8185d 100644 --- a/typescript/cli/src/version.ts +++ b/typescript/cli/src/version.ts @@ -1 +1 @@ -export const VERSION = '4.0.0-alpha.2'; +export const VERSION = '3.15.0'; diff --git a/typescript/helloworld/CHANGELOG.md b/typescript/helloworld/CHANGELOG.md index e69de29bb..1ea1e9cc3 100644 --- a/typescript/helloworld/CHANGELOG.md +++ b/typescript/helloworld/CHANGELOG.md @@ -0,0 +1,236 @@ +# @hyperlane-xyz/helloworld + +## 3.15.0 + +### Patch Changes + +- Updated dependencies [51bfff683] + - @hyperlane-xyz/sdk@3.15.0 + - @hyperlane-xyz/core@3.15.0 + +## 3.14.0 + +### Patch Changes + +- Updated dependencies [a8a68f6f6] + - @hyperlane-xyz/core@3.14.0 + - @hyperlane-xyz/sdk@3.14.0 + +## 3.13.0 + +### Patch Changes + +- b6b26e2bb: fix: minor change was breaking in registry export +- Updated dependencies [39ea7cdef] +- Updated dependencies [babe816f8] +- Updated dependencies [b440d98be] +- Updated dependencies [0cf692e73] + - @hyperlane-xyz/sdk@3.13.0 + - @hyperlane-xyz/core@3.13.0 + +## 3.12.0 + +### Patch Changes + +- Updated dependencies [eba393680] +- Updated dependencies [69de68a66] + - @hyperlane-xyz/sdk@3.12.0 + - @hyperlane-xyz/core@3.12.0 + +## 3.11.1 + +### Patch Changes + +- Updated dependencies [c900da187] + - @hyperlane-xyz/sdk@3.11.1 + - @hyperlane-xyz/core@3.11.1 + +## 3.11.0 + +### Minor Changes + +- b63714ede: Convert all public hyperlane npm packages from CJS to pure ESM + +### Patch Changes + +- Updated dependencies [811ecfbba] +- Updated dependencies [f8b6ea467] +- Updated dependencies [d37cbab72] +- Updated dependencies [b6fdf2f7f] +- Updated dependencies [a86a8296b] +- Updated dependencies [2db77f177] +- Updated dependencies [3a08e31b6] +- Updated dependencies [917266dce] +- Updated dependencies [aab63d466] +- Updated dependencies [2e439423e] +- Updated dependencies [b63714ede] +- Updated dependencies [3528b281e] +- Updated dependencies [450e8e0d5] +- Updated dependencies [af2634207] + - @hyperlane-xyz/sdk@3.11.0 + - @hyperlane-xyz/core@3.11.0 + +## 3.10.0 + +### Minor Changes + +- 96485144a: SDK support for ICA deployment and operation. +- 4e7a43be6: Replace Debug logger with Pino + +### Patch Changes + +- Updated dependencies [96485144a] +- Updated dependencies [38358ecec] +- Updated dependencies [ed0d4188c] +- Updated dependencies [4e7a43be6] + - @hyperlane-xyz/sdk@3.10.0 + - @hyperlane-xyz/core@3.10.0 + +## 3.9.0 + +### Patch Changes + +- Updated dependencies [11f257ebc] + - @hyperlane-xyz/sdk@3.9.0 + - @hyperlane-xyz/core@3.9.0 + +## 3.8.2 + +### Patch Changes + +- @hyperlane-xyz/core@3.8.2 +- @hyperlane-xyz/sdk@3.8.2 + +## 3.8.1 + +### Patch Changes + +- Updated dependencies [5daaae274] + - @hyperlane-xyz/sdk@3.8.1 + - @hyperlane-xyz/core@3.8.1 + +## 3.8.0 + +### Minor Changes + +- 9681df08d: Enabled verification of contracts as part of the deployment flow. + + - Solidity build artifact is now included as part of the `@hyperlane-xyz/core` package. + - Updated the `HyperlaneDeployer` to perform contract verification immediately after deploying a contract. A default verifier is instantiated using the core build artifact. + - Updated the `HyperlaneIsmFactory` to re-use the `HyperlaneDeployer` for deployment where possible. + - Minor logging improvements throughout deployers. + +### Patch Changes + +- Updated dependencies [9681df08d] +- Updated dependencies [9681df08d] +- Updated dependencies [9681df08d] +- Updated dependencies [9681df08d] +- Updated dependencies [9681df08d] +- Updated dependencies [9681df08d] +- Updated dependencies [9681df08d] +- Updated dependencies [9681df08d] +- Updated dependencies [9681df08d] +- Updated dependencies [9681df08d] +- Updated dependencies [9681df08d] +- Updated dependencies [9681df08d] + - @hyperlane-xyz/sdk@3.8.0 + - @hyperlane-xyz/core@3.8.0 + +## 3.7.0 + +### Patch Changes + +- Updated dependencies [6f464eaed] +- Updated dependencies [87151c62b] +- Updated dependencies [ab17af5f7] +- Updated dependencies [7b40232af] +- Updated dependencies [54aeb6420] + - @hyperlane-xyz/sdk@3.7.0 + - @hyperlane-xyz/core@3.7.0 + +## 3.6.2 + +### Patch Changes + +- @hyperlane-xyz/core@3.6.2 +- @hyperlane-xyz/sdk@3.6.2 + +## 3.6.1 + +### Patch Changes + +- Updated dependencies [ae4476ad0] +- Updated dependencies [f3b7ddb69] +- Updated dependencies [e4e4f93fc] + - @hyperlane-xyz/sdk@3.6.1 + - @hyperlane-xyz/core@3.6.1 + +## 3.6.0 + +### Patch Changes + +- Updated dependencies [67a6d971e] +- Updated dependencies [612d4163a] +- Updated dependencies [0488ef31d] +- Updated dependencies [8d8ba3f7a] + - @hyperlane-xyz/sdk@3.6.0 + - @hyperlane-xyz/core@3.6.0 + +## 3.5.1 + +### Patch Changes + +- Updated dependencies [a04454d6d] + - @hyperlane-xyz/sdk@3.5.1 + - @hyperlane-xyz/core@3.5.1 + +## 3.5.0 + +### Patch Changes + +- Updated dependencies [655b6a0cd] +- Updated dependencies [08ba0d32b] +- Updated dependencies [f7d285e3a] + - @hyperlane-xyz/sdk@3.5.0 + - @hyperlane-xyz/core@3.5.0 + +## 3.4.0 + +### Patch Changes + +- Updated dependencies [7919417ec] +- Updated dependencies [fd4fc1898] +- Updated dependencies [e06fe0b32] +- Updated dependencies [b832e57ae] +- Updated dependencies [79c96d718] + - @hyperlane-xyz/sdk@3.4.0 + - @hyperlane-xyz/core@3.4.0 + +## 3.3.0 + +### Patch Changes + +- Updated dependencies [7e620c9df] +- Updated dependencies [350175581] +- Updated dependencies [9f2c7ce7c] + - @hyperlane-xyz/sdk@3.3.0 + - @hyperlane-xyz/core@3.3.0 + +## 3.2.0 + +### Patch Changes + +- Updated dependencies [df34198d4] +- Updated dependencies [df693708b] + - @hyperlane-xyz/core@3.2.0 + - @hyperlane-xyz/sdk@3.2.0 + +## 3.1.10 + +### Patch Changes + +- c9e0aedae: Improve client side StandardHookMetadata library interface +- Updated dependencies [c9e0aedae] + - @hyperlane-xyz/core@3.1.10 + - @hyperlane-xyz/sdk@3.1.10 diff --git a/typescript/helloworld/contracts/HelloWorld.sol b/typescript/helloworld/contracts/HelloWorld.sol index b3bdb16fd..fc81207c5 100644 --- a/typescript/helloworld/contracts/HelloWorld.sol +++ b/typescript/helloworld/contracts/HelloWorld.sol @@ -83,12 +83,6 @@ contract HelloWorld is Router { } // ============ Internal functions ============ - function _metadata( - uint32 /*_destinationDomain*/ - ) internal view override returns (bytes memory) { - return StandardHookMetadata.overrideGasLimit(HANDLE_GAS_AMOUNT); - } - /** * @notice Handles a message from a remote router. * @dev Only called for messages sent from a remote router, as enforced by Router.sol. diff --git a/typescript/helloworld/package.json b/typescript/helloworld/package.json index e35e45bd7..22138b9bc 100644 --- a/typescript/helloworld/package.json +++ b/typescript/helloworld/package.json @@ -1,11 +1,11 @@ { "name": "@hyperlane-xyz/helloworld", "description": "A basic skeleton of an Hyperlane app", - "version": "4.0.0-alpha.2", + "version": "3.15.0", "dependencies": { - "@hyperlane-xyz/core": "4.0.0-alpha.2", + "@hyperlane-xyz/core": "3.15.0", "@hyperlane-xyz/registry": "1.3.0", - "@hyperlane-xyz/sdk": "4.0.0-alpha.2", + "@hyperlane-xyz/sdk": "3.15.0", "@openzeppelin/contracts-upgradeable": "^4.9.3", "ethers": "^5.7.2" }, diff --git a/typescript/infra/CHANGELOG.md b/typescript/infra/CHANGELOG.md index e1da9e3b3..ed68044f8 100644 --- a/typescript/infra/CHANGELOG.md +++ b/typescript/infra/CHANGELOG.md @@ -1,5 +1,14 @@ # @hyperlane-xyz/infra +## 3.15.0 + +### Patch Changes + +- Updated dependencies [51bfff683] + - @hyperlane-xyz/sdk@3.15.0 + - @hyperlane-xyz/helloworld@3.15.0 + - @hyperlane-xyz/utils@3.15.0 + ## 3.14.0 ### Patch Changes diff --git a/typescript/infra/config/registry.ts b/typescript/infra/config/registry.ts index 3b05cf47e..8c8be46bb 100644 --- a/typescript/infra/config/registry.ts +++ b/typescript/infra/config/registry.ts @@ -39,10 +39,18 @@ export function setRegistry(reg: FileSystemRegistry) { registry = reg; } +/** + * Gets a FileSystemRegistry whose contents are found at the environment + * variable `REGISTRY_URI`, or `DEFAULT_REGISTRY_URI` if no env var is specified. + * This registry will not have any environment-specific overrides applied, + * and is useful for synchronous registry operations that do not require + * any overrides. + * @returns A FileSystemRegistry. + */ export function getRegistry(): FileSystemRegistry { if (!registry) { const registryUri = process.env.REGISTRY_URI || DEFAULT_REGISTRY_URI; - rootLogger.info('Using registry URI:', registryUri); + rootLogger.info({ registryUri }, 'Using registry URI'); registry = new FileSystemRegistry({ uri: registryUri, logger: rootLogger.child({ module: 'infra-registry' }), diff --git a/typescript/infra/package.json b/typescript/infra/package.json index 66ecf3131..daede08e4 100644 --- a/typescript/infra/package.json +++ b/typescript/infra/package.json @@ -1,7 +1,7 @@ { "name": "@hyperlane-xyz/infra", "description": "Infrastructure utilities for the Hyperlane Network", - "version": "4.0.0-alpha.2", + "version": "3.15.0", "dependencies": { "@arbitrum/sdk": "^3.0.0", "@aws-sdk/client-iam": "^3.74.0", @@ -13,10 +13,10 @@ "@ethersproject/hardware-wallets": "^5.7.0", "@ethersproject/providers": "^5.7.2", "@google-cloud/secret-manager": "^5.5.0", - "@hyperlane-xyz/helloworld": "4.0.0-alpha.2", + "@hyperlane-xyz/helloworld": "3.15.0", "@hyperlane-xyz/registry": "1.3.0", - "@hyperlane-xyz/sdk": "4.0.0-alpha.2", - "@hyperlane-xyz/utils": "4.0.0-alpha.2", + "@hyperlane-xyz/sdk": "3.15.0", + "@hyperlane-xyz/utils": "3.15.0", "@nomiclabs/hardhat-etherscan": "^3.0.3", "@solana/web3.js": "^1.78.0", "asn1.js": "5.4.1", diff --git a/typescript/infra/scripts/secret-rpc-urls/get-rpc-urls.ts b/typescript/infra/scripts/secret-rpc-urls/get-rpc-urls.ts new file mode 100644 index 000000000..0f2c9fd5f --- /dev/null +++ b/typescript/infra/scripts/secret-rpc-urls/get-rpc-urls.ts @@ -0,0 +1,26 @@ +import { + getSecretRpcEndpoints, + secretRpcEndpointsExist, +} from '../../src/agents/index.js'; +import { getArgs, withChainRequired } from '../agent-utils.js'; + +async function main() { + const { environment, chain } = await withChainRequired(getArgs()).argv; + const secretExists = await secretRpcEndpointsExist(environment, chain); + if (!secretExists) { + console.log( + `No secret rpc urls found for ${chain} in ${environment} environment`, + ); + process.exit(0); + } + + const secrets = await getSecretRpcEndpoints(environment, chain); + console.log(secrets); +} + +main() + .then() + .catch((e) => { + console.error(e); + process.exit(1); + }); diff --git a/typescript/infra/scripts/secret-rpc-urls/set-rpc-urls.ts b/typescript/infra/scripts/secret-rpc-urls/set-rpc-urls.ts new file mode 100644 index 000000000..78881a01e --- /dev/null +++ b/typescript/infra/scripts/secret-rpc-urls/set-rpc-urls.ts @@ -0,0 +1,120 @@ +import { confirm } from '@inquirer/prompts'; +import { ethers } from 'ethers'; + +import { + getSecretRpcEndpoints, + getSecretRpcEndpointsLatestVersionName, + secretRpcEndpointsExist, + setSecretRpcEndpoints, +} from '../../src/agents/index.js'; +import { disableGCPSecretVersion } from '../../src/utils/gcloud.js'; +import { isEthereumProtocolChain } from '../../src/utils/utils.js'; +import { getArgs, withChainRequired, withRpcUrls } from '../agent-utils.js'; + +async function testProviders(rpcUrlsArray: string[]): Promise { + let providersSucceeded = true; + for (const url of rpcUrlsArray) { + const provider = new ethers.providers.StaticJsonRpcProvider(url); + try { + const blockNumber = await provider.getBlockNumber(); + console.log(`Valid provider for ${url} with block number ${blockNumber}`); + } catch (e) { + console.error(`Provider failed: ${url}`); + providersSucceeded = false; + } + } + + return providersSucceeded; +} + +async function main() { + const { environment, chain, rpcUrls } = await withRpcUrls( + withChainRequired(getArgs()), + ).argv; + + const rpcUrlsArray = rpcUrls + .split(/,\s*/) + .filter(Boolean) // filter out empty strings + .map((url) => url.trim()); + + if (!rpcUrlsArray.length) { + console.error('No rpc urls provided, Exiting.'); + process.exit(1); + } + + const secretPayload = JSON.stringify(rpcUrlsArray); + + const secretExists = await secretRpcEndpointsExist(environment, chain); + if (!secretExists) { + console.log( + `No secret rpc urls found for ${chain} in ${environment} environment\n`, + ); + } else { + const currentSecrets = await getSecretRpcEndpoints(environment, chain); + console.log( + `Current secrets found for ${chain} in ${environment} environment:\n${JSON.stringify( + currentSecrets, + null, + 2, + )}\n`, + ); + } + + const confirmedSet = await confirm({ + message: `Are you sure you want to set the following RPC URLs for ${chain} in ${environment}?\n${secretPayload}\n`, + }); + + if (!confirmedSet) { + console.log('Exiting without setting secret.'); + process.exit(0); + } + + if (isEthereumProtocolChain(chain)) { + console.log('\nTesting providers...'); + const testPassed = await testProviders(rpcUrlsArray); + if (!testPassed) { + console.error('At least one provider failed. Exiting.'); + process.exit(1); + } + + const confirmedProviders = await confirm({ + message: `All providers passed. Do you want to continue setting the secret?\n`, + }); + + if (!confirmedProviders) { + console.log('Exiting without setting secret.'); + process.exit(0); + } + } else { + console.log( + 'Skipping provider testing as chain is not an Ethereum protocol chain.', + ); + } + + let latestVersionName; + if (secretExists) { + latestVersionName = await getSecretRpcEndpointsLatestVersionName( + environment, + chain, + ); + } + console.log(`Setting secret...`); + await setSecretRpcEndpoints(environment, chain, secretPayload); + console.log(`Added secret version!`); + + if (latestVersionName) { + try { + await disableGCPSecretVersion(latestVersionName); + console.log(`Disabled previous version of the secret!`); + } catch (e) { + console.log(`Could not disable previous version of the secret`); + } + } +} + +main() + .then() + .catch((e) => { + console.error(e); + process.exit(1); + }); diff --git a/typescript/sdk/CHANGELOG.md b/typescript/sdk/CHANGELOG.md index 245e84426..3810b575d 100644 --- a/typescript/sdk/CHANGELOG.md +++ b/typescript/sdk/CHANGELOG.md @@ -1,5 +1,18 @@ # @hyperlane-xyz/sdk +## 3.15.0 + +### Minor Changes + +- 51bfff683: Mint/burn limit checking for xERC20 bridging + Corrects CLI output for HypXERC20 and HypXERC20Lockbox deployments + +### Patch Changes + +- Updated dependencies [51bfff683] + - @hyperlane-xyz/core@3.15.0 + - @hyperlane-xyz/utils@3.15.0 + ## 3.14.0 ### Patch Changes diff --git a/typescript/sdk/package.json b/typescript/sdk/package.json index 3944b8f92..0b74f6a0c 100644 --- a/typescript/sdk/package.json +++ b/typescript/sdk/package.json @@ -1,13 +1,13 @@ { "name": "@hyperlane-xyz/sdk", "description": "The official SDK for the Hyperlane Network", - "version": "4.0.0-alpha.2", + "version": "3.15.0", "dependencies": { "@aws-sdk/client-s3": "^3.74.0", "@cosmjs/cosmwasm-stargate": "^0.31.3", "@cosmjs/stargate": "^0.31.3", - "@hyperlane-xyz/core": "4.0.0-alpha.2", - "@hyperlane-xyz/utils": "4.0.0-alpha.2", + "@hyperlane-xyz/core": "3.15.0", + "@hyperlane-xyz/utils": "3.15.0", "@safe-global/api-kit": "1.3.0", "@safe-global/protocol-kit": "1.3.0", "@solana/spl-token": "^0.3.8", diff --git a/typescript/sdk/src/providers/SmartProvider/HyperlaneEtherscanProvider.ts b/typescript/sdk/src/providers/SmartProvider/HyperlaneEtherscanProvider.ts index 908c7939a..56fc50f57 100644 --- a/typescript/sdk/src/providers/SmartProvider/HyperlaneEtherscanProvider.ts +++ b/typescript/sdk/src/providers/SmartProvider/HyperlaneEtherscanProvider.ts @@ -24,6 +24,7 @@ export class HyperlaneEtherscanProvider ProviderMethod.Call, ProviderMethod.EstimateGas, ProviderMethod.SendTransaction, + ProviderMethod.MaxPriorityFeePerGas, ]); constructor( diff --git a/typescript/sdk/src/providers/SmartProvider/HyperlaneJsonRpcProvider.ts b/typescript/sdk/src/providers/SmartProvider/HyperlaneJsonRpcProvider.ts index 8b434a29c..b6695bcb2 100644 --- a/typescript/sdk/src/providers/SmartProvider/HyperlaneJsonRpcProvider.ts +++ b/typescript/sdk/src/providers/SmartProvider/HyperlaneJsonRpcProvider.ts @@ -32,6 +32,13 @@ export class HyperlaneJsonRpcProvider super(rpcConfig.connection ?? rpcConfig.http, network); } + prepareRequest(method: string, params: any): [string, any[]] { + if (method === ProviderMethod.MaxPriorityFeePerGas) { + return ['eth_maxPriorityFeePerGas', []]; + } + return super.prepareRequest(method, params); + } + async perform(method: string, params: any, reqId?: number): Promise { if (this.options?.debug) this.logger.debug( diff --git a/typescript/sdk/src/providers/SmartProvider/ProviderMethods.ts b/typescript/sdk/src/providers/SmartProvider/ProviderMethods.ts index 2a36c6599..6b7417786 100644 --- a/typescript/sdk/src/providers/SmartProvider/ProviderMethods.ts +++ b/typescript/sdk/src/providers/SmartProvider/ProviderMethods.ts @@ -16,6 +16,7 @@ export enum ProviderMethod { GetTransactionReceipt = 'getTransactionReceipt', GetLogs = 'getLogs', SendTransaction = 'sendTransaction', + MaxPriorityFeePerGas = 'maxPriorityFeePerGas', } export const AllProviderMethods = Object.values(ProviderMethod); diff --git a/typescript/sdk/src/providers/SmartProvider/SmartProvider.ts b/typescript/sdk/src/providers/SmartProvider/SmartProvider.ts index a3bb1f05b..a5daf9ea0 100644 --- a/typescript/sdk/src/providers/SmartProvider/SmartProvider.ts +++ b/typescript/sdk/src/providers/SmartProvider/SmartProvider.ts @@ -1,4 +1,4 @@ -import { providers } from 'ethers'; +import { BigNumber, providers, utils } from 'ethers'; import { Logger } from 'pino'; import { @@ -97,6 +97,40 @@ export class HyperlaneSmartProvider this.supportedMethods = [...supportedMethods.values()]; } + async getPriorityFee() { + try { + return BigNumber.from(await this.perform('maxPriorityFeePerGas', {})); + } catch (error) { + return BigNumber.from('1500000000'); + } + } + + async getFeeData(): Promise { + // override hardcoded getFeedata + // Copied from https://github.com/ethers-io/ethers.js/blob/v5/packages/abstract-provider/src.ts/index.ts#L235 which SmartProvider inherits this logic from + const { block, gasPrice } = await utils.resolveProperties({ + block: this.getBlock('latest'), + gasPrice: this.getGasPrice().catch(() => { + return null; + }), + }); + + let lastBaseFeePerGas: BigNumber | null = null, + maxFeePerGas: BigNumber | null = null, + maxPriorityFeePerGas: BigNumber | null = null; + + if (block?.baseFeePerGas) { + // We may want to compute this more accurately in the future, + // using the formula "check if the base fee is correct". + // See: https://eips.ethereum.org/EIPS/eip-1559 + lastBaseFeePerGas = block.baseFeePerGas; + maxPriorityFeePerGas = await this.getPriorityFee(); + maxFeePerGas = block.baseFeePerGas.mul(2).add(maxPriorityFeePerGas); + } + + return { lastBaseFeePerGas, maxFeePerGas, maxPriorityFeePerGas, gasPrice }; + } + static fromChainMetadata( chainMetadata: ChainMetadataWithRpcConnectionInfo, options?: SmartProviderOptions, diff --git a/typescript/sdk/src/token/Token.test.ts b/typescript/sdk/src/token/Token.test.ts index 9cd97c919..754538903 100644 --- a/typescript/sdk/src/token/Token.test.ts +++ b/typescript/sdk/src/token/Token.test.ts @@ -55,6 +55,22 @@ const STANDARD_TO_TOKEN: Record = { symbol: 'USDC', name: 'USDC', }, + [TokenStandard.EvmHypXERC20]: { + chainName: TestChainName.test2, + standard: TokenStandard.EvmHypXERC20, + addressOrDenom: '0x8358D8291e3bEDb04804975eEa0fe9fe0fAfB147', + decimals: 6, + symbol: 'USDC', + name: 'USDC', + }, + [TokenStandard.EvmHypXERC20Lockbox]: { + chainName: TestChainName.test2, + standard: TokenStandard.EvmHypXERC20Lockbox, + addressOrDenom: '0x8358D8291e3bEDb04804975eEa0fe9fe0fAfB147', + decimals: 6, + symbol: 'USDC', + name: 'USDC', + }, // Sealevel [TokenStandard.SealevelSpl]: { diff --git a/typescript/sdk/src/token/Token.ts b/typescript/sdk/src/token/Token.ts index 7ddc870e5..1a4747b7b 100644 --- a/typescript/sdk/src/token/Token.ts +++ b/typescript/sdk/src/token/Token.ts @@ -41,6 +41,8 @@ import { EvmHypCollateralAdapter, EvmHypNativeAdapter, EvmHypSyntheticAdapter, + EvmHypXERC20Adapter, + EvmHypXERC20LockboxAdapter, EvmNativeTokenAdapter, EvmTokenAdapter, } from './adapters/EvmTokenAdapter.js'; @@ -213,6 +215,14 @@ export class Token implements IToken { return new EvmHypSyntheticAdapter(chainName, multiProvider, { token: addressOrDenom, }); + } else if (standard === TokenStandard.EvmHypXERC20) { + return new EvmHypXERC20Adapter(chainName, multiProvider, { + token: addressOrDenom, + }); + } else if (standard === TokenStandard.EvmHypXERC20Lockbox) { + return new EvmHypXERC20LockboxAdapter(chainName, multiProvider, { + token: addressOrDenom, + }); } else if (standard === TokenStandard.SealevelHypNative) { return new SealevelHypNativeAdapter( chainName, diff --git a/typescript/sdk/src/token/TokenStandard.ts b/typescript/sdk/src/token/TokenStandard.ts index 8bdd0defc..d8346be16 100644 --- a/typescript/sdk/src/token/TokenStandard.ts +++ b/typescript/sdk/src/token/TokenStandard.ts @@ -15,6 +15,8 @@ export enum TokenStandard { EvmHypNative = 'EvmHypNative', EvmHypCollateral = 'EvmHypCollateral', EvmHypSynthetic = 'EvmHypSynthetic', + EvmHypXERC20 = 'EvmHypXERC20', + EvmHypXERC20Lockbox = 'EvmHypXERC20Lockbox', // Sealevel (Solana) SealevelSpl = 'SealevelSpl', @@ -48,6 +50,8 @@ export const TOKEN_STANDARD_TO_PROTOCOL: Record = { EvmHypNative: ProtocolType.Ethereum, EvmHypCollateral: ProtocolType.Ethereum, EvmHypSynthetic: ProtocolType.Ethereum, + EvmHypXERC20: ProtocolType.Ethereum, + EvmHypXERC20Lockbox: ProtocolType.Ethereum, // Sealevel (Solana) SealevelSpl: ProtocolType.Sealevel, @@ -96,10 +100,17 @@ export const TOKEN_COLLATERALIZED_STANDARDS = [ TokenStandard.CwHypNative, ]; +export const MINT_LIMITED_STANDARDS = [ + TokenStandard.EvmHypXERC20, + TokenStandard.EvmHypXERC20Lockbox, +]; + export const TOKEN_HYP_STANDARDS = [ TokenStandard.EvmHypNative, TokenStandard.EvmHypCollateral, TokenStandard.EvmHypSynthetic, + TokenStandard.EvmHypXERC20, + TokenStandard.EvmHypXERC20Lockbox, TokenStandard.SealevelHypNative, TokenStandard.SealevelHypCollateral, TokenStandard.SealevelHypSynthetic, @@ -128,8 +139,8 @@ export const TOKEN_TYPE_TO_STANDARD: Record = { [TokenType.native]: TokenStandard.EvmHypNative, [TokenType.collateral]: TokenStandard.EvmHypCollateral, [TokenType.collateralFiat]: TokenStandard.EvmHypCollateral, - [TokenType.XERC20]: TokenStandard.EvmHypCollateral, - [TokenType.XERC20Lockbox]: TokenStandard.EvmHypCollateral, + [TokenType.XERC20]: TokenStandard.EvmHypXERC20, + [TokenType.XERC20Lockbox]: TokenStandard.EvmHypXERC20Lockbox, [TokenType.collateralVault]: TokenStandard.EvmHypCollateral, [TokenType.collateralUri]: TokenStandard.EvmHypCollateral, [TokenType.fastCollateral]: TokenStandard.EvmHypCollateral, diff --git a/typescript/sdk/src/token/adapters/EvmTokenAdapter.ts b/typescript/sdk/src/token/adapters/EvmTokenAdapter.ts index 3fc25ade3..4d549b865 100644 --- a/typescript/sdk/src/token/adapters/EvmTokenAdapter.ts +++ b/typescript/sdk/src/token/adapters/EvmTokenAdapter.ts @@ -7,6 +7,11 @@ import { HypERC20Collateral, HypERC20Collateral__factory, HypERC20__factory, + HypXERC20, + HypXERC20Lockbox, + HypXERC20Lockbox__factory, + HypXERC20__factory, + IXERC20__factory, } from '@hyperlane-xyz/core'; import { Address, @@ -25,6 +30,7 @@ import { TokenMetadata } from '../types.js'; import { IHypTokenAdapter, + IHypXERC20Adapter, ITokenAdapter, InterchainGasQuote, TransferParams, @@ -279,6 +285,92 @@ export class EvmHypCollateralAdapter } } +// Interacts with HypXERC20Lockbox contracts +export class EvmHypXERC20LockboxAdapter + extends EvmHypCollateralAdapter + implements IHypXERC20Adapter +{ + hypXERC20Lockbox: HypXERC20Lockbox; + + constructor( + public readonly chainName: ChainName, + public readonly multiProvider: MultiProtocolProvider, + public readonly addresses: { token: Address }, + ) { + super(chainName, multiProvider, addresses); + + this.hypXERC20Lockbox = HypXERC20Lockbox__factory.connect( + addresses.token, + this.getProvider(), + ); + } + + async getMintLimit() { + const xERC20 = await this.hypXERC20Lockbox.xERC20(); + + const limit = await IXERC20__factory.connect( + xERC20, + this.getProvider(), + ).mintingCurrentLimitOf(this.contract.address); + + return BigInt(limit.toString()); + } + + async getBurnLimit() { + const xERC20 = await this.hypXERC20Lockbox.xERC20(); + + const limit = await IXERC20__factory.connect( + xERC20, + this.getProvider(), + ).mintingCurrentLimitOf(this.contract.address); + + return BigInt(limit.toString()); + } +} + +// Interacts with HypXERC20 contracts +export class EvmHypXERC20Adapter + extends EvmHypCollateralAdapter + implements IHypXERC20Adapter +{ + hypXERC20: HypXERC20; + + constructor( + public readonly chainName: ChainName, + public readonly multiProvider: MultiProtocolProvider, + public readonly addresses: { token: Address }, + ) { + super(chainName, multiProvider, addresses); + + this.hypXERC20 = HypXERC20__factory.connect( + addresses.token, + this.getProvider(), + ); + } + + async getMintLimit() { + const xERC20 = await this.hypXERC20.wrappedToken(); + + const limit = await IXERC20__factory.connect( + xERC20, + this.getProvider(), + ).mintingCurrentLimitOf(this.contract.address); + + return BigInt(limit.toString()); + } + + async getBurnLimit() { + const xERC20 = await this.hypXERC20.wrappedToken(); + + const limit = await IXERC20__factory.connect( + xERC20, + this.getProvider(), + ).burningCurrentLimitOf(this.contract.address); + + return BigInt(limit.toString()); + } +} + // Interacts HypNative contracts export class EvmHypNativeAdapter extends EvmHypCollateralAdapter diff --git a/typescript/sdk/src/token/adapters/ITokenAdapter.ts b/typescript/sdk/src/token/adapters/ITokenAdapter.ts index 67bd1a0f4..f0a8032d5 100644 --- a/typescript/sdk/src/token/adapters/ITokenAdapter.ts +++ b/typescript/sdk/src/token/adapters/ITokenAdapter.ts @@ -40,3 +40,8 @@ export interface IHypTokenAdapter extends ITokenAdapter { quoteTransferRemoteGas(destination: Domain): Promise; populateTransferRemoteTx(p: TransferRemoteParams): Promise; } + +export interface IHypXERC20Adapter extends IHypTokenAdapter { + getMintLimit(): Promise; + getBurnLimit(): Promise; +} diff --git a/typescript/sdk/src/token/deploy.ts b/typescript/sdk/src/token/deploy.ts index 5dfa37250..9fa24a7f4 100644 --- a/typescript/sdk/src/token/deploy.ts +++ b/typescript/sdk/src/token/deploy.ts @@ -131,10 +131,17 @@ abstract class TokenDeployer< } async deploy(configMap: WarpRouteDeployConfig) { - const tokenMetadata = await TokenDeployer.deriveTokenMetadata( - this.multiProvider, - configMap, - ); + let tokenMetadata: TokenMetadata | undefined; + try { + tokenMetadata = await TokenDeployer.deriveTokenMetadata( + this.multiProvider, + configMap, + ); + } catch (err) { + this.logger.error('Failed to derive token metadata', err, configMap); + throw err; + } + const resolvedConfigMap = objMap(configMap, (_, config) => ({ ...tokenMetadata, gas: gasOverhead(config.type), diff --git a/typescript/sdk/src/warp/WarpCore.test.ts b/typescript/sdk/src/warp/WarpCore.test.ts index e24ebd5b7..b10eb3fdc 100644 --- a/typescript/sdk/src/warp/WarpCore.test.ts +++ b/typescript/sdk/src/warp/WarpCore.test.ts @@ -222,7 +222,7 @@ describe('WarpCore', () => { const invalidAmount = await warpCore.validateTransfer({ originTokenAmount: evmHypNative.amount(-10), - destination: test1.name, + destination: test2.name, recipient: MOCK_ADDRESS, sender: MOCK_ADDRESS, }); @@ -230,7 +230,7 @@ describe('WarpCore', () => { const insufficientBalance = await warpCore.validateTransfer({ originTokenAmount: evmHypNative.amount(BIG_TRANSFER_AMOUNT), - destination: test1.name, + destination: test2.name, recipient: MOCK_ADDRESS, sender: MOCK_ADDRESS, }); diff --git a/typescript/sdk/src/warp/WarpCore.ts b/typescript/sdk/src/warp/WarpCore.ts index 9c9da1ca6..f68201d73 100644 --- a/typescript/sdk/src/warp/WarpCore.ts +++ b/typescript/sdk/src/warp/WarpCore.ts @@ -22,10 +22,13 @@ import { Token } from '../token/Token.js'; import { TokenAmount } from '../token/TokenAmount.js'; import { parseTokenConnectionId } from '../token/TokenConnection.js'; import { + MINT_LIMITED_STANDARDS, TOKEN_COLLATERALIZED_STANDARDS, TOKEN_STANDARD_TO_PROVIDER_TYPE, + TokenStandard, } from '../token/TokenStandard.js'; import { EVM_TRANSFER_REMOTE_GAS_ESTIMATE } from '../token/adapters/EvmTokenAdapter.js'; +import { IHypXERC20Adapter } from '../token/adapters/ITokenAdapter.js'; import { ChainName, ChainNameOrId } from '../types.js'; import { @@ -425,17 +428,32 @@ export class WarpCore { originToken.getConnectionForChain(destinationName)?.token; assert(destinationToken, `No connection found for ${destinationName}`); - if (!TOKEN_COLLATERALIZED_STANDARDS.includes(destinationToken.standard)) { + if ( + !TOKEN_COLLATERALIZED_STANDARDS.includes(destinationToken.standard) && + !MINT_LIMITED_STANDARDS.includes(destinationToken.standard) + ) { this.logger.debug( `${destinationToken.symbol} is not collateralized, skipping`, ); return true; } + let destinationBalance: bigint; + const adapter = destinationToken.getAdapter(this.multiProvider); - const destinationBalance = await adapter.getBalance( - destinationToken.addressOrDenom, - ); + if ( + destinationToken.standard === TokenStandard.EvmHypXERC20 || + destinationToken.standard === TokenStandard.EvmHypXERC20Lockbox + ) { + destinationBalance = await ( + adapter as IHypXERC20Adapter + ).getMintLimit(); + } else { + destinationBalance = await adapter.getBalance( + destinationToken.addressOrDenom, + ); + } + const destinationBalanceInOriginDecimals = convertDecimals( destinationToken.decimals, originToken.decimals, @@ -504,6 +522,17 @@ export class WarpCore { const amountError = this.validateAmount(originTokenAmount); if (amountError) return amountError; + const destinationCollateralError = await this.validateDestinationCollateral( + originTokenAmount, + destination, + ); + if (destinationCollateralError) return destinationCollateralError; + + const originCollateralError = await this.validateOriginCollateral( + originTokenAmount, + ); + if (originCollateralError) return originCollateralError; + const balancesError = await this.validateTokenBalances( originTokenAmount, destination, @@ -592,6 +621,7 @@ export class WarpCore { senderPubKey?: HexString, ): Promise | null> { const { token, amount } = originTokenAmount; + const { amount: senderBalance } = await token.getBalance( this.multiProvider, sender, @@ -637,6 +667,45 @@ export class WarpCore { return null; } + /** + * Ensure the sender has sufficient balances for transfer and interchain gas + */ + protected async validateDestinationCollateral( + originTokenAmount: TokenAmount, + destination: ChainNameOrId, + ): Promise | null> { + const valid = await this.isDestinationCollateralSufficient({ + originTokenAmount, + destination, + }); + if (!valid) return { amount: 'Insufficient collateral on destination' }; + + return null; + } + + /** + * Ensure the sender has sufficient balances for transfer and interchain gas + */ + protected async validateOriginCollateral( + originTokenAmount: TokenAmount, + ): Promise | null> { + const adapter = originTokenAmount.token.getAdapter(this.multiProvider); + + if ( + originTokenAmount.token.standard === TokenStandard.EvmHypXERC20 || + originTokenAmount.token.standard === TokenStandard.EvmHypXERC20Lockbox + ) { + const burnLimit = await ( + adapter as IHypXERC20Adapter + ).getBurnLimit(); + if (burnLimit < BigInt(originTokenAmount.amount)) { + return { amount: 'Insufficient burn limit on origin' }; + } + } + + return null; + } + /** * Search through token list to find token with matching chain and address */ diff --git a/typescript/utils/CHANGELOG.md b/typescript/utils/CHANGELOG.md index e0ab748ce..a796ccac6 100644 --- a/typescript/utils/CHANGELOG.md +++ b/typescript/utils/CHANGELOG.md @@ -1,5 +1,7 @@ # @hyperlane-xyz/utils +## 3.15.0 + ## 3.14.0 ## 3.13.0 diff --git a/typescript/utils/package.json b/typescript/utils/package.json index d4e46ddd3..c7f4b7ec0 100644 --- a/typescript/utils/package.json +++ b/typescript/utils/package.json @@ -1,7 +1,7 @@ { "name": "@hyperlane-xyz/utils", "description": "General utilities and types for the Hyperlane network", - "version": "4.0.0-alpha.2", + "version": "3.15.0", "dependencies": { "@cosmjs/encoding": "^0.31.3", "@solana/web3.js": "^1.78.0", diff --git a/yarn.lock b/yarn.lock index a5ec94b7a..70970a80f 100644 --- a/yarn.lock +++ b/yarn.lock @@ -4503,17 +4503,6 @@ __metadata: languageName: node linkType: hard -"@eslint/config-array@npm:^0.15.1": - version: 0.15.1 - resolution: "@eslint/config-array@npm:0.15.1" - dependencies: - "@eslint/object-schema": "npm:^2.1.3" - debug: "npm:^4.3.1" - minimatch: "npm:^3.0.5" - checksum: cf8f68a24498531180fad6846cb52dac4e852b0296d2664930bc15d6a2944ad427827bbaebfddf3f87b9c5db0e36c13974d6dc89fff8ba0d3d2b4357b8d52b4e - languageName: node - linkType: hard - "@eslint/eslintrc@npm:^2.1.4": version: 2.1.4 resolution: "@eslint/eslintrc@npm:2.1.4" @@ -4531,23 +4520,6 @@ __metadata: languageName: node linkType: hard -"@eslint/eslintrc@npm:^3.1.0": - version: 3.1.0 - resolution: "@eslint/eslintrc@npm:3.1.0" - dependencies: - ajv: "npm:^6.12.4" - debug: "npm:^4.3.2" - espree: "npm:^10.0.1" - globals: "npm:^14.0.0" - ignore: "npm:^5.2.0" - import-fresh: "npm:^3.2.1" - js-yaml: "npm:^4.1.0" - minimatch: "npm:^3.1.2" - strip-json-comments: "npm:^3.1.1" - checksum: 02bf892d1397e1029209dea685e9f4f87baf643315df2a632b5f121ec7e8548a3b34f428a007234fa82772218fa8a3ac2d10328637b9ce63b7f8344035b74db3 - languageName: node - linkType: hard - "@eslint/js@npm:8.57.0": version: 8.57.0 resolution: "@eslint/js@npm:8.57.0" @@ -4555,20 +4527,6 @@ __metadata: languageName: node linkType: hard -"@eslint/js@npm:9.4.0": - version: 9.4.0 - resolution: "@eslint/js@npm:9.4.0" - checksum: f1fa9acda8bab02dad21e9b7f46c6ba8cb3949979846caf7667f0c682ed0b56d9e8db143b00aab587ef2d02603df202eb5f7017d8f3a98be94be6efa763865ab - languageName: node - linkType: hard - -"@eslint/object-schema@npm:^2.1.3": - version: 2.1.4 - resolution: "@eslint/object-schema@npm:2.1.4" - checksum: 221e8d9f281c605948cd6e030874aacce83fe097f8f9c1964787037bccf08e82b7aa9eff1850a30fffac43f1d76555727ec22a2af479d91e268e89d1e035131e - languageName: node - linkType: hard - "@eth-optimism/contracts-bedrock@npm:0.16.2": version: 0.16.2 resolution: "@eth-optimism/contracts-bedrock@npm:0.16.2" @@ -4880,7 +4838,7 @@ __metadata: languageName: node linkType: hard -"@ethersproject/abi@npm:5.7.0, @ethersproject/abi@npm:^5.0.9, @ethersproject/abi@npm:^5.4.0, @ethersproject/abi@npm:^5.7.0": +"@ethersproject/abi@npm:*, @ethersproject/abi@npm:5.7.0, @ethersproject/abi@npm:^5.0.9, @ethersproject/abi@npm:^5.4.0, @ethersproject/abi@npm:^5.7.0": version: 5.7.0 resolution: "@ethersproject/abi@npm:5.7.0" dependencies: @@ -5290,7 +5248,7 @@ __metadata: languageName: node linkType: hard -"@ethersproject/providers@npm:5.7.2, @ethersproject/providers@npm:^5.4.4, @ethersproject/providers@npm:^5.7.0, @ethersproject/providers@npm:^5.7.1, @ethersproject/providers@npm:^5.7.2": +"@ethersproject/providers@npm:*, @ethersproject/providers@npm:5.7.2, @ethersproject/providers@npm:^5.4.4, @ethersproject/providers@npm:^5.7.0, @ethersproject/providers@npm:^5.7.1, @ethersproject/providers@npm:^5.7.2": version: 5.7.2 resolution: "@ethersproject/providers@npm:5.7.2" dependencies: @@ -5697,13 +5655,6 @@ __metadata: languageName: node linkType: hard -"@humanwhocodes/retry@npm:^0.3.0": - version: 0.3.0 - resolution: "@humanwhocodes/retry@npm:0.3.0" - checksum: e574bab58680867414e225c9002e9a97eb396f85871c180fbb1a9bcdf9ded4b4de0b327f7d0c43b775873362b7c92956d4b322e8bc4b90be56077524341f04b2 - languageName: node - linkType: hard - "@hyperlane-xyz/ccip-server@workspace:typescript/ccip-server": version: 0.0.0-use.local resolution: "@hyperlane-xyz/ccip-server@workspace:typescript/ccip-server" @@ -5730,9 +5681,11 @@ __metadata: dependencies: "@aws-sdk/client-kms": "npm:^3.577.0" "@aws-sdk/client-s3": "npm:^3.577.0" + "@ethersproject/abi": "npm:*" + "@ethersproject/providers": "npm:*" "@hyperlane-xyz/registry": "npm:1.3.0" - "@hyperlane-xyz/sdk": "npm:4.0.0-alpha.2" - "@hyperlane-xyz/utils": "npm:4.0.0-alpha.2" + "@hyperlane-xyz/sdk": "npm:3.15.0" + "@hyperlane-xyz/utils": "npm:3.15.0" "@inquirer/prompts": "npm:^3.0.0" "@types/mocha": "npm:^10.0.1" "@types/node": "npm:^18.14.5" @@ -5761,28 +5714,12 @@ __metadata: languageName: unknown linkType: soft -"@hyperlane-xyz/core@npm:3.7.0": - version: 3.7.0 - resolution: "@hyperlane-xyz/core@npm:3.7.0" - dependencies: - "@eth-optimism/contracts": "npm:^0.6.0" - "@hyperlane-xyz/utils": "npm:3.7.0" - "@openzeppelin/contracts": "npm:^4.9.3" - "@openzeppelin/contracts-upgradeable": "npm:^v4.9.3" - peerDependencies: - "@ethersproject/abi": "*" - "@ethersproject/providers": "*" - "@types/sinon-chai": "*" - checksum: efa01d943dd5b67830bb7244291c8ba9849472e804dff589463de76d3c03e56bc8d62454b575a6621aa1b8b53cc0d1d3b752a83d34f4b328ecd85e1ff23230d5 - languageName: node - linkType: hard - -"@hyperlane-xyz/core@npm:4.0.0-alpha.2, @hyperlane-xyz/core@workspace:solidity": +"@hyperlane-xyz/core@npm:3.15.0, @hyperlane-xyz/core@workspace:solidity": version: 0.0.0-use.local resolution: "@hyperlane-xyz/core@workspace:solidity" dependencies: "@eth-optimism/contracts": "npm:^0.6.0" - "@hyperlane-xyz/utils": "npm:4.0.0-alpha.2" + "@hyperlane-xyz/utils": "npm:3.15.0" "@layerzerolabs/lz-evm-oapp-v2": "npm:2.0.2" "@layerzerolabs/solidity-examples": "npm:^1.1.0" "@nomiclabs/hardhat-ethers": "npm:^2.2.3" @@ -5790,7 +5727,9 @@ __metadata: "@openzeppelin/contracts": "npm:^4.9.3" "@openzeppelin/contracts-upgradeable": "npm:^v4.9.3" "@typechain/ethers-v5": "npm:^11.1.2" + "@typechain/ethers-v6": "npm:^0.5.1" "@typechain/hardhat": "npm:^9.1.0" + "@types/node": "npm:^18.14.5" chai: "npm:^4.3.6" ethereum-waffle: "npm:^4.0.10" ethers: "npm:^5.7.2" @@ -5802,6 +5741,7 @@ __metadata: prettier-plugin-solidity: "npm:^1.1.3" solhint: "npm:^4.5.4" solhint-plugin-prettier: "npm:^0.0.5" + solidity-bytes-utils: "npm:^0.8.0" solidity-coverage: "npm:^0.8.3" ts-generator: "npm:^0.1.1" ts-node: "npm:^10.8.0" @@ -5814,13 +5754,29 @@ __metadata: languageName: unknown linkType: soft -"@hyperlane-xyz/helloworld@npm:4.0.0-alpha.2, @hyperlane-xyz/helloworld@workspace:typescript/helloworld": +"@hyperlane-xyz/core@npm:3.7.0": + version: 3.7.0 + resolution: "@hyperlane-xyz/core@npm:3.7.0" + dependencies: + "@eth-optimism/contracts": "npm:^0.6.0" + "@hyperlane-xyz/utils": "npm:3.7.0" + "@openzeppelin/contracts": "npm:^4.9.3" + "@openzeppelin/contracts-upgradeable": "npm:^v4.9.3" + peerDependencies: + "@ethersproject/abi": "*" + "@ethersproject/providers": "*" + "@types/sinon-chai": "*" + checksum: efa01d943dd5b67830bb7244291c8ba9849472e804dff589463de76d3c03e56bc8d62454b575a6621aa1b8b53cc0d1d3b752a83d34f4b328ecd85e1ff23230d5 + languageName: node + linkType: hard + +"@hyperlane-xyz/helloworld@npm:3.15.0, @hyperlane-xyz/helloworld@workspace:typescript/helloworld": version: 0.0.0-use.local resolution: "@hyperlane-xyz/helloworld@workspace:typescript/helloworld" dependencies: - "@hyperlane-xyz/core": "npm:4.0.0-alpha.2" + "@hyperlane-xyz/core": "npm:3.15.0" "@hyperlane-xyz/registry": "npm:1.3.0" - "@hyperlane-xyz/sdk": "npm:4.0.0-alpha.2" + "@hyperlane-xyz/sdk": "npm:3.15.0" "@nomiclabs/hardhat-ethers": "npm:^2.2.3" "@nomiclabs/hardhat-waffle": "npm:^2.0.6" "@openzeppelin/contracts-upgradeable": "npm:^4.9.3" @@ -5867,10 +5823,10 @@ __metadata: "@ethersproject/hardware-wallets": "npm:^5.7.0" "@ethersproject/providers": "npm:^5.7.2" "@google-cloud/secret-manager": "npm:^5.5.0" - "@hyperlane-xyz/helloworld": "npm:4.0.0-alpha.2" + "@hyperlane-xyz/helloworld": "npm:3.15.0" "@hyperlane-xyz/registry": "npm:1.3.0" - "@hyperlane-xyz/sdk": "npm:4.0.0-alpha.2" - "@hyperlane-xyz/utils": "npm:4.0.0-alpha.2" + "@hyperlane-xyz/sdk": "npm:3.15.0" + "@hyperlane-xyz/utils": "npm:3.15.0" "@nomiclabs/hardhat-ethers": "npm:^2.2.3" "@nomiclabs/hardhat-etherscan": "npm:^3.0.3" "@nomiclabs/hardhat-waffle": "npm:^2.0.6" @@ -5910,7 +5866,7 @@ __metadata: "@trivago/prettier-plugin-sort-imports": "npm:^4.2.1" "@typescript-eslint/eslint-plugin": "npm:^7.4.0" "@typescript-eslint/parser": "npm:^7.4.0" - eslint: "npm:^9.0.0" + eslint: "npm:^8.57.0" eslint-config-prettier: "npm:^9.1.0" eslint-plugin-jest: "npm:^28.2.0" husky: "npm:^8.0.0" @@ -5930,43 +5886,15 @@ __metadata: languageName: node linkType: hard -"@hyperlane-xyz/sdk@npm:3.7.0": - version: 3.7.0 - resolution: "@hyperlane-xyz/sdk@npm:3.7.0" - dependencies: - "@cosmjs/cosmwasm-stargate": "npm:^0.31.3" - "@cosmjs/stargate": "npm:^0.31.3" - "@hyperlane-xyz/core": "npm:3.7.0" - "@hyperlane-xyz/utils": "npm:3.7.0" - "@solana/spl-token": "npm:^0.3.8" - "@solana/web3.js": "npm:^1.78.0" - "@types/coingecko-api": "npm:^1.0.10" - "@types/debug": "npm:^4.1.7" - "@wagmi/chains": "npm:^1.8.0" - bignumber.js: "npm:^9.1.1" - coingecko-api: "npm:^1.0.10" - cosmjs-types: "npm:^0.9.0" - cross-fetch: "npm:^3.1.5" - debug: "npm:^4.3.4" - ethers: "npm:^5.7.2" - viem: "npm:^1.20.0" - zod: "npm:^3.21.2" - peerDependencies: - "@ethersproject/abi": "*" - "@ethersproject/providers": "*" - checksum: b124a42f34502c4dad4127723d345158f592056d7e60e17d87c84bf81664ead20232ffaff66e6c21968dfd5693ba5122910fbcaa6b7db5b05fdd5d2051592835 - languageName: node - linkType: hard - -"@hyperlane-xyz/sdk@npm:4.0.0-alpha.2, @hyperlane-xyz/sdk@workspace:typescript/sdk": +"@hyperlane-xyz/sdk@npm:3.15.0, @hyperlane-xyz/sdk@workspace:typescript/sdk": version: 0.0.0-use.local resolution: "@hyperlane-xyz/sdk@workspace:typescript/sdk" dependencies: "@aws-sdk/client-s3": "npm:^3.74.0" "@cosmjs/cosmwasm-stargate": "npm:^0.31.3" "@cosmjs/stargate": "npm:^0.31.3" - "@hyperlane-xyz/core": "npm:4.0.0-alpha.2" - "@hyperlane-xyz/utils": "npm:4.0.0-alpha.2" + "@hyperlane-xyz/core": "npm:3.15.0" + "@hyperlane-xyz/utils": "npm:3.15.0" "@nomiclabs/hardhat-ethers": "npm:^2.2.3" "@nomiclabs/hardhat-waffle": "npm:^2.0.6" "@safe-global/api-kit": "npm:1.3.0" @@ -6006,19 +5934,35 @@ __metadata: languageName: unknown linkType: soft -"@hyperlane-xyz/utils@npm:3.7.0": +"@hyperlane-xyz/sdk@npm:3.7.0": version: 3.7.0 - resolution: "@hyperlane-xyz/utils@npm:3.7.0" + resolution: "@hyperlane-xyz/sdk@npm:3.7.0" dependencies: - "@cosmjs/encoding": "npm:^0.31.3" + "@cosmjs/cosmwasm-stargate": "npm:^0.31.3" + "@cosmjs/stargate": "npm:^0.31.3" + "@hyperlane-xyz/core": "npm:3.7.0" + "@hyperlane-xyz/utils": "npm:3.7.0" + "@solana/spl-token": "npm:^0.3.8" "@solana/web3.js": "npm:^1.78.0" + "@types/coingecko-api": "npm:^1.0.10" + "@types/debug": "npm:^4.1.7" + "@wagmi/chains": "npm:^1.8.0" bignumber.js: "npm:^9.1.1" + coingecko-api: "npm:^1.0.10" + cosmjs-types: "npm:^0.9.0" + cross-fetch: "npm:^3.1.5" + debug: "npm:^4.3.4" ethers: "npm:^5.7.2" - checksum: c76f36913c572702b9dfe22fd868db6fed01c0da9485319e33e8d00a6b8a1bfdcecb5f61c8a3fd8ccbef0b36809e8055db62d75d0c6759d5e079ee330586bcd1 + viem: "npm:^1.20.0" + zod: "npm:^3.21.2" + peerDependencies: + "@ethersproject/abi": "*" + "@ethersproject/providers": "*" + checksum: b124a42f34502c4dad4127723d345158f592056d7e60e17d87c84bf81664ead20232ffaff66e6c21968dfd5693ba5122910fbcaa6b7db5b05fdd5d2051592835 languageName: node linkType: hard -"@hyperlane-xyz/utils@npm:4.0.0-alpha.2, @hyperlane-xyz/utils@workspace:typescript/utils": +"@hyperlane-xyz/utils@npm:3.15.0, @hyperlane-xyz/utils@workspace:typescript/utils": version: 0.0.0-use.local resolution: "@hyperlane-xyz/utils@workspace:typescript/utils" dependencies: @@ -6036,6 +5980,18 @@ __metadata: languageName: unknown linkType: soft +"@hyperlane-xyz/utils@npm:3.7.0": + version: 3.7.0 + resolution: "@hyperlane-xyz/utils@npm:3.7.0" + dependencies: + "@cosmjs/encoding": "npm:^0.31.3" + "@solana/web3.js": "npm:^1.78.0" + bignumber.js: "npm:^9.1.1" + ethers: "npm:^5.7.2" + checksum: c76f36913c572702b9dfe22fd868db6fed01c0da9485319e33e8d00a6b8a1bfdcecb5f61c8a3fd8ccbef0b36809e8055db62d75d0c6759d5e079ee330586bcd1 + languageName: node + linkType: hard + "@hyperlane-xyz/widgets@npm:3.7.0": version: 3.7.0 resolution: "@hyperlane-xyz/widgets@npm:3.7.0" @@ -13750,6 +13706,13 @@ __metadata: languageName: node linkType: hard +"ds-test@github:dapphub/ds-test": + version: 1.0.0 + resolution: "ds-test@https://github.com/dapphub/ds-test.git#commit=e282159d5170298eb2455a6c05280ab5a73a4ef0" + checksum: a63cada107d8f2775934bc580f04cb6f6509f843cb41cbc3a617e77b2e628a86d7fd858f964e7e2d6f41c3797c0e16ec2d87a6cb4c6187c5b6c2bc969ccae4b3 + languageName: node + linkType: hard + "duplexer3@npm:^0.1.4": version: 0.1.4 resolution: "duplexer3@npm:0.1.4" @@ -14319,16 +14282,6 @@ __metadata: languageName: node linkType: hard -"eslint-scope@npm:^8.0.1": - version: 8.0.1 - resolution: "eslint-scope@npm:8.0.1" - dependencies: - esrecurse: "npm:^4.3.0" - estraverse: "npm:^5.2.0" - checksum: 458513863d3c79005b599f40250437bddba923f18549058ea45820a8d3d4bbc67fe292751d522a0cab69dd01fe211ffde5c1a5fc867e86f2d28727b1d61610da - languageName: node - linkType: hard - "eslint-visitor-keys@npm:^3.3.0": version: 3.3.0 resolution: "eslint-visitor-keys@npm:3.3.0" @@ -14350,13 +14303,6 @@ __metadata: languageName: node linkType: hard -"eslint-visitor-keys@npm:^4.0.0": - version: 4.0.0 - resolution: "eslint-visitor-keys@npm:4.0.0" - checksum: c7617166e6291a15ce2982b5c4b9cdfb6409f5c14562712d12e2584480cdf18609694b21d7dad35b02df0fa2cd037505048ded54d2f405c64f600949564eb334 - languageName: node - linkType: hard - "eslint@npm:^8.57.0": version: 8.57.0 resolution: "eslint@npm:8.57.0" @@ -14405,61 +14351,6 @@ __metadata: languageName: node linkType: hard -"eslint@npm:^9.0.0": - version: 9.4.0 - resolution: "eslint@npm:9.4.0" - dependencies: - "@eslint-community/eslint-utils": "npm:^4.2.0" - "@eslint-community/regexpp": "npm:^4.6.1" - "@eslint/config-array": "npm:^0.15.1" - "@eslint/eslintrc": "npm:^3.1.0" - "@eslint/js": "npm:9.4.0" - "@humanwhocodes/module-importer": "npm:^1.0.1" - "@humanwhocodes/retry": "npm:^0.3.0" - "@nodelib/fs.walk": "npm:^1.2.8" - ajv: "npm:^6.12.4" - chalk: "npm:^4.0.0" - cross-spawn: "npm:^7.0.2" - debug: "npm:^4.3.2" - escape-string-regexp: "npm:^4.0.0" - eslint-scope: "npm:^8.0.1" - eslint-visitor-keys: "npm:^4.0.0" - espree: "npm:^10.0.1" - esquery: "npm:^1.4.2" - esutils: "npm:^2.0.2" - fast-deep-equal: "npm:^3.1.3" - file-entry-cache: "npm:^8.0.0" - find-up: "npm:^5.0.0" - glob-parent: "npm:^6.0.2" - ignore: "npm:^5.2.0" - imurmurhash: "npm:^0.1.4" - is-glob: "npm:^4.0.0" - is-path-inside: "npm:^3.0.3" - json-stable-stringify-without-jsonify: "npm:^1.0.1" - levn: "npm:^0.4.1" - lodash.merge: "npm:^4.6.2" - minimatch: "npm:^3.1.2" - natural-compare: "npm:^1.4.0" - optionator: "npm:^0.9.3" - strip-ansi: "npm:^6.0.1" - text-table: "npm:^0.2.0" - bin: - eslint: bin/eslint.js - checksum: e2eaae18eb79d543a1ca5420495ea9bf1278f9e25bfa6309ec4e4dae981cba4d731a9b857f5e2f8b5e467adaaf871a635a7eb143a749e7cdcdff4716821628d2 - languageName: node - linkType: hard - -"espree@npm:^10.0.1": - version: 10.0.1 - resolution: "espree@npm:10.0.1" - dependencies: - acorn: "npm:^8.11.3" - acorn-jsx: "npm:^5.3.2" - eslint-visitor-keys: "npm:^4.0.0" - checksum: 557d6cfb4894b1489effcaed8702682086033f8a2449568933bc59493734733d750f2a87907ba575844d3933340aea2d84288f5e67020c6152f6fd18a86497b2 - languageName: node - linkType: hard - "espree@npm:^9.6.0, espree@npm:^9.6.1": version: 9.6.1 resolution: "espree@npm:9.6.1" @@ -15224,15 +15115,6 @@ __metadata: languageName: node linkType: hard -"file-entry-cache@npm:^8.0.0": - version: 8.0.0 - resolution: "file-entry-cache@npm:8.0.0" - dependencies: - flat-cache: "npm:^4.0.0" - checksum: afe55c4de4e0d226a23c1eae62a7219aafb390859122608a89fa4df6addf55c7fd3f1a2da6f5b41e7cdff496e4cf28bbd215d53eab5c817afa96d2b40c81bfb0 - languageName: node - linkType: hard - "file-uri-to-path@npm:1.0.0": version: 1.0.0 resolution: "file-uri-to-path@npm:1.0.0" @@ -15338,16 +15220,6 @@ __metadata: languageName: node linkType: hard -"flat-cache@npm:^4.0.0": - version: 4.0.1 - resolution: "flat-cache@npm:4.0.1" - dependencies: - flatted: "npm:^3.2.9" - keyv: "npm:^4.5.4" - checksum: 58ce851d9045fffc7871ce2bd718bc485ad7e777bf748c054904b87c351ff1080c2c11da00788d78738bfb51b71e4d5ea12d13b98eb36e3358851ffe495b62dc - languageName: node - linkType: hard - "flat@npm:^5.0.2": version: 5.0.2 resolution: "flat@npm:5.0.2" @@ -15364,13 +15236,6 @@ __metadata: languageName: node linkType: hard -"flatted@npm:^3.2.9": - version: 3.3.1 - resolution: "flatted@npm:3.3.1" - checksum: 7b8376061d5be6e0d3658bbab8bde587647f68797cf6bfeae9dea0e5137d9f27547ab92aaff3512dd9d1299086a6d61be98e9d48a56d17531b634f77faadbc49 - languageName: node - linkType: hard - "fmix@npm:^0.1.0": version: 0.1.0 resolution: "fmix@npm:0.1.0" @@ -15416,6 +15281,13 @@ __metadata: languageName: node linkType: hard +"forge-std@npm:^1.1.2": + version: 1.1.2 + resolution: "forge-std@npm:1.1.2" + checksum: 78fa45e7df8076d4e8a3d8494736931082e1faa02495593b0330c09464a053d2ff1d48c2d1db004c15d763ba4547ecfb46b701f79655a46ca638033913e729a1 + languageName: node + linkType: hard + "form-data-encoder@npm:1.7.1": version: 1.7.1 resolution: "form-data-encoder@npm:1.7.1" @@ -16138,13 +16010,6 @@ __metadata: languageName: node linkType: hard -"globals@npm:^14.0.0": - version: 14.0.0 - resolution: "globals@npm:14.0.0" - checksum: 03939c8af95c6df5014b137cac83aa909090c3a3985caef06ee9a5a669790877af8698ab38007e4c0186873adc14c0b13764acc754b16a754c216cc56aa5f021 - languageName: node - linkType: hard - "globalthis@npm:^1.0.1, globalthis@npm:^1.0.3": version: 1.0.3 resolution: "globalthis@npm:1.0.3" @@ -18605,7 +18470,7 @@ __metadata: languageName: node linkType: hard -"keyv@npm:^4.5.3, keyv@npm:^4.5.4": +"keyv@npm:^4.5.3": version: 4.5.4 resolution: "keyv@npm:4.5.4" dependencies: @@ -23332,6 +23197,16 @@ __metadata: languageName: node linkType: hard +"solidity-bytes-utils@npm:^0.8.0": + version: 0.8.2 + resolution: "solidity-bytes-utils@npm:0.8.2" + dependencies: + ds-test: "github:dapphub/ds-test" + forge-std: "npm:^1.1.2" + checksum: 72238183c3cea06867244e359d47d6355d9d8c72d50ed7a3b2e87c6ba3bf760cc7c7bfef089c04ce60f8c6c4f6f213e49a4c009f27902465e660c7b30fa5ab57 + languageName: node + linkType: hard + "solidity-comments-darwin-arm64@npm:0.0.2": version: 0.0.2 resolution: "solidity-comments-darwin-arm64@npm:0.0.2"