Compare commits

...

9 Commits

28 changed files with 37 additions and 1007 deletions

10
.env

@ -1,8 +1,8 @@
# These API keys are intentionally public. Please do not report them - thank you for your concern.
ESLINT_NO_DEV_ERRORS=true
REACT_APP_AMPLITUDE_PROXY_URL="https://api.uniswap.org/v1/amplitude-proxy"
REACT_APP_AMPLITUDE_PROXY_URL="https://null.null"
REACT_APP_AWS_API_REGION="us-east-2"
REACT_APP_AWS_API_ENDPOINT="https://beta.api.uniswap.org/v1/graphql"
REACT_APP_AWS_API_ENDPOINT="https://null.null"
REACT_APP_BNB_RPC_URL="https://rough-sleek-hill.bsc.quiknode.pro/413cc98cbc776cda8fdf1d0f47003583ff73d9bf"
REACT_APP_INFURA_KEY="4bf032f2d38a4ed6bb975b80d6340847"
REACT_APP_QUICKNODE_MAINNET_RPC_URL="https://magical-alien-tab.quiknode.pro/669e87e569a8277d3fbd9e202f9df93189f19f4c"
@ -10,7 +10,7 @@ REACT_APP_MOONPAY_API="https://api.moonpay.com"
REACT_APP_MOONPAY_LINK="https://us-central1-uniswap-mobile.cloudfunctions.net/signMoonpayLinkV2?platform=web&env=staging"
REACT_APP_MOONPAY_PUBLISHABLE_KEY="pk_test_DycfESRid31UaSxhI5yWKe1r5E5kKSz"
REACT_APP_SENTRY_DSN="https://a3c62e400b8748b5a8d007150e2f38b7@o1037921.ingest.sentry.io/4504255148851200"
REACT_APP_STATSIG_PROXY_URL="https://api.uniswap.org/v1/statsig-proxy"
REACT_APP_TEMP_API_URL="https://temp.api.uniswap.org/v1"
REACT_APP_UNISWAP_API_URL="https://api.uniswap.org/v2"
REACT_APP_STATSIG_PROXY_URL="https://null.null"
REACT_APP_TEMP_API_URL="https://null.null"
REACT_APP_UNISWAP_API_URL="https://null.null"
REACT_APP_WALLET_CONNECT_PROJECT_ID="c6c9bacd35afa3eb9e6cccf6d8464395"

@ -1,16 +1,16 @@
# These API keys are intentionally public. Please do not report them - thank you for your concern.
REACT_APP_AMPLITUDE_PROXY_URL="https://api.uniswap.org/v1/amplitude-proxy"
REACT_APP_AWS_API_ENDPOINT="https://api.uniswap.org/v1/graphql"
REACT_APP_AMPLITUDE_PROXY_URL="https://null.null"
REACT_APP_AWS_API_ENDPOINT="https://null.null"
REACT_APP_BNB_RPC_URL="https://old-wispy-arrow.bsc.quiknode.pro/f5c060177236065c1058531a0615ab4f7a34a2fd"
REACT_APP_FIREBASE_KEY="AIzaSyBcZWwTcTJHj_R6ipZcrJkXdq05PuX0Rs0"
REACT_APP_FORTMATIC_KEY="pk_live_F937DF033A1666BF"
REACT_APP_GOOGLE_ANALYTICS_ID="G-KDP9B6W4H8"
REACT_APP_INFURA_KEY="099fc58e0de9451d80b18d7c74caa7c1"
REACT_APP_MOONPAY_API="https://api.moonpay.com"
REACT_APP_MOONPAY_LINK="https://us-central1-uniswap-mobile.cloudfunctions.net/signMoonpayLinkV2?platform=web&env=production"
REACT_APP_MOONPAY_LINK="https://null.null"
REACT_APP_MOONPAY_PUBLISHABLE_KEY="pk_live_uQG4BJC4w3cxnqpcSqAfohdBFDTsY6E"
REACT_APP_SENTRY_ENABLED=true
REACT_APP_SENTRY_TRACES_SAMPLE_RATE=0.00003
REACT_APP_STATSIG_PROXY_URL="https://api.uniswap.org/v1/statsig-proxy"
REACT_APP_STATSIG_PROXY_URL="https://null.null"
REACT_APP_QUICKNODE_MAINNET_RPC_URL="https://ultra-blue-flower.quiknode.pro/770b22d5f362c537bc8fe19b034c45b22958f880"
THE_GRAPH_SCHEMA_ENDPOINT="https://api.thegraph.com/subgraphs/name/uniswap/uniswap-v3?source=uniswap"

1
.github/CODEOWNERS vendored

@ -1 +0,0 @@
* @uniswap/web-reviewers

@ -1,22 +0,0 @@
---
name: Bug Report
about: Describe an issue in the Uniswap Interface
title: ''
labels: bug
assignees: ''
---
**Bug Description**
A clear and concise description of the bug.
**Steps to Reproduce**
1. Go to ...
2. Click on ...
...
**Expected Behavior**
A clear and concise description of what you expected to happen.
**Additional Context**
Add any other context about the problem here (screenshots, whether the bug only occurs only in certain mobile/desktop/browser environments, etc.)

@ -1,8 +0,0 @@
blank_issues_enabled: true
contact_links:
- name: Support
url: https://discord.gg/FCfyBSbCU5
about: Please ask and answer questions here
- name: List a token
url: https://github.com/Uniswap/default-token-list#adding-a-token
about: Any requests to add a token to Uniswap should go here

@ -1,19 +0,0 @@
---
name: Feature Request
about: Suggest an idea for improving the UX of the Uniswap Interface
title: ''
labels: 'improvement'
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.

@ -1,48 +0,0 @@
name: Report
description: Report test failures via Slack
inputs:
name:
description: The name of the failing test
required: true
SLACK_WEBHOOK_URL:
description: The webhook URL to send the report to
required: true
runs:
using: composite
steps:
- uses: slackapi/slack-github-action@007b2c3c751a190b6f0f040e47ed024deaa72844
with:
payload: |
{
"text": "${{ inputs.name }} failing on `${{ github.ref_name }}`",
"blocks": [
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "*${{ inputs.name }} failing on `${{ github.ref_name }}`:* <https://github.com/${{ github.repository}}/actions/runs/${{ github.run_id }}|view failing action>"
}
},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "_This is blocking pull requests and branch promotions._\n_Please prioritize fixing the build._"
}
}
]
}
env:
SLACK_WEBHOOK_URL: ${{ inputs.SLACK_WEBHOOK_URL }}
SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK
# The !oncall bot requires its own message:
- uses: slackapi/slack-github-action@007b2c3c751a190b6f0f040e47ed024deaa72844
with:
payload: |
{
"text": "!oncall web"
}
env:
SLACK_WEBHOOK_URL: ${{ inputs.SLACK_WEBHOOK_URL }}
SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK

@ -1,49 +0,0 @@
name: Setup
description: checkout repo, setup node, and install node_modules
runs:
using: composite
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 18
registry-url: https://registry.npmjs.org
# cache is intentionally omitted, as it is faster with yarn v1 to cache node_modules.
- uses: actions/cache@v3
id: install-cache
with:
# node_modules/.cache is intentionally omitted, as this is used for build tool caches.
path: |
node_modules
!node_modules/.cache
key: ${{ runner.os }}-install-${{ hashFiles('yarn.lock') }}
- if: steps.install-cache.outputs.cache-hit != 'true'
run: yarn install --frozen-lockfile --ignore-scripts
shell: bash
# Run patch-package to apply patches to dependencies.
- run: yarn patch-package
shell: bash
# Contracts are compiled from source. If source hasn't changed, the contracts do not need to be re-compiled.
- uses: actions/cache@v3
id: contracts-cache
with:
path: |
src/abis/types
src/types/v3
key: ${{ runner.os }}-contracts-${{ hashFiles('src/abis/**/*.json', 'node_modules/@uniswap/**/artifacts/contracts/**/*.json') }}
- if: steps.contracts-cache.outputs.cache-hit != 'true'
run: yarn contracts
shell: bash
# These operations cannot be cached, so they are run concurrently
# - ajv: Validators compile quickly, so caching can be omitted.
# - graphql: GraphQL is generated from schema and client-side graphql queries. The schema is always fetched and
# changes to client-side queries are hard to detect, so it is always re-generated.
# - i18n: Messages are extracted from source and compiled. No caching extractor is available (out-of-the-box).
- run: yarn concurrently --max-processes=100% npm:ajv npm:graphql npm:i18n
shell: bash

@ -1,12 +0,0 @@
version: 2
updates:
- package-ecosystem: npm
# Files stored in repository root
directory: '/'
schedule:
interval: 'daily'
allow:
- dependency-name: '@uniswap/default-token-list'
- dependency-name: '@uniswap/token-lists'
reviewers:
- 'Uniswap/dependabot-reviewers'

@ -1,52 +0,0 @@
<!-- Your PR title must follow conventional commits: https://github.com/Uniswap/interface#pr-title -->
## Description
<!-- Summary of change, including motivation and context. -->
<!-- Use verb-driven language: "Fixes XYZ" instead of "This change fixes XYZ" -->
<!-- Delete inapplicable lines: -->
_Linear ticket:_
_Slack thread:_
_Relevant docs:_
<!-- Delete this section if your change does not affect UI. -->
## Screen capture
### Before
| Mobile | Desktop |
| ------------ | ------------ |
| paste_before | paste_before |
### After
| Mobile | Desktop |
| ------------ | ----------- |
| paste_after | paste_after |
## Test plan
<!-- Delete this section if your change is not a bug fix. -->
### Reproducing the error
<!-- Include steps to reproduce the bug. -->
1.
### QA (ie manual testing)
<!-- Include steps to test the change, ensuring no regression. -->
- [ ] N/A
#### Devices
<!-- If applicable, include different devices and screen sizes that may be affected, and how you've tested them. -->
### Automated testing
<!-- If N/A, check and note so it is obvious to your reviewers and does not show up as an incomplete task. -->
<!-- eg - [x] Unit test N/A -->
- [ ] Unit test
- [ ] Integration/E2E test

@ -1,73 +0,0 @@
name: 1 | Push main -> staging
# This CI job is responsible for pushing the current contents of the `main` branch to the
# `releases/staging` branch, which will in turn kick off a deploy to the staging environment.
on:
workflow_dispatch:
# https://stackoverflow.com/questions/57921401/push-to-origin-from-github-action
jobs:
push-staging:
name: 'Push to staging branch'
runs-on: ubuntu-latest
environment:
name: push/staging
steps:
- name: Check test status
uses: actions/github-script@v6.4.1
with:
script: |
const statuses = await github.rest.repos.listCommitStatusesForRef({
owner: context.repo.owner,
repo: context.repo.repo,
ref: context.sha
})
const status = statuses.data.find(status => status.context === 'Test / promotion')?.state || 'missing'
core.info('Status: ' + status)
if (status !== 'success') {
core.setFailed('"Test / promotion" must be successful before pushing')
}
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
with:
token: ${{ secrets.RELEASE_SERVICE_ACCESS_TOKEN }}
ref: main
# The source file must exist for the corresponding translation messages to be downloaded.
- run: touch src/locales/en-US.po
- name: Download translations
uses: crowdin/github-action@3133cc916c35590475cf6705f482fb653d8e36e9
with:
upload_sources: false
download_translations: true
project_id: 458284
token: ${{ secrets.CROWDIN_PERSONAL_TOKEN_SECRET }}
source: 'src/locales/en-US.po'
translation: 'src/locales/%locale%.po'
localization_branch_name: main
create_pull_request: false
push_translations: false
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Git config
run: |
git config user.name 'UL Service Account'
git config user.email 'hello-happy-puppy@users.noreply.github.com'
- name: Add translations
run: |
rm src/locales/en-US.po
git add -f src/locales/*.po
git commit -m 'ci(t9n): download translations from crowdin'
- name: Add CODEOWNERS
run: |
echo '* @uniswap/web-admins' > CODEOWNERS
git add CODEOWNERS
git commit -m 'ci: add global CODEOWNERS'
- name: Git push
run: |
git push origin main:releases/staging --force

@ -1,64 +0,0 @@
name: 2 | Deploy staging
on:
push:
branches:
- 'releases/staging'
jobs:
deploy-to-staging:
runs-on: ubuntu-latest
environment:
name: deploy/staging
steps:
- uses: slackapi/slack-github-action@007b2c3c751a190b6f0f040e47ed024deaa72844
continue-on-error: true
with:
payload: |
{
"text": "Deploy _started_ for ${{ github.ref_name }}"
}
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK
- uses: actions/checkout@v3
- uses: ./.github/actions/setup
- run: yarn build
env:
REACT_APP_STAGING: 1
- name: Update Cloudflare Pages deployment
id: pages-deployment
uses: cloudflare/pages-action@364c7ca09a4b57837c5967871d64a2c31adb8c0d
with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
projectName: interface-staging
directory: build
githubToken: ${{ secrets.GITHUB_TOKEN }}
# Cloudflare uses `main` as the default production branch, so we push using the `main` branch so that it can be aliased by a custom domain.
branch: main
- uses: slackapi/slack-github-action@007b2c3c751a190b6f0f040e47ed024deaa72844
continue-on-error: true
if: always()
with:
payload: |
{
"text": "Deploy *${{ steps.pages-deployment.outcome }}* for ${{ github.ref_name }}"
}
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK
- name: Upload source maps to Sentry
uses: getsentry/action-release@bd5f874fcda966ba48139b0140fb3ec0cb3aabdd
continue-on-error: true
env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
with:
environment: staging
sourcemaps: './build/static/js'
url_prefix: '~/static/js'

@ -1,42 +0,0 @@
name: 3 | Push staging -> prod
# This CI job is responsible for force pushing the content of releases/staging to releases/prod. It
# is restricted to web-reviewers through virtue of the GitHub environment protection rules for the
# prod environment.
on:
workflow_dispatch:
jobs:
push-prod:
name: 'Push to prod branch'
runs-on: ubuntu-latest
environment:
name: push/prod
steps:
- name: Check test status
uses: actions/github-script@v6.4.1
with:
script: |
const statuses = await github.rest.repos.listCommitStatusesForRef({
owner: context.repo.owner,
repo: context.repo.repo,
ref: context.sha
})
const status = statuses.data.find(status => status.context === 'Test / promotion')?.state || 'missing'
core.info('Status: ' + status)
if (status !== 'success') {
core.setFailed('"Test / promotion" must be successful before pushing')
}
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
with:
token: ${{ secrets.RELEASE_SERVICE_ACCESS_TOKEN }}
ref: releases/staging
- name: Git config
run: |
git config user.name "UL Service Account"
git config user.email "hello-happy-puppy@users.noreply.github.com"
- name: Git push
run: |
git push origin releases/staging:releases/prod --force

@ -1,111 +0,0 @@
name: 4 | Deploy prod
on:
push:
branches:
- 'releases/prod'
jobs:
deploy-to-prod:
runs-on: ubuntu-latest
environment:
name: deploy/prod
steps:
- uses: slackapi/slack-github-action@007b2c3c751a190b6f0f040e47ed024deaa72844
continue-on-error: true
with:
payload: |
{
"text": "Deploy _started_ for ${{ github.ref_name }}"
}
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK
- uses: actions/checkout@v3
- uses: ./.github/actions/setup
- run: yarn build
- name: Bump and tag
id: github-tag-action
uses: mathieudutour/github-tag-action@d745f2e74aaf1ee82e747b181f7a0967978abee0
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
release_branches: releases/prod
default_bump: patch
- name: Pin to IPFS
id: pinata
uses: anantaramdas/ipfs-pinata-deploy-action@39bbda1ce1fe24c69c6f57861b8038278d53688d
with:
pin-name: Uniswap ${{ steps.github-tag-action.outputs.new_tag }}
path: './build'
pinata-api-key: ${{ secrets.PINATA_API_KEY }}
pinata-secret-api-key: ${{ secrets.PINATA_API_SECRET_KEY }}
- name: Convert CIDv0 to CIDv1
id: convert-cidv0
uses: uniswap/convert-cidv0-cidv1@v1.0.0
with:
cidv0: ${{ steps.pinata.outputs.hash }}
- name: Publish release
uses: actions/create-release@v1.1.0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tag_name: ${{ steps.github-tag-action.outputs.new_tag }}
release_name: Release ${{ steps.github-tag-action.outputs.new_tag }}
body: |
IPFS hash of the deployment:
- CIDv0: `${{ steps.pinata.outputs.hash }}`
- CIDv1: `${{ steps.convert-cidv0.outputs.cidv1 }}`
The latest release is always mirrored at [app.uniswap.org](https://app.uniswap.org).
You can also access the Uniswap Interface from an IPFS gateway.
**BEWARE**: The Uniswap interface uses [`localStorage`](https://developer.mozilla.org/en-US/docs/Web/API/Window/localStorage) to remember your settings, such as which tokens you have imported.
**You should always use an IPFS gateway that enforces origin separation**, or our hosted deployment of the latest release at [app.uniswap.org](https://app.uniswap.org).
Your Uniswap settings are never remembered across different URLs.
IPFS gateways:
- https://${{ steps.convert-cidv0.outputs.cidv1 }}.ipfs.dweb.link/
- https://${{ steps.convert-cidv0.outputs.cidv1 }}.ipfs.cf-ipfs.com/
- [ipfs://${{ steps.pinata.outputs.hash }}/](ipfs://${{ steps.pinata.outputs.hash }}/)
${{ steps.github-tag-action.outputs.changelog }}
- name: Update Cloudflare Pages deployment
uses: cloudflare/pages-action@364c7ca09a4b57837c5967871d64a2c31adb8c0d
id: pages-deployment
with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
projectName: ${{ secrets.CLOUDFLARE_PROJECT_NAME }}
directory: build
githubToken: ${{ secrets.GITHUB_TOKEN }}
# Cloudflare uses `main` as the default production branch, so we push using the `main` branch so that it can be aliased by a custom domain.
branch: main
- uses: slackapi/slack-github-action@007b2c3c751a190b6f0f040e47ed024deaa72844
continue-on-error: true
if: always()
with:
payload: |
{
"text": "Deploy *${{ steps.pages-deployment.outcome }}* for ${{ github.ref_name }}"
}
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK
- name: Upload source maps to Sentry
uses: getsentry/action-release@4744f6a65149f441c5f396d5b0877307c0db52c7
continue-on-error: true
env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
with:
environment: production
sourcemaps: './build/static/js'
url_prefix: '~/static/js'

@ -1,17 +0,0 @@
name: Check PR Title
on:
pull_request_target:
types:
- opened
- edited
- synchronize
jobs:
# Ensures that the PR title adheres to [conventional commits](https://www.conventionalcommits.org/en/v1.0.0/).
conventional-commit:
runs-on: ubuntu-latest
steps:
- uses: amannn/action-semantic-pull-request@v3.4.0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

@ -1,26 +0,0 @@
name: Crowdin Upload
on:
push:
branches:
- main
jobs:
upload-sources:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/setup
- run: yarn i18n:extract
- name: Upload Crowdin sources
uses: crowdin/github-action@3133cc916c35590475cf6705f482fb653d8e36e9
with:
upload_sources: true
download_translations: false
project_id: 458284
token: ${{ secrets.CROWDIN_PERSONAL_TOKEN_SECRET }}
source: 'src/locales/en-US.po'
translation: 'src/locales/%locale%.po'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

@ -1,91 +0,0 @@
name: Slack notification on pushes to releases/*
# This CI job will push notifications to Slack whenever code is merged into any releases/* branch
#
# The steps of the command line kung-fu shown below are as follows:
# First we take the JSON-formatted Github context
# echo $GITHUB_CONTEXT \
# Then we parse out the specific fields we want for our messages using jq and format it into tab-separated values
# | jq '.event.commits[] | [.url, .id[0:7], .author.username, .timestamp, .message] | @tsv' \
# We need to do some cleaning on this output - specifically removing quotes and replacing newlines with something easier to split
# | sed 's/"//g' | sed 's/\\t/;/g' | sed 's/\\n/;/g' | sed 's/\\//g' \
# We then use awk to format the TSV into a Slack message
# | awk -F';' '{print "• <"$1"|"$2"> (<https://github.com/"$3"|"$3">, "$4") - "$5}' \
# We need to deal with some escaping issues with newlines so that we don't break the Slack message format
# | sed 's/$/\\n/g' | tr -d '\n' \
# Finally we have to truncate the message to 3,000 characters max, otherwise Slack will reject it
# | awk '{print substr($0,0,3000);}' \
# Then shove the bytes into a file to store them in their exact format
# > /tmp/parsed_github_context
on:
push:
branches:
- 'releases/*'
jobs:
notify-slack:
runs-on: ubuntu-latest
environment:
name: notify/releases
steps:
- name: Parse event to slug
id: parse-slug
env:
GITHUB_CONTEXT: ${{ toJson(github) }}
# Formats the contents of the GitHub event into slugs: one line per commit, formatted for Slack.
# Explanation for each line is in the comments above.
run: |
echo $GITHUB_CONTEXT \
| jq '.event.commits[] | [.url, .id[0:7], .author.username, .timestamp, .message] | @tsv' \
| sed 's/"//g' | sed 's/\\t/;/g' | sed 's/\\n/;/g' | sed 's/\\//g' \
| awk -F';' '{print "• <"$1"|"$2"> (<https://github.com/"$3"|"$3">, "$4") - "$5}' \
| sed 's/$/\\n/g' | tr -d '\n' \
| awk '{print substr($0,0,3000);}' \
> /tmp/parsed_github_context
echo "SLACK_COMMITS=$(cat /tmp/parsed_github_context)" >> "$GITHUB_OUTPUT"
- uses: slackapi/slack-github-action@007b2c3c751a190b6f0f040e47ed024deaa72844
with:
payload: |
{
"text": "GitHub Action build result: ${{ job.status }}\n${{ github.event.pull_request.html_url || github.event.head_commit.url }}",
"blocks": [
{
"type": "divider"
},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "*Code merged to <https://github.com/Uniswap/interface/tree/${{ github.ref }}|${{ github.ref_name }}> branch:*\n"
}
},
{
"type": "divider"
},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "*Actor*: <https://github.com/${{ github.triggering_actor }}/|${{ github.triggering_actor }}>\n*Force pushed*: ${{ github.event.forced || false }}\n"
}
},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "${{ steps.parse-slug.outputs.SLACK_COMMITS || 'New branch created' }}"
}
},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "<${{ github.event.compare}}|View Diff>"
}
}
]
}
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK

@ -1,24 +0,0 @@
name: Semgrep
on:
workflow_dispatch: {}
pull_request: {}
push:
branches:
- main
paths:
- .github/workflows/semgrep.yml
schedule:
# random HH:MM to avoid a load spike on GitHub Actions at 00:00
- cron: '2 11 * * *'
jobs:
semgrep:
name: semgrep/ci
runs-on: ubuntu-20.04
env:
SEMGREP_APP_TOKEN: ${{ secrets.SEMGREP_APP_TOKEN }}
container:
image: returntocorp/semgrep
if: (github.actor != 'dependabot[bot]')
steps:
- uses: actions/checkout@v3
- run: semgrep ci

@ -1,278 +0,0 @@
name: Test
# Many build steps have their own caches, so each job has its own cache to improve subsequent build times.
# Build tools are configured to cache to node_modules/.cache, so they are cached independently of node_modules.
# Caches are saved every run (by keying on github.run_id), and the most recent available cache is loaded.
# See https://jongleberry.medium.com/speed-up-your-ci-and-dx-with-node-modules-cache-ac8df82b7bb0.
on:
push:
branches:
- main
- releases/staging
pull_request:
workflow_dispatch:
jobs:
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/setup
- uses: actions/cache@v3
with:
path: node_modules/.cache
key: ${{ runner.os }}-eslint-${{ github.run_id }}
restore-keys: ${{ runner.os }}-eslint-
- run: yarn lint
- if: failure() && github.ref_name == 'main'
uses: ./.github/actions/report
with:
name: Lint
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_TEST_REPORTER_WEBHOOK }}
typecheck:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/setup
- uses: actions/cache@v3
with:
path: node_modules/.cache
key: ${{ runner.os }}-tsc-${{ github.run_id }}
restore-keys: ${{ runner.os }}-tsc-
- run: yarn typecheck
- if: failure() && github.ref_name == 'main'
uses: ./.github/actions/report
with:
name: Typecheck
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_TEST_REPORTER_WEBHOOK }}
deps-tests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/setup
- run: yarn yarn-deduplicate --strategy=highest --list --fail
- if: failure() && github.ref_name == 'main'
uses: ./.github/actions/report
with:
name: Dependency checks
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_TEST_REPORTER_WEBHOOK }}
unit-tests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/setup
- uses: actions/cache@v3
with:
path: node_modules/.cache
key: ${{ runner.os }}-jest-${{ github.run_id }}
restore-keys: ${{ runner.os }}-jest-
- run: yarn test --coverage --maxWorkers=100%
- uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
fail_ci_if_error: false
flags: unit-tests
- if: failure() && github.ref_name == 'main'
uses: ./.github/actions/report
with:
name: Unit tests
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_TEST_REPORTER_WEBHOOK }}
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/setup
- uses: actions/cache@v3
with:
path: node_modules/.swc
key: ${{ runner.os }}-swc-${{ github.run_id }}
restore-keys: ${{ runner.os }}-swc-
- run: yarn build
- uses: actions/upload-artifact@v3
with:
name: build
path: build
if-no-files-found: error
cypress-typecheck:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/setup
- uses: actions/cache@v3
with:
path: node_modules/.cache
key: ${{ runner.os }}-cypress-tsc-${{ github.run_id }}
restore-keys: ${{ runner.os }}-cypress-tsc-
- run: yarn typecheck:cypress
- if: failure() && github.ref_name == 'main'
uses: ./.github/actions/report
with:
name: Cypress typecheck
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_TEST_REPORTER_WEBHOOK }}
# Allows for parallel re-runs of cypress tests without re-building.
cypress-rerun:
runs-on: ubuntu-latest
steps:
- run: exit 0
cypress-test-matrix:
needs: [build, cypress-rerun]
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
containers: [1, 2, 3, 4]
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/setup
- uses: actions/cache@v3
with:
path: /root/.cache/Cypress
key: ${{ runner.os }}-cypress-${{ hashFiles('**/node_modules/cypress/package.json') }}
- run: |
yarn cypress install
yarn cypress info
- uses: actions/download-artifact@v3
with:
name: build
path: build
- uses: actions/cache/restore@v3
with:
path: cache
key: ${{ runner.os }}-hardhat-${{ hashFiles('hardhat.config.js') }}-${{ github.run_id }}
restore-keys: ${{ runner.os }}-hardhat-${{ hashFiles('hardhat.config.js') }}-
- uses: cypress-io/github-action@v4
with:
install: false
record: true
parallel: true
start: yarn serve
wait-on: 'http://localhost:3000'
browser: electron
group: e2e
spec: ${{ github.ref_name == 'releases/staging' && 'cypress/{e2e,staging}/**/*.test.ts' || 'cypress/e2e/**/*.test.ts' }}
env:
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
COMMIT_INFO_BRANCH: ${{ github.event.pull_request.head.ref || github.ref_name }}
COMMIT_INFO_MESSAGE: ${{ github.event.pull_request.title || github.event.head_commit.message }}
COMMIT_INFO_AUTHOR: ${{ github.event.sender.login || github.event.head_commit.author.login }}
# Cypress requires an email for filtering by author, but GitHub does not expose one.
# GitHub's public profile email can be deterministically produced from user id/login.
COMMIT_INFO_EMAIL: ${{ github.event.sender.id || github.event.head_commit.author.id }}+${{ github.event.sender.login || github.event.head_commit.author.login }}@users.noreply.github.com
COMMIT_INFO_SHA: ${{ github.event.pull_request.head.sha || github.event.head_commit.sha }}
COMMIT_INFO_TIMESTAMP: ${{ github.event.pull_request.updated_at || github.event.head_commit.timestamp }}
CYPRESS_PULL_REQUEST_ID: ${{ github.event.pull_request.number }}
CYPRESS_PULL_REQUEST_URL: ${{ github.event.pull_request.html_url }}
- if: failure() && github.ref_name == 'main'
uses: ./.github/actions/report
with:
name: Cypress tests
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_TEST_REPORTER_WEBHOOK }}
- uses: actions/upload-artifact@v3
with:
name: hardhat-cache
path: cache
hardhat-cache:
needs: [cypress-test-matrix]
runs-on: ubuntu-latest
steps:
- uses: actions/download-artifact@v3
with:
name: hardhat-cache
path: cache
- uses: actions/cache/save@v3
with:
path: cache
key: ${{ runner.os }}-hardhat-${{ hashFiles('hardhat.config.js') }}-${{ github.run_id }}
cloud-typecheck:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/setup
- uses: actions/cache@v3
with:
path: node_modules/.cache
key: ${{ runner.os }}-cloud-tsc-${{ github.run_id }}
restore-keys: ${{ runner.os }}-cloud-tsc-
- run: yarn typecheck:cloud
- if: failure() && github.ref_name == 'main'
uses: ./.github/actions/report
with:
name: Cloud typecheck
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_TEST_REPORTER_WEBHOOK }}
cloud-tests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/setup
- uses: actions/cache@v3
with:
path: node_modules/.cache
key: ${{ runner.os }}-cloud-jest-${{ github.run_id }}
restore-keys: ${{ runner.os }}-cloud-jest-
# Ignore start:cloud output so it doesn't flood the test output.
# Only use 1 worker for testing, as the other is used to run start:cloud (the proxy server under test).
- run: yarn start-server-and-test 'yarn start:cloud >/dev/null' 3000 'yarn test:cloud --coverage --maxWorkers=1'
- uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
fail_ci_if_error: false
flags: cloud-tests
pre:
if: ${{ github.ref_name == 'main' || github.ref_name == 'releases/staging' }}
runs-on: ubuntu-latest
steps:
- uses: actions/github-script@v6.4.1
with:
script: |
github.rest.repos.createCommitStatus({
owner: context.repo.owner,
repo: context.repo.repo,
sha: context.sha,
state: 'pending',
context: 'Test / promotion',
description: 'Running tests...',
target_url: 'https://github.com/Uniswap/interface/actions/runs/' + context.runId
})
post:
if: ${{ github.ref_name == 'main' || github.ref_name == 'releases/staging' }}
needs: [pre, lint, typecheck, deps-tests, unit-tests, cypress-test-matrix, cloud-tests]
runs-on: ubuntu-latest
steps:
- uses: actions/github-script@v6.4.1
with:
script: |
github.rest.repos.createCommitStatus({
owner: context.repo.owner,
repo: context.repo.repo,
sha: context.sha,
state: ${{ env.STATUS }} ? 'success' : 'failure',
context: 'Test / promotion',
description: ${{ env.STATUS }} ? 'All tests passed' : 'One or more tests failed and are blocking promotion',
target_url: 'https://github.com/Uniswap/interface/actions/runs/' + context.runId
})
env:
STATUS: |
${{ needs.lint.result == 'success' }} &&
${{ needs.typecheck.result == 'success' }} &&
${{ needs.deps-tests.result == 'success' }} &&
${{ needs.unit-tests.result == 'success' }} &&
${{ needs.cypress-test-matrix.result == 'success' }} &&
${{ needs.cloud-tests.result == 'success' }}

@ -9,19 +9,13 @@ const thegraphConfig = require('../graphql.thegraph.config')
const exec = promisify(child_process.exec)
function fetchSchema(url, outputFile) {
exec(`yarn --silent get-graphql-schema --h Origin=https://app.uniswap.org ${url}`)
.then(({ stderr, stdout }) => {
if (stderr) {
throw new Error(stderr)
} else {
fs.writeFile(outputFile, stdout)
}
})
.catch((err) => {
console.error(err)
console.error(`Failed to fetch schema from ${url}`)
})
async function fetchSchema(url, outputFile) {
try {
const { stdout } = await exec(`yarn --silent get-graphql-schema --h Origin=https://app.uniswap.org ${url}`);
await fs.writeFile(outputFile, stdout);
} catch(err){
console.error(`Failed to fetch schema from ${url}`)
}
}
fetchSchema(process.env.THE_GRAPH_SCHEMA_ENDPOINT, thegraphConfig.schema)

@ -1,8 +1,6 @@
export const UNI_LIST = 'https://cloudflare-ipfs.com/ipns/tokens.uniswap.org'
export const UNI_EXTENDED_LIST = 'https://cloudflare-ipfs.com/ipns/extendedtokens.uniswap.org'
const UNI_UNSUPPORTED_LIST = 'https://cloudflare-ipfs.com/ipns/unsupportedtokens.uniswap.org'
const AAVE_LIST = 'tokenlist.aave.eth'
const BA_LIST = 'https://raw.githubusercontent.com/The-Blockchain-Association/sec-notice-list/master/ba-sec-list.json'
// TODO(WEB-2282): Re-enable CMC list once we have a better solution for handling large lists.
// const CMC_ALL_LIST = 'https://s3.coinmarketcap.com/generated/dex/tokens/eth-tokens-all.json'
const COINGECKO_LIST = 'https://tokens.coingecko.com/uniswap/all.json'
@ -27,8 +25,6 @@ export const AVALANCHE_LIST =
export const BASE_LIST =
'https://raw.githubusercontent.com/ethereum-optimism/ethereum-optimism.github.io/master/optimism.tokenlist.json'
export const UNSUPPORTED_LIST_URLS: string[] = [BA_LIST, UNI_UNSUPPORTED_LIST]
// default lists to be 'active' aka searched across
export const DEFAULT_ACTIVE_LIST_URLS: string[] = [UNI_LIST]
export const DEFAULT_INACTIVE_LIST_URLS: string[] = [
@ -52,8 +48,7 @@ export const DEFAULT_INACTIVE_LIST_URLS: string[] = [
CELO_LIST,
PLASMA_BNB_LIST,
AVALANCHE_LIST,
BASE_LIST,
...UNSUPPORTED_LIST_URLS,
BASE_LIST
]
export const DEFAULT_LIST_OF_LISTS: string[] = [...DEFAULT_ACTIVE_LIST_URLS, ...DEFAULT_INACTIVE_LIST_URLS]

@ -126,34 +126,27 @@ export const FALLBACK_URLS = {
* These are the URLs used by the interface when there is not another available source of chain data.
*/
export const RPC_URLS = {
[ChainId.MAINNET]: [
`https://mainnet.infura.io/v3/${INFURA_KEY}`,
QUICKNODE_MAINNET_RPC_URL,
...FALLBACK_URLS[ChainId.MAINNET],
],
[ChainId.GOERLI]: [`https://goerli.infura.io/v3/${INFURA_KEY}`, ...FALLBACK_URLS[ChainId.GOERLI]],
[ChainId.MAINNET]: [`https://rpc.mevblocker.io/fast`, QUICKNODE_MAINNET_RPC_URL, ...FALLBACK_URLS[ChainId.MAINNET]],
[ChainId.GOERLI]: [`https://ethereum-goerli.publicnode.com`, ...FALLBACK_URLS[ChainId.GOERLI]],
[ChainId.SEPOLIA]: [`https://sepolia.infura.io/v3/${INFURA_KEY}`, ...FALLBACK_URLS[ChainId.SEPOLIA]],
[ChainId.OPTIMISM]: [`https://optimism-mainnet.infura.io/v3/${INFURA_KEY}`, ...FALLBACK_URLS[ChainId.OPTIMISM]],
[ChainId.OPTIMISM]: [`https://optimism.llamarpc.com`, ...FALLBACK_URLS[ChainId.OPTIMISM]],
[ChainId.OPTIMISM_GOERLI]: [
`https://optimism-goerli.infura.io/v3/${INFURA_KEY}`,
...FALLBACK_URLS[ChainId.OPTIMISM_GOERLI],
],
[ChainId.ARBITRUM_ONE]: [
`https://arbitrum-mainnet.infura.io/v3/${INFURA_KEY}`,
...FALLBACK_URLS[ChainId.ARBITRUM_ONE],
],
[ChainId.ARBITRUM_ONE]: [`https://arbitrum.llamarpc.com`, ...FALLBACK_URLS[ChainId.ARBITRUM_ONE]],
[ChainId.ARBITRUM_GOERLI]: [
`https://arbitrum-goerli.infura.io/v3/${INFURA_KEY}`,
...FALLBACK_URLS[ChainId.ARBITRUM_GOERLI],
],
[ChainId.POLYGON]: [`https://polygon-mainnet.infura.io/v3/${INFURA_KEY}`, ...FALLBACK_URLS[ChainId.POLYGON]],
[ChainId.POLYGON]: [`https://polygon.llamarpc.com`, ...FALLBACK_URLS[ChainId.POLYGON]],
[ChainId.POLYGON_MUMBAI]: [
`https://polygon-mumbai.infura.io/v3/${INFURA_KEY}`,
...FALLBACK_URLS[ChainId.POLYGON_MUMBAI],
],
[ChainId.CELO]: FALLBACK_URLS[ChainId.CELO],
[ChainId.CELO_ALFAJORES]: FALLBACK_URLS[ChainId.CELO_ALFAJORES],
[ChainId.BNB]: [QUICKNODE_BNB_RPC_URL, ...FALLBACK_URLS[ChainId.BNB]],
[ChainId.BNB]: ['https://bsc.publicnode.com', 'https://binance.llamarpc.com', ...FALLBACK_URLS[ChainId.BNB]],
[ChainId.AVALANCHE]: [`https://avalanche-mainnet.infura.io/v3/${INFURA_KEY}`, ...FALLBACK_URLS[ChainId.AVALANCHE]],
[ChainId.BASE]: [`https://base-mainnet.infura.io/v3/${INFURA_KEY}`, ...FALLBACK_URLS[ChainId.BASE]],
}

@ -20,6 +20,7 @@ import {
OP,
PORTAL_ETH_CELO,
PORTAL_USDC_CELO,
TORN_MAINNET,
USDC_ARBITRUM,
USDC_ARBITRUM_GOERLI,
USDC_AVALANCHE,
@ -67,10 +68,10 @@ const WRAPPED_NATIVE_CURRENCIES_ONLY: ChainTokenList = Object.fromEntries(
export const COMMON_BASES: ChainCurrencyList = {
[ChainId.MAINNET]: [
nativeOnChain(ChainId.MAINNET),
TORN_MAINNET,
DAI,
USDC_MAINNET,
USDT,
WBTC,
WRAPPED_NATIVE_CURRENCY[ChainId.MAINNET] as Token,
],
[ChainId.GOERLI]: [nativeOnChain(ChainId.GOERLI), WRAPPED_NATIVE_CURRENCY[ChainId.GOERLI] as Token],

@ -93,10 +93,6 @@ export function checkWarning(tokenAddress: string, chainId?: number | null) {
return null
case TOKEN_LIST_TYPES.UNI_EXTENDED:
return MediumWarning
case TOKEN_LIST_TYPES.UNKNOWN:
return StrongWarning
case TOKEN_LIST_TYPES.BLOCKED:
return BlockedWarning
case TOKEN_LIST_TYPES.BROKEN:
return BlockedWarning
}

@ -2,7 +2,7 @@ import { TokenInfo } from '@uniswap/token-lists'
import { ListsState } from 'state/lists/reducer'
import store from '../state'
import { UNI_EXTENDED_LIST, UNI_LIST, UNSUPPORTED_LIST_URLS } from './lists'
import { UNI_EXTENDED_LIST, UNI_LIST} from './lists'
import { COMMON_BASES } from './routing'
import brokenTokenList from './tokenLists/broken.tokenlist.json'
import { NATIVE_CHAIN_ID } from './tokens'
@ -37,14 +37,6 @@ class TokenSafetyLookupTable {
brokenTokenList.tokens.forEach((token) => {
this.dict[token.address.toLowerCase()] = TOKEN_LIST_TYPES.BROKEN
})
// Initialize blocked tokens from all urls included
UNSUPPORTED_LIST_URLS.map((url) => lists.byUrl[url]?.current?.tokens)
.filter((x): x is TokenInfo[] => !!x)
.flat(1)
.forEach((token) => {
this.dict[token.address.toLowerCase()] = TOKEN_LIST_TYPES.BLOCKED
})
}
checkToken(address: string, chainId?: number | null) {

@ -15,6 +15,13 @@ export const USDC_MAINNET = new Token(
'USDC',
'USD//C'
)
export const TORN_MAINNET = new Token(
ChainId.MAINNET,
'0x77777FeDdddFfC19Ff86DB637967013e6C6A116C',
18,
'TORN',
'Tornado Cash'
)
const USDC_GOERLI = new Token(ChainId.GOERLI, '0x07865c6e87b9f70255377e024ace6630c1eaa37f', 6, 'USDC', 'USD//C')
const USDC_SEPOLIA = new Token(ChainId.SEPOLIA, '0x6f14C02Fc1F78322cFd7d707aB90f18baD3B54f5', 6, 'USDC', 'USD//C')
export const USDC_OPTIMISM = new Token(

@ -5,7 +5,7 @@ import { AppState } from 'state/reducer'
import sortByListPriority from 'utils/listSort'
import BROKEN_LIST from '../../constants/tokenLists/broken.tokenlist.json'
import { DEFAULT_ACTIVE_LIST_URLS, UNSUPPORTED_LIST_URLS } from './../../constants/lists'
import { DEFAULT_ACTIVE_LIST_URLS } from './../../constants/lists'
type Mutable<T> = {
-readonly [P in keyof T]: Mutable<T[P]>
@ -75,9 +75,6 @@ export function useUnsupportedTokenList(): TokenAddressMap {
// get hard-coded broken tokens
const brokenListMap = useMemo(() => tokensToChainTokenMap(BROKEN_LIST), [])
// get dynamic list of unsupported tokens
const loadedUnsupportedListMap = useCombinedTokenMapFromUrls(UNSUPPORTED_LIST_URLS)
// format into one token address map
return useMemo(() => combineMaps(brokenListMap, loadedUnsupportedListMap), [brokenListMap, loadedUnsupportedListMap])
return brokenListMap;
}

@ -1,6 +1,6 @@
import { getVersionUpgrade, VersionUpgrade } from '@uniswap/token-lists'
import { useWeb3React } from '@web3-react/core'
import { DEFAULT_LIST_OF_LISTS, UNSUPPORTED_LIST_URLS } from 'constants/lists'
import { DEFAULT_LIST_OF_LISTS } from 'constants/lists'
import TokenSafetyLookupTable from 'constants/tokenSafetyLookup'
import { useStateRehydrated } from 'hooks/useStateRehydrated'
import useInterval from 'lib/hooks/useInterval'
@ -32,7 +32,7 @@ export default function Updater(): null {
if (!isWindowVisible) return
DEFAULT_LIST_OF_LISTS.forEach((url) => {
// Skip validation on unsupported lists
const isUnsupportedList = UNSUPPORTED_LIST_URLS.includes(url)
const isUnsupportedList = false;
fetchList(url, isUnsupportedList).catch((error) => console.debug('interval list fetching error', error))
})
}, [fetchList, isWindowVisible])
@ -50,14 +50,6 @@ export default function Updater(): null {
fetchList(listUrl).catch((error) => console.debug('list added fetching error', error))
}
})
UNSUPPORTED_LIST_URLS.forEach((listUrl) => {
const list = lists[listUrl]
if (!list || (!list.current && !list.loadingRequestId && !list.error)) {
fetchList(listUrl, /* isUnsupportedList= */ true).catch((error) =>
console.debug('list added fetching error', error)
)
}
})
}, [dispatch, fetchList, lists, rehydrated])
// automatically update lists for every version update