build: cache generated files across builds (#6495)

* build: cache generated files across builds

* docs: lingui pkg-up comment

* docs: explain clean extraction
This commit is contained in:
Zach Pomerantz 2023-05-09 15:49:53 -07:00 committed by GitHub
parent 2f80646ddd
commit 2150347ba2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 124 additions and 27 deletions

@ -10,11 +10,12 @@ runs:
with:
node-version: 14
registry-url: https://registry.npmjs.org
cache: 'yarn'
# node_modules/.cache is intentionally omitted, as this is used for build tool caches.
- uses: actions/cache@v3
id: install-cache
with:
# node_modules/.cache is intentionally omitted, as this is used for build tool caches.
path: |
node_modules
!node_modules/.cache
@ -22,3 +23,55 @@ runs:
- if: steps.install-cache.outputs.cache-hit != 'true'
run: yarn install --frozen-lockfile --ignore-scripts
shell: bash
# Validators compile quickly, so caching can be omitted.
- run: yarn ajv
shell: bash
# Contracts are compiled from source. If source hasn't changed, the contracts do not need to be re-compiled.
- uses: actions/cache@v3
id: contracts-cache
with:
path: |
src/abis/types
src/types/v3
key: ${{ runner.os }}-contracts-${{ hashFiles('src/abis/**/*.json', 'node_modules/@uniswap/**/artifacts/contracts/**/*.json') }}
- if: steps.contracts-cache.outputs.cache-hit != 'true'
run: yarn contracts
shell: bash
# GraphQL is generated from schema. The schema is always fetched, but if unchanged, graphql does not need to be re-generated.
- run: yarn graphql:fetch
shell: bash
- uses: actions/cache@v3
id: graphql-cache
with:
path: src/graphql/**/__generated__
key: ${{ runner.os }}-graphql-${{ hashFiles('src/graphql/**/schema.graphql') }}
- if: steps.graphql-cache.outputs.cache-hit != 'true'
run: yarn graphql:generate
shell: bash
# Messages are extracted from source.
# A record of source file content hashes is maintained in node_modules/.cache/lingui by a custom extractor.
# Messages are always extracted, but extraction may rely on the custom extractor's loaded cache.
- uses: actions/cache@v3
id: i18n-extract-cache
with:
path: |
src/locales/en-US.po
node_modules/.cache
key: ${{ runner.os }}-i18n-extract-${{ github.run_id }}
restore-keys: ${{ runner.os }}-i18n-extract-
- run: yarn i18n:extract
shell: bash
# Translations are compiled from messages. If messages haven't changed, the translations do not need to be re-compiled.
- uses: actions/cache@v3
id: i18n-compile-cache
with:
path: src/locales/*.js
key: ${{ runner.os }}-i18n-compile-${{ hashFiles('src/locales/*.po') }}
- if: steps.i18n-compile-cache.outputs.cache-hit !='true'
run: yarn i18n:compile
shell: bash

@ -47,7 +47,6 @@ jobs:
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/setup
- run: yarn prepare
- run: yarn build
- name: Pin to IPFS

@ -2,6 +2,7 @@ name: Test
# Many build steps have their own caches, so each job has its own cache to improve subsequent build times.
# Build tools are configured to cache cache to node_modules/.cache, so this is cached independently of node_modules.
# Caches are saved every run (by keying on github.run_id), and the most recent available cache is loaded.
# See https://jongleberry.medium.com/speed-up-your-ci-and-dx-with-node-modules-cache-ac8df82b7bb0.
on:
@ -37,7 +38,6 @@ jobs:
path: node_modules/.cache
key: ${{ runner.os }}-tsc-${{ hashFiles('**/yarn.lock') }}-${{ github.run_id }}
restore-keys: ${{ runner.os }}-tsc-${{ hashFiles('**/yarn.lock') }}-
- run: yarn prepare
- run: yarn typecheck
deps-tests:
@ -58,7 +58,6 @@ jobs:
path: node_modules/.cache
key: ${{ runner.os }}-jest-${{ hashFiles('**/yarn.lock') }}-${{ github.run_id }}
restore-keys: ${{ runner.os }}-jest-${{ hashFiles('**/yarn.lock') }}-
- run: yarn prepare
- run: yarn test --coverage --maxWorkers=100%
- uses: codecov/codecov-action@v3
with:
@ -78,7 +77,6 @@ jobs:
path: node_modules/.cache
key: ${{ runner.os }}-build-e2e-${{ hashFiles('**/yarn.lock') }}-${{ github.run_id }}
restore-keys: ${{ runner.os }}-build-e2e-${{ hashFiles('**/yarn.lock') }}-
- run: yarn prepare
- run: yarn build:e2e
env:
NODE_OPTIONS: "--max_old_space_size=4096"

@ -1,6 +1,6 @@
/* eslint-env node */
const defaultConfig = require('./graphql.config')
const defaultConfig = require('./graphql.data.config')
module.exports = {
src: defaultConfig.src,

@ -1,8 +1,63 @@
import { default as babelExtractor } from '@lingui/cli/api/extractors/babel'
import { createHash } from 'crypto'
import { mkdirSync, readFileSync, writeFileSync } from 'fs'
import * as path from 'path'
import * as pkgUp from 'pkg-up' // pkg-up is used by lingui, and is used here to match lingui's own extractors
/**
* A custom caching extractor for CI.
* Falls back to the babelExtractor in a non-CI (ie local) environment.
* Caches a file's latest extracted content's hash, and skips re-extracting if it is already present in the cache.
* In CI, re-extracting files takes over one minute, so this is a significant savings.
*/
const cachingExtractor: typeof babelExtractor = {
match(filename: string) {
return babelExtractor.match(filename)
},
extract(filename: string, code: string, ...options: unknown[]) {
if (!process.env.CI) return babelExtractor.extract(filename, code, ...options)
// This runs from node_modules/@lingui/conf, so we need to back out to the root.
const pkg = pkgUp.sync()
if (!pkg) throw new Error('No root found')
const root = path.dirname(pkg)
const filePath = path.join(root, filename)
const file = readFileSync(filePath)
const hash = createHash('sha256').update(file).digest('hex')
const cacheRoot = path.join(root, 'node_modules/.cache/lingui')
mkdirSync(cacheRoot, { recursive: true })
const cachePath = path.join(cacheRoot, filename.replace(/\//g, '-'))
// Only read from the cache if we're not performing a "clean" run, as a clean run must re-extract from all
// files to ensure that obsolete messages are removed.
if (!process.argv.includes('--clean')) {
try {
const cache = readFileSync(cachePath, 'utf8')
if (cache === hash) return
} catch (e) {
// It should not be considered an error if there is no cache file.
}
}
writeFileSync(cachePath, hash)
return babelExtractor.extract(filename, code, ...options)
},
}
const linguiConfig = {
catalogs: [
{
path: '<rootDir>/src/locales/{locale}',
include: ['<rootDir>/src'],
include: ['<rootDir>/src/**/*.ts', '<rootDir>/src/**/*.tsx'],
exclude: [
'<rootDir>/src/**/*.d.ts',
'<rootDir>/src/**/*.test.*',
'<rootDir>/src/types/v3/**',
'<rootDir>/src/abis/types/**',
'<rootDir>/src/graphql/**/__generated__/**',
],
},
],
compileNamespace: 'cjs',
@ -53,6 +108,7 @@ const linguiConfig = {
runtimeConfigModule: ['@lingui/core', 'i18n'],
sourceLocale: 'en-US',
pseudoLocale: 'pseudo',
extractors: [cachingExtractor],
}
export default linguiConfig

@ -5,19 +5,20 @@
"homepage": ".",
"license": "GPL-3.0-or-later",
"scripts": {
"ajv": "node scripts/compile-ajv-validators.js",
"contracts:compile:abi": "typechain --target ethers-v5 --out-dir src/abis/types \"./src/abis/**/*.json\"",
"contracts:compile:v3": "typechain --target ethers-v5 --out-dir src/types/v3 \"./node_modules/@uniswap/**/artifacts/contracts/**/*[!dbg].json\"",
"contracts:compile": "yarn contracts:compile:abi && yarn contracts:compile:v3",
"contracts": "yarn contracts:compile:abi && yarn contracts:compile:v3",
"graphql:fetch": "node scripts/fetch-schema.js",
"graphql:generate:data": "graphql-codegen --config apollo-codegen.ts",
"graphql:generate:thegraph": "graphql-codegen --config apollo-codegen_thegraph.ts",
"graphql:generate:data": "graphql-codegen --config graphql.data.codegen.config.ts",
"graphql:generate:thegraph": "graphql-codegen --config graphql.thegraph.codegen.config.ts",
"graphql:generate": "yarn graphql:generate:data && yarn graphql:generate:thegraph",
"prei18n:extract": "node scripts/prei18n-extract.js",
"graphql": "yarn graphql:fetch && yarn graphql:generate",
"i18n:extract": "lingui extract --locale en-US",
"i18n:compile": "yarn i18n:extract && lingui compile",
"i18n:pseudo": "lingui extract --locale pseudo && lingui compile",
"ajv:compile": "node scripts/compile-ajv-validators.js",
"prepare": "yarn contracts:compile && yarn graphql:fetch && yarn graphql:generate && yarn i18n:compile && yarn ajv:compile",
"i18n:pseudo": "lingui extract --locale pseudo",
"i18n:compile": "lingui compile",
"i18n": "yarn i18n:extract --clean && yarn i18n:compile",
"prepare": "yarn ajv && yarn contracts && yarn graphql && yarn i18n",
"start": "craco start",
"build": "craco build",
"build:e2e": "REACT_APP_CSP_ALLOW_UNSAFE_EVAL=true REACT_APP_ADD_COVERAGE_INSTRUMENTATION=true craco build",

@ -4,8 +4,8 @@ require('dotenv').config({ path: '.env.production' })
const child_process = require('child_process')
const fs = require('fs/promises')
const { promisify } = require('util')
const dataConfig = require('../graphql.config')
const thegraphConfig = require('../graphql_thegraph.config')
const dataConfig = require('../graphql.data.config')
const thegraphConfig = require('../graphql.thegraph.config')
const exec = promisify(child_process.exec)

@ -1,10 +0,0 @@
/* eslint-env node */
const { exec } = require('child_process')
const isWindows = process.platform === 'win32' || /^(msys|cygwin)$/.test(process.env.OSTYPE)
if (isWindows) {
exec(`type nul > src/locales/en-US.po`)
} else {
exec(`touch src/locales/en-US.po`)
}