Compare commits
56 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d78b7a3998 | ||
|
|
5abc2f36e2 | ||
|
|
38a068bcea | ||
|
|
65772a8e1a | ||
|
|
a022093ce0 | ||
|
|
50f84b3c57 | ||
|
|
df21d5326a | ||
|
|
e65fa8fc00 | ||
|
|
8e682cc848 | ||
|
|
ef8e4330d3 | ||
|
|
a0278615a4 | ||
|
|
d8821d7317 | ||
|
|
78a82d9ff2 | ||
|
|
1ac7decd89 | ||
|
|
4fa7276cc1 | ||
|
|
6ae222d55c | ||
|
|
efeeda5e5c | ||
|
|
c75c937b13 | ||
|
|
d1ee596292 | ||
|
|
b61e2cc96a | ||
|
|
ca94445f0f | ||
|
|
4d95fe925d | ||
|
|
f3e5b0ded1 | ||
|
|
8ce9761bad | ||
|
|
3d051e454d | ||
|
|
c48ad4421f | ||
|
|
40264ff900 | ||
|
|
e86f83b9c0 | ||
|
|
e986b09e58 | ||
|
|
22dee96f51 | ||
|
|
0c614b7097 | ||
|
|
f0a5869c53 | ||
|
|
5cd1668e0d | ||
|
|
0b061d68cc | ||
|
|
ffa4a2d054 | ||
|
|
f8072a8004 | ||
|
|
4306b3563a | ||
|
|
c4de88af6f | ||
|
|
9486185ceb | ||
|
|
261eb04e97 | ||
|
|
832bd6c908 | ||
|
|
05844caf85 | ||
|
|
f7e82fcb46 | ||
|
|
27b3af0a7b | ||
|
|
f733fac875 | ||
|
|
1097a1b020 | ||
|
|
ea422d63b3 | ||
|
|
a3821f6c4b | ||
|
|
de8a717b4c | ||
|
|
86e0269a86 | ||
|
|
d15c097dba | ||
|
|
49f71574f4 | ||
|
|
9ee685df46 | ||
|
|
263bfe5ce6 | ||
|
|
e8a9eea493 | ||
|
|
1c499c9f42 |
66
.github/workflows/nodejs.yml
vendored
66
.github/workflows/nodejs.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [8.x, 10.x, 12.x, 13.x ]
|
||||
node-version: [ 8.x, 10.x, 12.x, 13.x ]
|
||||
|
||||
steps:
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
@@ -22,11 +22,20 @@ jobs:
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- run: npm ci
|
||||
- run: npm run build-all
|
||||
- run: npm run test-node
|
||||
- name: Install node-hid requirements
|
||||
run: sudo apt-get install libusb-1.0-0 libusb-1.0-0-dev libudev-dev
|
||||
|
||||
- name: Install dependencies (and link per package)
|
||||
run: npm ci
|
||||
|
||||
- name: Build CommonJS and ESM (from TypeScript)
|
||||
run: npm run build-all
|
||||
|
||||
- name: Run tests
|
||||
run: npm run test-node
|
||||
|
||||
|
||||
test-browser:
|
||||
@@ -43,12 +52,20 @@ jobs:
|
||||
with:
|
||||
node-version: 12.x
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- run: npm ci
|
||||
- run: npm run build-all
|
||||
- run: npm run test-browser-${{ matrix.module }}
|
||||
- name: Install node-hid requirements
|
||||
run: sudo apt-get install libusb-1.0-0 libusb-1.0-0-dev libudev-dev
|
||||
|
||||
- name: Install dependencies (and link per package)
|
||||
run: npm ci
|
||||
|
||||
- name: Build CommonJS and ESM (from TypeScript)
|
||||
run: npm run build-all
|
||||
|
||||
- name: Run tests
|
||||
run: npm run test-browser-${{ matrix.module }}
|
||||
|
||||
test-react-native:
|
||||
|
||||
@@ -66,11 +83,17 @@ jobs:
|
||||
with:
|
||||
node-version: 12.x
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- run: npm ci
|
||||
- run: npm run build-all
|
||||
- run: npm run test-react
|
||||
- name: Install dependencies (and link per package)
|
||||
run: npm ci
|
||||
|
||||
- name: Build CommonJS and ESM (from TypeScript)
|
||||
run: npm run build-all
|
||||
|
||||
- name: Run tests
|
||||
run: npm run test-react
|
||||
|
||||
|
||||
coverage:
|
||||
@@ -86,11 +109,20 @@ jobs:
|
||||
with:
|
||||
node-version: 12.x
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- run: npm ci
|
||||
- run: npm run build-all
|
||||
- run: npm run test-coverage
|
||||
- name: Install node-hid requirements
|
||||
run: sudo apt-get install libusb-1.0-0 libusb-1.0-0-dev libudev-dev
|
||||
|
||||
- name: Install dependencies (and link per package)
|
||||
run: npm ci
|
||||
|
||||
- name: Build CommonJS and ESM (from TypeScript)
|
||||
run: npm run build-all
|
||||
|
||||
- name: Run tests
|
||||
run: npm run test-coverage
|
||||
|
||||
- name: Upload coverage summary
|
||||
uses: actions/upload-artifact@v2
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -1,4 +1,6 @@
|
||||
node_modules/
|
||||
packages/*/node_modules
|
||||
.package_node_modules/
|
||||
obsolete/
|
||||
.DS_Store
|
||||
.tmp/
|
||||
|
||||
31
CHANGELOG.md
31
CHANGELOG.md
@@ -3,6 +3,36 @@ Changelog
|
||||
|
||||
This change log is managed by `admin/cmds/update-versions` but may be manually updated.
|
||||
|
||||
ethers/v5.0.17 (2020-10-07 20:08)
|
||||
---------------------------------
|
||||
|
||||
- Better error message for parseUnits of non-strings. ([#981](https://github.com/ethers-io/ethers.js/issues/981); [5abc2f3](https://github.com/ethers-io/ethers.js/commit/5abc2f36e20eef79a935961f3dd8133b5528d9e5))
|
||||
- Add gzip support to AlchemyProivder and InfuraProvider fetching. ([#1085](https://github.com/ethers-io/ethers.js/issues/1085); [38a068b](https://github.com/ethers-io/ethers.js/commit/38a068bcea3f251c8f3a349a90fcb077a39d23ad))
|
||||
- Add gzip support to getUrl in node. ([#1085](https://github.com/ethers-io/ethers.js/issues/1085); [65772a8](https://github.com/ethers-io/ethers.js/commit/65772a8e1a55d663bdb67e3a2b160fecc9f986ef))
|
||||
- Added CommunityResourcable to mark Providers as highly throttled. ([a022093](https://github.com/ethers-io/ethers.js/commit/a022093ce03f55db7ba2cac36e365d1af39ac45b))
|
||||
- Added debug event info to WebSocketProvider. ([#1018](https://github.com/ethers-io/ethers.js/issues/1018); [8e682cc](https://github.com/ethers-io/ethers.js/commit/8e682cc8481c6051a6f8115b29d78f4996120ccd))
|
||||
|
||||
ethers/v5.0.16 (2020-10-05 15:44)
|
||||
---------------------------------
|
||||
|
||||
- ABI encoding performance additions. ([#1012](https://github.com/ethers-io/ethers.js/issues/1012); [f3e5b0d](https://github.com/ethers-io/ethers.js/commit/f3e5b0ded1b227a377fd4799507653c95c76e353))
|
||||
- Export hexConcat in utils. ([#1079](https://github.com/ethers-io/ethers.js/issues/1079); [3d051e4](https://github.com/ethers-io/ethers.js/commit/3d051e454db978f58c7b38ff4484096c3eb85b94))
|
||||
- Cache chain ID for WebSocketProvider. ([#1054](https://github.com/ethers-io/ethers.js/issues/1054); [40264ff](https://github.com/ethers-io/ethers.js/commit/40264ff9006156ba8441e6101e5a7149a5cf03f6))
|
||||
|
||||
ethers/v5.0.15 (2020-09-26 03:22)
|
||||
---------------------------------
|
||||
|
||||
- Add more accurate intrinsic gas cost to ABI calls with specified gas property. ([#1058](https://github.com/ethers-io/ethers.js/issues/1058); [f0a5869](https://github.com/ethers-io/ethers.js/commit/f0a5869c53475e55a5f47d8651f609fff45dc9a7))
|
||||
- Better errors for unconfigured ENS names. ([#1066](https://github.com/ethers-io/ethers.js/issues/1066); [5cd1668](https://github.com/ethers-io/ethers.js/commit/5cd1668e0d29099c5b7ce1fdc1d0e8a41af1a249))
|
||||
- Updated CLI solc to versin 0.7.1. ([4306b35](https://github.com/ethers-io/ethers.js/commit/4306b3563a171baa9d7bf4872475a13c3434f834))
|
||||
|
||||
ethers/v5.0.14 (2020-09-16 02:39)
|
||||
---------------------------------
|
||||
|
||||
- More robust blockchain error detection ([#1047](https://github.com/ethers-io/ethers.js/issues/1047); [49f7157](https://github.com/ethers-io/ethers.js/commit/49f71574f4799d685a5ae8fd24fe1134f752d70a))
|
||||
- Forward blockchain errors from Signer during gas estimation. ([#1047](https://github.com/ethers-io/ethers.js/issues/1047); [9ee685d](https://github.com/ethers-io/ethers.js/commit/9ee685df46753c46cbbde12d05d6ea04f2b5ea3f))
|
||||
- Improve fetch errors with looser mime-type detection. ([#1047](https://github.com/ethers-io/ethers.js/issues/1047); [263bfe5](https://github.com/ethers-io/ethers.js/commit/263bfe5ce632790e0399d06a0ab660a501997998))
|
||||
|
||||
ethers/v5.0.13 (2020-09-11 02:10)
|
||||
---------------------------------
|
||||
|
||||
@@ -115,4 +145,3 @@ ethers/v5.0.0 (2020-06-12 19:58)
|
||||
|
||||
- Preserve config canary string. ([7157816](https://github.com/ethers-io/ethers.js/commit/7157816fa53f660d750811b293e3b1d5a2f70bd4))
|
||||
- Updated docs. ([9e4c7e6](https://github.com/ethers-io/ethers.js/commit/9e4c7e609d9eeb5f2a11d6a90bfa9d32ee696431))
|
||||
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
const { setupBuild } = require("../build");
|
||||
|
||||
setupBuild(false);
|
||||
@@ -1,37 +0,0 @@
|
||||
const { setupBuild } = require("../build");
|
||||
const { loadPackage, savePackage } = require("../local");
|
||||
|
||||
const arg = process.argv[2];
|
||||
|
||||
(async function() {
|
||||
process.argv.slice(2).forEach((arg) => {
|
||||
console.log("Setting Option:", arg);
|
||||
switch(arg) {
|
||||
case "esm":
|
||||
setupBuild(true);
|
||||
break;
|
||||
|
||||
case "cjs":
|
||||
setupBuild(false);
|
||||
break;
|
||||
|
||||
// This will remove the browser field entirely, so make sure
|
||||
// to set esm of cjs first as they will restore the browser
|
||||
// field
|
||||
case "browser-lang-all": {
|
||||
const info = loadPackage("wordlists");
|
||||
delete info.browser;
|
||||
savePackage("wordlists", info);
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
console.log("Unknown option:", arg);
|
||||
return 1;
|
||||
}
|
||||
});
|
||||
return 0;
|
||||
|
||||
})().then((result) => {
|
||||
process.exit(result);
|
||||
});
|
||||
@@ -1,16 +0,0 @@
|
||||
"use stricT";
|
||||
|
||||
const depgraph = require("../depgraph");
|
||||
const { log } = require("../log");
|
||||
const { loadJson, resolve, saveJson } = require("../utils");
|
||||
|
||||
(async function() {
|
||||
log(`<bold:Updating dependency-graph build order (tsconfig.project.json)...>`);
|
||||
let ordered = depgraph.getOrdered(true);
|
||||
|
||||
let path = resolve("tsconfig.project.json")
|
||||
|
||||
let projectConfig = loadJson(path);
|
||||
projectConfig.references = ordered.map((name) => ({ path: ("./packages/" + name) }));
|
||||
saveJson(path, projectConfig);
|
||||
})();
|
||||
@@ -1,30 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
const fs = require("fs");
|
||||
const { resolve } = require("path");
|
||||
|
||||
const sourceEthers = fs.readFileSync(resolve(__dirname, "../../packages/ethers/src.ts/ethers.ts")).toString();
|
||||
const targets = sourceEthers.match(/export\s*{\s*((.|\s)*)}/)[1].trim();
|
||||
|
||||
const output = `"use strict";
|
||||
|
||||
// To modify this file, you must update ./admin/cmds/update-exports.js
|
||||
|
||||
import * as ethers from "./ethers";
|
||||
|
||||
try {
|
||||
const anyGlobal = (window as any);
|
||||
|
||||
if (anyGlobal._ethers == null) {
|
||||
anyGlobal._ethers = ethers;
|
||||
}
|
||||
} catch (error) { }
|
||||
|
||||
export { ethers };
|
||||
|
||||
export {
|
||||
${ targets }
|
||||
} from "./ethers";
|
||||
`;
|
||||
|
||||
fs.writeFileSync(resolve(__dirname, "../../packages/ethers/src.ts/index.ts"), output);
|
||||
@@ -14,7 +14,7 @@ These API keys are a provided as a community resource by the backend services
|
||||
for low-traffic projects and for early prototyping.
|
||||
|
||||
Since these API keys are shared by all users (that have not acquired their
|
||||
own API key), they are aggressively throttled which means reties occur more
|
||||
own API key), they are aggressively throttled which means retries occur more
|
||||
frequently and the responses are slower.
|
||||
|
||||
It is **highly recommended** that you sign up for a free API key from each service for their
|
||||
|
||||
@@ -24,7 +24,7 @@ _subsection: Methods @<ContractFactory--methods>
|
||||
|
||||
_property: contractFactory.attach(address) => [[Contract]] @<ContractFactory-attach>
|
||||
|
||||
Return an instance of a [[Contract]] attched to //address//. This is the
|
||||
Return an instance of a [[Contract]] attached to //address//. This is the
|
||||
same as using the [Contract constructor](Contract--creating) with
|
||||
//address// and this the the //interface// and //signerOrProvider// passed
|
||||
in when creating the ContractFactory.
|
||||
@@ -37,7 +37,7 @@ to the Contract's constructor.
|
||||
_property: contractFactory.deploy(...args) => Promise<[[Contract]]> @<ContractFactory-deploy>
|
||||
|
||||
Uses the signer to deploy the Contract with //args// passed into the constructor and
|
||||
retruns a Contract which is attached to the address where this contract **will** be
|
||||
returns a Contract which is attached to the address where this contract **will** be
|
||||
deployed once the transaction is mined.
|
||||
|
||||
The transaction can be found at ``contract.deployTransaction``, and no interactions
|
||||
|
||||
@@ -17,7 +17,7 @@ Generates a brain wallet, with a slightly improved experience, in which
|
||||
the generated wallet has a mnemonic.
|
||||
|
||||
_property: BrainWallet.generateLegacy(username, password [ , progressCallback ]) => [[experimental-brainwallet]]
|
||||
Generate a brain wallet which is compatibile with the ethers v3 and earlier.
|
||||
Generate a brain wallet which is compatible with the ethers v3 and earlier.
|
||||
|
||||
|
||||
_subsection: EIP1193Bridge @<experimental-eip1193bridge> @INHERIT<[[link-npm-events]]>
|
||||
|
||||
@@ -26,7 +26,7 @@ Create a formatted output of an array of [[asm-operation]].
|
||||
|
||||
_heading: Bytecode @<asm-bytecode> @INHERIT<Array\<[[asm-operation]]\>>
|
||||
|
||||
Each arary index represents an operation, collapsing multi-byte operations
|
||||
Each array index represents an operation, collapsing multi-byte operations
|
||||
(i.e. ``PUSH``) into a single operation.
|
||||
|
||||
_property: bytecode.getOperation(offset) => [[asm-operation]]
|
||||
@@ -52,7 +52,7 @@ If the opcode is a ``PUSH``, this is the value of that push
|
||||
_subsection: Opcode @<asm-opcode> @SRC<asm/opcodes:class.Opcode>
|
||||
|
||||
_property: asm.Opcode.from(valueOrMnemonic) => [[asm-opcode]]
|
||||
Create a new instnace of an Opcode for a given numeric value
|
||||
Create a new instance of an Opcode for a given numeric value
|
||||
(e.g. 0x60 is PUSH1) or mnemonic string (e.g. "PUSH1").
|
||||
|
||||
_heading: Properties
|
||||
|
||||
@@ -49,7 +49,7 @@ string of a decimal number.
|
||||
|
||||
_property: literalNode.verbatim => boolean
|
||||
This is true in a [[asm-datanode]] context, since in that case the
|
||||
value should be taken verbatim and no ``PUSH`` operation shoud be
|
||||
value should be taken verbatim and no ``PUSH`` operation should be
|
||||
added, otherwise false.
|
||||
|
||||
|
||||
@@ -96,7 +96,7 @@ any output assembly, using the ``{{! code here }}`` syntax.
|
||||
|
||||
_property: literalNode.verbatim => boolean
|
||||
This is true in a [[asm-datanode]] context, since in that case the
|
||||
value should be taken verbatim and no ``PUSH`` operation shoud be
|
||||
value should be taken verbatim and no ``PUSH`` operation should be
|
||||
added, otherwise false.
|
||||
|
||||
_property: evaluationNode.script => string
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
_section: Ethers ASM Dialect @<asm-dialect>
|
||||
|
||||
This provides a quick, high-level overcview of the **Ethers ASM Dialect**
|
||||
This provides a quick, high-level overview of the **Ethers ASM Dialect**
|
||||
for EVM, which is defined by the [Ethers ASM Dialect Grammar](link-ethers-asm-grammar)
|
||||
|
||||
Once a program is compiled by a higher level langauge into ASM (assembly),
|
||||
Once a program is compiled by a higher level language into ASM (assembly),
|
||||
or hand-coded directly in ASM, it needs to be assembled into bytecode.
|
||||
|
||||
The assembly process performs a very small set of operations and is
|
||||
@@ -34,7 +34,7 @@ A **Label** is a position in the program which can be jumped to. A
|
||||
``JUMPDEST`` is automatically added to this point in the assembled
|
||||
output.
|
||||
|
||||
@TODO: Exmaples
|
||||
@TODO: Examples
|
||||
|
||||
|
||||
_subsection: Literals @<asm-dialect-literal>
|
||||
@@ -45,7 +45,7 @@ operation.
|
||||
A **Literal** can be provided using a [[DataHexString]] or a decimal
|
||||
byte value.
|
||||
|
||||
@TODO: exmples
|
||||
@TODO: examples
|
||||
|
||||
|
||||
_subsection: Comments @<asm-dialect-comment>
|
||||
@@ -64,7 +64,7 @@ within a **deployment bytecode**, which can be used as **init code**.
|
||||
When deploying a program to Ethereum, an **init transaction** is used. An
|
||||
//init transaction// has a null ``to`` address and contains bytecode in
|
||||
the ``data``. This ``data`` bytecode is a program, that when executed
|
||||
returns some other bytecode as a result, this restul is the bytecode
|
||||
returns some other bytecode as a result, this result is the bytecode
|
||||
to be installed.
|
||||
|
||||
Therefore it is important that embedded code uses jumps relative to itself,
|
||||
@@ -84,7 +84,7 @@ _subsection: Data Segment @<asm-dialect-datasegment>
|
||||
A **Data Segment** allows arbitrary data to be embedded into a program,
|
||||
which can be useful for lookup tables or deploy-time constants.
|
||||
|
||||
An emtpty **Data Segment** can also be used when a labelled location is
|
||||
An empty **Data Segment** can also be used when a labelled location is
|
||||
required, but without the ``JUMPDEST`` which a [[asm-dialect-label]] adds.
|
||||
|
||||
@TODO: Example
|
||||
@@ -111,5 +111,5 @@ _subsection: Stack Placeholders @<asm-dialect-placeholder>
|
||||
@TODO: exampl
|
||||
|
||||
|
||||
_subsection: Evaluation and Excution @<asm-dialect-scripting>
|
||||
_subsection: Evaluation and Execution @<asm-dialect-scripting>
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ It uses a quorum and connects to multiple [Providers](Provider) as backends,
|
||||
each configured with a //priority// and a //weight// .
|
||||
|
||||
When a request is made, the request is dispatched to multiple backends, randomly
|
||||
choosen (higher prioirty backends are always selected first) and the results from
|
||||
chosen (higher priority backends are always selected first) and the results from
|
||||
each are compared against the others. Only once the quorum has been reached will that
|
||||
result be accepted and returned to the caller.
|
||||
|
||||
@@ -41,7 +41,7 @@ The provider for this configuration.
|
||||
|
||||
_property: fallbackProviderConfig.priority => number
|
||||
The priority used for the provider. Higher priorities are favoured over lower
|
||||
priorities. If multiple providers share the same prioirty, they are choosen
|
||||
priorities. If multiple providers share the same prioirty, they are chosen
|
||||
at random.
|
||||
|
||||
_property: fallbackProviderConfig.stallTimeout => number
|
||||
@@ -96,7 +96,7 @@ The URL to use for the JsonRpcProvider instance.
|
||||
_subsection: Web3Provider @<Web3Provider> @INHERIT<[[JsonRpcProvider]]> @SRC<providers:class.Web3Provider>
|
||||
|
||||
The Web3Provider is meant to ease moving from a [web3.js based](link-web3)
|
||||
application to ethers by wraping an existing Web3-compatible (such as a
|
||||
application to ethers by wrapping an existing Web3-compatible (such as a
|
||||
[Web3HttpProvider](link-web3-http), [Web3IpcProvider](link-web3-ipc) or
|
||||
[Web3WsProvider](link-web3-ws)) and exposing it as an ethers.js [[Provider]]
|
||||
which can then be used with the rest of the library.
|
||||
|
||||
@@ -26,10 +26,10 @@ A **Networkish** may be any of the following:
|
||||
- the name of a common network as a string (e.g. ``"homestead"``)
|
||||
- the chain ID a network as a number; if the chain ID is that of a
|
||||
common network, the ``name`` and ``ensAddress`` will be populated, otherwise,
|
||||
the deafults name ``"unknown"`` and no ``ensAddress`` is used
|
||||
the default name ``"unknown"`` and no ``ensAddress`` is used
|
||||
|
||||
_subsection: Network @<providers-Network>
|
||||
A **Network** represents an Etherem network.
|
||||
A **Network** represents an Ethereum network.
|
||||
|
||||
_property: network.name => string
|
||||
The human-readable name of the network, such as ``homestead``. If the network
|
||||
@@ -273,7 +273,7 @@ The amount of gas actually used by this transaction.
|
||||
|
||||
_property: receipt.logsBloom => string<[[DataHexString]]>
|
||||
A [bloom-filter](link-wiki-bloomfilter), which
|
||||
incldues all the addresses and topics included in any log in this
|
||||
includes all the addresses and topics included in any log in this
|
||||
transaction.
|
||||
|
||||
_property: receipt.blockHash => string<[[DataHexString]]<32>>
|
||||
|
||||
@@ -12,7 +12,7 @@ _subsection: Creating Instance @<AbiCoder--creating>
|
||||
|
||||
For the most part, there should never be a need to manually create
|
||||
an instance of an [[AbiCoder]], since one is created with the
|
||||
default coersion function when the library is loaded which can
|
||||
default coercion function when the library is loaded which can
|
||||
be used universally.
|
||||
|
||||
This is likely only needed by those with specific needs to override
|
||||
|
||||
@@ -212,7 +212,7 @@ to parameters which are part of an [[EventFragment]].
|
||||
_property: paramType.arrayChildren => [[ParamType]] @<ParamType-arrayChildren>
|
||||
|
||||
The type of children of the array. This is null for for any parameter
|
||||
wjhich is not an array.
|
||||
which is not an array.
|
||||
|
||||
_property: paramType.arrayLength => number @<ParamType-arrayLength>
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ The **Interface** Class abstracts the encoding and decoding required
|
||||
to interact with contracts on the Ethereum network.
|
||||
|
||||
Many of the standards organically evolved along side the [[link-solidity]]
|
||||
language, which other languages have adopted to remain compatibile with
|
||||
language, which other languages have adopted to remain compatible with
|
||||
existing deployed contracts.
|
||||
|
||||
The EVM itself does not understand what the ABI is. It is simply an agreed
|
||||
|
||||
@@ -17,7 +17,7 @@ _heading: BigNumberish @<BigNumberish>
|
||||
|
||||
Many functions and methods in this library take in values which
|
||||
can be non-ambiguously and safely converted to a BigNumber. These
|
||||
values can be sepcified as:
|
||||
values can be specified as:
|
||||
|
||||
_definition: **//string//**
|
||||
A [[HexString]] or a decimal string, either of which may
|
||||
@@ -122,18 +122,18 @@ Returns a BigNumber with the value of //BigNumber// with bits beyond
|
||||
the //bitcount// least significant bits set to zero.
|
||||
|
||||
|
||||
_heading: Two's Compliment
|
||||
_heading: Two's Complement
|
||||
|
||||
[Two's Complicment](link-wiki-twoscomplement)
|
||||
[Two's Complement](link-wiki-twoscomplement)
|
||||
is an elegant method used to encode and decode fixed-width signed values
|
||||
while efficiently preserving mathematic operations.
|
||||
Most users will not need to interact with these.
|
||||
|
||||
_property: BigNumber.fromTwos(bitwidth) => [[BigNumber]] @SRC<bignumber>
|
||||
Returns a BigNumber with the value of //BigNumber// converted from twos-compliment with //bitwidth//.
|
||||
Returns a BigNumber with the value of //BigNumber// converted from twos-complement with //bitwidth//.
|
||||
|
||||
_property: BigNumber.toTwos(bitwidth) => [[BigNumber]] @SRC<bignumber>
|
||||
Returns a BigNumber with the value of //BigNumber// converted to twos-compliment with //bitwidth//.
|
||||
Returns a BigNumber with the value of //BigNumber// converted to twos-complement with //bitwidth//.
|
||||
|
||||
|
||||
_heading: Comparison and Equivalence
|
||||
@@ -232,7 +232,7 @@ mathematical operations handled safely.
|
||||
_heading: Why not BigNumber.js, BN.js, BigDecimal, etc?
|
||||
|
||||
Everyone has their own favourite Big Number library, and once someone
|
||||
has choosen one, it becomes part of their identity, like their editor,
|
||||
has chosen one, it becomes part of their identity, like their editor,
|
||||
vi vs emacs. There are over 100 Big Number libraries on [npm](link-npm-query-bignumber).
|
||||
|
||||
One of the biggest differences between the Ethers [[BigNumber]] object and
|
||||
@@ -246,7 +246,7 @@ low-level library's objects which supports myriad in-place operations.
|
||||
Second, the Ethers [[BigNumber]] provides all the functionality required
|
||||
internally and should generally be sufficient for most developers while
|
||||
not exposing some of the more advanced and rare functionality. So it will
|
||||
be eaiser to swap out the underlying library without impacting consumers.
|
||||
be easier to swap out the underlying library without impacting consumers.
|
||||
|
||||
For example, if [[link-npm-bnjs]] was exposed, someone may use the
|
||||
greatest-common-denominator functions, which would then be functionality
|
||||
|
||||
@@ -25,7 +25,7 @@ binary data as a string.
|
||||
_heading: HexString @<HexString>
|
||||
|
||||
A **Hexstring** is a string which has a ``0x`` prefix followed by any
|
||||
number of nibbles (i.e. case-insensitive hexidecumal characters, ``0-9`` and ``a-f``).
|
||||
number of nibbles (i.e. case-insensitive hexadecimal characters, ``0-9`` and ``a-f``).
|
||||
|
||||
_heading: Signature @<Signature>
|
||||
|
||||
@@ -37,7 +37,7 @@ _heading: Signature @<Signature>
|
||||
_heading: Raw Signature @<signature-raw> @inherit<string\<[[DataHexString]]\<65\>\>>
|
||||
|
||||
A **Raw Signature** is a common Signature format where the r, s and v are
|
||||
concanenated into a 65 byte (130 nibble) [[DataHexString]].
|
||||
concatenated into a 65 byte (130 nibble) [[DataHexString]].
|
||||
|
||||
|
||||
_heading: SignatureLike @<SignatureLike>
|
||||
@@ -112,7 +112,7 @@ _property: ethers.utils.stripZeros(aBytesLike) => Uint8Array @<utils-stripZeros
|
||||
Returns a Uint8Array with all leading ``0`` bytes of //aBtyesLike// removed.
|
||||
|
||||
_property: ethers.utils.zeroPad(aBytesLike, length) => Uint8Array @<utils-zeroPad> @SRC<bytes>
|
||||
Retutns a Uint8Array of the data in //aBytesLike// with ``0`` bytes prepended to
|
||||
Returns a Uint8Array of the data in //aBytesLike// with ``0`` bytes prepended to
|
||||
//length// bytes long.
|
||||
|
||||
If //aBytesLike// is already longer than //length// bytes long, an InvalidArgument
|
||||
|
||||
@@ -93,7 +93,7 @@ A signed format string begins with ``fixed``, which an unsigned format
|
||||
string begins with ``ufixed``, followed by the width (in bits) and the
|
||||
number of decimals.
|
||||
|
||||
The width must be conguent to 0 mod 8 (i.e. ``(width % 8) == 0``) and no
|
||||
The width must be congruent to 0 mod 8 (i.e. ``(width % 8) == 0``) and no
|
||||
larger than 256 bits and the number of decimals must be no larger than 80.
|
||||
|
||||
For example:
|
||||
|
||||
@@ -9,7 +9,7 @@ The [Cryptographic Hash Functions](link-wiki-cryptographichash)
|
||||
are a specific family of hash functions.
|
||||
|
||||
_property: ethers.utils.id(text) => string<[[DataHexString]]<32>> @<utils-id> @SRC<hash>
|
||||
The Ethereum Identity function computs the [KECCAK256](link-wiki-sha3) hash of the //text// bytes.
|
||||
The Ethereum Identity function computes the [KECCAK256](link-wiki-sha3) hash of the //text// bytes.
|
||||
|
||||
_property: ethers.utils.keccak256(aBytesLike) => string<[[DataHexString]]<32>> @<utils-keccak256> @SRC<keccak256>
|
||||
Returns the [KECCAK256](link-wiki-sha3) digest //aBytesLike//.
|
||||
@@ -163,7 +163,7 @@ the tightly packing algorithm.
|
||||
|
||||
_property: ethers.utils.solidityPack(types, values) => string<[[DataHexString]]> @<utils-solidityPack> @SRC<solidity:pack>
|
||||
Returns the non-standard encoded //values// packed according to
|
||||
their respecive type in //types//.
|
||||
their respective type in //types//.
|
||||
|
||||
_property: ethers.utils.solidityKeccak256(types, values) => string<[[DataHexString]]<32>> @<utils-solidityKeccak256> @SRC<solidity:keccak256>
|
||||
Returns the [KECCAK256](link-wiki-sha3) of the non-standard encoded //values// packed
|
||||
|
||||
@@ -101,7 +101,7 @@ _heading: Methods @<HDNode--methods>
|
||||
|
||||
_property: hdNode.neuter() => [[HDNode]] @<HDNode-neuter> @SRC<hdnode>
|
||||
Return a new instance of //hdNode// with its private key removed
|
||||
but all otehr properties preserved. This ensures that the key
|
||||
but all other properties preserved. This ensures that the key
|
||||
can not leak the private key of itself or any derived children,
|
||||
but may still be used to compute the addresses of itself and
|
||||
any non-hardened children.
|
||||
|
||||
@@ -56,11 +56,11 @@ _heading: Usage Validation
|
||||
There can be used to ensure various properties and actions are safe.
|
||||
|
||||
_property: logger.checkAbstract(target, kind) => void @SRC<logger>
|
||||
Checks that //target// is not //kind// and performs the same operatons
|
||||
Checks that //target// is not //kind// and performs the same operations
|
||||
as ``checkNew``. This is useful for ensuring abstract classes are not
|
||||
being instantiated.
|
||||
|
||||
_property: logger.checkArgumentCount(count, expectedCound [ , message) => void @SRC<logger>
|
||||
_property: logger.checkArgumentCount(count, expectedCount [ , message) => void @SRC<logger>
|
||||
If //count// is not equal to //expectedCount//, throws a [MISSING_ARGUMENT](errors-MissingArgument)
|
||||
or [UNEXPECTED_ARGUMENT](errors-UnexpectedArgument) error.
|
||||
|
||||
|
||||
@@ -8,11 +8,11 @@ The private key for this Signing Key.
|
||||
|
||||
_property: signingKey.publicKey => string<[[DataHexString]]<65>>
|
||||
The uncompressed public key for this Signing Key. It will always be
|
||||
65 bytes (130 nibbles) and begine with ``0x04``.
|
||||
65 bytes (130 nibbles) and begins with ``0x04``.
|
||||
|
||||
_property: signingKey.compressedPublicKey => string<[[DataHexString]]<33>>
|
||||
The compressed public key for this Signing Key. It will always be
|
||||
33 bytes (66 nibbles) and begine with either ``0x02`` or ``0x03``.
|
||||
33 bytes (66 nibbles) and begins with either ``0x02`` or ``0x03``.
|
||||
|
||||
_property: signingKey.signDigest(digest) => [[Signature]]
|
||||
Sign the //digest// and return the signature.
|
||||
|
||||
@@ -40,7 +40,7 @@ Returns the Array of codepoints of //text//, optionally normalized using the
|
||||
_note: Note
|
||||
This function correctly splits each **user-perceived character** into
|
||||
its codepoint, accounting for surrogate pairs. This should not be confused with
|
||||
``string.split("")``, which destroys surrogate pairs, spliting between each UTF-16
|
||||
``string.split("")``, which destroys surrogate pairs, splitting between each UTF-16
|
||||
codeunit instead.
|
||||
|
||||
_property: ethers.utils.toUtf8String(aBytesLike [ , onError = error ] ) => string @<utils-toUtf8String> @SRC<strings>
|
||||
@@ -88,7 +88,7 @@ See NFKC for more an example.
|
||||
|
||||
_note: Note
|
||||
Only certain specified characters are folded in Canonical Equivalence, and thus
|
||||
it should **not** be considered a method to acheive //any// level of security from
|
||||
it should **not** be considered a method to achieve //any// level of security from
|
||||
[homoglyph attacks](link-wiki-homoglyph).
|
||||
|
||||
|
||||
|
||||
@@ -54,7 +54,7 @@ _property: transaction.gasLimit => [[BigNumber]]
|
||||
The gas limit for //transaction//. An account must have enough ether to
|
||||
cover the gas (at the specified **gasPrice**). Any unused gas is
|
||||
refunded at the end of the transaction, and if there is insufficient gas
|
||||
to complete execution, the effects of the trasaction are reverted, but
|
||||
to complete execution, the effects of the transaction are reverted, but
|
||||
the gas is **fully consumed** and an out-of-gas error occurs.
|
||||
|
||||
_property: transaction.gasPrice => [[BigNumber]]
|
||||
|
||||
@@ -37,7 +37,7 @@ Additional headers to include in the connection.
|
||||
_heading: PollOptions @<PollOptions>
|
||||
|
||||
_property: options.timeout => number
|
||||
The amount of time allowed to ellapse before triggering a timeout
|
||||
The amount of time allowed to elapse before triggering a timeout
|
||||
error.
|
||||
|
||||
_property: options.floor => number
|
||||
|
||||
@@ -30,8 +30,8 @@ the registered //name//.
|
||||
|
||||
_subsection: Languages @<wordlists--languages>
|
||||
|
||||
The [official wordlists](link-bip39-wordlists) availalbe in at
|
||||
`ethers.wordlists`. In the browser, only the english langauge is
|
||||
The [official wordlists](link-bip39-wordlists) available in at
|
||||
`ethers.wordlists`. In the browser, only the english language is
|
||||
available by default; to include the others (which increases the
|
||||
size of the library), see the dist files in the `ethers` package.
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ _section: Assembler @<cli-asm>
|
||||
|
||||
The assembler Command-Line utility allows you to assemble the
|
||||
[Ethers ASM Dialect](asm-dialect) into deployable EVM bytecode
|
||||
and disassemle EVM bytecode into human-readable mnemonics.
|
||||
and disassemble EVM bytecode into human-readable mnemonics.
|
||||
|
||||
|
||||
_subsection: Help
|
||||
@@ -31,7 +31,7 @@ _code: SimpleStore.asm @lang<asm>
|
||||
|
||||
; SimpleStore (uint)
|
||||
|
||||
; Set the inital value of 42
|
||||
; Set the initial value of 42
|
||||
sstore(0, 42)
|
||||
|
||||
; Init code to deploy myContract
|
||||
@@ -144,7 +144,7 @@ Byt specifying the **Position Independent Code** flag, code
|
||||
will be generated in a way such that all offsets are relative, allowing
|
||||
the program to be moved without any impact to its logic.
|
||||
|
||||
This does incur an additional gsas cost of 8 gas per offset access though.
|
||||
This does incur an additional gas cost of 8 gas per offset access though.
|
||||
|
||||
_definition: **-\-target LABEL**
|
||||
All programs have a root scope named ``_`` which is by default
|
||||
|
||||
@@ -62,7 +62,7 @@ TRANSACTION OPTIONS (default: query network)
|
||||
--gasPrice GWEI Default gas price for transactions(in wei)
|
||||
--gasLimit GAS Default gas limit for transactions
|
||||
--nonce NONCE Initial nonce for the first transaction
|
||||
--yes Always accept Siging and Sending
|
||||
--yes Always accept Signing and Sending
|
||||
|
||||
OTHER OPTIONS
|
||||
--wait Wait until transactions are mined
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
_section: Sandbox Utility
|
||||
|
||||
The sandbox utility provides a simple way to use the most common
|
||||
ethers utilities required during learning, debuging and managing
|
||||
ethers utilities required during learning, debugging and managing
|
||||
interactions with the Ethereum network.
|
||||
|
||||
If no command is given, it will enter a REPL interface with many
|
||||
@@ -64,7 +64,7 @@ TRANSACTION OPTIONS (default: query network)
|
||||
--gasPrice GWEI Default gas price for transactions(in wei)
|
||||
--gasLimit GAS Default gas limit for transactions
|
||||
--nonce NONCE Initial nonce for the first transaction
|
||||
--yes Always accept Siging and Sending
|
||||
--yes Always accept Signing and Sending
|
||||
|
||||
OTHER OPTIONS
|
||||
--wait Wait until transactions are mined
|
||||
|
||||
@@ -21,7 +21,7 @@ associated plugin class will be instantiated and run.
|
||||
|
||||
_property: setPlugin(pluginClass) => void @<cli-setplugin> @SRC<cli/cli>
|
||||
Set a dedicated [[cli-plugin]] class which will handle all input. This
|
||||
may not be used in conjuction with addPlugin and will not automatically
|
||||
may not be used in conjunction with addPlugin and will not automatically
|
||||
accept a command from the arguments.
|
||||
|
||||
_property: showUsage([ message = "" [ , status = 0 ] ]) => never @<cli-showusage> @SRC<cli/cli>
|
||||
@@ -36,7 +36,7 @@ _subsection: Plugin @<cli-plugin> @SRC<cli:class.Plugin>
|
||||
Each **Plugin** manages each command of a CLI and is executed in phases.
|
||||
|
||||
If the usage (i.e. help) of a CLI is requested, the static methods ``getHelp``
|
||||
and ``getOptionHelp`` are used to geneate the help screen.
|
||||
and ``getOptionHelp`` are used to generate the help screen.
|
||||
|
||||
Otherwise, a plugin is instantiated and the ``prepareOptions`` is called. Each
|
||||
plugin **must** call ``super.prepareOptions``, otherwise the basic options are
|
||||
@@ -83,7 +83,7 @@ _property: plugin.prepareArgs(args) => Promise<void> @<plugin-prepareargs> @SR
|
||||
_property: plugin.run() => Promise<void> @<plugin-run> @SRC<cli/cli:Plugin.run>
|
||||
|
||||
_property: plugin.getAddress(addressOrName [ , message = "", [ allowZero = false ] ]) => Promise<string> @<plugin-getaddress> @SRC<cli/cli:Plugin.getAddress>
|
||||
A plugin should use this method to resolve an address. If the resovled address is
|
||||
A plugin should use this method to resolve an address. If the resolved address is
|
||||
the zero address and //allowZero// is not true, an error is raised.
|
||||
|
||||
_property: plugin.dump(header, info) => void @<plugin-dump> @SRC<cli/cli:Plugin.dump>
|
||||
@@ -92,7 +92,7 @@ formatted style. In the future, plugins may support a JSON output format
|
||||
which will automatically work with this method.
|
||||
|
||||
_property: plugin.throwUsageError([ message = "" ]) => never @<plugin-throwusageerror> @SRC<cli/cli>
|
||||
Stops exectuion of the plugin and shows the help screen of the plugin with
|
||||
Stops execution of the plugin and shows the help screen of the plugin with
|
||||
the optional //message//.
|
||||
|
||||
_property: plugin.throwError(message) => never @<plugin-throwerror> @SRC<cli/cli>
|
||||
@@ -133,7 +133,7 @@ Flags are simple binary options (such as the ``--yes``), which are true if prese
|
||||
otherwise false.
|
||||
|
||||
Options require a single parameter follow them on the command line
|
||||
(such as ``--account wallet.json``, which nhas the name ``account`` and the value
|
||||
(such as ``--account wallet.json``, which has the name ``account`` and the value
|
||||
``wallet.json``)
|
||||
|
||||
Arguments are all other values on the command line, and are not accessed through
|
||||
|
||||
@@ -191,7 +191,7 @@ module.exports = {
|
||||
"link-solidity": { name: "Solidity" , url: "https:/\/solidity.readthedocs.io/en/v0.6.2/" },
|
||||
"link-sphinx": { name: "Sphinx", url: "https:/\/www.sphinx-doc.org/" },
|
||||
|
||||
"link-alchemy-signup": "https:/\/alchemyapi.io/signup",
|
||||
"link-alchemy-signup": "https:/\/dashboard.alchemyapi.io/signup?referral=55a35117-028e-4b7c-9e47-e275ad0acc6d",
|
||||
"link-etherscan-signup": "https:/\/etherscan.io/apis",
|
||||
"link-etherscan-ratelimit": "https:/\/info.etherscan.com/api-return-errors/",
|
||||
"link-infura-signup": "https:/\/infura.io/register",
|
||||
|
||||
@@ -94,7 +94,7 @@ Style Guide (this section will have much more coming):
|
||||
- Avoid inline links in the source; use the ``externalLinks`` field in the config.js
|
||||
- Prefix external links with ``link-``
|
||||
- Changing an anchor name must be well justified, as it will break all existing links
|
||||
to that section; flatworm will support symblinks in the future
|
||||
to that section; flatworm will support symlinks in the future
|
||||
- In general, I aim for xonsistency; look to similar situations throughout the documentation
|
||||
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@ To use ethers in React Native, you must either provide shims for the needed
|
||||
missing functionality, or use the ethers.js shim.
|
||||
|
||||
It is **HIGHLY RECOMMENDED** you check out the [security section](cookbook-reactnative-security>
|
||||
below for instructions on installing pacakges which can affect the security
|
||||
below for instructions on installing packages which can affect the security
|
||||
of your application.
|
||||
|
||||
After installing packages, you may need to restart your packager and company.
|
||||
|
||||
@@ -280,7 +280,7 @@ The language can be specified using the [@lang extension](flatworm--ext-lang).
|
||||
_table:
|
||||
|
||||
| **Language** | **Notes** |
|
||||
| javascript | Syntax highlights and [evaluates](flatworm--code-eval) the JavaScipt |
|
||||
| javascript | Syntax highlights and [evaluates](flatworm--code-eval) the JavaScript |
|
||||
| script | Same as ``javascript``, but does not evaluate the results |
|
||||
| shell | Shell scripts or command-line |
|
||||
| text | Plain text with no syntax highlighting |
|
||||
@@ -379,7 +379,7 @@ _heading: Variables @<flatworm--table-variable>
|
||||
|
||||
Often the layout of a table is easier to express and maintain without
|
||||
uneven or changing content within it. So the content can be defined
|
||||
separately within a table directive using **variables**. A varaible
|
||||
separately within a table directive using **variables**. A variable
|
||||
name must being with a letter and must only contain letters and numbers.
|
||||
|
||||
Variables are also useful when content is repeated throughout a table.
|
||||
|
||||
@@ -47,7 +47,7 @@ _code: ES6 in the Browser @lang<html>
|
||||
_code: ES3 (UMD) in the Browser @lang<html>
|
||||
|
||||
<script src="https://cdn.ethers.io/lib/ethers-5.0.umd.min.js"
|
||||
type="application/javascipt"></script>
|
||||
type="application/javascript"></script>
|
||||
|
||||
|
||||
_subsection: Common Terminology @<getting-started--glossary>
|
||||
@@ -65,7 +65,7 @@ $Signer: A Signer is a class which (usually) in some way directly or
|
||||
ether to perform operations.
|
||||
$Contract: A Contract is an abstraction which represents a connection to a
|
||||
specific contract on the Ethereum Network, so that applications
|
||||
can use it like a normal JavaScipt object.
|
||||
can use it like a normal JavaScript object.
|
||||
|
||||
|
||||
| **Provider** | $Provider |
|
||||
@@ -225,7 +225,7 @@ const daiContract = new ethers.Contract("dai.tokens.ethers.eth", daiAbi, provide
|
||||
daiContract.name()
|
||||
//!
|
||||
|
||||
// Get the ERC-20 token synbol (for tickers and UIs)
|
||||
// Get the ERC-20 token symbol (for tickers and UIs)
|
||||
daiContract.symbol()
|
||||
//!
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ _section: Migration: From Ethers v4 @<migration-v4>
|
||||
This document only covers the features present in v4 which have changed
|
||||
in some important way in v5.
|
||||
|
||||
It does not cover all the new additional featuers that have been added and
|
||||
It does not cover all the new additional features that have been added and
|
||||
mainly aims to help those updating their older scripts and applications to
|
||||
retain functional parity.
|
||||
|
||||
@@ -52,7 +52,7 @@ _subsection: Contracts
|
||||
_heading: ENS Name Resolution
|
||||
|
||||
The name of the resolved address has changed. If the address passed into the
|
||||
constructor was an ENS name, the address will be resovled before any calls
|
||||
constructor was an ENS name, the address will be resolved before any calls
|
||||
are made to the contract.
|
||||
|
||||
The name of the property where the resolved address has changed from ``addressPromise``
|
||||
@@ -151,7 +151,7 @@ All errors now belong to the [[Logger]] class and the related functions
|
||||
have been moved to [[Logger]] instances, which can include a per-package
|
||||
version string.
|
||||
|
||||
Global error fucntions have been moved [[Logger]] class methods.
|
||||
Global error functions have been moved [[Logger]] class methods.
|
||||
|
||||
_code: @lang<script>
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
_section: Testing
|
||||
|
||||
Testing is a critcial part of any library which wishes to remain secure, safe
|
||||
Testing is a critical part of any library which wishes to remain secure, safe
|
||||
and reliable.
|
||||
|
||||
Ethers currently has **over 23k tests** among its test suites, which are all
|
||||
@@ -15,7 +15,7 @@ fix and included to prevent future changes from causing a regression.
|
||||
|
||||
A large number of the test cases were created procedurally by using
|
||||
known correct implementations from various sources (such as Geth) and
|
||||
written in different languages and verifyied with multiple libraries.
|
||||
written in different languages and verified with multiple libraries.
|
||||
|
||||
For example, the ABI test suites were generated by procedurally generating
|
||||
a list of types, for each type choosing a random (valid) value, which then
|
||||
@@ -52,14 +52,14 @@ and will require ES2015 for [Proxy](link-js-proxy).
|
||||
|
||||
Certain features in JavaScript are also avoided, such as look-behind tokens in regular
|
||||
expressions, since these have caused conflicts (at import time) with certain JavaScript
|
||||
environmants such as [Otto](link-otto).
|
||||
environments such as [Otto](link-otto).
|
||||
|
||||
Basically, the moral of the story is "be inclusive and don't drop people needlessly".
|
||||
|
||||
|
||||
_subsection: Test Suites @<testing-suites>
|
||||
|
||||
The test suites are avaialble a gzipped JSON files in the
|
||||
The test suites are available as gzipped JSON files in the
|
||||
``@ethersproject/testcases``, which makes it easy to install and import
|
||||
(both GZIP and JSON are quite easy to consume from most langauges). Each
|
||||
test suite also has its schema available in this package.
|
||||
@@ -71,10 +71,10 @@ $ContractEvents: Compiled Solidity, ABI interfaces, input types/values with
|
||||
output types/values for emitted events; all tests were
|
||||
executed against real Ethereum nodes
|
||||
$ContractAbi: Compiled Solidity, ABI interfaces, input types/values with the
|
||||
output types/values, encoded and decoded binrary data and normalized
|
||||
output types/values, encoded and decoded binary data and normalized
|
||||
values for function calls executed against real Ethereum nodes.
|
||||
$ContractAbi2: Identical to ``contract-interface``, except with emphasis on
|
||||
the ABIv2 coder which supports nested dynami types and strutured
|
||||
the ABIv2 coder which supports nested dynami types and structured
|
||||
data
|
||||
$ContractSignatures: Contract signatures and matching selectors
|
||||
$Hashes: Data and respective hashes against a variety of hash functions
|
||||
@@ -88,7 +88,7 @@ $Transactions: Signed and unsigned transactions with their serialized form
|
||||
including both with and without EIP-155 replay protection
|
||||
$Units: Values converted between various units
|
||||
$Wallet: Keystore JSON format wallets, passwords and decrypted values
|
||||
$Wordlist: Fully decompressed BIP-39 offcial wordlists
|
||||
$Wordlist: Fully decompressed BIP-39 official wordlists
|
||||
|
||||
| **Filename** | **Test Cases** <|
|
||||
| accounts.json.gz | $Account <|
|
||||
@@ -116,7 +116,7 @@ _property: testcases.loadTests(tag) => Array<TestCase>
|
||||
Load all the given testcases for the //tag//.
|
||||
|
||||
A tag is the string in the above list of test case names not including
|
||||
any extenstion (e.g. ``"solidity-hashes"``)
|
||||
any extension (e.g. ``"solidity-hashes"``)
|
||||
|
||||
_property: testcases.TestCase.TEST_NAME
|
||||
Most testcases have its schema available as a TypeScript type to make testing
|
||||
@@ -129,8 +129,8 @@ we do not case what values are used, however we want the values to be consistent
|
||||
accross runs. Otherwise it becomes difficult to reproduce an issue.
|
||||
|
||||
In each of the following the seed is used to control the random value returned. Be
|
||||
sure to tweak the seed properly, for eaxmple on each iteration change the value and
|
||||
in recursive functions, concatentate to the seed.
|
||||
sure to tweak the seed properly, for example on each iteration change the value and
|
||||
in recursive functions, concatenate to the seed.
|
||||
|
||||
_property: testcases.randomBytes(seed, lower [, upper ]) => Uint8Array
|
||||
Return at least //lower// random bytes, up to //upper// (exclusive) if specified,
|
||||
@@ -159,9 +159,9 @@ accounts and transactions suites can be merged into one large collection.
|
||||
|
||||
_heading: Accounts
|
||||
|
||||
Basic account information using a private key and computing various addrss forms.
|
||||
Basic account information using a private key and computing various address forms.
|
||||
|
||||
Tests were verfified against [EthereumJS](https:/\/github.com/ethereumjs) and custom
|
||||
Tests were verified against [EthereumJS](https:/\/github.com/ethereumjs) and custom
|
||||
scripts created to directly interact with Geth and cpp implementations.
|
||||
|
||||
//See: ``accounts.json.gz``//
|
||||
|
||||
@@ -7,7 +7,7 @@ module.exports = function(config) {
|
||||
{ pattern: "./packages/ethers/dist/ethers-all.esm.min.js", type: "module" },
|
||||
{ pattern: "./packages/tests/dist/tests.esm.js", type: "module" }
|
||||
],
|
||||
reporters: ['karma'],
|
||||
reporters: [ 'karma' ],
|
||||
plugins: [
|
||||
'karma-mocha',
|
||||
'karma-chrome-launcher',
|
||||
@@ -18,7 +18,7 @@ module.exports = function(config) {
|
||||
browsers: [ 'ChromeHeadless', "HeadlessLittleLiar" ],
|
||||
autoWatch: false,
|
||||
singleRun: true,
|
||||
browserNoActivityTimeout: 60000,
|
||||
browserNoActivityTimeout: 3600000,
|
||||
customLaunchers: {
|
||||
HeadlessLittleLiar: {
|
||||
base: 'ChromeHeadless',
|
||||
|
||||
@@ -18,24 +18,24 @@ module.exports = function(config) {
|
||||
browsers: [ 'ChromeHeadless', "HeadlessLittleLiar" ],
|
||||
autoWatch: false,
|
||||
singleRun: true,
|
||||
browserNoActivityTimeout: 600000,
|
||||
browserNoActivityTimeout: 3600000,
|
||||
|
||||
customLaunchers: {
|
||||
HeadlessLittleLiar: {
|
||||
base: 'ChromeHeadless',
|
||||
// https://peter.sh/experiments/chromium-command-line-switches/
|
||||
flags: [
|
||||
'--disable-extensions',
|
||||
'--disable-extensions',
|
||||
|
||||
// Enable this to help debug CORS issues (otherwise fetch throws a useless TypeError)
|
||||
//'--disable-web-security',
|
||||
// Enable this to help debug CORS issues (otherwise fetch throws a useless TypeError)
|
||||
//'--disable-web-security',
|
||||
|
||||
'--enable-automation',
|
||||
'--enable-automation',
|
||||
|
||||
// Cloudflare will block (on the testnet endpoints) any traffic
|
||||
// from a headless chome (based on the user agent), so we lie
|
||||
// This was take from Safari, because that is what I had on-hand
|
||||
'--user-agent', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_5) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/13.1.1 Safari/605.1.15']
|
||||
// Cloudflare will block (on the testnet endpoints) any traffic
|
||||
// from a headless chome (based on the user agent), so we lie
|
||||
// This was take from Safari, because that is what I had on-hand
|
||||
'--user-agent', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_5) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/13.1.1 Safari/605.1.15']
|
||||
}
|
||||
},
|
||||
/*
|
||||
|
||||
1
misc/ReactNativeTestApp/libs/.gitignore
vendored
Normal file
1
misc/ReactNativeTestApp/libs/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
*.js
|
||||
3
misc/admin/auto-build.sh
Executable file
3
misc/admin/auto-build.sh
Executable file
@@ -0,0 +1,3 @@
|
||||
#!/bin/bash
|
||||
|
||||
npx tsc -w
|
||||
1
misc/admin/lib/build.d.ts
vendored
Normal file
1
misc/admin/lib/build.d.ts
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export declare function setupBuild(buildModule: boolean): void;
|
||||
48
misc/admin/lib/build.js
Normal file
48
misc/admin/lib/build.js
Normal file
@@ -0,0 +1,48 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const path_1 = require("./path");
|
||||
const utils_1 = require("./utils");
|
||||
function setupConfig(outDir, moduleType, targetType) {
|
||||
// Configure the tsconfit.package.json...
|
||||
const path = path_1.resolve("tsconfig.package.json");
|
||||
const content = utils_1.loadJson(path);
|
||||
content.compilerOptions.module = moduleType;
|
||||
content.compilerOptions.target = targetType;
|
||||
utils_1.saveJson(path, content, true);
|
||||
// Configure the browser field for every pacakge, copying the
|
||||
// browser.umd filed for UMD and browser.esm for ESM
|
||||
path_1.dirnames.forEach((dirname) => {
|
||||
const filename = path_1.getPackageJsonPath(dirname);
|
||||
const info = utils_1.loadJson(filename);
|
||||
if (info._ethers_nobuild) {
|
||||
return;
|
||||
}
|
||||
if (targetType === "es2015") {
|
||||
if (info["browser.esm"]) {
|
||||
info.browser = info["browser.esm"];
|
||||
}
|
||||
}
|
||||
else if (targetType === "es5") {
|
||||
if (info["browser.umd"]) {
|
||||
info.browser = info["browser.umd"];
|
||||
}
|
||||
}
|
||||
else {
|
||||
throw new Error("unsupported target");
|
||||
}
|
||||
utils_1.saveJson(filename, info, true);
|
||||
let path = path_1.resolve("packages", dirname, "tsconfig.json");
|
||||
let content = utils_1.loadJson(path);
|
||||
content.compilerOptions.outDir = outDir;
|
||||
utils_1.saveJson(path, content, true);
|
||||
});
|
||||
}
|
||||
function setupBuild(buildModule) {
|
||||
if (buildModule) {
|
||||
setupConfig("./lib.esm/", "es2015", "es2015");
|
||||
}
|
||||
else {
|
||||
setupConfig("./lib/", "commonjs", "es5");
|
||||
}
|
||||
}
|
||||
exports.setupBuild = setupBuild;
|
||||
8
misc/admin/lib/changelog.d.ts
vendored
Normal file
8
misc/admin/lib/changelog.d.ts
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
export declare type Change = {
|
||||
title: string;
|
||||
version: string;
|
||||
date: string;
|
||||
content: string;
|
||||
};
|
||||
export declare function generate(): Promise<string>;
|
||||
export declare function getLatestChange(): Change;
|
||||
133
misc/admin/lib/changelog.js
Normal file
133
misc/admin/lib/changelog.js
Normal file
@@ -0,0 +1,133 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs_1 = __importDefault(require("fs"));
|
||||
const local = __importStar(require("./local"));
|
||||
const log_1 = require("./log");
|
||||
const npm = __importStar(require("./npm"));
|
||||
const path_1 = require("./path");
|
||||
const run_1 = require("./run");
|
||||
const utils_1 = require("./utils");
|
||||
const changelogPath = path_1.resolve("CHANGELOG.md");
|
||||
function generate() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const lines = fs_1.default.readFileSync(changelogPath).toString().trim().split("\n");
|
||||
let firstLine = null;
|
||||
const versions = Object.keys(lines.reduce((accum, line, index) => {
|
||||
const match = line.match(/^ethers\/v([^ ]*)/);
|
||||
if (match) {
|
||||
if (firstLine == null) {
|
||||
firstLine = index;
|
||||
}
|
||||
accum[match[1]] = true;
|
||||
}
|
||||
return accum;
|
||||
}, {}));
|
||||
const version = local.getPackage("ethers").version;
|
||||
;
|
||||
const published = yield npm.getPackage("ethers");
|
||||
if (versions.indexOf(version) >= 0) {
|
||||
const line = `Version ${version} already in CHANGELOG. Please edit before committing.`;
|
||||
console.log(log_1.colorify.red(utils_1.repeat("=", line.length)));
|
||||
console.log(log_1.colorify.red(line));
|
||||
console.log(log_1.colorify.red(utils_1.repeat("=", line.length)));
|
||||
}
|
||||
const gitResult = yield run_1.run("git", ["log", (published.gitHead + "..")]);
|
||||
if (!gitResult.ok) {
|
||||
console.log(gitResult);
|
||||
throw new Error("Error running git log");
|
||||
}
|
||||
let changes = [];
|
||||
gitResult.stdout.split("\n").forEach((line) => {
|
||||
if (line.toLowerCase().substring(0, 6) === "commit") {
|
||||
changes.push({
|
||||
commit: line.substring(6).trim(),
|
||||
date: null,
|
||||
body: ""
|
||||
});
|
||||
}
|
||||
else if (line.toLowerCase().substring(0, 5) === "date:") {
|
||||
changes[changes.length - 1].date = utils_1.getDateTime(new Date(line.substring(5).trim()));
|
||||
}
|
||||
else if (line.substring(0, 1) === " ") {
|
||||
line = line.trim();
|
||||
if (line === "") {
|
||||
return;
|
||||
}
|
||||
changes[changes.length - 1].body += line + " ";
|
||||
}
|
||||
});
|
||||
const output = [];
|
||||
for (let i = 0; i < firstLine; i++) {
|
||||
output.push(lines[i]);
|
||||
}
|
||||
const newTitle = `ethers/v${version} (${utils_1.getDateTime(new Date())})`;
|
||||
output.push(newTitle);
|
||||
output.push(utils_1.repeat("-", newTitle.length));
|
||||
output.push("");
|
||||
changes.forEach((change) => {
|
||||
let body = change.body.trim();
|
||||
let linkMatch = body.match(/(\((.*#.*)\))/);
|
||||
let commit = `[${change.commit.substring(0, 7)}](https://github.com/ethers-io/ethers.js/commit/${change.commit})`;
|
||||
let link = commit;
|
||||
if (linkMatch) {
|
||||
body = body.replace(/ *(\(.*#.*)\) */, "");
|
||||
link = linkMatch[2].replace(/#([0-9]+)/g, (all, issue) => {
|
||||
return `[#${issue}](https://github.com/ethers-io/ethers.js/issues/${issue})`;
|
||||
}) + "; " + commit;
|
||||
}
|
||||
output.push(` - ${body} (${link})`);
|
||||
});
|
||||
output.push("");
|
||||
for (let i = firstLine; i < lines.length; i++) {
|
||||
output.push(lines[i]);
|
||||
}
|
||||
return output.join("\n");
|
||||
});
|
||||
}
|
||||
exports.generate = generate;
|
||||
function getLatestChange() {
|
||||
let result = null;
|
||||
const lines = fs_1.default.readFileSync(changelogPath).toString().split("\n");
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
const match = line.match(/ethers\/([^\(]*)\(([^\)]*)\)/);
|
||||
if (match) {
|
||||
if (result) {
|
||||
break;
|
||||
}
|
||||
result = {
|
||||
title: line.trim(),
|
||||
version: match[1].trim(),
|
||||
date: match[2].trim(),
|
||||
content: ""
|
||||
};
|
||||
}
|
||||
else if (result) {
|
||||
if (!line.trim().match(/^-+$/)) {
|
||||
result.content += line.trim() + "\n";
|
||||
}
|
||||
}
|
||||
}
|
||||
result.content = result.content.trim();
|
||||
return result;
|
||||
}
|
||||
exports.getLatestChange = getLatestChange;
|
||||
1
misc/admin/lib/cmds/bump-versions.d.ts
vendored
Normal file
1
misc/admin/lib/cmds/bump-versions.d.ts
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export {};
|
||||
81
misc/admin/lib/cmds/bump-versions.js
Normal file
81
misc/admin/lib/cmds/bump-versions.js
Normal file
@@ -0,0 +1,81 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs_1 = __importDefault(require("fs"));
|
||||
const semver_1 = __importDefault(require("semver"));
|
||||
const path_1 = require("../path");
|
||||
const local = __importStar(require("../local"));
|
||||
const log_1 = require("../log");
|
||||
const npm = __importStar(require("../npm"));
|
||||
const utils_1 = require("../utils");
|
||||
(function () {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const progress = log_1.getProgressBar(log_1.colorify.bold("Bumping package.json versions"));
|
||||
const latestVersions = {};
|
||||
let updated = false;
|
||||
const output = [];
|
||||
// For each package, detect diff between tarball and remote
|
||||
for (let i = 0; i < path_1.dirnames.length; i++) {
|
||||
progress(i / path_1.dirnames.length);
|
||||
const dirname = path_1.dirnames[i];
|
||||
const pLocal = local.getPackage(dirname);
|
||||
const pNpm = yield npm.getPackage(dirname);
|
||||
const tarballHash = local.computeTarballHash(dirname);
|
||||
let version = pNpm.version;
|
||||
if (tarballHash !== pNpm.tarballHash) {
|
||||
version = semver_1.default.inc(version, "patch");
|
||||
output.push([
|
||||
" ",
|
||||
log_1.colorify.blue(pLocal.name),
|
||||
utils_1.repeat(" ", 47 - pLocal.name.length - pNpm.version.length),
|
||||
pNpm.version,
|
||||
log_1.colorify.bold(" => "),
|
||||
log_1.colorify.green(version)
|
||||
].join(""));
|
||||
local.updateJson(path_1.getPackageJsonPath(dirname), { gitHead: undefined, tarballHash, version }, true);
|
||||
updated = true;
|
||||
}
|
||||
latestVersions[pLocal.name] = version;
|
||||
// Write out the _version.ts
|
||||
if (!pLocal._ethers_nobuild) {
|
||||
const code = "export const version = " + JSON.stringify(dirname + "/" + version) + ";\n";
|
||||
fs_1.default.writeFileSync(path_1.resolve(path_1.getPackagePath(dirname), "src.ts/_version.ts"), code);
|
||||
}
|
||||
}
|
||||
progress(1);
|
||||
if (updated) {
|
||||
const filename = path_1.resolve("packages/ethers/package.json");
|
||||
const info = utils_1.loadJson(filename);
|
||||
Object.keys(info.dependencies).forEach((name) => {
|
||||
const version = latestVersions[name];
|
||||
if (name == null) {
|
||||
return;
|
||||
}
|
||||
info.dependencies[name] = version;
|
||||
});
|
||||
utils_1.saveJson(filename, info);
|
||||
}
|
||||
output.forEach((line) => { console.log(line); });
|
||||
});
|
||||
})().catch((error) => {
|
||||
console.log(`Error running ${process.argv[0]}: ${error.message}`);
|
||||
process.exit(1);
|
||||
});
|
||||
1
misc/admin/lib/cmds/echo.d.ts
vendored
Normal file
1
misc/admin/lib/cmds/echo.d.ts
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export {};
|
||||
4
misc/admin/lib/cmds/echo.js
Normal file
4
misc/admin/lib/cmds/echo.js
Normal file
@@ -0,0 +1,4 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const log_1 = require("../log");
|
||||
console.log(log_1.colorify.bold(process.argv[2] || "no message"));
|
||||
1
misc/admin/lib/cmds/hoist.d.ts
vendored
Normal file
1
misc/admin/lib/cmds/hoist.d.ts
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export {};
|
||||
26
misc/admin/lib/cmds/hoist.js
Normal file
26
misc/admin/lib/cmds/hoist.js
Normal file
@@ -0,0 +1,26 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const path_1 = require("../path");
|
||||
const local_1 = require("../local");
|
||||
const log_1 = require("../log");
|
||||
(function () {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const dependencies = local_1.getDependencies(null, (name) => {
|
||||
return !path_1.isEthers(name);
|
||||
});
|
||||
console.log(log_1.colorify.bold(`Hoisting ${Object.keys(dependencies).length} dependencies into root package...`));
|
||||
local_1.updateJson(path_1.dirs.rootPackageJsonPath, { dependencies });
|
||||
});
|
||||
})().catch((error) => {
|
||||
console.log(`Error running ${process.argv[0]}: ${error.message}`);
|
||||
process.exit(1);
|
||||
});
|
||||
1
misc/admin/lib/cmds/link.d.ts
vendored
Normal file
1
misc/admin/lib/cmds/link.d.ts
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export {};
|
||||
65
misc/admin/lib/cmds/link.js
Normal file
65
misc/admin/lib/cmds/link.js
Normal file
@@ -0,0 +1,65 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs_1 = __importDefault(require("fs"));
|
||||
const path_1 = require("path");
|
||||
const local_1 = require("../local");
|
||||
const log_1 = require("../log");
|
||||
const path_2 = require("../path");
|
||||
const utils_1 = require("../utils");
|
||||
function link(existing, path) {
|
||||
try {
|
||||
const current = fs_1.default.readlinkSync(path);
|
||||
// Alerady linked
|
||||
if (current === existing) {
|
||||
return;
|
||||
}
|
||||
fs_1.default.unlinkSync(path);
|
||||
}
|
||||
catch (error) {
|
||||
if (error.code !== "ENOENT") {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
// Link
|
||||
const dir = path_1.dirname(path);
|
||||
utils_1.mkdir(dir);
|
||||
fs_1.default.symlinkSync(existing, path);
|
||||
}
|
||||
(function () {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
console.log(log_1.colorify.bold(`Linking ${path_2.packages.length} package node_modules rat nests...`));
|
||||
const nodeModulesBase = path_1.resolve(path_2.dirs.root, ".package_node_modules");
|
||||
// Make a symlink in the ROOT/node_mpdules to each package in this repo
|
||||
path_2.packages.forEach((name) => {
|
||||
// e.g. /node_modules/@ethersproject/abi => /packages/abi
|
||||
link(path_2.getPackagePath(name), path_1.resolve(path_2.dirs.root, "node_modules", name));
|
||||
// e.g. /packages/abi/node_modules => /.package_node_modules/abi/
|
||||
const nodeModules = path_1.resolve(nodeModulesBase, path_2.getDirname(name));
|
||||
utils_1.mkdir(nodeModules);
|
||||
link(nodeModules, path_1.resolve(path_2.getPackagePath(name), "node_modules"));
|
||||
});
|
||||
path_2.packages.forEach((name) => {
|
||||
const nodeModules = path_1.resolve(nodeModulesBase, path_2.getDirname(name));
|
||||
const deps = local_1.getDependencies(name);
|
||||
Object.keys(deps).forEach((name) => {
|
||||
link(path_1.resolve(path_2.dirs.root, "node_modules", name), path_1.resolve(nodeModules, name));
|
||||
});
|
||||
});
|
||||
});
|
||||
})().catch((error) => {
|
||||
console.log(`Error running ${process.argv[0]}: ${error.message}`);
|
||||
process.exit(1);
|
||||
});
|
||||
;
|
||||
1
misc/admin/lib/cmds/npm-skip-node8.d.ts
vendored
Normal file
1
misc/admin/lib/cmds/npm-skip-node8.d.ts
vendored
Normal file
@@ -0,0 +1 @@
|
||||
declare const major: any;
|
||||
@@ -1,15 +1,13 @@
|
||||
"use strict";
|
||||
|
||||
const { major } = require("semver");
|
||||
|
||||
// This should be used like `node npm-skip-node8 || COMMAND`.
|
||||
// - If node 8, this script returns true, skipping COMMAND
|
||||
// - Otherwise, return false, running COMMAND
|
||||
|
||||
if (major(process.version) > 8) {
|
||||
// Node >8; return "false" (wrt to shell scripting)
|
||||
process.exit(1);
|
||||
} else {
|
||||
}
|
||||
else {
|
||||
// Node 8; return "true" (wrt to shell scripting)
|
||||
process.exit(0);
|
||||
}
|
||||
15
misc/admin/lib/cmds/publish.d.ts
vendored
Normal file
15
misc/admin/lib/cmds/publish.d.ts
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
/// <reference types="node" />
|
||||
import AWS from 'aws-sdk';
|
||||
declare type PutInfo = {
|
||||
ACL: "public-read";
|
||||
Body: string | Buffer;
|
||||
Bucket: string;
|
||||
ContentType: string;
|
||||
Key: string;
|
||||
};
|
||||
export declare function putObject(s3: AWS.S3, info: PutInfo): Promise<{
|
||||
name: string;
|
||||
hash: string;
|
||||
}>;
|
||||
export declare function invalidate(cloudfront: AWS.CloudFront, distributionId: string): Promise<string>;
|
||||
export {};
|
||||
202
misc/admin/lib/cmds/publish.js
Normal file
202
misc/admin/lib/cmds/publish.js
Normal file
@@ -0,0 +1,202 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const aws_sdk_1 = __importDefault(require("aws-sdk"));
|
||||
const fs_1 = __importDefault(require("fs"));
|
||||
const changelog_1 = require("../changelog");
|
||||
const config_1 = require("../config");
|
||||
const depgraph_1 = require("../depgraph");
|
||||
const git_1 = require("../git");
|
||||
const github_1 = require("../github");
|
||||
const local = __importStar(require("../local"));
|
||||
const log_1 = require("../log");
|
||||
const npm = __importStar(require("../npm"));
|
||||
const path_1 = require("../path");
|
||||
const utils_1 = require("../utils");
|
||||
const USER_AGENT = "ethers-dist@0.0.1";
|
||||
const TAG = "latest";
|
||||
function putObject(s3, info) {
|
||||
return new Promise((resolve, reject) => {
|
||||
s3.putObject(info, function (error, data) {
|
||||
if (error) {
|
||||
reject(error);
|
||||
}
|
||||
else {
|
||||
resolve({
|
||||
name: info.Key,
|
||||
hash: data.ETag.replace(/"/g, '')
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
exports.putObject = putObject;
|
||||
function invalidate(cloudfront, distributionId) {
|
||||
return new Promise((resolve, reject) => {
|
||||
cloudfront.createInvalidation({
|
||||
DistributionId: distributionId,
|
||||
InvalidationBatch: {
|
||||
CallerReference: `${USER_AGENT}-${parseInt(String((new Date()).getTime() / 1000))}`,
|
||||
Paths: {
|
||||
Quantity: 1,
|
||||
Items: [
|
||||
"/\*"
|
||||
]
|
||||
}
|
||||
}
|
||||
}, function (error, data) {
|
||||
if (error) {
|
||||
console.log(error);
|
||||
return;
|
||||
}
|
||||
resolve(data.Invalidation.Id);
|
||||
});
|
||||
});
|
||||
}
|
||||
exports.invalidate = invalidate;
|
||||
(function () {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const dirnames = depgraph_1.getOrdered();
|
||||
// @TODO: Fail if there are any untracked files or unchecked in files
|
||||
const publish = {};
|
||||
const progressUpdate = log_1.getProgressBar(log_1.colorify.bold("Finding updated packages..."));
|
||||
for (let i = 0; i < dirnames.length; i++) {
|
||||
progressUpdate(i / dirnames.length);
|
||||
let dirname = dirnames[i];
|
||||
let info = local.getPackage(dirname);
|
||||
let npmInfo = yield npm.getPackage(dirname);
|
||||
// No change in version, no need to publish
|
||||
if (info.version === npmInfo.version) {
|
||||
continue;
|
||||
}
|
||||
// Get the latest commit this package was modified at
|
||||
const path = path_1.resolve("packages", dirname);
|
||||
const gitHead = yield git_1.getGitTag(path);
|
||||
if (gitHead == null) {
|
||||
throw new Error("hmmm...");
|
||||
}
|
||||
publish[dirname] = {
|
||||
name: info.name,
|
||||
gitHead: gitHead,
|
||||
oldVersion: (npmInfo ? npmInfo.version : "NEW"),
|
||||
newVersion: info.version
|
||||
};
|
||||
}
|
||||
progressUpdate(1);
|
||||
console.log(log_1.colorify.bold(`Found ${Object.keys(publish).length} updated pacakges...`));
|
||||
Object.keys(publish).forEach((dirname) => {
|
||||
const info = publish[dirname];
|
||||
console.log(` ${log_1.colorify.blue(info.name)} ${utils_1.repeat(" ", 50 - info.name.length - info.oldVersion.length)} ${info.oldVersion} ${log_1.colorify.bold("=>")} ${log_1.colorify.green(info.newVersion)}`);
|
||||
});
|
||||
const publishNames = Object.keys(publish);
|
||||
publishNames.sort((a, b) => (dirnames.indexOf(a) - dirnames.indexOf(b)));
|
||||
// Load the token from the encrypted store
|
||||
const options = {
|
||||
access: "public",
|
||||
npmVersion: USER_AGENT,
|
||||
tag: TAG
|
||||
};
|
||||
try {
|
||||
const token = (yield config_1.config.get("npm-token")).trim().split("=");
|
||||
options[token[0]] = token[1];
|
||||
}
|
||||
catch (error) {
|
||||
switch (error.message) {
|
||||
case "wrong password":
|
||||
console.log(log_1.colorify.bold("Wrong password"));
|
||||
break;
|
||||
case "cancelled":
|
||||
break;
|
||||
default:
|
||||
console.log(error);
|
||||
}
|
||||
console.log(log_1.colorify.red("Aborting."));
|
||||
return;
|
||||
}
|
||||
console.log(log_1.colorify.bold("Publishing:"));
|
||||
for (let i = 0; i < publishNames.length; i++) {
|
||||
const dirname = publishNames[i];
|
||||
const path = path_1.resolve("packages", dirname);
|
||||
const pathJson = path_1.resolve("packages", dirname, "package.json");
|
||||
const { gitHead, name, newVersion } = publish[dirname];
|
||||
console.log(` ${log_1.colorify.blue(name)} @ ${log_1.colorify.green(newVersion)}`);
|
||||
local.updateJson(pathJson, { gitHead: gitHead }, true);
|
||||
const info = utils_1.loadJson(pathJson);
|
||||
yield npm.publish(path, info, options);
|
||||
local.updateJson(pathJson, { gitHead: undefined }, true);
|
||||
}
|
||||
if (publishNames.indexOf("ethers") >= 0) {
|
||||
const change = changelog_1.getLatestChange();
|
||||
const awsAccessId = yield config_1.config.get("aws-upload-scripts-accesskey");
|
||||
const awsSecretKey = yield config_1.config.get("aws-upload-scripts-secretkey");
|
||||
// Publish tagged release on GitHub
|
||||
{
|
||||
// The password above already succeeded
|
||||
const username = yield config_1.config.get("github-user");
|
||||
const password = yield config_1.config.get("github-release");
|
||||
const gitCommit = yield git_1.getGitTag(path_1.resolve("CHANGELOG.md"));
|
||||
// Publish the release
|
||||
const beta = false;
|
||||
const link = yield github_1.createRelease(username, password, change.version, change.title, change.content, beta, gitCommit);
|
||||
console.log(`${log_1.colorify.bold("Published release:")} ${link}`);
|
||||
}
|
||||
// Upload libs to the CDN (as ethers-v5.0 and ethers-5.0.x)
|
||||
{
|
||||
const bucketName = yield config_1.config.get("aws-upload-scripts-bucket");
|
||||
const originRoot = yield config_1.config.get("aws-upload-scripts-root");
|
||||
const s3 = new aws_sdk_1.default.S3({
|
||||
apiVersion: '2006-03-01',
|
||||
accessKeyId: awsAccessId,
|
||||
secretAccessKey: awsSecretKey
|
||||
});
|
||||
// Upload the libs to ethers-v5.0 and ethers-5.0.x
|
||||
const fileInfos = [
|
||||
{ filename: "packages/ethers/dist/ethers.esm.min.js", key: `ethers-${change.version.substring(1)}.esm.min.js` },
|
||||
{ filename: "packages/ethers/dist/ethers.umd.min.js", key: `ethers-${change.version.substring(1)}.umd.min.js` },
|
||||
{ filename: "packages/ethers/dist/ethers.esm.min.js", key: "ethers-5.0.esm.min.js" },
|
||||
{ filename: "packages/ethers/dist/ethers.umd.min.js", key: "ethers-5.0.umd.min.js" },
|
||||
];
|
||||
for (let i = 0; i < fileInfos.length; i++) {
|
||||
const { filename, key } = fileInfos[i];
|
||||
yield putObject(s3, {
|
||||
ACL: "public-read",
|
||||
Body: fs_1.default.readFileSync(path_1.resolve(filename)),
|
||||
Bucket: bucketName,
|
||||
ContentType: "application/javascript; charset=utf-8",
|
||||
Key: (originRoot + key)
|
||||
});
|
||||
console.log(`${log_1.colorify.bold("Uploaded:")} https://cdn.ethers.io/lib/${key}`);
|
||||
}
|
||||
}
|
||||
// Flush the edge caches
|
||||
{
|
||||
const distributionId = yield config_1.config.get("aws-upload-scripts-distribution-id");
|
||||
const cloudfront = new aws_sdk_1.default.CloudFront({
|
||||
//apiVersion: '2006-03-01',
|
||||
accessKeyId: awsAccessId,
|
||||
secretAccessKey: awsSecretKey
|
||||
});
|
||||
const invalidationId = yield invalidate(cloudfront, distributionId);
|
||||
console.log(`${log_1.colorify.bold("Invalidating Edge Cache:")} ${invalidationId}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
})();
|
||||
8
misc/admin/lib/cmds/serve-docs.d.ts
vendored
Normal file
8
misc/admin/lib/cmds/serve-docs.d.ts
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
/// <reference types="node" />
|
||||
import { Server } from "http";
|
||||
export declare function getMime(filename: string): string;
|
||||
export declare type Options = {
|
||||
port?: number;
|
||||
redirects?: Record<string, string>;
|
||||
};
|
||||
export declare function start(root: string, options: Options): Server;
|
||||
96
misc/admin/lib/cmds/serve-docs.js
Normal file
96
misc/admin/lib/cmds/serve-docs.js
Normal file
@@ -0,0 +1,96 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs_1 = __importDefault(require("fs"));
|
||||
const http_1 = require("http");
|
||||
const path_1 = require("path");
|
||||
function getMime(filename) {
|
||||
switch (filename.split('.').pop().toLowerCase()) {
|
||||
case 'css': return 'text/css';
|
||||
case 'doctree': return 'application/x-doctree';
|
||||
case 'eot': return 'application/vnd.ms-fontobject';
|
||||
case 'gif': return 'image/gif';
|
||||
case 'html': return 'text/html';
|
||||
case 'js': return 'application/javascript';
|
||||
case 'jpg': return 'image/jpeg';
|
||||
case 'jpeg': return 'image/jpeg';
|
||||
case 'md': return 'text/markdown';
|
||||
case 'pickle': return 'application/x-pickle';
|
||||
case 'png': return 'image/png';
|
||||
case 'svg': return 'image/svg+xml';
|
||||
case 'ttf': return 'application/x-font-ttf';
|
||||
case 'txt': return 'text/plain';
|
||||
case 'woff': return 'application/font-woff';
|
||||
}
|
||||
console.log('NO MIME', filename);
|
||||
return "application/octet-stream";
|
||||
}
|
||||
exports.getMime = getMime;
|
||||
function start(root, options) {
|
||||
if (root == null) {
|
||||
throw new Error("root required");
|
||||
}
|
||||
if (options == null) {
|
||||
options = {};
|
||||
}
|
||||
if (options.port == null) {
|
||||
options.port = 8000;
|
||||
}
|
||||
root = path_1.resolve(root);
|
||||
const server = http_1.createServer((req, resp) => {
|
||||
// Follow redirects in options
|
||||
if (options.redirects && options.redirects[req.url]) {
|
||||
resp.writeHead(301, { Location: options.redirects[req.url] });
|
||||
resp.end();
|
||||
return;
|
||||
}
|
||||
let filename = path_1.resolve(root, "." + req.url);
|
||||
// Make sure we aren't crawling out of our sandbox
|
||||
if (req.url[0] !== "/" || filename.substring(0, filename.length) !== filename) {
|
||||
resp.writeHead(403);
|
||||
resp.end();
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const stat = fs_1.default.statSync(filename);
|
||||
if (stat.isDirectory()) {
|
||||
// Redirect bare directory to its path (i.e. "/foo" => "/foo/")
|
||||
if (req.url[req.url.length - 1] !== "/") {
|
||||
resp.writeHead(301, { Location: req.url + "/" });
|
||||
resp.end();
|
||||
return;
|
||||
}
|
||||
filename += "/index.html";
|
||||
}
|
||||
const content = fs_1.default.readFileSync(filename);
|
||||
resp.writeHead(200, {
|
||||
"Content-Length": content.length,
|
||||
"Content-Type": getMime(filename)
|
||||
});
|
||||
resp.end(content);
|
||||
return;
|
||||
}
|
||||
catch (error) {
|
||||
if (error.code === "ENOENT") {
|
||||
resp.writeHead(404, {});
|
||||
resp.end();
|
||||
return;
|
||||
}
|
||||
resp.writeHead(500, {});
|
||||
resp.end();
|
||||
return;
|
||||
}
|
||||
});
|
||||
server.listen(options.port, () => {
|
||||
console.log(`Server running on: http://localhost:${options.port}`);
|
||||
});
|
||||
return server;
|
||||
}
|
||||
exports.start = start;
|
||||
start(path_1.resolve(__dirname, "../../docs"), {
|
||||
redirects: {
|
||||
"/": "/v5/"
|
||||
}
|
||||
});
|
||||
1
misc/admin/lib/cmds/set-build-option.d.ts
vendored
Normal file
1
misc/admin/lib/cmds/set-build-option.d.ts
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export {};
|
||||
45
misc/admin/lib/cmds/set-build-option.js
Normal file
45
misc/admin/lib/cmds/set-build-option.js
Normal file
@@ -0,0 +1,45 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const build_1 = require("../build");
|
||||
const log_1 = require("../log");
|
||||
const path_1 = require("../path");
|
||||
const utils_1 = require("../utils");
|
||||
(function () {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
process.argv.slice(2).forEach((arg) => {
|
||||
console.log(log_1.colorify.bold("Setting Option:"), arg);
|
||||
switch (arg) {
|
||||
case "esm":
|
||||
build_1.setupBuild(true);
|
||||
break;
|
||||
case "cjs":
|
||||
build_1.setupBuild(false);
|
||||
break;
|
||||
// This will remove the browser field entirely, so make sure
|
||||
// to set esm of cjs first as they will restore the browser
|
||||
// field
|
||||
case "browser-lang-all": {
|
||||
const filename = path_1.getPackageJsonPath("wordlists");
|
||||
const info = utils_1.loadJson(filename);
|
||||
delete info.browser;
|
||||
utils_1.saveJson(filename, info, true);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new Error(`Unknown option: ${JSON.stringify(arg)}`);
|
||||
}
|
||||
});
|
||||
});
|
||||
})().catch((error) => {
|
||||
console.log(`Error running ${process.argv[0]}: ${error.message}`);
|
||||
process.exit(1);
|
||||
});
|
||||
1
misc/admin/lib/cmds/set-config.d.ts
vendored
Normal file
1
misc/admin/lib/cmds/set-config.d.ts
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export {};
|
||||
27
misc/admin/lib/cmds/set-config.js
Normal file
27
misc/admin/lib/cmds/set-config.js
Normal file
@@ -0,0 +1,27 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const config_1 = require("../config");
|
||||
const log_1 = require("../log");
|
||||
if (process.argv.length !== 3) {
|
||||
console.log("Usage: set-config KEY");
|
||||
process.exit(1);
|
||||
}
|
||||
const key = process.argv[2];
|
||||
(function () {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const value = yield log_1.getPassword("Value: ");
|
||||
yield config_1.config.set(key, value);
|
||||
});
|
||||
})().catch((error) => {
|
||||
console.log(`Error running ${process.argv[0]}: ${error.message}`);
|
||||
process.exit(1);
|
||||
});
|
||||
1
misc/admin/lib/cmds/spell-check.d.ts
vendored
Normal file
1
misc/admin/lib/cmds/spell-check.d.ts
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export {};
|
||||
227
misc/admin/lib/cmds/spell-check.js
Normal file
227
misc/admin/lib/cmds/spell-check.js
Normal file
@@ -0,0 +1,227 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs_1 = __importDefault(require("fs"));
|
||||
const path_1 = require("path");
|
||||
const typescript_1 = __importDefault(require("typescript"));
|
||||
const log_1 = require("../log");
|
||||
const Words = fs_1.default.readFileSync("/usr/share/dict/words").toString().split("\n").reduce((accum, word) => {
|
||||
accum[word.toLowerCase()] = true;
|
||||
return accum;
|
||||
}, {});
|
||||
`
|
||||
// Words missing from the dictionary
|
||||
accessing addresses aligned autofill called cancelled changed censored
|
||||
clamping compiled computed configured consumed creating decoded decoding
|
||||
decreased decrypt decrypted decrypting deployed deploying deprecated detected
|
||||
discontinued earliest email enabled encoded encoding encrypt
|
||||
encrypted encrypting entries euro exceeded existing expected
|
||||
expired failed fetches formatted formatting funding generated
|
||||
hardened has highly ignoring implemented implementer imported including instantiate
|
||||
joined keyword labelled larger lookup matches mined modified modifies multi
|
||||
named needed nested neutered numeric offline optimizer overriding owned packed
|
||||
padded parsed parsing passed placeholder processing properties prototyping reached
|
||||
recommended recovered redacted remaining replaced required
|
||||
serializes shared signed signing skipped stored supported tagging targetted
|
||||
throttled transactions uninstall unstake unsubscribe using verifies website
|
||||
|
||||
// Overly Specific Words
|
||||
bech BIP BIP39 BIP44 btc bzz crypto eip etc hashes hmac icap
|
||||
keccak ltc namehash ripemd RLP scrypt secp sha xdai
|
||||
|
||||
blockhash
|
||||
|
||||
bitcoin ethereum finney gwei kwei mwei satoshi szabo wei weth
|
||||
|
||||
crowdsale hexlify hd hdnode underpriced
|
||||
|
||||
boolean int struct tuple uint
|
||||
nonpayable
|
||||
jumpdest mstore shr shl xor
|
||||
|
||||
// Classes
|
||||
ABIEncoder testcase numberish Wordlist
|
||||
|
||||
// Common Code Strings
|
||||
abi addr api app arg arrayify asm basex bigint bignumber bn byte
|
||||
bytecode callback calldata checksum ciphertext cli codepoint commify config
|
||||
contenthash ctr ctrl debug dd dklen eexist encseed eof ethaddr
|
||||
ethseed ethers eval exec filename func gz hid http https hw iv
|
||||
info init ipc json kdf kdfparams labelhash lang lib mm multihash nfc
|
||||
nfkc nfd nfkd nodehash notok nowait nullish oob opcode pbkdf pc plugin
|
||||
pragma pre prf repl rpc sighash topichash solc stdin stdout subclasses
|
||||
subnode timeout todo txt ufixed utc utf util url uuid vm vs websocket
|
||||
wikipedia wx xe xpriv xpub xx yyyy zlib
|
||||
|
||||
// AbiV2
|
||||
abiv
|
||||
|
||||
// Query parameters
|
||||
apikey asc endblock startblock
|
||||
|
||||
alchemyapi Cloudflare Etherscan INFURA IPFS MetaMask Nodesmith
|
||||
Trezor ledgerhq axic bitcoinjs browserify easyseed ethereumjs
|
||||
goerli homestead kotti kovan mainnet morden mordor rinkeby
|
||||
ropsten testnet
|
||||
|
||||
// Demo words
|
||||
args foo eth foo foobar ll localhost passwd ricmoo tx xxx yna
|
||||
|
||||
// nameprep tags
|
||||
ALCat BiDi LCat nameprep
|
||||
|
||||
// Lanauge Codes (and short binary data)
|
||||
cn cz en es fr it ja tw zh zh_cn zh_tw
|
||||
OYAa IJBEJqXZJ
|
||||
|
||||
`.split("\n").filter((l) => (l.substring(0, 2) != "/\/")).join("\n").split(/\s+/g).forEach((word) => {
|
||||
word = word.trim();
|
||||
if (word === "") {
|
||||
return;
|
||||
}
|
||||
Words[word.toLowerCase()] = true;
|
||||
});
|
||||
function getStrings(source) {
|
||||
const sourceFile = typescript_1.default.createSourceFile("filename.ts", source, typescript_1.default.ScriptTarget.Latest);
|
||||
const result = [];
|
||||
function add(value, pos) {
|
||||
const lineNo = sourceFile.getLineAndCharacterOfPosition(pos).line + 1;
|
||||
result.push({ value, lineNo });
|
||||
}
|
||||
//let lastClass = null, lastEnum = null;
|
||||
function visit(node, depth) {
|
||||
switch (node.kind) {
|
||||
//case ts.SyntaxKind.TemplateExpression:
|
||||
// if (node.head) { visit(node.head); }
|
||||
// console.dir(node, { depth: null });
|
||||
// break;
|
||||
case typescript_1.default.SyntaxKind.TemplateHead:
|
||||
case typescript_1.default.SyntaxKind.TemplateMiddle:
|
||||
case typescript_1.default.SyntaxKind.TemplateTail:
|
||||
case typescript_1.default.SyntaxKind.StringLiteral:
|
||||
case typescript_1.default.SyntaxKind.NoSubstitutionTemplateLiteral:
|
||||
add(node.text, node.pos);
|
||||
break;
|
||||
}
|
||||
typescript_1.default.forEachChild(node, (node) => { return visit(node, depth + 1); });
|
||||
}
|
||||
visit(sourceFile, 0);
|
||||
return result;
|
||||
}
|
||||
const Include = new RegExp("packages/.*/src.ts/.*\.ts$");
|
||||
const Exclude = new RegExp("/node_modules/|src.ts/.*browser.*");
|
||||
function getAllStrings(path) {
|
||||
const Root = path_1.resolve(__dirname, path);
|
||||
const readdir = function (path) {
|
||||
if (path.match(Exclude)) {
|
||||
return [];
|
||||
}
|
||||
const stat = fs_1.default.statSync(path);
|
||||
if (stat.isDirectory()) {
|
||||
return fs_1.default.readdirSync(path).reduce((result, filename) => {
|
||||
readdir(path_1.resolve(path, filename)).forEach((file) => {
|
||||
result.push(file);
|
||||
});
|
||||
return result;
|
||||
}, []);
|
||||
}
|
||||
if (path.match(Include)) {
|
||||
const source = fs_1.default.readFileSync(path).toString();
|
||||
return [{ filename: path.substring(Root.length), values: getStrings(source) }];
|
||||
}
|
||||
return [];
|
||||
};
|
||||
return readdir(Root);
|
||||
}
|
||||
function checkWord(word) {
|
||||
word = word.toLowerCase();
|
||||
// A word
|
||||
if (Words[word]) {
|
||||
return true;
|
||||
}
|
||||
// Simple Plural
|
||||
if (word.match(/.*s$/) && Words[word.substring(0, word.length - 1)]) {
|
||||
return true;
|
||||
}
|
||||
// Hex string
|
||||
if (word.match(/^(0x)?[0-9a-f]*$/i)) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function starts(text, prefix) {
|
||||
return (text.substring(0, prefix.length) === prefix);
|
||||
}
|
||||
(function () {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
console.log(log_1.colorify.bold("Spell checking source code strings..."));
|
||||
let count = 0;
|
||||
getAllStrings(path_1.resolve(__dirname, "../../../../packages")).forEach((file) => {
|
||||
if (starts(file.filename, "/testcases/src.ts/generation-scripts")) {
|
||||
return;
|
||||
}
|
||||
if (starts(file.filename, "/asm/src.ts/opcodes.ts")) {
|
||||
return;
|
||||
}
|
||||
file.values.forEach((entry) => {
|
||||
function problem(word) {
|
||||
count++;
|
||||
console.log({
|
||||
filename: file.filename,
|
||||
word: JSON.stringify(word),
|
||||
sentence: JSON.stringify(entry.value.substring(0, 80)),
|
||||
line: entry.lineNo
|
||||
});
|
||||
}
|
||||
const value = entry.value.trim();
|
||||
// Emptry space
|
||||
if (value === "") {
|
||||
return;
|
||||
}
|
||||
// Prolly a require
|
||||
if (value.match(/^@ethersproject\/[a-z0-9-]+$/)) {
|
||||
return;
|
||||
}
|
||||
if (value.substring(0, 2) === "./") {
|
||||
return;
|
||||
}
|
||||
// Prolly encoded binary data
|
||||
if (value.indexOf(" ") === -1 && value.length > 20) {
|
||||
return;
|
||||
}
|
||||
if (checkWord(value)) {
|
||||
return;
|
||||
}
|
||||
value.replace(/([a-z+])([A-Z])/g, (all, first, secondLetter) => {
|
||||
return first + " " + secondLetter;
|
||||
}).replace(/((?:0x)?[A-Za-z]+)/gi, (all, word) => {
|
||||
if (checkWord(word)) {
|
||||
return "";
|
||||
}
|
||||
problem(word);
|
||||
return "";
|
||||
});
|
||||
;
|
||||
});
|
||||
});
|
||||
if (count) {
|
||||
console.log(`Found ${count} typos.`);
|
||||
process.exit(1);
|
||||
}
|
||||
process.exit(0);
|
||||
});
|
||||
})().catch((error) => {
|
||||
console.log(error);
|
||||
process.exit(1);
|
||||
});
|
||||
1
misc/admin/lib/cmds/update-changelog.d.ts
vendored
Normal file
1
misc/admin/lib/cmds/update-changelog.d.ts
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export {};
|
||||
24
misc/admin/lib/cmds/update-changelog.js
Normal file
24
misc/admin/lib/cmds/update-changelog.js
Normal file
@@ -0,0 +1,24 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs_1 = __importDefault(require("fs"));
|
||||
const changelog_1 = require("../changelog");
|
||||
const log_1 = require("../log");
|
||||
const path_1 = require("../path");
|
||||
(function () {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
console.log(log_1.colorify.bold("Updating CHANGELOG.md..."));
|
||||
fs_1.default.writeFileSync(path_1.resolve("CHANGELOG.md"), yield changelog_1.generate());
|
||||
});
|
||||
})();
|
||||
1
misc/admin/lib/cmds/update-depgraph.d.ts
vendored
Normal file
1
misc/admin/lib/cmds/update-depgraph.d.ts
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export {};
|
||||
25
misc/admin/lib/cmds/update-depgraph.js
Normal file
25
misc/admin/lib/cmds/update-depgraph.js
Normal file
@@ -0,0 +1,25 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const depgraph_1 = require("../depgraph");
|
||||
const path_1 = require("../path");
|
||||
const local_1 = require("../local");
|
||||
(function () {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const ordered = depgraph_1.getOrdered(true);
|
||||
local_1.updateJson(path_1.resolve("tsconfig.project.json"), {
|
||||
references: ordered.map((name) => ({ path: ("./packages/" + name) }))
|
||||
});
|
||||
});
|
||||
})().catch((error) => {
|
||||
console.log(error);
|
||||
process.exit(1);
|
||||
});
|
||||
1
misc/admin/lib/cmds/update-exports.d.ts
vendored
Normal file
1
misc/admin/lib/cmds/update-exports.d.ts
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export {};
|
||||
38
misc/admin/lib/cmds/update-exports.js
Normal file
38
misc/admin/lib/cmds/update-exports.js
Normal file
@@ -0,0 +1,38 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs_1 = __importDefault(require("fs"));
|
||||
const log_1 = require("../log");
|
||||
const path_1 = require("../path");
|
||||
const sourceEthers = fs_1.default.readFileSync(path_1.resolve("packages/ethers/src.ts/ethers.ts")).toString();
|
||||
const targets = sourceEthers.match(/export\s*{\s*((.|\s)*)}/)[1].trim();
|
||||
////////////////////
|
||||
// Begin template
|
||||
////////////////////
|
||||
const output = `"use strict";
|
||||
|
||||
// To modify this file, you must update ./misc/admin/lib/cmds/update-exports.js
|
||||
|
||||
import * as ethers from "./ethers";
|
||||
|
||||
try {
|
||||
const anyGlobal = (window as any);
|
||||
|
||||
if (anyGlobal._ethers == null) {
|
||||
anyGlobal._ethers = ethers;
|
||||
}
|
||||
} catch (error) { }
|
||||
|
||||
export { ethers };
|
||||
|
||||
export {
|
||||
${targets}
|
||||
} from "./ethers";
|
||||
`;
|
||||
////////////////////
|
||||
// End template
|
||||
////////////////////
|
||||
console.log(log_1.colorify.bold(`Flattening exports...`));
|
||||
fs_1.default.writeFileSync(path_1.resolve("packages/ethers/src.ts/index.ts"), output);
|
||||
1
misc/admin/lib/cmds/update-hashes.d.ts
vendored
Normal file
1
misc/admin/lib/cmds/update-hashes.d.ts
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export {};
|
||||
33
misc/admin/lib/cmds/update-hashes.js
Normal file
33
misc/admin/lib/cmds/update-hashes.js
Normal file
@@ -0,0 +1,33 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
//import { getGitTag } from "../git";
|
||||
const local_1 = require("../local");
|
||||
const log_1 = require("../log");
|
||||
const path_1 = require("../path");
|
||||
(function () {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const progress = log_1.getProgressBar(log_1.colorify.bold("Updating package.json hashes"));
|
||||
// Updating all tarball hashes now that versions have been updated
|
||||
for (let i = 0; i < path_1.dirnames.length; i++) {
|
||||
progress(i / path_1.dirnames.length);
|
||||
const dirname = path_1.dirnames[i];
|
||||
//const gitHead = await getGitTag(resolve("packages", dirname));
|
||||
const tarballHash = local_1.computeTarballHash(dirname);
|
||||
local_1.updateJson(path_1.getPackageJsonPath(dirname), { tarballHash }, true);
|
||||
}
|
||||
progress(1);
|
||||
});
|
||||
})().catch((error) => {
|
||||
console.log(error);
|
||||
process.exit(1);
|
||||
});
|
||||
;
|
||||
6
misc/admin/lib/config.d.ts
vendored
Normal file
6
misc/admin/lib/config.d.ts
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
export declare const config: {
|
||||
get: (key: string) => Promise<string>;
|
||||
set: (key: string, value: string) => void;
|
||||
keys: () => Promise<string[]>;
|
||||
lock: () => void;
|
||||
};
|
||||
129
misc/admin/lib/config.js
Normal file
129
misc/admin/lib/config.js
Normal file
@@ -0,0 +1,129 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const crypto_1 = require("crypto");
|
||||
const fs_1 = __importDefault(require("fs"));
|
||||
const os_1 = __importDefault(require("os"));
|
||||
const path_1 = require("path");
|
||||
const aes_js_1 = __importDefault(require("aes-js"));
|
||||
const scrypt_js_1 = __importDefault(require("scrypt-js"));
|
||||
const log_1 = require("./log");
|
||||
function getRandomBytes(length) {
|
||||
const result = new Uint8Array(length);
|
||||
result.set(crypto_1.randomBytes(length));
|
||||
return result;
|
||||
}
|
||||
function computeHmac(key, data) {
|
||||
return "0x" + crypto_1.createHmac("sha512", key).update(data).digest("hex");
|
||||
}
|
||||
function getScrypt(message, password, salt) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const progress = log_1.getProgressBar(message);
|
||||
return yield scrypt_js_1.default.scrypt(Buffer.from(password), Buffer.from(salt), (1 << 17), 8, 1, 64, progress);
|
||||
});
|
||||
}
|
||||
class Config {
|
||||
constructor(filename) {
|
||||
this.salt = null;
|
||||
this.dkey = null;
|
||||
this.values = {};
|
||||
this.canary = "";
|
||||
this.filename = filename;
|
||||
}
|
||||
load() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if (this.dkey) {
|
||||
return;
|
||||
}
|
||||
let data = null;
|
||||
if (fs_1.default.existsSync(this.filename)) {
|
||||
data = JSON.parse(fs_1.default.readFileSync(this.filename).toString());
|
||||
}
|
||||
else {
|
||||
data = {
|
||||
salt: Buffer.from(getRandomBytes(32)).toString("hex")
|
||||
};
|
||||
}
|
||||
this.canary = data.canary || "";
|
||||
this.salt = data.salt;
|
||||
const password = yield log_1.getPassword(log_1.colorify.bold("Password (config-store): "));
|
||||
this.dkey = yield getScrypt(log_1.colorify.bold("Unlocking config"), password, this.salt);
|
||||
if (data.ciphertext) {
|
||||
const ciphertext = Buffer.from(data.ciphertext, "base64");
|
||||
const iv = Buffer.from(data.iv, "base64");
|
||||
const aes = new aes_js_1.default.ModeOfOperation.ctr(this.dkey.slice(0, 32), new aes_js_1.default.Counter(iv));
|
||||
const plaintext = aes.decrypt(ciphertext);
|
||||
const hmac = computeHmac(this.dkey.slice(32, 64), plaintext);
|
||||
if (hmac !== data.hmac) {
|
||||
console.log(log_1.colorify.red("Incorrect password."));
|
||||
throw new Error("wrong password");
|
||||
}
|
||||
this.values = JSON.parse(Buffer.from(plaintext).toString());
|
||||
}
|
||||
});
|
||||
}
|
||||
keys() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield this.load();
|
||||
return Object.keys(this.values);
|
||||
});
|
||||
}
|
||||
save() {
|
||||
this.values._junk = Buffer.from(getRandomBytes(16 + Math.floor(Math.random() * 48))).toString("base64");
|
||||
const plaintext = Buffer.from(JSON.stringify(this.values));
|
||||
const iv = Buffer.from(getRandomBytes(16));
|
||||
const hmac = computeHmac(this.dkey.slice(32, 64), plaintext);
|
||||
const aes = new aes_js_1.default.ModeOfOperation.ctr(this.dkey.slice(0, 32), new aes_js_1.default.Counter(iv));
|
||||
const ciphertext = Buffer.from(aes.encrypt(plaintext));
|
||||
const data = {
|
||||
ciphertext: ciphertext.toString("base64"),
|
||||
iv: iv.toString("base64"),
|
||||
salt: this.salt,
|
||||
hmac: hmac,
|
||||
canary: this.canary
|
||||
};
|
||||
fs_1.default.writeFileSync(this.filename, JSON.stringify(data, null, 2));
|
||||
}
|
||||
get(key) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield this.load();
|
||||
return this.values[key];
|
||||
});
|
||||
}
|
||||
set(key, value) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield this.load();
|
||||
this.values[key] = value;
|
||||
this.save();
|
||||
});
|
||||
}
|
||||
lock() {
|
||||
this.salt = this.dkey = null;
|
||||
}
|
||||
}
|
||||
const _config = new Config(path_1.resolve(os_1.default.homedir(), ".ethers-dist"));
|
||||
exports.config = {
|
||||
get: function (key) {
|
||||
return _config.get(key);
|
||||
},
|
||||
set: function (key, value) {
|
||||
_config.set(key, value);
|
||||
},
|
||||
keys: function () {
|
||||
return _config.keys();
|
||||
},
|
||||
lock: function () {
|
||||
_config.lock();
|
||||
}
|
||||
};
|
||||
2
misc/admin/lib/depgraph.d.ts
vendored
Normal file
2
misc/admin/lib/depgraph.d.ts
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
export declare function getOrdered(skipNobuild?: boolean): Array<string>;
|
||||
export declare function sort(dirnames: Array<string>): void;
|
||||
101
misc/admin/lib/depgraph.js
Normal file
101
misc/admin/lib/depgraph.js
Normal file
@@ -0,0 +1,101 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const path_1 = require("./path");
|
||||
const local_1 = require("./local");
|
||||
class OrderedSet {
|
||||
constructor() {
|
||||
this._keys = [];
|
||||
this._values = {};
|
||||
}
|
||||
add(key) {
|
||||
this._values[key] = true;
|
||||
this._keys = null;
|
||||
}
|
||||
contains(key) {
|
||||
return !!this._values[key];
|
||||
}
|
||||
_sort() {
|
||||
if (this._keys != null) {
|
||||
return;
|
||||
}
|
||||
this._keys = Object.keys(this._values);
|
||||
this._keys.sort();
|
||||
}
|
||||
get length() {
|
||||
this._sort();
|
||||
return this._keys.length;
|
||||
}
|
||||
get(index) {
|
||||
this._sort();
|
||||
return this._keys[index];
|
||||
}
|
||||
}
|
||||
function getOrdered(skipNobuild) {
|
||||
const packages = {};
|
||||
const filenames = {};
|
||||
// Maps packages to names to list of dependencies; { [ name:string]: Array<name: string> }
|
||||
const deps = {};
|
||||
let addDeps = (name, depends) => {
|
||||
Object.keys(depends).forEach((dep) => {
|
||||
// Not a package we manage
|
||||
if (packages[dep] == null) {
|
||||
return;
|
||||
}
|
||||
deps[name].add(dep);
|
||||
});
|
||||
};
|
||||
for (let i = 0; i < path_1.dirnames.length; i++) {
|
||||
let dirname = path_1.dirnames[i];
|
||||
let info = local_1.getPackage(dirname);
|
||||
if (skipNobuild && info._ethers_nobuild) {
|
||||
continue;
|
||||
}
|
||||
packages[info.name] = info;
|
||||
filenames[info.name] = dirname;
|
||||
}
|
||||
Object.keys(packages).forEach((name) => {
|
||||
let info = packages[name];
|
||||
deps[info.name] = new OrderedSet();
|
||||
addDeps(info.name, info.dependencies || {});
|
||||
addDeps(info.name, info.devDependencies || {});
|
||||
});
|
||||
let ordered = [];
|
||||
let remaining = Object.keys(deps);
|
||||
let isSatisfied = (name) => {
|
||||
for (let i = 0; i < deps[name].length; i++) {
|
||||
if (ordered.indexOf(deps[name].get(i)) === -1) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
};
|
||||
while (remaining.length) {
|
||||
let bail = true;
|
||||
for (let i = 0; i < remaining.length; i++) {
|
||||
if (!isSatisfied(remaining[i])) {
|
||||
continue;
|
||||
}
|
||||
bail = false;
|
||||
ordered.push(remaining[i]);
|
||||
remaining.splice(i, 1);
|
||||
break;
|
||||
}
|
||||
if (bail) {
|
||||
throw new Error("Nothing processed; circular dependencies...");
|
||||
}
|
||||
}
|
||||
return ordered.map((name) => filenames[name]);
|
||||
}
|
||||
exports.getOrdered = getOrdered;
|
||||
function sort(dirnames) {
|
||||
let ordered = getOrdered();
|
||||
dirnames.sort((a, b) => {
|
||||
let ai = ordered.indexOf(local_1.getPackage(a).name);
|
||||
let bi = ordered.indexOf(local_1.getPackage(b).name);
|
||||
if (ai === -1 || bi === -1) {
|
||||
throw new Error("unknown dirname - " + [a, b].join(", "));
|
||||
}
|
||||
return ai - bi;
|
||||
});
|
||||
}
|
||||
exports.sort = sort;
|
||||
18
misc/admin/lib/geturl.d.ts
vendored
Normal file
18
misc/admin/lib/geturl.d.ts
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
export declare type GetUrlResponse = {
|
||||
statusCode: number;
|
||||
statusMessage: string;
|
||||
headers: {
|
||||
[key: string]: string;
|
||||
};
|
||||
body: Uint8Array;
|
||||
};
|
||||
export declare type Options = {
|
||||
method?: string;
|
||||
body?: Uint8Array;
|
||||
headers?: {
|
||||
[key: string]: string;
|
||||
};
|
||||
user?: string;
|
||||
password?: string;
|
||||
};
|
||||
export declare function getUrl(href: string, options?: Options): Promise<GetUrlResponse>;
|
||||
128
misc/admin/lib/geturl.js
Normal file
128
misc/admin/lib/geturl.js
Normal file
@@ -0,0 +1,128 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const http_1 = __importDefault(require("http"));
|
||||
const https_1 = __importDefault(require("https"));
|
||||
const url_1 = require("url");
|
||||
function getResponse(request) {
|
||||
return new Promise((resolve, reject) => {
|
||||
request.once("response", (resp) => {
|
||||
const response = {
|
||||
statusCode: resp.statusCode,
|
||||
statusMessage: resp.statusMessage,
|
||||
headers: Object.keys(resp.headers).reduce((accum, name) => {
|
||||
let value = resp.headers[name];
|
||||
if (Array.isArray(value)) {
|
||||
value = value.join(", ");
|
||||
}
|
||||
accum[name] = value;
|
||||
return accum;
|
||||
}, {}),
|
||||
body: null
|
||||
};
|
||||
//resp.setEncoding("utf8");
|
||||
resp.on("data", (chunk) => {
|
||||
if (response.body == null) {
|
||||
response.body = new Uint8Array(0);
|
||||
}
|
||||
const body = new Uint8Array(response.body.length + chunk.length);
|
||||
body.set(response.body, 0);
|
||||
body.set(chunk, response.body.length);
|
||||
response.body = body;
|
||||
});
|
||||
resp.on("end", () => {
|
||||
resolve(response);
|
||||
});
|
||||
resp.on("error", (error) => {
|
||||
/* istanbul ignore next */
|
||||
error.response = response;
|
||||
reject(error);
|
||||
});
|
||||
});
|
||||
request.on("error", (error) => { reject(error); });
|
||||
});
|
||||
}
|
||||
// The URL.parse uses null instead of the empty string
|
||||
function nonnull(value) {
|
||||
if (value == null) {
|
||||
return "";
|
||||
}
|
||||
return value;
|
||||
}
|
||||
function staller(duration) {
|
||||
return new Promise((resolve) => {
|
||||
const timer = setTimeout(resolve, duration);
|
||||
timer.unref();
|
||||
});
|
||||
}
|
||||
function _getUrl(href, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if (options == null) {
|
||||
options = {};
|
||||
}
|
||||
// @TODO: Once we drop support for node 8, we can pass the href
|
||||
// firectly into request and skip adding the components
|
||||
// to this request object
|
||||
const url = url_1.parse(href);
|
||||
const request = {
|
||||
protocol: nonnull(url.protocol),
|
||||
hostname: nonnull(url.hostname),
|
||||
port: nonnull(url.port),
|
||||
path: (nonnull(url.pathname) + nonnull(url.search)),
|
||||
method: (options.method || "GET"),
|
||||
headers: (options.headers || {}),
|
||||
};
|
||||
if (options.user && options.password) {
|
||||
request.auth = `${options.user}:${options.password}`;
|
||||
}
|
||||
let req = null;
|
||||
switch (nonnull(url.protocol)) {
|
||||
case "http:":
|
||||
req = http_1.default.request(request);
|
||||
break;
|
||||
case "https:":
|
||||
req = https_1.default.request(request);
|
||||
break;
|
||||
default:
|
||||
/* istanbul ignore next */
|
||||
throw new Error(`unsupported protocol ${url.protocol}`);
|
||||
}
|
||||
if (options.body) {
|
||||
req.write(Buffer.from(options.body));
|
||||
}
|
||||
req.end();
|
||||
const response = yield getResponse(req);
|
||||
return response;
|
||||
});
|
||||
}
|
||||
function getUrl(href, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let error = null;
|
||||
for (let i = 0; i < 3; i++) {
|
||||
try {
|
||||
const result = yield Promise.race([
|
||||
_getUrl(href, options),
|
||||
staller(30000).then((result) => { throw new Error("timeout"); })
|
||||
]);
|
||||
return result;
|
||||
}
|
||||
catch (e) {
|
||||
error = e;
|
||||
}
|
||||
yield staller(1000);
|
||||
}
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
exports.getUrl = getUrl;
|
||||
1
misc/admin/lib/git.d.ts
vendored
Normal file
1
misc/admin/lib/git.d.ts
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export declare function getGitTag(filename: string): Promise<string>;
|
||||
32
misc/admin/lib/git.js
Normal file
32
misc/admin/lib/git.js
Normal file
@@ -0,0 +1,32 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const run_1 = require("./run");
|
||||
// Returns the most recent git commit hash for a given filename
|
||||
function getGitTag(filename) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const result = yield run_1.run("git", ["log", "-n", "1", "--", filename]);
|
||||
if (!result.ok) {
|
||||
throw new Error(`git log error`);
|
||||
}
|
||||
let log = result.stdout.trim();
|
||||
if (!log) {
|
||||
return null;
|
||||
}
|
||||
const hashMatch = log.match(/^commit\s+([0-9a-f]{40})\n/i);
|
||||
if (!hashMatch) {
|
||||
return null;
|
||||
}
|
||||
return hashMatch[1];
|
||||
});
|
||||
}
|
||||
exports.getGitTag = getGitTag;
|
||||
//getGitTag("/Users/ricmoo/Development/ethers/ethers.js/packages/abi").then(console.log);
|
||||
1
misc/admin/lib/github.d.ts
vendored
Normal file
1
misc/admin/lib/github.d.ts
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export declare function createRelease(user: string, password: string, tagName: string, title: string, body: string, prerelease?: boolean, commit?: string): Promise<string>;
|
||||
35
misc/admin/lib/github.js
Normal file
35
misc/admin/lib/github.js
Normal file
@@ -0,0 +1,35 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const geturl_1 = require("./geturl");
|
||||
function createRelease(user, password, tagName, title, body, prerelease, commit) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const result = yield geturl_1.getUrl("https:/\/api.github.com/repos/ethers-io/ethers.js/releases", {
|
||||
body: Buffer.from(JSON.stringify({
|
||||
tag_name: tagName,
|
||||
target_commitish: (commit || "master"),
|
||||
name: title,
|
||||
body: body,
|
||||
//draft: true,
|
||||
draft: false,
|
||||
prerelease: !!prerelease
|
||||
})),
|
||||
method: "POST",
|
||||
headers: {
|
||||
"User-Agent": "ethers-io"
|
||||
},
|
||||
user: user,
|
||||
password: password
|
||||
});
|
||||
return JSON.parse(Buffer.from(result.body).toString("utf8")).html_url;
|
||||
});
|
||||
}
|
||||
exports.createRelease = createRelease;
|
||||
19
misc/admin/lib/local.d.ts
vendored
Normal file
19
misc/admin/lib/local.d.ts
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
export declare type Package = {
|
||||
dependencies: {
|
||||
[name: string]: string;
|
||||
};
|
||||
devDependencies: {
|
||||
[name: string]: string;
|
||||
};
|
||||
gitHead: string;
|
||||
name: string;
|
||||
version: string;
|
||||
tarballHash: string;
|
||||
location: "remote" | "local";
|
||||
_ethers_nobuild: boolean;
|
||||
};
|
||||
export declare function getPackage(name: string): Package;
|
||||
export declare function updateJson(path: string, replace: Record<string, any>, sort?: boolean): void;
|
||||
export declare function getDependencies(name?: string, filter?: (name: string) => boolean): Record<string, string>;
|
||||
export declare function getPackList(name: string): Array<string>;
|
||||
export declare function computeTarballHash(name: string): string;
|
||||
137
misc/admin/lib/local.js
Normal file
137
misc/admin/lib/local.js
Normal file
@@ -0,0 +1,137 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs_1 = __importDefault(require("fs"));
|
||||
const path_1 = require("./path");
|
||||
const run_1 = require("./run");
|
||||
const utils_1 = require("./utils");
|
||||
function getPackage(name) {
|
||||
const value = utils_1.loadJson(path_1.getPackageJsonPath(name));
|
||||
return {
|
||||
name: value.name,
|
||||
version: value.version,
|
||||
dependencies: (value.dependencies || {}),
|
||||
devDependencies: (value.dependencies || {}),
|
||||
location: "local",
|
||||
tarballHash: (value.tarballHash || null),
|
||||
gitHead: (value.gitHead || null),
|
||||
_ethers_nobuild: !!value._ethers_nobuild,
|
||||
};
|
||||
}
|
||||
exports.getPackage = getPackage;
|
||||
function updateJson(path, replace, sort) {
|
||||
const values = utils_1.loadJson(path);
|
||||
Object.keys(replace).forEach((key) => {
|
||||
const value = replace[key];
|
||||
if (value === undefined) {
|
||||
delete values[key];
|
||||
}
|
||||
else {
|
||||
values[key] = replace[key];
|
||||
}
|
||||
});
|
||||
utils_1.saveJson(path, values, !!sort);
|
||||
}
|
||||
exports.updateJson = updateJson;
|
||||
function getDependencies(name, filter) {
|
||||
if (name) {
|
||||
return utils_1.sortRecords(getPackage(name).dependencies);
|
||||
}
|
||||
// Find all versions for each package dependency
|
||||
const deps = path_1.dirnames.reduce((accum, dirname) => {
|
||||
const deps = getPackage(dirname).dependencies;
|
||||
Object.keys(deps).forEach((name) => {
|
||||
if (filter && !filter(name)) {
|
||||
return;
|
||||
}
|
||||
if (!accum[name]) {
|
||||
accum[name] = {};
|
||||
}
|
||||
accum[name][deps[name]] = true;
|
||||
});
|
||||
return accum;
|
||||
}, {});
|
||||
// Make sure each package dependency only has 1 version
|
||||
return utils_1.sortRecords(Object.keys(deps).reduce((accum, name) => {
|
||||
const versions = Object.keys(deps[name]);
|
||||
if (versions.length > 1) {
|
||||
throw new Error(`cannot depend on multiple versions for ${JSON.stringify(name)}: ${versions.map(v => JSON.stringify(v)).join(", ")}`);
|
||||
}
|
||||
accum[name] = versions[0];
|
||||
return accum;
|
||||
}, {}));
|
||||
}
|
||||
exports.getDependencies = getDependencies;
|
||||
function getPackList(name) {
|
||||
const result = run_1.run("npm", ["pack", "--json", path_1.getPackagePath(name), "--dry-run"]);
|
||||
if (!result.ok) {
|
||||
const error = new Error(`failed to run npm pack: ${name}`);
|
||||
error.result = result;
|
||||
throw error;
|
||||
}
|
||||
return JSON.parse(result.stdout)[0].files.map((info) => info.path);
|
||||
}
|
||||
exports.getPackList = getPackList;
|
||||
/*
|
||||
export function getTarball(name: string): Buffer {
|
||||
const files = getPackList(name).map((name) => `./${ name }`);
|
||||
files.sort((a, b) => {
|
||||
|
||||
const compsA = a.split("/"), compsB = b.split("/");
|
||||
while (true) {
|
||||
const a = compsA.shift(), b = compsB.shift();
|
||||
if (a === b) { continue; }
|
||||
|
||||
if (compsA.length === 0 && compsB.length === 0) {
|
||||
if (a < b) { return -1; }
|
||||
if (a > b) { return 1; }
|
||||
break;
|
||||
}
|
||||
|
||||
if (compsA.length === 0) { return -1; }
|
||||
if (compsB.length === 0) { return 1; }
|
||||
|
||||
if (a < b) { return -1; }
|
||||
if (a > b) { return 1; }
|
||||
}
|
||||
|
||||
return 0;
|
||||
});
|
||||
|
||||
return tar.create({
|
||||
sync: true,
|
||||
cwd: getPackagePath(name),
|
||||
prefix: "package/",
|
||||
gzip: true,
|
||||
portable: true,
|
||||
// Provide a specific date in the 1980s for the benefit of zip,
|
||||
// which is confounded by files dated at the Unix epoch 0.
|
||||
mtime: new Date('1985-10-26T08:15:00.000Z'),
|
||||
}, files).read();
|
||||
}
|
||||
*/
|
||||
function computeTarballHash(name) {
|
||||
// Sort the files to get a consistent hash
|
||||
const files = getPackList(name);
|
||||
files.sort();
|
||||
// Compute the hash for each file
|
||||
const packageRoot = path_1.getPackagePath(name);
|
||||
const hashes = files.reduce((accum, filename) => {
|
||||
let content = fs_1.default.readFileSync(path_1.resolve(packageRoot, filename));
|
||||
// The package.json includes the hash, so we need to nix it to get a consistent hash
|
||||
if (filename === "package.json") {
|
||||
const info = JSON.parse(content.toString());
|
||||
delete info.gitHead;
|
||||
delete info.tarballHash;
|
||||
content = Buffer.from(JSON.stringify(info, null, 2));
|
||||
}
|
||||
accum[filename] = utils_1.sha256(content);
|
||||
return accum;
|
||||
}, {});
|
||||
return utils_1.sha256(Buffer.from("{" + files.map((filename) => {
|
||||
return `${JSON.stringify(filename)}:"${hashes[filename]}"`;
|
||||
}).join(",") + "}"));
|
||||
}
|
||||
exports.computeTarballHash = computeTarballHash;
|
||||
12
misc/admin/lib/log.d.ts
vendored
Normal file
12
misc/admin/lib/log.d.ts
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
export declare function getProgressBar(action: string): (percent: number) => void;
|
||||
export declare type ColorifyFunc = (text: string) => string;
|
||||
export declare const colorify: {
|
||||
[format: string]: ColorifyFunc;
|
||||
};
|
||||
export declare type PromptOptions = {
|
||||
choice?: Array<string>;
|
||||
defaultChoice?: string;
|
||||
mask?: string;
|
||||
};
|
||||
export declare function getPrompt(prompt: string, options?: PromptOptions): Promise<string>;
|
||||
export declare function getPassword(prompt: string): Promise<string>;
|
||||
149
misc/admin/lib/log.js
Normal file
149
misc/admin/lib/log.js
Normal file
@@ -0,0 +1,149 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const utils_1 = require("./utils");
|
||||
// See: https://stackoverflow.com/questions/9781218/how-to-change-node-jss-console-font-color
|
||||
let disableColor = !(process.stdout.isTTY);
|
||||
function getProgressBar(action) {
|
||||
let lastProgress = -1;
|
||||
return function (percent) {
|
||||
const progress = Math.trunc(percent * 100);
|
||||
if (disableColor) {
|
||||
if (lastProgress === -1) {
|
||||
console.log(action + "...");
|
||||
}
|
||||
lastProgress = progress;
|
||||
return;
|
||||
}
|
||||
process.stdin.setRawMode(false);
|
||||
process.stdin.pause();
|
||||
if (progress === lastProgress || lastProgress === 100) {
|
||||
return;
|
||||
}
|
||||
lastProgress = progress;
|
||||
(process.stdout).clearLine();
|
||||
(process.stdout).cursorTo(0);
|
||||
process.stdout.write(action + "... " + progress + "%");
|
||||
if (percent === 1) {
|
||||
process.stdout.write('\n');
|
||||
}
|
||||
return;
|
||||
};
|
||||
}
|
||||
exports.getProgressBar = getProgressBar;
|
||||
const colorSequences = {
|
||||
blue: "\x1b[34m",
|
||||
cyan: "\x1b[36m",
|
||||
green: "\x1b[32m",
|
||||
magenta: "\x1b[35m",
|
||||
red: "\x1b[31m",
|
||||
yellow: "\x1b[33m",
|
||||
bold: ""
|
||||
};
|
||||
function getColor(color) {
|
||||
if (!color || color === "normal") {
|
||||
return "\x1b[0m";
|
||||
}
|
||||
return "\x1b[1m" + colorSequences[color];
|
||||
}
|
||||
function _colorify(format) {
|
||||
return function (text) {
|
||||
if (disableColor) {
|
||||
return text;
|
||||
}
|
||||
return getColor(format) + text.replace(/[^ -~]+/g, "") + getColor();
|
||||
};
|
||||
}
|
||||
exports.colorify = Object.freeze({
|
||||
bold: _colorify("bold"),
|
||||
blue: _colorify("blue"),
|
||||
green: _colorify("green"),
|
||||
red: _colorify("red"),
|
||||
});
|
||||
function _getPrompt(prompt, options, callback) {
|
||||
process.stdout.write(prompt);
|
||||
let stdin = process.stdin;
|
||||
stdin.resume();
|
||||
stdin.setRawMode(true);
|
||||
stdin.resume();
|
||||
stdin.setEncoding('utf8');
|
||||
let message = '';
|
||||
let respond = (ctrlC, message) => {
|
||||
process.stdout.write('\n');
|
||||
stdin.setRawMode(false);
|
||||
stdin.pause();
|
||||
stdin.removeListener('data', handler);
|
||||
callback(ctrlC, message);
|
||||
};
|
||||
function handler(chr) {
|
||||
chr = String(chr);
|
||||
switch (chr) {
|
||||
// Enter (ish)
|
||||
case "\n":
|
||||
case "\r":
|
||||
case "\u0004":
|
||||
if (options.choice) {
|
||||
if (options.defaultChoice) {
|
||||
respond(null, options.defaultChoice);
|
||||
}
|
||||
}
|
||||
else {
|
||||
respond(null, message);
|
||||
}
|
||||
break;
|
||||
// Backspace
|
||||
case "\u007f":
|
||||
if (message.length > 0 && options.choice == null) {
|
||||
message = message.substring(0, message.length - 1);
|
||||
(process.stdout).clearLine();
|
||||
(process.stdout).cursorTo(0);
|
||||
if (options.mask) {
|
||||
process.stdout.write(prompt + utils_1.repeat(options.mask, message.length));
|
||||
}
|
||||
else {
|
||||
process.stdout.write(prompt + message);
|
||||
}
|
||||
}
|
||||
break;
|
||||
// Ctrl-C
|
||||
case "\u0003":
|
||||
process.stdout.write('\n[ CTRL-C ]');
|
||||
respond(true, null);
|
||||
break;
|
||||
// Any other character
|
||||
default:
|
||||
if (options.choice) {
|
||||
if (options.choice.indexOf(chr) >= 0) {
|
||||
process.stdout.write(chr);
|
||||
respond(null, chr);
|
||||
}
|
||||
}
|
||||
else {
|
||||
// More passsword characters
|
||||
if (options.mask) {
|
||||
process.stdout.write('*');
|
||||
}
|
||||
else {
|
||||
process.stdout.write(chr);
|
||||
}
|
||||
message += chr;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
stdin.on('data', handler);
|
||||
}
|
||||
function getPrompt(prompt, options) {
|
||||
return new Promise((resolve, reject) => {
|
||||
_getPrompt(prompt, (options || {}), (ctrlC, password) => {
|
||||
if (ctrlC) {
|
||||
return reject(new Error("cancelled"));
|
||||
}
|
||||
resolve(password);
|
||||
});
|
||||
});
|
||||
}
|
||||
exports.getPrompt = getPrompt;
|
||||
function getPassword(prompt) {
|
||||
return getPrompt(prompt, { mask: "*" });
|
||||
}
|
||||
exports.getPassword = getPassword;
|
||||
4
misc/admin/lib/npm.d.ts
vendored
Normal file
4
misc/admin/lib/npm.d.ts
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
import { Options } from "libnpmpublish";
|
||||
import { Package } from "./local";
|
||||
export declare function getPackage(name: string, version?: string): Promise<Package>;
|
||||
export declare function publish(path: string, manifest: any, options: Options): Promise<void>;
|
||||
82
misc/admin/lib/npm.js
Normal file
82
misc/admin/lib/npm.js
Normal file
@@ -0,0 +1,82 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const libnpmpublish_1 = require("libnpmpublish");
|
||||
const semver_1 = __importDefault(require("semver"));
|
||||
const geturl_1 = require("./geturl");
|
||||
const local_1 = require("./local");
|
||||
const log_1 = require("./log");
|
||||
const cache = {};
|
||||
function getPackageInfo(name) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// Convert dirname to package if needed
|
||||
name = local_1.getPackage(name).name;
|
||||
if (!cache[name]) {
|
||||
try {
|
||||
const result = yield geturl_1.getUrl("http:/" + "/registry.npmjs.org/" + name);
|
||||
cache[name] = JSON.parse(Buffer.from(result.body).toString("utf8"));
|
||||
}
|
||||
catch (error) {
|
||||
if (error.status === 404) {
|
||||
return null;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
return cache[name] || null;
|
||||
});
|
||||
}
|
||||
function getPackage(name, version) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const infos = yield getPackageInfo(name);
|
||||
if (infos == null) {
|
||||
return null;
|
||||
}
|
||||
if (version == null) {
|
||||
const versions = Object.keys(infos.versions);
|
||||
versions.sort(semver_1.default.compare);
|
||||
version = versions.pop();
|
||||
}
|
||||
const info = infos.versions[version];
|
||||
return {
|
||||
dependencies: (info.dependencies || {}),
|
||||
devDependencies: (info.devDependencies || {}),
|
||||
gitHead: info.gitHead,
|
||||
location: "remote",
|
||||
name: info.name,
|
||||
tarballHash: info.tarballHash,
|
||||
version: info.version,
|
||||
_ethers_nobuild: !!info._ethers_nobuild,
|
||||
};
|
||||
});
|
||||
}
|
||||
exports.getPackage = getPackage;
|
||||
function publish(path, manifest, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
yield libnpmpublish_1.publish(path, manifest, options);
|
||||
}
|
||||
catch (error) {
|
||||
// We need an OTP
|
||||
if (error.code === "EOTP") {
|
||||
const otp = yield log_1.getPrompt(log_1.colorify.bold("Enter OTP: "));
|
||||
options.otp = otp.replace(" ", "");
|
||||
// Retry with the new OTP
|
||||
return yield publish(path, manifest, options);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.publish = publish;
|
||||
13
misc/admin/lib/path.d.ts
vendored
Normal file
13
misc/admin/lib/path.d.ts
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
export declare const root: string;
|
||||
export declare function resolve(...args: Array<string>): string;
|
||||
export declare const dirs: Readonly<{
|
||||
rootPackageJsonPath: string;
|
||||
packages: string;
|
||||
root: string;
|
||||
}>;
|
||||
export declare const dirnames: ReadonlyArray<string>;
|
||||
export declare const packages: ReadonlyArray<string>;
|
||||
export declare function getPackagePath(name: string): string;
|
||||
export declare function getDirname(name: string): string;
|
||||
export declare function getPackageJsonPath(name: string): string;
|
||||
export declare function isEthers(name: string): boolean;
|
||||
58
misc/admin/lib/path.js
Normal file
58
misc/admin/lib/path.js
Normal file
@@ -0,0 +1,58 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs_1 = __importDefault(require("fs"));
|
||||
const path_1 = require("path");
|
||||
exports.root = path_1.resolve(__dirname, "../../../");
|
||||
function resolve(...args) {
|
||||
args.unshift(exports.root);
|
||||
return path_1.resolve.apply(null, args);
|
||||
}
|
||||
exports.resolve = resolve;
|
||||
const pathRootPackageJsonPath = resolve("package.json");
|
||||
const pathPackages = resolve("packages");
|
||||
exports.dirs = Object.freeze({
|
||||
rootPackageJsonPath: pathRootPackageJsonPath,
|
||||
packages: pathPackages,
|
||||
root: exports.root,
|
||||
});
|
||||
exports.dirnames = Object.freeze(fs_1.default.readdirSync(exports.dirs.packages).filter((dirname) => {
|
||||
return (dirname[0] !== ".");
|
||||
}));
|
||||
const packageLookup = exports.dirnames.reduce((accum, dirname) => {
|
||||
const packagePath = path_1.resolve(exports.dirs.packages, dirname);
|
||||
const packageJsonPath = path_1.resolve(packagePath, "package.json");
|
||||
const info = JSON.parse(fs_1.default.readFileSync(packageJsonPath).toString());
|
||||
const packageName = info.name;
|
||||
const version = info.version;
|
||||
accum[packageName] = accum[dirname] = {
|
||||
dirname, packageName, packagePath, packageJsonPath, version
|
||||
};
|
||||
return accum;
|
||||
}, {});
|
||||
exports.packages = Object.freeze(exports.dirnames.map((dirname) => packageLookup[dirname].packageName));
|
||||
function getPackageInfo(name) {
|
||||
const value = packageLookup[name];
|
||||
if (!value) {
|
||||
throw new Error(`unknown package: ${name}`);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
function getPackagePath(name) {
|
||||
return getPackageInfo(name).packagePath;
|
||||
}
|
||||
exports.getPackagePath = getPackagePath;
|
||||
function getDirname(name) {
|
||||
return getPackageInfo(name).dirname;
|
||||
}
|
||||
exports.getDirname = getDirname;
|
||||
function getPackageJsonPath(name) {
|
||||
return getPackageInfo(name).packageJsonPath;
|
||||
}
|
||||
exports.getPackageJsonPath = getPackageJsonPath;
|
||||
function isEthers(name) {
|
||||
return !!packageLookup[name];
|
||||
}
|
||||
exports.isEthers = isEthers;
|
||||
10
misc/admin/lib/run.d.ts
vendored
Normal file
10
misc/admin/lib/run.d.ts
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
/// <reference types="node" />
|
||||
export declare type RunResult = {
|
||||
stderr: string | null;
|
||||
_stderr: string | Buffer;
|
||||
stdout: string;
|
||||
_stdout: string | Buffer;
|
||||
status: number;
|
||||
ok: boolean;
|
||||
};
|
||||
export declare function run(progname: string, args?: Array<string>, currentWorkingDirectory?: string): RunResult;
|
||||
64
misc/admin/lib/run.js
Normal file
64
misc/admin/lib/run.js
Normal file
@@ -0,0 +1,64 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const child_process_1 = require("child_process");
|
||||
function run(progname, args, currentWorkingDirectory) {
|
||||
if (args == null) {
|
||||
args = [];
|
||||
}
|
||||
const options = {};
|
||||
if (currentWorkingDirectory) {
|
||||
options.cwd = currentWorkingDirectory;
|
||||
}
|
||||
const child = child_process_1.spawnSync(progname, args, options);
|
||||
const result = {
|
||||
_stderr: child.stderr,
|
||||
stderr: (child.stderr.toString() || null),
|
||||
_stdout: child.stdout,
|
||||
stdout: child.stdout.toString(),
|
||||
status: child.status,
|
||||
ok: (child.stderr.length === 0 && child.status === 0)
|
||||
};
|
||||
if (child.error) {
|
||||
(child.error).result = result;
|
||||
throw child.error;
|
||||
}
|
||||
return result;
|
||||
/*
|
||||
const result: RunResult = {
|
||||
stderr: null,
|
||||
_stderr: Buffer.from([]),
|
||||
stdout: null,
|
||||
_stdout: Buffer.from([]),
|
||||
status: null,
|
||||
ok: false,
|
||||
};
|
||||
|
||||
proc.stderr.on("data", (data) => {
|
||||
result._stderr = Buffer.concat([ result._stderr, data ]);
|
||||
});
|
||||
|
||||
proc.stdout.on("data", (data) => {
|
||||
result._stdout = Buffer.concat([ result._stdout, data ]);
|
||||
});
|
||||
|
||||
proc.on("error", (error) => {
|
||||
result.stderr = result._stderr.toString("utf8");
|
||||
result.stdout = result._stdout.toString("utf8");
|
||||
(<any>error).result = result;
|
||||
|
||||
console.log("Error:", error);
|
||||
|
||||
reject(error);
|
||||
});
|
||||
|
||||
proc.on("close", (code) => {
|
||||
result.stderr = result._stderr.toString("utf8");
|
||||
result.stdout = result._stdout.toString("utf8");
|
||||
result.status = code;
|
||||
result.ok = (result._stderr.length === 0 && code === 0);
|
||||
resolve(result);
|
||||
});
|
||||
});
|
||||
*/
|
||||
}
|
||||
exports.run = run;
|
||||
10
misc/admin/lib/utils.d.ts
vendored
Normal file
10
misc/admin/lib/utils.d.ts
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
/// <reference types="node" />
|
||||
export declare function repeat(char: string, length: number): string;
|
||||
export declare function sha256(content: Buffer): string;
|
||||
export declare function sortRecords(record: Record<string, any>): Record<string, any>;
|
||||
export declare function atomicWrite(path: string, value: string | Uint8Array): void;
|
||||
export declare function loadJson(path: string): any;
|
||||
export declare function saveJson(filename: string, data: any, sort?: boolean): any;
|
||||
export declare function resolveProperties(props: Record<string, Promise<any>>): Promise<Record<string, any>>;
|
||||
export declare function mkdir(path: string): void;
|
||||
export declare function getDateTime(date: Date): string;
|
||||
125
misc/admin/lib/utils.js
Normal file
125
misc/admin/lib/utils.js
Normal file
@@ -0,0 +1,125 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs_1 = __importDefault(require("fs"));
|
||||
const path_1 = require("path");
|
||||
const crypto_1 = require("crypto");
|
||||
function repeat(char, length) {
|
||||
if (char.length === 0) {
|
||||
return "";
|
||||
}
|
||||
let output = char;
|
||||
while (output.length < length) {
|
||||
output = output + output;
|
||||
}
|
||||
return output.substring(0, length);
|
||||
}
|
||||
exports.repeat = repeat;
|
||||
function sha256(content) {
|
||||
const hasher = crypto_1.createHash("sha256");
|
||||
hasher.update(content);
|
||||
return "0x" + hasher.digest("hex");
|
||||
}
|
||||
exports.sha256 = sha256;
|
||||
function sortRecords(record) {
|
||||
const keys = Object.keys(record);
|
||||
keys.sort();
|
||||
return keys.reduce((accum, name) => {
|
||||
accum[name] = record[name];
|
||||
return accum;
|
||||
}, {});
|
||||
}
|
||||
exports.sortRecords = sortRecords;
|
||||
function atomicWrite(path, value) {
|
||||
const tmp = path_1.resolve(__dirname, "../../../.atomic-tmp");
|
||||
fs_1.default.writeFileSync(tmp, value);
|
||||
fs_1.default.renameSync(tmp, path);
|
||||
}
|
||||
exports.atomicWrite = atomicWrite;
|
||||
function loadJson(path) {
|
||||
return JSON.parse(fs_1.default.readFileSync(path).toString());
|
||||
}
|
||||
exports.loadJson = loadJson;
|
||||
function saveJson(filename, data, sort) {
|
||||
let replacer = undefined;
|
||||
if (sort) {
|
||||
replacer = (key, value) => {
|
||||
if (Array.isArray(value)) {
|
||||
// pass
|
||||
}
|
||||
else if (value && typeof (value) === "object") {
|
||||
const keys = Object.keys(value);
|
||||
keys.sort();
|
||||
return keys.reduce((accum, key) => {
|
||||
accum[key] = value[key];
|
||||
return accum;
|
||||
}, {});
|
||||
}
|
||||
return value;
|
||||
};
|
||||
}
|
||||
atomicWrite(filename, JSON.stringify(data, replacer, 2) + "\n");
|
||||
}
|
||||
exports.saveJson = saveJson;
|
||||
function resolveProperties(props) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const keys = Object.keys(props);
|
||||
const promises = yield Promise.all(keys.map((k) => props[k]));
|
||||
return keys.reduce((accum, key, index) => {
|
||||
accum[key] = promises[index];
|
||||
return accum;
|
||||
}, {});
|
||||
});
|
||||
}
|
||||
exports.resolveProperties = resolveProperties;
|
||||
// Node 8 does not support recursive mkdir... Remove this in v6.
|
||||
function mkdir(path) {
|
||||
let bail = 0;
|
||||
const dirs = [];
|
||||
while (path !== "/") {
|
||||
if (bail++ > 50) {
|
||||
throw new Error("something bad happened...");
|
||||
}
|
||||
if (fs_1.default.existsSync(path)) {
|
||||
break;
|
||||
}
|
||||
dirs.push(path);
|
||||
path = path_1.dirname(path);
|
||||
}
|
||||
while (dirs.length) {
|
||||
fs_1.default.mkdirSync(dirs.pop());
|
||||
}
|
||||
}
|
||||
exports.mkdir = mkdir;
|
||||
function zpad(value, length) {
|
||||
if (length == null) {
|
||||
length = 2;
|
||||
}
|
||||
const str = String(value);
|
||||
return repeat("0", length - str.length) + str;
|
||||
}
|
||||
function getDate(date) {
|
||||
return [
|
||||
date.getFullYear(),
|
||||
zpad(date.getMonth() + 1),
|
||||
zpad(date.getDate())
|
||||
].join("-");
|
||||
}
|
||||
function getDateTime(date) {
|
||||
return getDate(date) + " " + [
|
||||
zpad(date.getHours()),
|
||||
zpad(date.getMinutes() + 1)
|
||||
].join(":");
|
||||
}
|
||||
exports.getDateTime = getDateTime;
|
||||
47
misc/admin/src.ts/build.ts
Normal file
47
misc/admin/src.ts/build.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
import { dirnames, getPackageJsonPath, resolve } from "./path";
|
||||
import { loadJson, saveJson } from "./utils";
|
||||
|
||||
function setupConfig(outDir: string, moduleType: string, targetType: string) {
|
||||
|
||||
// Configure the tsconfit.package.json...
|
||||
const path = resolve("tsconfig.package.json");
|
||||
const content = loadJson(path);
|
||||
content.compilerOptions.module = moduleType;
|
||||
content.compilerOptions.target = targetType;
|
||||
saveJson(path, content, true);
|
||||
|
||||
// Configure the browser field for every pacakge, copying the
|
||||
// browser.umd filed for UMD and browser.esm for ESM
|
||||
dirnames.forEach((dirname) => {
|
||||
const filename = getPackageJsonPath(dirname)
|
||||
const info = loadJson(filename);
|
||||
|
||||
if (info._ethers_nobuild) { return; }
|
||||
|
||||
if (targetType === "es2015") {
|
||||
if (info["browser.esm"]) {
|
||||
info.browser = info["browser.esm"];
|
||||
}
|
||||
} else if (targetType === "es5") {
|
||||
if (info["browser.umd"]) {
|
||||
info.browser = info["browser.umd"];
|
||||
}
|
||||
} else {
|
||||
throw new Error("unsupported target");
|
||||
}
|
||||
saveJson(filename, info, true);
|
||||
|
||||
let path = resolve("packages", dirname, "tsconfig.json");
|
||||
let content = loadJson(path);
|
||||
content.compilerOptions.outDir = outDir;
|
||||
saveJson(path, content, true);
|
||||
});
|
||||
}
|
||||
|
||||
export function setupBuild(buildModule: boolean): void {
|
||||
if (buildModule) {
|
||||
setupConfig("./lib.esm/", "es2015", "es2015");
|
||||
} else {
|
||||
setupConfig("./lib/", "commonjs", "es5");
|
||||
}
|
||||
}
|
||||
122
misc/admin/src.ts/changelog.ts
Normal file
122
misc/admin/src.ts/changelog.ts
Normal file
@@ -0,0 +1,122 @@
|
||||
import fs from "fs";
|
||||
|
||||
import * as local from "./local";
|
||||
import { colorify } from "./log";
|
||||
import * as npm from "./npm";
|
||||
import { resolve } from "./path";
|
||||
import { run } from "./run";
|
||||
import { getDateTime, repeat } from "./utils";
|
||||
|
||||
const changelogPath = resolve("CHANGELOG.md");
|
||||
|
||||
export type Change = {
|
||||
title: string;
|
||||
version: string;
|
||||
date: string;
|
||||
content: string;
|
||||
};
|
||||
|
||||
export async function generate(): Promise<string> {
|
||||
const lines = fs.readFileSync(changelogPath).toString().trim().split("\n");
|
||||
|
||||
let firstLine: number = null;
|
||||
const versions: Array<string> = Object.keys(lines.reduce((accum, line, index) => {
|
||||
const match = line.match(/^ethers\/v([^ ]*)/);
|
||||
if (match) {
|
||||
if (firstLine == null) { firstLine = index; }
|
||||
accum[match[1]] = true;
|
||||
}
|
||||
return accum;
|
||||
}, <Record<string, boolean>>{ }));
|
||||
|
||||
const version = local.getPackage("ethers").version;;
|
||||
const published = await npm.getPackage("ethers");
|
||||
|
||||
if (versions.indexOf(version) >= 0) {
|
||||
const line = `Version ${ version } already in CHANGELOG. Please edit before committing.`;
|
||||
console.log(colorify.red(repeat("=", line.length)));
|
||||
console.log(colorify.red(line));
|
||||
console.log(colorify.red(repeat("=", line.length)));
|
||||
}
|
||||
|
||||
const gitResult = await run("git", [ "log", (published.gitHead + "..") ]);
|
||||
if (!gitResult.ok) {
|
||||
console.log(gitResult);
|
||||
throw new Error("Error running git log");
|
||||
}
|
||||
|
||||
let changes: Array<{ body: string, commit: string, date: string }> = [ ];
|
||||
gitResult.stdout.split("\n").forEach((line) => {
|
||||
if (line.toLowerCase().substring(0, 6) === "commit") {
|
||||
changes.push({
|
||||
commit: line.substring(6).trim(),
|
||||
date: null,
|
||||
body: ""
|
||||
});
|
||||
} else if (line.toLowerCase().substring(0, 5) === "date:") {
|
||||
changes[changes.length - 1].date = getDateTime(new Date(line.substring(5).trim()));
|
||||
} else if (line.substring(0, 1) === " ") {
|
||||
line = line.trim();
|
||||
if (line === "") { return; }
|
||||
changes[changes.length - 1].body += line + " ";
|
||||
}
|
||||
});
|
||||
|
||||
const output: Array<string> = [ ];
|
||||
for (let i = 0; i < firstLine; i++) {
|
||||
output.push(lines[i]);
|
||||
}
|
||||
|
||||
const newTitle = `ethers/v${ version } (${ getDateTime(new Date()) })`;
|
||||
output.push(newTitle);
|
||||
output.push(repeat("-", newTitle.length));
|
||||
output.push("");
|
||||
|
||||
changes.forEach((change) => {
|
||||
let body = change.body.trim();
|
||||
let linkMatch = body.match(/(\((.*#.*)\))/)
|
||||
let commit = `[${ change.commit.substring(0, 7) }](https://github.com/ethers-io/ethers.js/commit/${ change.commit })`;
|
||||
let link = commit;
|
||||
if (linkMatch) {
|
||||
body = body.replace(/ *(\(.*#.*)\) */, "");
|
||||
link = linkMatch[2].replace(/#([0-9]+)/g, (all, issue) => {
|
||||
return `[#${ issue }](https://github.com/ethers-io/ethers.js/issues/${ issue })`;
|
||||
}) + "; " + commit;
|
||||
}
|
||||
output.push(` - ${ body } (${ link })`);
|
||||
});
|
||||
|
||||
output.push("");
|
||||
|
||||
for (let i = firstLine; i < lines.length; i++) {
|
||||
output.push(lines[i]);
|
||||
}
|
||||
|
||||
return output.join("\n");
|
||||
}
|
||||
|
||||
export function getLatestChange(): Change {
|
||||
let result: Change = null;
|
||||
|
||||
const lines = fs.readFileSync(changelogPath).toString().split("\n");
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
const match = line.match(/ethers\/([^\(]*)\(([^\)]*)\)/);
|
||||
if (match) {
|
||||
if (result) { break; }
|
||||
result = {
|
||||
title: line.trim(),
|
||||
version: match[1].trim(),
|
||||
date: match[2].trim(),
|
||||
content: ""
|
||||
};
|
||||
} else if (result) {
|
||||
if (!line.trim().match(/^-+$/)) {
|
||||
result.content += line.trim() + "\n";
|
||||
}
|
||||
}
|
||||
}
|
||||
result.content = result.content.trim();
|
||||
|
||||
return result;
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user