From cfa6dec29314fe485df283974612d40550bc4179 Mon Sep 17 00:00:00 2001 From: Richard Moore Date: Sat, 10 Oct 2020 04:12:52 -0400 Subject: [PATCH] Initial EIP-712 utilities (#687). --- packages/abstract-signer/src.ts/index.ts | 26 +- packages/hash/package.json | 4 + packages/hash/src.ts/index.ts | 16 ++ packages/hash/src.ts/typed-data.ts | 304 +++++++++++++++++++++++ 4 files changed, 348 insertions(+), 2 deletions(-) create mode 100644 packages/hash/src.ts/typed-data.ts diff --git a/packages/abstract-signer/src.ts/index.ts b/packages/abstract-signer/src.ts/index.ts index 1c455e55d..374292970 100644 --- a/packages/abstract-signer/src.ts/index.ts +++ b/packages/abstract-signer/src.ts/index.ts @@ -1,8 +1,8 @@ "use strict"; import { BlockTag, Provider, TransactionRequest, TransactionResponse } from "@ethersproject/abstract-provider"; -import { BigNumber } from "@ethersproject/bignumber"; -import { Bytes } from "@ethersproject/bytes"; +import { BigNumber, BigNumberish } from "@ethersproject/bignumber"; +import { Bytes, BytesLike } from "@ethersproject/bytes"; import { Deferrable, defineReadOnly, resolveProperties, shallowCopy } from "@ethersproject/properties"; import { Logger } from "@ethersproject/logger"; @@ -19,6 +19,22 @@ const forwardErrors = [ Logger.errors.REPLACEMENT_UNDERPRICED, ]; +// EIP-712 Typed Data +// See: https://eips.ethereum.org/EIPS/eip-712 + +export interface TypedDataDomain { + name?: string; + version?: string; + chainId?: BigNumberish; + verifyingContract?: string; + salt?: BytesLike; +}; + +export interface TypedDataField { + name: string; + type: string; +}; + // Sub-classes of Signer may optionally extend this interface to indicate // they have a private key available synchronously export interface ExternallyOwnedAccount { @@ -54,6 +70,8 @@ export abstract class Signer { // it does, sentTransaction MUST be overridden. abstract signTransaction(transaction: Deferrable): Promise; +// abstract _signTypedData(domain: TypedDataDomain, types: Array, data: any): Promise; + // Returns a new instance of the Signer, connected to provider. // This MAY throw if changing providers is not supported. abstract connect(provider: Provider): Signer; @@ -246,6 +264,10 @@ export class VoidSigner extends Signer { return this._fail("VoidSigner cannot sign transactions", "signTransaction"); } + _signTypedData(domain: TypedDataDomain, types: Array, data: any): Promise { + return this._fail("VoidSigner cannot sign typed data", "signTypedData"); + } + connect(provider: Provider): VoidSigner { return new VoidSigner(this.address, provider); } diff --git a/packages/hash/package.json b/packages/hash/package.json index c4ab4908a..d36098de5 100644 --- a/packages/hash/package.json +++ b/packages/hash/package.json @@ -1,9 +1,13 @@ { "author": "Richard Moore ", "dependencies": { + "@ethersproject/abstract-signer": "^5.0.6", + "@ethersproject/address": "^5.0.5", + "@ethersproject/bignumber": "^5.0.8", "@ethersproject/bytes": "^5.0.4", "@ethersproject/keccak256": "^5.0.3", "@ethersproject/logger": "^5.0.5", + "@ethersproject/properties": "^5.0.4", "@ethersproject/strings": "^5.0.4" }, "description": "Hash utility functions for Ethereum.", diff --git a/packages/hash/src.ts/index.ts b/packages/hash/src.ts/index.ts index 407395c09..8f147dba2 100644 --- a/packages/hash/src.ts/index.ts +++ b/packages/hash/src.ts/index.ts @@ -8,6 +8,22 @@ import { Logger } from "@ethersproject/logger"; import { version } from "./_version"; const logger = new Logger(version); +import { + getPrimaryType as _getPrimaryType, + hashStruct as _hashStruct, + hashTypedData as _hashTypedData, + hashTypedDataDomain as _hashTypedDataDomain, + TypedDataEncoder as _TypedDataEncoder +} from "./typed-data"; + +export { + _getPrimaryType, + _hashStruct, + _hashTypedData, + _hashTypedDataDomain, + _TypedDataEncoder +} + /////////////////////////////// const Zeros = new Uint8Array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]); diff --git a/packages/hash/src.ts/typed-data.ts b/packages/hash/src.ts/typed-data.ts new file mode 100644 index 000000000..86c04abf2 --- /dev/null +++ b/packages/hash/src.ts/typed-data.ts @@ -0,0 +1,304 @@ +import { TypedDataDomain, TypedDataField } from "@ethersproject/abstract-signer"; +import { getAddress } from "@ethersproject/address"; +import { BigNumber, BigNumberish } from "@ethersproject/bignumber"; +import { arrayify, BytesLike, concat, hexConcat, hexZeroPad } from "@ethersproject/bytes"; +import { keccak256 } from "@ethersproject/keccak256"; +import { deepCopy, defineReadOnly } from "@ethersproject/properties"; + +import { Logger } from "@ethersproject/logger"; +import { version } from "./_version"; +const logger = new Logger(version); + +import { id } from "./index"; + +const padding = new Uint8Array(32); +padding.fill(0); + +const NegativeOne: BigNumber = BigNumber.from(-1); +const Zero: BigNumber = BigNumber.from(0); +const One: BigNumber = BigNumber.from(1); +const MaxUint256: BigNumber = BigNumber.from("0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"); + +function hexPadRight(value: BytesLike) { + const bytes = arrayify(value); + return hexConcat([ bytes, padding.slice(bytes.length % 32) ]); +} + +const hexTrue = hexZeroPad(One.toHexString(), 32); +const hexFalse = hexZeroPad(Zero.toHexString(), 32); + +const domainFieldTypes: Record = { + name: "string", + version: "string", + chainId: "uint256", + verifyingContract: "address", + salt: "bytes32" +}; + +function getBaseEncoder(type: string): (value: any) => string { + // intXX and uintXX + { + const match = type.match(/^(u?)int(\d+)$/); + if (match) { + const width = parseInt(match[2]); + if (width % 8 !== 0 || width > 256 || match[2] !== String(width)) { + logger.throwArgumentError("invalid numeric width", "type", type); + } + const signed = (match[1] === ""); + + return function(value: BigNumberish) { + let v = BigNumber.from(value); + + if (signed) { + let bounds = MaxUint256.mask(width - 1); + if (v.gt(bounds) || v.lt(bounds.add(One).mul(NegativeOne))) { + logger.throwArgumentError(`value out-of-bounds for ${ type }`, "value", value); + } + } else if (v.lt(Zero) || v.gt(MaxUint256.mask(width))) { + logger.throwArgumentError(`value out-of-bounds for ${ type }`, "value", value); + } + + v = v.toTwos(256); + + return hexZeroPad(v.toHexString(), 32); + }; + } + } + + // bytesXX + { + const match = type.match(/^bytes(\d+)$/); + if (match) { + const width = parseInt(match[1]); + if (width === 0 || width > 32 || match[1] !== String(width)) { + logger.throwArgumentError("invalid bytes width", "type", type); + } + return function(value: BytesLike) { + const bytes = arrayify(value); + if (bytes.length !== width) { + logger.throwArgumentError(`invalid length for ${ type }`, "value", value); + } + return hexPadRight(value); + }; + } + } + + switch (type) { + case "address": return function(value: string) { + return hexZeroPad(getAddress(value), 32); + }; + case "bool": return function(value: boolean) { + return ((!value) ? hexFalse: hexTrue); + }; + case "bytes": return function(value: BytesLike) { + return keccak256(value); + }; + case "string": return function(value: string) { + return id(value); + }; + } + + return null; +} + +function encodeType(name: string, fields: Array): string { + return `${ name }(${ fields.map((f) => (f.type + " " + f.name)).join(",") })`; +} + +export class TypedDataEncoder { + readonly primaryType: string; + readonly types: Record>; + + readonly _types: Record; + + constructor(types: Record>) { + defineReadOnly(this, "types", Object.freeze(deepCopy(types))); + + defineReadOnly(this, "_types", { }); + + // Link struct types to their direct child structs + const links: Record> = { }; + + // Link structs to structs which contain them as a child + const parents: Record> = { }; + + // Link all subtypes within a given struct + const subtypes: Record> = { }; + + Object.keys(types).forEach((type) => { + links[type] = { }; + parents[type] = [ ]; + subtypes[type] = { } + }); + + for (const name in types) { + + const uniqueNames: Record = { }; + + types[name].forEach((field) => { + + // Check each field has a unique name + if (uniqueNames[field.name]) { + logger.throwArgumentError(`duplicate variable name ${ JSON.stringify(field.name) } in ${ JSON.stringify(name) }`, "types", types); + } + uniqueNames[field.name] = true; + + // Get the base type (drop any array specifiers) + const baseType = field.type.match(/^([^\x5b]*)(\x5b|$)/)[1]; + if (baseType === name) { + logger.throwArgumentError(`circular type reference to ${ JSON.stringify(baseType) }`, "types", types); + } + + // Is this a base encoding type? + const encoder = getBaseEncoder(baseType); + if (encoder) { return ;} + + if (!parents[baseType]) { + logger.throwArgumentError(`unknown type ${ JSON.stringify(baseType) }`, "types", types); + } + + // Add linkage + parents[baseType].push(name); + links[name][baseType] = true; + }); + } + + // Deduce the primary type + const primaryTypes = Object.keys(parents).filter((n) => (parents[n].length === 0)); + + if (primaryTypes.length === 0) { + logger.throwArgumentError("missing primary type", "types", types); + } else if (primaryTypes.length > 1) { + logger.throwArgumentError(`ambiguous primary types or unused types: ${ primaryTypes.map((t) => (JSON.stringify(t))).join(", ") }`, "types", types); + } + + defineReadOnly(this, "primaryType", primaryTypes[0]); + + // Check for circular type references + function checkCircular(type: string, found: Record) { + if (found[type]) { + logger.throwArgumentError(`circular type reference to ${ JSON.stringify(type) }`, "types", types); + } + + found[type] = true; + + Object.keys(links[type]).forEach((child) => { + if (!parents[child]) { return; } + + // Recursively check children + checkCircular(child, found); + + // Mark all ancestors as having this decendant + Object.keys(found).forEach((subtype) => { + subtypes[subtype][child] = true; + }); + }); + + delete found[type]; + } + checkCircular(this.primaryType, { }); + + // Compute each fully describe type + for (const name in subtypes) { + const st = Object.keys(subtypes[name]); + st.sort(); + this._types[name] = encodeType(name, types[name]) + st.map((t) => encodeType(t, types[t])).join(""); + } + } + + _getEncoder(type: string): (value: any) => string { + const match = type.match(/^([^\x5b]*)(\x5b(\d*)\x5d)?$/); + if (!match) { logger.throwArgumentError(`unknown type: ${ type }`, "type", type); } + + const baseType = match[1]; + + let baseEncoder = getBaseEncoder(baseType); + + // A struct type + if (baseEncoder == null) { + const fields = this.types[baseType]; + if (!fields) { logger.throwArgumentError(`unknown type: ${ type }`, "type", type); } + + const encodedType = id(this._types[baseType]); + baseEncoder = (value: Record) => { + const values = fields.map((f) => { + const result = this._getEncoder(f.type)(value[f.name]); + if (this._types[f.type]) { return keccak256(result); } + return result; + }); + values.unshift(encodedType); + return hexConcat(values); + } + } + + // An array type + if (match[2]) { + const length = (match[3] ? parseInt(match[3]): -1); + return (value: Array) => { + if (length >= 0 && value.length !== length) { + logger.throwArgumentError("array length mismatch; expected length ${ arrayLength }", "value", value); + } + + let result = value.map(baseEncoder); + if (this._types[baseType]) { + result = result.map(keccak256); + } + return keccak256(hexConcat(result)); + }; + } + + return baseEncoder; + } + + encodeType(name: string): string { + const result = this._types[name]; + if (!result) { + logger.throwArgumentError(`unknown type: ${ JSON.stringify(name) }`, "name", name); + } + return result; + } + + encodeData(type: string, value: any): string { + return this._getEncoder(type)(value); + } + + hashStruct(name: string, value: Record): string { + return keccak256(this.encodeData(name, value)); + } + + encode(value: Record): string { + return this.encodeData(this.primaryType, value); + } + + hash(value: Record): string { + return this.hashStruct(this.primaryType, value); + } +} + +export function getPrimaryType(types: Record>): string { + return (new TypedDataEncoder(types)).primaryType; +} + +export function hashStruct(name: string, types: Record>, value: Record): string { + return (new TypedDataEncoder(types)).hashStruct(name, value); +} + +export function hashTypedDataDomain(domain: TypedDataDomain): string { + const domainFields: Array = [ ]; + for (const name in domain) { + const type = domainFieldTypes[name]; + if (!type) { + logger.throwArgumentError(`invalid typed-data domain key: ${ JSON.stringify(name) }`, "domain", domain); + } + domainFields.push({ name, type }); + } + return hashStruct("EIP712Domain", { EIP712Domain: domainFields }, domain); +} + +export function hashTypedData(domain: TypedDataDomain, types: Record>, value: Record): string { + return keccak256(concat([ + "0x1901", + hashTypedDataDomain(domain), + (new TypedDataEncoder(types)).hash(value) + ])); +}