0) {
- if(p < this.DB && (d = this[i]>>p) > 0) { m = true; r = int2char(d); }
- while(i >= 0) {
- if(p < k) {
- d = (this[i]&((1<>(p+=this.DB-k);
- }
- else {
- d = (this[i]>>(p-=k))&km;
- if(p <= 0) { p += this.DB; --i; }
- }
- if(d > 0) m = true;
- if(m) r += int2char(d);
- }
- }
- return m?r:"0";
- }
-
- // (public) -this
- function bnNegate() { var r = nbi(); BigInteger.ZERO.subTo(this,r); return r; }
-
- // (public) |this|
- function bnAbs() { return (this.s<0)?this.negate():this; }
-
- // (public) return + if this > a, - if this < a, 0 if equal
- function bnCompareTo(a) {
- var r = this.s-a.s;
- if(r != 0) return r;
- var i = this.t;
- r = i-a.t;
- if(r != 0) return (this.s<0)?-r:r;
- while(--i >= 0) if((r=this[i]-a[i]) != 0) return r;
- return 0;
- }
-
- // returns bit length of the integer x
- function nbits(x) {
- var r = 1, t;
- if((t=x>>>16) != 0) { x = t; r += 16; }
- if((t=x>>8) != 0) { x = t; r += 8; }
- if((t=x>>4) != 0) { x = t; r += 4; }
- if((t=x>>2) != 0) { x = t; r += 2; }
- if((t=x>>1) != 0) { x = t; r += 1; }
- return r;
- }
-
- // (public) return the number of bits in "this"
- function bnBitLength() {
- if(this.t <= 0) return 0;
- return this.DB*(this.t-1)+nbits(this[this.t-1]^(this.s&this.DM));
- }
-
- // (protected) r = this << n*DB
- function bnpDLShiftTo(n,r) {
- var i;
- for(i = this.t-1; i >= 0; --i) r[i+n] = this[i];
- for(i = n-1; i >= 0; --i) r[i] = 0;
- r.t = this.t+n;
- r.s = this.s;
- }
-
- // (protected) r = this >> n*DB
- function bnpDRShiftTo(n,r) {
- for(var i = n; i < this.t; ++i) r[i-n] = this[i];
- r.t = Math.max(this.t-n,0);
- r.s = this.s;
- }
-
- // (protected) r = this << n
- function bnpLShiftTo(n,r) {
- var bs = n%this.DB;
- var cbs = this.DB-bs;
- var bm = (1<= 0; --i) {
- r[i+ds+1] = (this[i]>>cbs)|c;
- c = (this[i]&bm)<= 0; --i) r[i] = 0;
- r[ds] = c;
- r.t = this.t+ds+1;
- r.s = this.s;
- r.clamp();
- }
-
- // (protected) r = this >> n
- function bnpRShiftTo(n,r) {
- r.s = this.s;
- var ds = Math.floor(n/this.DB);
- if(ds >= this.t) { r.t = 0; return; }
- var bs = n%this.DB;
- var cbs = this.DB-bs;
- var bm = (1<>bs;
- for(var i = ds+1; i < this.t; ++i) {
- r[i-ds-1] |= (this[i]&bm)<>bs;
- }
- if(bs > 0) r[this.t-ds-1] |= (this.s&bm)<>= this.DB;
- }
- if(a.t < this.t) {
- c -= a.s;
- while(i < this.t) {
- c += this[i];
- r[i++] = c&this.DM;
- c >>= this.DB;
- }
- c += this.s;
- }
- else {
- c += this.s;
- while(i < a.t) {
- c -= a[i];
- r[i++] = c&this.DM;
- c >>= this.DB;
- }
- c -= a.s;
- }
- r.s = (c<0)?-1:0;
- if(c < -1) r[i++] = this.DV+c;
- else if(c > 0) r[i++] = c;
- r.t = i;
- r.clamp();
- }
-
- // (protected) r = this * a, r != this,a (HAC 14.12)
- // "this" should be the larger one if appropriate.
- function bnpMultiplyTo(a,r) {
- var x = this.abs(), y = a.abs();
- var i = x.t;
- r.t = i+y.t;
- while(--i >= 0) r[i] = 0;
- for(i = 0; i < y.t; ++i) r[i+x.t] = x.am(0,y[i],r,i,0,x.t);
- r.s = 0;
- r.clamp();
- if(this.s != a.s) BigInteger.ZERO.subTo(r,r);
- }
-
- // (protected) r = this^2, r != this (HAC 14.16)
- function bnpSquareTo(r) {
- var x = this.abs();
- var i = r.t = 2*x.t;
- while(--i >= 0) r[i] = 0;
- for(i = 0; i < x.t-1; ++i) {
- var c = x.am(i,x[i],r,2*i,0,1);
- if((r[i+x.t]+=x.am(i+1,2*x[i],r,2*i+1,c,x.t-i-1)) >= x.DV) {
- r[i+x.t] -= x.DV;
- r[i+x.t+1] = 1;
- }
- }
- if(r.t > 0) r[r.t-1] += x.am(i,x[i],r,2*i,0,1);
- r.s = 0;
- r.clamp();
- }
-
- // (protected) divide this by m, quotient and remainder to q, r (HAC 14.20)
- // r != q, this != m. q or r may be null.
- function bnpDivRemTo(m,q,r) {
- var pm = m.abs();
- if(pm.t <= 0) return;
- var pt = this.abs();
- if(pt.t < pm.t) {
- if(q != null) q.fromInt(0);
- if(r != null) this.copyTo(r);
- return;
- }
- if(r == null) r = nbi();
- var y = nbi(), ts = this.s, ms = m.s;
- var nsh = this.DB-nbits(pm[pm.t-1]); // normalize modulus
- if(nsh > 0) { pm.lShiftTo(nsh,y); pt.lShiftTo(nsh,r); }
- else { pm.copyTo(y); pt.copyTo(r); }
- var ys = y.t;
- var y0 = y[ys-1];
- if(y0 == 0) return;
- var yt = y0*(1<1)?y[ys-2]>>this.F2:0);
- var d1 = this.FV/yt, d2 = (1<= 0) {
- r[r.t++] = 1;
- r.subTo(t,r);
- }
- BigInteger.ONE.dlShiftTo(ys,t);
- t.subTo(y,y); // "negative" y so we can replace sub with am later
- while(y.t < ys) y[y.t++] = 0;
- while(--j >= 0) {
- // Estimate quotient digit
- var qd = (r[--i]==y0)?this.DM:Math.floor(r[i]*d1+(r[i-1]+e)*d2);
- if((r[i]+=y.am(0,qd,r,j,0,ys)) < qd) { // Try it out
- y.dlShiftTo(j,t);
- r.subTo(t,r);
- while(r[i] < --qd) r.subTo(t,r);
- }
- }
- if(q != null) {
- r.drShiftTo(ys,q);
- if(ts != ms) BigInteger.ZERO.subTo(q,q);
- }
- r.t = ys;
- r.clamp();
- if(nsh > 0) r.rShiftTo(nsh,r); // Denormalize remainder
- if(ts < 0) BigInteger.ZERO.subTo(r,r);
- }
-
- // (public) this mod a
- function bnMod(a) {
- var r = nbi();
- this.abs().divRemTo(a,null,r);
- if(this.s < 0 && r.compareTo(BigInteger.ZERO) > 0) a.subTo(r,r);
- return r;
- }
-
- // Modular reduction using "classic" algorithm
- function Classic(m) { this.m = m; }
- function cConvert(x) {
- if(x.s < 0 || x.compareTo(this.m) >= 0) return x.mod(this.m);
- else return x;
- }
- function cRevert(x) { return x; }
- function cReduce(x) { x.divRemTo(this.m,null,x); }
- function cMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); }
- function cSqrTo(x,r) { x.squareTo(r); this.reduce(r); }
-
- Classic.prototype.convert = cConvert;
- Classic.prototype.revert = cRevert;
- Classic.prototype.reduce = cReduce;
- Classic.prototype.mulTo = cMulTo;
- Classic.prototype.sqrTo = cSqrTo;
-
- // (protected) return "-1/this % 2^DB"; useful for Mont. reduction
- // justification:
- // xy == 1 (mod m)
- // xy = 1+km
- // xy(2-xy) = (1+km)(1-km)
- // x[y(2-xy)] = 1-k^2m^2
- // x[y(2-xy)] == 1 (mod m^2)
- // if y is 1/x mod m, then y(2-xy) is 1/x mod m^2
- // should reduce x and y(2-xy) by m^2 at each step to keep size bounded.
- // JS multiply "overflows" differently from C/C++, so care is needed here.
- function bnpInvDigit() {
- if(this.t < 1) return 0;
- var x = this[0];
- if((x&1) == 0) return 0;
- var y = x&3; // y == 1/x mod 2^2
- y = (y*(2-(x&0xf)*y))&0xf; // y == 1/x mod 2^4
- y = (y*(2-(x&0xff)*y))&0xff; // y == 1/x mod 2^8
- y = (y*(2-(((x&0xffff)*y)&0xffff)))&0xffff; // y == 1/x mod 2^16
- // last step - calculate inverse mod DV directly;
- // assumes 16 < DB <= 32 and assumes ability to handle 48-bit ints
- y = (y*(2-x*y%this.DV))%this.DV; // y == 1/x mod 2^dbits
- // we really want the negative inverse, and -DV < y < DV
- return (y>0)?this.DV-y:-y;
- }
-
- // Montgomery reduction
- function Montgomery(m) {
- this.m = m;
- this.mp = m.invDigit();
- this.mpl = this.mp&0x7fff;
- this.mph = this.mp>>15;
- this.um = (1<<(m.DB-15))-1;
- this.mt2 = 2*m.t;
- }
-
- // xR mod m
- function montConvert(x) {
- var r = nbi();
- x.abs().dlShiftTo(this.m.t,r);
- r.divRemTo(this.m,null,r);
- if(x.s < 0 && r.compareTo(BigInteger.ZERO) > 0) this.m.subTo(r,r);
- return r;
- }
-
- // x/R mod m
- function montRevert(x) {
- var r = nbi();
- x.copyTo(r);
- this.reduce(r);
- return r;
- }
-
- // x = x/R mod m (HAC 14.32)
- function montReduce(x) {
- while(x.t <= this.mt2) // pad x so am has enough room later
- x[x.t++] = 0;
- for(var i = 0; i < this.m.t; ++i) {
- // faster way of calculating u0 = x[i]*mp mod DV
- var j = x[i]&0x7fff;
- var u0 = (j*this.mpl+(((j*this.mph+(x[i]>>15)*this.mpl)&this.um)<<15))&x.DM;
- // use am to combine the multiply-shift-add into one call
- j = i+this.m.t;
- x[j] += this.m.am(0,u0,x,i,0,this.m.t);
- // propagate carry
- while(x[j] >= x.DV) { x[j] -= x.DV; x[++j]++; }
- }
- x.clamp();
- x.drShiftTo(this.m.t,x);
- if(x.compareTo(this.m) >= 0) x.subTo(this.m,x);
- }
-
- // r = "x^2/R mod m"; x != r
- function montSqrTo(x,r) { x.squareTo(r); this.reduce(r); }
-
- // r = "xy/R mod m"; x,y != r
- function montMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); }
-
- Montgomery.prototype.convert = montConvert;
- Montgomery.prototype.revert = montRevert;
- Montgomery.prototype.reduce = montReduce;
- Montgomery.prototype.mulTo = montMulTo;
- Montgomery.prototype.sqrTo = montSqrTo;
-
- // (protected) true iff this is even
- function bnpIsEven() { return ((this.t>0)?(this[0]&1):this.s) == 0; }
-
- // (protected) this^e, e < 2^32, doing sqr and mul with "r" (HAC 14.79)
- function bnpExp(e,z) {
- if(e > 0xffffffff || e < 1) return BigInteger.ONE;
- var r = nbi(), r2 = nbi(), g = z.convert(this), i = nbits(e)-1;
- g.copyTo(r);
- while(--i >= 0) {
- z.sqrTo(r,r2);
- if((e&(1< 0) z.mulTo(r2,g,r);
- else { var t = r; r = r2; r2 = t; }
- }
- return z.revert(r);
- }
-
- // (public) this^e % m, 0 <= e < 2^32
- function bnModPowInt(e,m) {
- var z;
- if(e < 256 || m.isEven()) z = new Classic(m); else z = new Montgomery(m);
- return this.exp(e,z);
- }
-
- // protected
- BigInteger.prototype.copyTo = bnpCopyTo;
- BigInteger.prototype.fromInt = bnpFromInt;
- BigInteger.prototype.fromString = bnpFromString;
- BigInteger.prototype.clamp = bnpClamp;
- BigInteger.prototype.dlShiftTo = bnpDLShiftTo;
- BigInteger.prototype.drShiftTo = bnpDRShiftTo;
- BigInteger.prototype.lShiftTo = bnpLShiftTo;
- BigInteger.prototype.rShiftTo = bnpRShiftTo;
- BigInteger.prototype.subTo = bnpSubTo;
- BigInteger.prototype.multiplyTo = bnpMultiplyTo;
- BigInteger.prototype.squareTo = bnpSquareTo;
- BigInteger.prototype.divRemTo = bnpDivRemTo;
- BigInteger.prototype.invDigit = bnpInvDigit;
- BigInteger.prototype.isEven = bnpIsEven;
- BigInteger.prototype.exp = bnpExp;
-
- // public
- BigInteger.prototype.toString = bnToString;
- BigInteger.prototype.negate = bnNegate;
- BigInteger.prototype.abs = bnAbs;
- BigInteger.prototype.compareTo = bnCompareTo;
- BigInteger.prototype.bitLength = bnBitLength;
- BigInteger.prototype.mod = bnMod;
- BigInteger.prototype.modPowInt = bnModPowInt;
-
- // "constants"
- BigInteger.ZERO = nbv(0);
- BigInteger.ONE = nbv(1);
-
- // Copyright (c) 2005-2009 Tom Wu
- // All Rights Reserved.
- // See "LICENSE" for details.
-
- // Extended JavaScript BN functions, required for RSA private ops.
-
- // Version 1.1: new BigInteger("0", 10) returns "proper" zero
- // Version 1.2: square() API, isProbablePrime fix
-
- // (public)
- function bnClone() { var r = nbi(); this.copyTo(r); return r; }
-
- // (public) return value as integer
- function bnIntValue() {
- if(this.s < 0) {
- if(this.t == 1) return this[0]-this.DV;
- else if(this.t == 0) return -1;
- }
- else if(this.t == 1) return this[0];
- else if(this.t == 0) return 0;
- // assumes 16 < DB < 32
- return ((this[1]&((1<<(32-this.DB))-1))<>24; }
-
- // (public) return value as short (assumes DB>=16)
- function bnShortValue() { return (this.t==0)?this.s:(this[0]<<16)>>16; }
-
- // (protected) return x s.t. r^x < DV
- function bnpChunkSize(r) { return Math.floor(Math.LN2*this.DB/Math.log(r)); }
-
- // (public) 0 if this == 0, 1 if this > 0
- function bnSigNum() {
- if(this.s < 0) return -1;
- else if(this.t <= 0 || (this.t == 1 && this[0] <= 0)) return 0;
- else return 1;
- }
-
- // (protected) convert to radix string
- function bnpToRadix(b) {
- if(b == null) b = 10;
- if(this.signum() == 0 || b < 2 || b > 36) return "0";
- var cs = this.chunkSize(b);
- var a = Math.pow(b,cs);
- var d = nbv(a), y = nbi(), z = nbi(), r = "";
- this.divRemTo(d,y,z);
- while(y.signum() > 0) {
- r = (a+z.intValue()).toString(b).substr(1) + r;
- y.divRemTo(d,y,z);
- }
- return z.intValue().toString(b) + r;
- }
-
- // (protected) convert from radix string
- function bnpFromRadix(s,b) {
- this.fromInt(0);
- if(b == null) b = 10;
- var cs = this.chunkSize(b);
- var d = Math.pow(b,cs), mi = false, j = 0, w = 0;
- for(var i = 0; i < s.length; ++i) {
- var x = intAt(s,i);
- if(x < 0) {
- if(s.charAt(i) == "-" && this.signum() == 0) mi = true;
- continue;
- }
- w = b*w+x;
- if(++j >= cs) {
- this.dMultiply(d);
- this.dAddOffset(w,0);
- j = 0;
- w = 0;
- }
- }
- if(j > 0) {
- this.dMultiply(Math.pow(b,j));
- this.dAddOffset(w,0);
- }
- if(mi) BigInteger.ZERO.subTo(this,this);
- }
-
- // (protected) alternate constructor
- function bnpFromNumber(a,b,c) {
- if("number" == typeof b) {
- // new BigInteger(int,int,RNG)
- if(a < 2) this.fromInt(1);
- else {
- this.fromNumber(a,c);
- if(!this.testBit(a-1)) // force MSB set
- this.bitwiseTo(BigInteger.ONE.shiftLeft(a-1),op_or,this);
- if(this.isEven()) this.dAddOffset(1,0); // force odd
- while(!this.isProbablePrime(b)) {
- this.dAddOffset(2,0);
- if(this.bitLength() > a) this.subTo(BigInteger.ONE.shiftLeft(a-1),this);
- }
- }
- }
- else {
- // new BigInteger(int,RNG)
- var x = new Array(), t = a&7;
- x.length = (a>>3)+1;
- b.nextBytes(x);
- if(t > 0) x[0] &= ((1< 0) {
- if(p < this.DB && (d = this[i]>>p) != (this.s&this.DM)>>p)
- r[k++] = d|(this.s<<(this.DB-p));
- while(i >= 0) {
- if(p < 8) {
- d = (this[i]&((1<>(p+=this.DB-8);
- }
- else {
- d = (this[i]>>(p-=8))&0xff;
- if(p <= 0) { p += this.DB; --i; }
- }
- if((d&0x80) != 0) d |= -256;
- if(k == 0 && (this.s&0x80) != (d&0x80)) ++k;
- if(k > 0 || d != this.s) r[k++] = d;
- }
- }
- return r;
- }
-
- function bnEquals(a) { return(this.compareTo(a)==0); }
- function bnMin(a) { return(this.compareTo(a)<0)?this:a; }
- function bnMax(a) { return(this.compareTo(a)>0)?this:a; }
-
- // (protected) r = this op a (bitwise)
- function bnpBitwiseTo(a,op,r) {
- var i, f, m = Math.min(a.t,this.t);
- for(i = 0; i < m; ++i) r[i] = op(this[i],a[i]);
- if(a.t < this.t) {
- f = a.s&this.DM;
- for(i = m; i < this.t; ++i) r[i] = op(this[i],f);
- r.t = this.t;
- }
- else {
- f = this.s&this.DM;
- for(i = m; i < a.t; ++i) r[i] = op(f,a[i]);
- r.t = a.t;
- }
- r.s = op(this.s,a.s);
- r.clamp();
- }
-
- // (public) this & a
- function op_and(x,y) { return x&y; }
- function bnAnd(a) { var r = nbi(); this.bitwiseTo(a,op_and,r); return r; }
-
- // (public) this | a
- function op_or(x,y) { return x|y; }
- function bnOr(a) { var r = nbi(); this.bitwiseTo(a,op_or,r); return r; }
-
- // (public) this ^ a
- function op_xor(x,y) { return x^y; }
- function bnXor(a) { var r = nbi(); this.bitwiseTo(a,op_xor,r); return r; }
-
- // (public) this & ~a
- function op_andnot(x,y) { return x&~y; }
- function bnAndNot(a) { var r = nbi(); this.bitwiseTo(a,op_andnot,r); return r; }
-
- // (public) ~this
- function bnNot() {
- var r = nbi();
- for(var i = 0; i < this.t; ++i) r[i] = this.DM&~this[i];
- r.t = this.t;
- r.s = ~this.s;
- return r;
- }
-
- // (public) this << n
- function bnShiftLeft(n) {
- var r = nbi();
- if(n < 0) this.rShiftTo(-n,r); else this.lShiftTo(n,r);
- return r;
- }
-
- // (public) this >> n
- function bnShiftRight(n) {
- var r = nbi();
- if(n < 0) this.lShiftTo(-n,r); else this.rShiftTo(n,r);
- return r;
- }
-
- // return index of lowest 1-bit in x, x < 2^31
- function lbit(x) {
- if(x == 0) return -1;
- var r = 0;
- if((x&0xffff) == 0) { x >>= 16; r += 16; }
- if((x&0xff) == 0) { x >>= 8; r += 8; }
- if((x&0xf) == 0) { x >>= 4; r += 4; }
- if((x&3) == 0) { x >>= 2; r += 2; }
- if((x&1) == 0) ++r;
- return r;
- }
-
- // (public) returns index of lowest 1-bit (or -1 if none)
- function bnGetLowestSetBit() {
- for(var i = 0; i < this.t; ++i)
- if(this[i] != 0) return i*this.DB+lbit(this[i]);
- if(this.s < 0) return this.t*this.DB;
- return -1;
- }
-
- // return number of 1 bits in x
- function cbit(x) {
- var r = 0;
- while(x != 0) { x &= x-1; ++r; }
- return r;
- }
-
- // (public) return number of set bits
- function bnBitCount() {
- var r = 0, x = this.s&this.DM;
- for(var i = 0; i < this.t; ++i) r += cbit(this[i]^x);
- return r;
- }
-
- // (public) true iff nth bit is set
- function bnTestBit(n) {
- var j = Math.floor(n/this.DB);
- if(j >= this.t) return(this.s!=0);
- return((this[j]&(1<<(n%this.DB)))!=0);
- }
-
- // (protected) this op (1<>= this.DB;
- }
- if(a.t < this.t) {
- c += a.s;
- while(i < this.t) {
- c += this[i];
- r[i++] = c&this.DM;
- c >>= this.DB;
- }
- c += this.s;
- }
- else {
- c += this.s;
- while(i < a.t) {
- c += a[i];
- r[i++] = c&this.DM;
- c >>= this.DB;
- }
- c += a.s;
- }
- r.s = (c<0)?-1:0;
- if(c > 0) r[i++] = c;
- else if(c < -1) r[i++] = this.DV+c;
- r.t = i;
- r.clamp();
- }
-
- // (public) this + a
- function bnAdd(a) { var r = nbi(); this.addTo(a,r); return r; }
-
- // (public) this - a
- function bnSubtract(a) { var r = nbi(); this.subTo(a,r); return r; }
-
- // (public) this * a
- function bnMultiply(a) { var r = nbi(); this.multiplyTo(a,r); return r; }
-
- // (public) this^2
- function bnSquare() { var r = nbi(); this.squareTo(r); return r; }
-
- // (public) this / a
- function bnDivide(a) { var r = nbi(); this.divRemTo(a,r,null); return r; }
-
- // (public) this % a
- function bnRemainder(a) { var r = nbi(); this.divRemTo(a,null,r); return r; }
-
- // (public) [this/a,this%a]
- function bnDivideAndRemainder(a) {
- var q = nbi(), r = nbi();
- this.divRemTo(a,q,r);
- return new Array(q,r);
- }
-
- // (protected) this *= n, this >= 0, 1 < n < DV
- function bnpDMultiply(n) {
- this[this.t] = this.am(0,n-1,this,0,0,this.t);
- ++this.t;
- this.clamp();
- }
-
- // (protected) this += n << w words, this >= 0
- function bnpDAddOffset(n,w) {
- if(n == 0) return;
- while(this.t <= w) this[this.t++] = 0;
- this[w] += n;
- while(this[w] >= this.DV) {
- this[w] -= this.DV;
- if(++w >= this.t) this[this.t++] = 0;
- ++this[w];
- }
- }
-
- // A "null" reducer
- function NullExp() {}
- function nNop(x) { return x; }
- function nMulTo(x,y,r) { x.multiplyTo(y,r); }
- function nSqrTo(x,r) { x.squareTo(r); }
-
- NullExp.prototype.convert = nNop;
- NullExp.prototype.revert = nNop;
- NullExp.prototype.mulTo = nMulTo;
- NullExp.prototype.sqrTo = nSqrTo;
-
- // (public) this^e
- function bnPow(e) { return this.exp(e,new NullExp()); }
-
- // (protected) r = lower n words of "this * a", a.t <= n
- // "this" should be the larger one if appropriate.
- function bnpMultiplyLowerTo(a,n,r) {
- var i = Math.min(this.t+a.t,n);
- r.s = 0; // assumes a,this >= 0
- r.t = i;
- while(i > 0) r[--i] = 0;
- var j;
- for(j = r.t-this.t; i < j; ++i) r[i+this.t] = this.am(0,a[i],r,i,0,this.t);
- for(j = Math.min(a.t,n); i < j; ++i) this.am(0,a[i],r,i,0,n-i);
- r.clamp();
- }
-
- // (protected) r = "this * a" without lower n words, n > 0
- // "this" should be the larger one if appropriate.
- function bnpMultiplyUpperTo(a,n,r) {
- --n;
- var i = r.t = this.t+a.t-n;
- r.s = 0; // assumes a,this >= 0
- while(--i >= 0) r[i] = 0;
- for(i = Math.max(n-this.t,0); i < a.t; ++i)
- r[this.t+i-n] = this.am(n-i,a[i],r,0,0,this.t+i-n);
- r.clamp();
- r.drShiftTo(1,r);
- }
-
- // Barrett modular reduction
- function Barrett(m) {
- // setup Barrett
- this.r2 = nbi();
- this.q3 = nbi();
- BigInteger.ONE.dlShiftTo(2*m.t,this.r2);
- this.mu = this.r2.divide(m);
- this.m = m;
- }
-
- function barrettConvert(x) {
- if(x.s < 0 || x.t > 2*this.m.t) return x.mod(this.m);
- else if(x.compareTo(this.m) < 0) return x;
- else { var r = nbi(); x.copyTo(r); this.reduce(r); return r; }
- }
-
- function barrettRevert(x) { return x; }
-
- // x = x mod m (HAC 14.42)
- function barrettReduce(x) {
- x.drShiftTo(this.m.t-1,this.r2);
- if(x.t > this.m.t+1) { x.t = this.m.t+1; x.clamp(); }
- this.mu.multiplyUpperTo(this.r2,this.m.t+1,this.q3);
- this.m.multiplyLowerTo(this.q3,this.m.t+1,this.r2);
- while(x.compareTo(this.r2) < 0) x.dAddOffset(1,this.m.t+1);
- x.subTo(this.r2,x);
- while(x.compareTo(this.m) >= 0) x.subTo(this.m,x);
- }
-
- // r = x^2 mod m; x != r
- function barrettSqrTo(x,r) { x.squareTo(r); this.reduce(r); }
-
- // r = x*y mod m; x,y != r
- function barrettMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); }
-
- Barrett.prototype.convert = barrettConvert;
- Barrett.prototype.revert = barrettRevert;
- Barrett.prototype.reduce = barrettReduce;
- Barrett.prototype.mulTo = barrettMulTo;
- Barrett.prototype.sqrTo = barrettSqrTo;
-
- // (public) this^e % m (HAC 14.85)
- function bnModPow(e,m) {
- var i = e.bitLength(), k, r = nbv(1), z;
- if(i <= 0) return r;
- else if(i < 18) k = 1;
- else if(i < 48) k = 3;
- else if(i < 144) k = 4;
- else if(i < 768) k = 5;
- else k = 6;
- if(i < 8)
- z = new Classic(m);
- else if(m.isEven())
- z = new Barrett(m);
- else
- z = new Montgomery(m);
-
- // precomputation
- var g = new Array(), n = 3, k1 = k-1, km = (1< 1) {
- var g2 = nbi();
- z.sqrTo(g[1],g2);
- while(n <= km) {
- g[n] = nbi();
- z.mulTo(g2,g[n-2],g[n]);
- n += 2;
- }
- }
-
- var j = e.t-1, w, is1 = true, r2 = nbi(), t;
- i = nbits(e[j])-1;
- while(j >= 0) {
- if(i >= k1) w = (e[j]>>(i-k1))&km;
- else {
- w = (e[j]&((1<<(i+1))-1))<<(k1-i);
- if(j > 0) w |= e[j-1]>>(this.DB+i-k1);
- }
-
- n = k;
- while((w&1) == 0) { w >>= 1; --n; }
- if((i -= n) < 0) { i += this.DB; --j; }
- if(is1) { // ret == 1, don't bother squaring or multiplying it
- g[w].copyTo(r);
- is1 = false;
- }
- else {
- while(n > 1) { z.sqrTo(r,r2); z.sqrTo(r2,r); n -= 2; }
- if(n > 0) z.sqrTo(r,r2); else { t = r; r = r2; r2 = t; }
- z.mulTo(r2,g[w],r);
- }
-
- while(j >= 0 && (e[j]&(1< 0) {
- x.rShiftTo(g,x);
- y.rShiftTo(g,y);
- }
- while(x.signum() > 0) {
- if((i = x.getLowestSetBit()) > 0) x.rShiftTo(i,x);
- if((i = y.getLowestSetBit()) > 0) y.rShiftTo(i,y);
- if(x.compareTo(y) >= 0) {
- x.subTo(y,x);
- x.rShiftTo(1,x);
- }
- else {
- y.subTo(x,y);
- y.rShiftTo(1,y);
- }
- }
- if(g > 0) y.lShiftTo(g,y);
- return y;
- }
-
- // (protected) this % n, n < 2^26
- function bnpModInt(n) {
- if(n <= 0) return 0;
- var d = this.DV%n, r = (this.s<0)?n-1:0;
- if(this.t > 0)
- if(d == 0) r = this[0]%n;
- else for(var i = this.t-1; i >= 0; --i) r = (d*r+this[i])%n;
- return r;
- }
-
- // (public) 1/this % m (HAC 14.61)
- function bnModInverse(m) {
- var ac = m.isEven();
- if((this.isEven() && ac) || m.signum() == 0) return BigInteger.ZERO;
- var u = m.clone(), v = this.clone();
- var a = nbv(1), b = nbv(0), c = nbv(0), d = nbv(1);
- while(u.signum() != 0) {
- while(u.isEven()) {
- u.rShiftTo(1,u);
- if(ac) {
- if(!a.isEven() || !b.isEven()) { a.addTo(this,a); b.subTo(m,b); }
- a.rShiftTo(1,a);
- }
- else if(!b.isEven()) b.subTo(m,b);
- b.rShiftTo(1,b);
- }
- while(v.isEven()) {
- v.rShiftTo(1,v);
- if(ac) {
- if(!c.isEven() || !d.isEven()) { c.addTo(this,c); d.subTo(m,d); }
- c.rShiftTo(1,c);
- }
- else if(!d.isEven()) d.subTo(m,d);
- d.rShiftTo(1,d);
- }
- if(u.compareTo(v) >= 0) {
- u.subTo(v,u);
- if(ac) a.subTo(c,a);
- b.subTo(d,b);
- }
- else {
- v.subTo(u,v);
- if(ac) c.subTo(a,c);
- d.subTo(b,d);
- }
- }
- if(v.compareTo(BigInteger.ONE) != 0) return BigInteger.ZERO;
- if(d.compareTo(m) >= 0) return d.subtract(m);
- if(d.signum() < 0) d.addTo(m,d); else return d;
- if(d.signum() < 0) return d.add(m); else return d;
- }
-
- var lowprimes = [2,3,5,7,11,13,17,19,23,29,31,37,41,43,47,53,59,61,67,71,73,79,83,89,97,101,103,107,109,113,127,131,137,139,149,151,157,163,167,173,179,181,191,193,197,199,211,223,227,229,233,239,241,251,257,263,269,271,277,281,283,293,307,311,313,317,331,337,347,349,353,359,367,373,379,383,389,397,401,409,419,421,431,433,439,443,449,457,461,463,467,479,487,491,499,503,509,521,523,541,547,557,563,569,571,577,587,593,599,601,607,613,617,619,631,641,643,647,653,659,661,673,677,683,691,701,709,719,727,733,739,743,751,757,761,769,773,787,797,809,811,821,823,827,829,839,853,857,859,863,877,881,883,887,907,911,919,929,937,941,947,953,967,971,977,983,991,997];
- var lplim = (1<<26)/lowprimes[lowprimes.length-1];
-
- // (public) test primality with certainty >= 1-.5^t
- function bnIsProbablePrime(t) {
- var i, x = this.abs();
- if(x.t == 1 && x[0] <= lowprimes[lowprimes.length-1]) {
- for(i = 0; i < lowprimes.length; ++i)
- if(x[0] == lowprimes[i]) return true;
- return false;
- }
- if(x.isEven()) return false;
- i = 1;
- while(i < lowprimes.length) {
- var m = lowprimes[i], j = i+1;
- while(j < lowprimes.length && m < lplim) m *= lowprimes[j++];
- m = x.modInt(m);
- while(i < j) if(m%lowprimes[i++] == 0) return false;
- }
- return x.millerRabin(t);
- }
-
- // (protected) true if probably prime (HAC 4.24, Miller-Rabin)
- function bnpMillerRabin(t) {
- var n1 = this.subtract(BigInteger.ONE);
- var k = n1.getLowestSetBit();
- if(k <= 0) return false;
- var r = n1.shiftRight(k);
- t = (t+1)>>1;
- if(t > lowprimes.length) t = lowprimes.length;
- var a = nbi();
- for(var i = 0; i < t; ++i) {
- //Pick bases at random, instead of starting at 2
- a.fromInt(lowprimes[Math.floor(Math.random()*lowprimes.length)]);
- var y = a.modPow(r,this);
- if(y.compareTo(BigInteger.ONE) != 0 && y.compareTo(n1) != 0) {
- var j = 1;
- while(j++ < k && y.compareTo(n1) != 0) {
- y = y.modPowInt(2,this);
- if(y.compareTo(BigInteger.ONE) == 0) return false;
- }
- if(y.compareTo(n1) != 0) return false;
- }
- }
- return true;
- }
-
- // protected
- BigInteger.prototype.chunkSize = bnpChunkSize;
- BigInteger.prototype.toRadix = bnpToRadix;
- BigInteger.prototype.fromRadix = bnpFromRadix;
- BigInteger.prototype.fromNumber = bnpFromNumber;
- BigInteger.prototype.bitwiseTo = bnpBitwiseTo;
- BigInteger.prototype.changeBit = bnpChangeBit;
- BigInteger.prototype.addTo = bnpAddTo;
- BigInteger.prototype.dMultiply = bnpDMultiply;
- BigInteger.prototype.dAddOffset = bnpDAddOffset;
- BigInteger.prototype.multiplyLowerTo = bnpMultiplyLowerTo;
- BigInteger.prototype.multiplyUpperTo = bnpMultiplyUpperTo;
- BigInteger.prototype.modInt = bnpModInt;
- BigInteger.prototype.millerRabin = bnpMillerRabin;
-
- // public
- BigInteger.prototype.clone = bnClone;
- BigInteger.prototype.intValue = bnIntValue;
- BigInteger.prototype.byteValue = bnByteValue;
- BigInteger.prototype.shortValue = bnShortValue;
- BigInteger.prototype.signum = bnSigNum;
- BigInteger.prototype.toByteArray = bnToByteArray;
- BigInteger.prototype.equals = bnEquals;
- BigInteger.prototype.min = bnMin;
- BigInteger.prototype.max = bnMax;
- BigInteger.prototype.and = bnAnd;
- BigInteger.prototype.or = bnOr;
- BigInteger.prototype.xor = bnXor;
- BigInteger.prototype.andNot = bnAndNot;
- BigInteger.prototype.not = bnNot;
- BigInteger.prototype.shiftLeft = bnShiftLeft;
- BigInteger.prototype.shiftRight = bnShiftRight;
- BigInteger.prototype.getLowestSetBit = bnGetLowestSetBit;
- BigInteger.prototype.bitCount = bnBitCount;
- BigInteger.prototype.testBit = bnTestBit;
- BigInteger.prototype.setBit = bnSetBit;
- BigInteger.prototype.clearBit = bnClearBit;
- BigInteger.prototype.flipBit = bnFlipBit;
- BigInteger.prototype.add = bnAdd;
- BigInteger.prototype.subtract = bnSubtract;
- BigInteger.prototype.multiply = bnMultiply;
- BigInteger.prototype.divide = bnDivide;
- BigInteger.prototype.remainder = bnRemainder;
- BigInteger.prototype.divideAndRemainder = bnDivideAndRemainder;
- BigInteger.prototype.modPow = bnModPow;
- BigInteger.prototype.modInverse = bnModInverse;
- BigInteger.prototype.pow = bnPow;
- BigInteger.prototype.gcd = bnGCD;
- BigInteger.prototype.isProbablePrime = bnIsProbablePrime;
-
- // JSBN-specific extension
- BigInteger.prototype.square = bnSquare;
-
- // Expose the Barrett function
- BigInteger.prototype.Barrett = Barrett
-
- // BigInteger interfaces not implemented in jsbn:
-
- // BigInteger(int signum, byte[] magnitude)
- // double doubleValue()
- // float floatValue()
- // int hashCode()
- // long longValue()
- // static BigInteger valueOf(long val)
-
- // Random number generator - requires a PRNG backend, e.g. prng4.js
-
- // For best results, put code like
- //
- // in your main HTML document.
-
- var rng_state;
- var rng_pool;
- var rng_pptr;
-
- // Mix in a 32-bit integer into the pool
- function rng_seed_int(x) {
- rng_pool[rng_pptr++] ^= x & 255;
- rng_pool[rng_pptr++] ^= (x >> 8) & 255;
- rng_pool[rng_pptr++] ^= (x >> 16) & 255;
- rng_pool[rng_pptr++] ^= (x >> 24) & 255;
- if(rng_pptr >= rng_psize) rng_pptr -= rng_psize;
- }
-
- // Mix in the current time (w/milliseconds) into the pool
- function rng_seed_time() {
- rng_seed_int(new Date().getTime());
- }
-
- // Initialize the pool with junk if needed.
- if(rng_pool == null) {
- rng_pool = new Array();
- rng_pptr = 0;
- var t;
- if(typeof window !== "undefined" && window.crypto) {
- if (window.crypto.getRandomValues) {
- // Use webcrypto if available
- var ua = new Uint8Array(32);
- window.crypto.getRandomValues(ua);
- for(t = 0; t < 32; ++t)
- rng_pool[rng_pptr++] = ua[t];
- }
- else if(navigator.appName == "Netscape" && navigator.appVersion < "5") {
- // Extract entropy (256 bits) from NS4 RNG if available
- var z = window.crypto.random(32);
- for(t = 0; t < z.length; ++t)
- rng_pool[rng_pptr++] = z.charCodeAt(t) & 255;
- }
- }
- while(rng_pptr < rng_psize) { // extract some randomness from Math.random()
- t = Math.floor(65536 * Math.random());
- rng_pool[rng_pptr++] = t >>> 8;
- rng_pool[rng_pptr++] = t & 255;
- }
- rng_pptr = 0;
- rng_seed_time();
- //rng_seed_int(window.screenX);
- //rng_seed_int(window.screenY);
- }
-
- function rng_get_byte() {
- if(rng_state == null) {
- rng_seed_time();
- rng_state = prng_newstate();
- rng_state.init(rng_pool);
- for(rng_pptr = 0; rng_pptr < rng_pool.length; ++rng_pptr)
- rng_pool[rng_pptr] = 0;
- rng_pptr = 0;
- //rng_pool = null;
- }
- // TODO: allow reseeding after first request
- return rng_state.next();
- }
-
- function rng_get_bytes(ba) {
- var i;
- for(i = 0; i < ba.length; ++i) ba[i] = rng_get_byte();
- }
-
- function SecureRandom() {}
-
- SecureRandom.prototype.nextBytes = rng_get_bytes;
-
- // prng4.js - uses Arcfour as a PRNG
-
- function Arcfour() {
- this.i = 0;
- this.j = 0;
- this.S = new Array();
- }
-
- // Initialize arcfour context from key, an array of ints, each from [0..255]
- function ARC4init(key) {
- var i, j, t;
- for(i = 0; i < 256; ++i)
- this.S[i] = i;
- j = 0;
- for(i = 0; i < 256; ++i) {
- j = (j + this.S[i] + key[i % key.length]) & 255;
- t = this.S[i];
- this.S[i] = this.S[j];
- this.S[j] = t;
- }
- this.i = 0;
- this.j = 0;
- }
-
- function ARC4next() {
- var t;
- this.i = (this.i + 1) & 255;
- this.j = (this.j + this.S[this.i]) & 255;
- t = this.S[this.i];
- this.S[this.i] = this.S[this.j];
- this.S[this.j] = t;
- return this.S[(t + this.S[this.i]) & 255];
- }
-
- Arcfour.prototype.init = ARC4init;
- Arcfour.prototype.next = ARC4next;
-
- // Plug in your RNG constructor here
- function prng_newstate() {
- return new Arcfour();
- }
-
- // Pool size must be a multiple of 4 and greater than 32.
- // An array of bytes the size of the pool will be passed to init()
- var rng_psize = 256;
-
- if (true) {
- exports = module.exports = {
- default: BigInteger,
- BigInteger: BigInteger,
- SecureRandom: SecureRandom,
- };
- } else {}
-
-}).call(this);
-
-
/***/ }),
/***/ 7106:
@@ -65240,2984 +61487,6 @@ const validRange = (range, options) => {
module.exports = validRange
-/***/ }),
-
-/***/ 37575:
-/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
-
-"use strict";
-
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-const utils_1 = __webpack_require__(11725);
-// The default Buffer size if one is not provided.
-const DEFAULT_SMARTBUFFER_SIZE = 4096;
-// The default string encoding to use for reading/writing strings.
-const DEFAULT_SMARTBUFFER_ENCODING = 'utf8';
-class SmartBuffer {
- /**
- * Creates a new SmartBuffer instance.
- *
- * @param options { SmartBufferOptions } The SmartBufferOptions to apply to this instance.
- */
- constructor(options) {
- this.length = 0;
- this._encoding = DEFAULT_SMARTBUFFER_ENCODING;
- this._writeOffset = 0;
- this._readOffset = 0;
- if (SmartBuffer.isSmartBufferOptions(options)) {
- // Checks for encoding
- if (options.encoding) {
- utils_1.checkEncoding(options.encoding);
- this._encoding = options.encoding;
- }
- // Checks for initial size length
- if (options.size) {
- if (utils_1.isFiniteInteger(options.size) && options.size > 0) {
- this._buff = Buffer.allocUnsafe(options.size);
- }
- else {
- throw new Error(utils_1.ERRORS.INVALID_SMARTBUFFER_SIZE);
- }
- // Check for initial Buffer
- }
- else if (options.buff) {
- if (Buffer.isBuffer(options.buff)) {
- this._buff = options.buff;
- this.length = options.buff.length;
- }
- else {
- throw new Error(utils_1.ERRORS.INVALID_SMARTBUFFER_BUFFER);
- }
- }
- else {
- this._buff = Buffer.allocUnsafe(DEFAULT_SMARTBUFFER_SIZE);
- }
- }
- else {
- // If something was passed but it's not a SmartBufferOptions object
- if (typeof options !== 'undefined') {
- throw new Error(utils_1.ERRORS.INVALID_SMARTBUFFER_OBJECT);
- }
- // Otherwise default to sane options
- this._buff = Buffer.allocUnsafe(DEFAULT_SMARTBUFFER_SIZE);
- }
- }
- /**
- * Creates a new SmartBuffer instance with the provided internal Buffer size and optional encoding.
- *
- * @param size { Number } The size of the internal Buffer.
- * @param encoding { String } The BufferEncoding to use for strings.
- *
- * @return { SmartBuffer }
- */
- static fromSize(size, encoding) {
- return new this({
- size: size,
- encoding: encoding
- });
- }
- /**
- * Creates a new SmartBuffer instance with the provided Buffer and optional encoding.
- *
- * @param buffer { Buffer } The Buffer to use as the internal Buffer value.
- * @param encoding { String } The BufferEncoding to use for strings.
- *
- * @return { SmartBuffer }
- */
- static fromBuffer(buff, encoding) {
- return new this({
- buff: buff,
- encoding: encoding
- });
- }
- /**
- * Creates a new SmartBuffer instance with the provided SmartBufferOptions options.
- *
- * @param options { SmartBufferOptions } The options to use when creating the SmartBuffer instance.
- */
- static fromOptions(options) {
- return new this(options);
- }
- /**
- * Type checking function that determines if an object is a SmartBufferOptions object.
- */
- static isSmartBufferOptions(options) {
- const castOptions = options;
- return (castOptions &&
- (castOptions.encoding !== undefined || castOptions.size !== undefined || castOptions.buff !== undefined));
- }
- // Signed integers
- /**
- * Reads an Int8 value from the current read position or an optionally provided offset.
- *
- * @param offset { Number } The offset to read data from (optional)
- * @return { Number }
- */
- readInt8(offset) {
- return this._readNumberValue(Buffer.prototype.readInt8, 1, offset);
- }
- /**
- * Reads an Int16BE value from the current read position or an optionally provided offset.
- *
- * @param offset { Number } The offset to read data from (optional)
- * @return { Number }
- */
- readInt16BE(offset) {
- return this._readNumberValue(Buffer.prototype.readInt16BE, 2, offset);
- }
- /**
- * Reads an Int16LE value from the current read position or an optionally provided offset.
- *
- * @param offset { Number } The offset to read data from (optional)
- * @return { Number }
- */
- readInt16LE(offset) {
- return this._readNumberValue(Buffer.prototype.readInt16LE, 2, offset);
- }
- /**
- * Reads an Int32BE value from the current read position or an optionally provided offset.
- *
- * @param offset { Number } The offset to read data from (optional)
- * @return { Number }
- */
- readInt32BE(offset) {
- return this._readNumberValue(Buffer.prototype.readInt32BE, 4, offset);
- }
- /**
- * Reads an Int32LE value from the current read position or an optionally provided offset.
- *
- * @param offset { Number } The offset to read data from (optional)
- * @return { Number }
- */
- readInt32LE(offset) {
- return this._readNumberValue(Buffer.prototype.readInt32LE, 4, offset);
- }
- /**
- * Reads a BigInt64BE value from the current read position or an optionally provided offset.
- *
- * @param offset { Number } The offset to read data from (optional)
- * @return { BigInt }
- */
- readBigInt64BE(offset) {
- utils_1.bigIntAndBufferInt64Check('readBigInt64BE');
- return this._readNumberValue(Buffer.prototype.readBigInt64BE, 8, offset);
- }
- /**
- * Reads a BigInt64LE value from the current read position or an optionally provided offset.
- *
- * @param offset { Number } The offset to read data from (optional)
- * @return { BigInt }
- */
- readBigInt64LE(offset) {
- utils_1.bigIntAndBufferInt64Check('readBigInt64LE');
- return this._readNumberValue(Buffer.prototype.readBigInt64LE, 8, offset);
- }
- /**
- * Writes an Int8 value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeInt8(value, offset) {
- this._writeNumberValue(Buffer.prototype.writeInt8, 1, value, offset);
- return this;
- }
- /**
- * Inserts an Int8 value at the given offset value.
- *
- * @param value { Number } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertInt8(value, offset) {
- return this._insertNumberValue(Buffer.prototype.writeInt8, 1, value, offset);
- }
- /**
- * Writes an Int16BE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeInt16BE(value, offset) {
- return this._writeNumberValue(Buffer.prototype.writeInt16BE, 2, value, offset);
- }
- /**
- * Inserts an Int16BE value at the given offset value.
- *
- * @param value { Number } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertInt16BE(value, offset) {
- return this._insertNumberValue(Buffer.prototype.writeInt16BE, 2, value, offset);
- }
- /**
- * Writes an Int16LE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeInt16LE(value, offset) {
- return this._writeNumberValue(Buffer.prototype.writeInt16LE, 2, value, offset);
- }
- /**
- * Inserts an Int16LE value at the given offset value.
- *
- * @param value { Number } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertInt16LE(value, offset) {
- return this._insertNumberValue(Buffer.prototype.writeInt16LE, 2, value, offset);
- }
- /**
- * Writes an Int32BE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeInt32BE(value, offset) {
- return this._writeNumberValue(Buffer.prototype.writeInt32BE, 4, value, offset);
- }
- /**
- * Inserts an Int32BE value at the given offset value.
- *
- * @param value { Number } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertInt32BE(value, offset) {
- return this._insertNumberValue(Buffer.prototype.writeInt32BE, 4, value, offset);
- }
- /**
- * Writes an Int32LE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeInt32LE(value, offset) {
- return this._writeNumberValue(Buffer.prototype.writeInt32LE, 4, value, offset);
- }
- /**
- * Inserts an Int32LE value at the given offset value.
- *
- * @param value { Number } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertInt32LE(value, offset) {
- return this._insertNumberValue(Buffer.prototype.writeInt32LE, 4, value, offset);
- }
- /**
- * Writes a BigInt64BE value to the current write position (or at optional offset).
- *
- * @param value { BigInt } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeBigInt64BE(value, offset) {
- utils_1.bigIntAndBufferInt64Check('writeBigInt64BE');
- return this._writeNumberValue(Buffer.prototype.writeBigInt64BE, 8, value, offset);
- }
- /**
- * Inserts a BigInt64BE value at the given offset value.
- *
- * @param value { BigInt } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertBigInt64BE(value, offset) {
- utils_1.bigIntAndBufferInt64Check('writeBigInt64BE');
- return this._insertNumberValue(Buffer.prototype.writeBigInt64BE, 8, value, offset);
- }
- /**
- * Writes a BigInt64LE value to the current write position (or at optional offset).
- *
- * @param value { BigInt } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeBigInt64LE(value, offset) {
- utils_1.bigIntAndBufferInt64Check('writeBigInt64LE');
- return this._writeNumberValue(Buffer.prototype.writeBigInt64LE, 8, value, offset);
- }
- /**
- * Inserts a Int64LE value at the given offset value.
- *
- * @param value { BigInt } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertBigInt64LE(value, offset) {
- utils_1.bigIntAndBufferInt64Check('writeBigInt64LE');
- return this._insertNumberValue(Buffer.prototype.writeBigInt64LE, 8, value, offset);
- }
- // Unsigned Integers
- /**
- * Reads an UInt8 value from the current read position or an optionally provided offset.
- *
- * @param offset { Number } The offset to read data from (optional)
- * @return { Number }
- */
- readUInt8(offset) {
- return this._readNumberValue(Buffer.prototype.readUInt8, 1, offset);
- }
- /**
- * Reads an UInt16BE value from the current read position or an optionally provided offset.
- *
- * @param offset { Number } The offset to read data from (optional)
- * @return { Number }
- */
- readUInt16BE(offset) {
- return this._readNumberValue(Buffer.prototype.readUInt16BE, 2, offset);
- }
- /**
- * Reads an UInt16LE value from the current read position or an optionally provided offset.
- *
- * @param offset { Number } The offset to read data from (optional)
- * @return { Number }
- */
- readUInt16LE(offset) {
- return this._readNumberValue(Buffer.prototype.readUInt16LE, 2, offset);
- }
- /**
- * Reads an UInt32BE value from the current read position or an optionally provided offset.
- *
- * @param offset { Number } The offset to read data from (optional)
- * @return { Number }
- */
- readUInt32BE(offset) {
- return this._readNumberValue(Buffer.prototype.readUInt32BE, 4, offset);
- }
- /**
- * Reads an UInt32LE value from the current read position or an optionally provided offset.
- *
- * @param offset { Number } The offset to read data from (optional)
- * @return { Number }
- */
- readUInt32LE(offset) {
- return this._readNumberValue(Buffer.prototype.readUInt32LE, 4, offset);
- }
- /**
- * Reads a BigUInt64BE value from the current read position or an optionally provided offset.
- *
- * @param offset { Number } The offset to read data from (optional)
- * @return { BigInt }
- */
- readBigUInt64BE(offset) {
- utils_1.bigIntAndBufferInt64Check('readBigUInt64BE');
- return this._readNumberValue(Buffer.prototype.readBigUInt64BE, 8, offset);
- }
- /**
- * Reads a BigUInt64LE value from the current read position or an optionally provided offset.
- *
- * @param offset { Number } The offset to read data from (optional)
- * @return { BigInt }
- */
- readBigUInt64LE(offset) {
- utils_1.bigIntAndBufferInt64Check('readBigUInt64LE');
- return this._readNumberValue(Buffer.prototype.readBigUInt64LE, 8, offset);
- }
- /**
- * Writes an UInt8 value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeUInt8(value, offset) {
- return this._writeNumberValue(Buffer.prototype.writeUInt8, 1, value, offset);
- }
- /**
- * Inserts an UInt8 value at the given offset value.
- *
- * @param value { Number } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertUInt8(value, offset) {
- return this._insertNumberValue(Buffer.prototype.writeUInt8, 1, value, offset);
- }
- /**
- * Writes an UInt16BE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeUInt16BE(value, offset) {
- return this._writeNumberValue(Buffer.prototype.writeUInt16BE, 2, value, offset);
- }
- /**
- * Inserts an UInt16BE value at the given offset value.
- *
- * @param value { Number } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertUInt16BE(value, offset) {
- return this._insertNumberValue(Buffer.prototype.writeUInt16BE, 2, value, offset);
- }
- /**
- * Writes an UInt16LE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeUInt16LE(value, offset) {
- return this._writeNumberValue(Buffer.prototype.writeUInt16LE, 2, value, offset);
- }
- /**
- * Inserts an UInt16LE value at the given offset value.
- *
- * @param value { Number } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertUInt16LE(value, offset) {
- return this._insertNumberValue(Buffer.prototype.writeUInt16LE, 2, value, offset);
- }
- /**
- * Writes an UInt32BE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeUInt32BE(value, offset) {
- return this._writeNumberValue(Buffer.prototype.writeUInt32BE, 4, value, offset);
- }
- /**
- * Inserts an UInt32BE value at the given offset value.
- *
- * @param value { Number } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertUInt32BE(value, offset) {
- return this._insertNumberValue(Buffer.prototype.writeUInt32BE, 4, value, offset);
- }
- /**
- * Writes an UInt32LE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeUInt32LE(value, offset) {
- return this._writeNumberValue(Buffer.prototype.writeUInt32LE, 4, value, offset);
- }
- /**
- * Inserts an UInt32LE value at the given offset value.
- *
- * @param value { Number } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertUInt32LE(value, offset) {
- return this._insertNumberValue(Buffer.prototype.writeUInt32LE, 4, value, offset);
- }
- /**
- * Writes a BigUInt64BE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeBigUInt64BE(value, offset) {
- utils_1.bigIntAndBufferInt64Check('writeBigUInt64BE');
- return this._writeNumberValue(Buffer.prototype.writeBigUInt64BE, 8, value, offset);
- }
- /**
- * Inserts a BigUInt64BE value at the given offset value.
- *
- * @param value { Number } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertBigUInt64BE(value, offset) {
- utils_1.bigIntAndBufferInt64Check('writeBigUInt64BE');
- return this._insertNumberValue(Buffer.prototype.writeBigUInt64BE, 8, value, offset);
- }
- /**
- * Writes a BigUInt64LE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeBigUInt64LE(value, offset) {
- utils_1.bigIntAndBufferInt64Check('writeBigUInt64LE');
- return this._writeNumberValue(Buffer.prototype.writeBigUInt64LE, 8, value, offset);
- }
- /**
- * Inserts a BigUInt64LE value at the given offset value.
- *
- * @param value { Number } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertBigUInt64LE(value, offset) {
- utils_1.bigIntAndBufferInt64Check('writeBigUInt64LE');
- return this._insertNumberValue(Buffer.prototype.writeBigUInt64LE, 8, value, offset);
- }
- // Floating Point
- /**
- * Reads an FloatBE value from the current read position or an optionally provided offset.
- *
- * @param offset { Number } The offset to read data from (optional)
- * @return { Number }
- */
- readFloatBE(offset) {
- return this._readNumberValue(Buffer.prototype.readFloatBE, 4, offset);
- }
- /**
- * Reads an FloatLE value from the current read position or an optionally provided offset.
- *
- * @param offset { Number } The offset to read data from (optional)
- * @return { Number }
- */
- readFloatLE(offset) {
- return this._readNumberValue(Buffer.prototype.readFloatLE, 4, offset);
- }
- /**
- * Writes a FloatBE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeFloatBE(value, offset) {
- return this._writeNumberValue(Buffer.prototype.writeFloatBE, 4, value, offset);
- }
- /**
- * Inserts a FloatBE value at the given offset value.
- *
- * @param value { Number } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertFloatBE(value, offset) {
- return this._insertNumberValue(Buffer.prototype.writeFloatBE, 4, value, offset);
- }
- /**
- * Writes a FloatLE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeFloatLE(value, offset) {
- return this._writeNumberValue(Buffer.prototype.writeFloatLE, 4, value, offset);
- }
- /**
- * Inserts a FloatLE value at the given offset value.
- *
- * @param value { Number } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertFloatLE(value, offset) {
- return this._insertNumberValue(Buffer.prototype.writeFloatLE, 4, value, offset);
- }
- // Double Floating Point
- /**
- * Reads an DoublEBE value from the current read position or an optionally provided offset.
- *
- * @param offset { Number } The offset to read data from (optional)
- * @return { Number }
- */
- readDoubleBE(offset) {
- return this._readNumberValue(Buffer.prototype.readDoubleBE, 8, offset);
- }
- /**
- * Reads an DoubleLE value from the current read position or an optionally provided offset.
- *
- * @param offset { Number } The offset to read data from (optional)
- * @return { Number }
- */
- readDoubleLE(offset) {
- return this._readNumberValue(Buffer.prototype.readDoubleLE, 8, offset);
- }
- /**
- * Writes a DoubleBE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeDoubleBE(value, offset) {
- return this._writeNumberValue(Buffer.prototype.writeDoubleBE, 8, value, offset);
- }
- /**
- * Inserts a DoubleBE value at the given offset value.
- *
- * @param value { Number } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertDoubleBE(value, offset) {
- return this._insertNumberValue(Buffer.prototype.writeDoubleBE, 8, value, offset);
- }
- /**
- * Writes a DoubleLE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeDoubleLE(value, offset) {
- return this._writeNumberValue(Buffer.prototype.writeDoubleLE, 8, value, offset);
- }
- /**
- * Inserts a DoubleLE value at the given offset value.
- *
- * @param value { Number } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertDoubleLE(value, offset) {
- return this._insertNumberValue(Buffer.prototype.writeDoubleLE, 8, value, offset);
- }
- // Strings
- /**
- * Reads a String from the current read position.
- *
- * @param arg1 { Number | String } The number of bytes to read as a String, or the BufferEncoding to use for
- * the string (Defaults to instance level encoding).
- * @param encoding { String } The BufferEncoding to use for the string (Defaults to instance level encoding).
- *
- * @return { String }
- */
- readString(arg1, encoding) {
- let lengthVal;
- // Length provided
- if (typeof arg1 === 'number') {
- utils_1.checkLengthValue(arg1);
- lengthVal = Math.min(arg1, this.length - this._readOffset);
- }
- else {
- encoding = arg1;
- lengthVal = this.length - this._readOffset;
- }
- // Check encoding
- if (typeof encoding !== 'undefined') {
- utils_1.checkEncoding(encoding);
- }
- const value = this._buff.slice(this._readOffset, this._readOffset + lengthVal).toString(encoding || this._encoding);
- this._readOffset += lengthVal;
- return value;
- }
- /**
- * Inserts a String
- *
- * @param value { String } The String value to insert.
- * @param offset { Number } The offset to insert the string at.
- * @param encoding { String } The BufferEncoding to use for writing strings (defaults to instance encoding).
- *
- * @return this
- */
- insertString(value, offset, encoding) {
- utils_1.checkOffsetValue(offset);
- return this._handleString(value, true, offset, encoding);
- }
- /**
- * Writes a String
- *
- * @param value { String } The String value to write.
- * @param arg2 { Number | String } The offset to write the string at, or the BufferEncoding to use.
- * @param encoding { String } The BufferEncoding to use for writing strings (defaults to instance encoding).
- *
- * @return this
- */
- writeString(value, arg2, encoding) {
- return this._handleString(value, false, arg2, encoding);
- }
- /**
- * Reads a null-terminated String from the current read position.
- *
- * @param encoding { String } The BufferEncoding to use for the string (Defaults to instance level encoding).
- *
- * @return { String }
- */
- readStringNT(encoding) {
- if (typeof encoding !== 'undefined') {
- utils_1.checkEncoding(encoding);
- }
- // Set null character position to the end SmartBuffer instance.
- let nullPos = this.length;
- // Find next null character (if one is not found, default from above is used)
- for (let i = this._readOffset; i < this.length; i++) {
- if (this._buff[i] === 0x00) {
- nullPos = i;
- break;
- }
- }
- // Read string value
- const value = this._buff.slice(this._readOffset, nullPos);
- // Increment internal Buffer read offset
- this._readOffset = nullPos + 1;
- return value.toString(encoding || this._encoding);
- }
- /**
- * Inserts a null-terminated String.
- *
- * @param value { String } The String value to write.
- * @param arg2 { Number | String } The offset to write the string to, or the BufferEncoding to use.
- * @param encoding { String } The BufferEncoding to use for writing strings (defaults to instance encoding).
- *
- * @return this
- */
- insertStringNT(value, offset, encoding) {
- utils_1.checkOffsetValue(offset);
- // Write Values
- this.insertString(value, offset, encoding);
- this.insertUInt8(0x00, offset + value.length);
- return this;
- }
- /**
- * Writes a null-terminated String.
- *
- * @param value { String } The String value to write.
- * @param arg2 { Number | String } The offset to write the string to, or the BufferEncoding to use.
- * @param encoding { String } The BufferEncoding to use for writing strings (defaults to instance encoding).
- *
- * @return this
- */
- writeStringNT(value, arg2, encoding) {
- // Write Values
- this.writeString(value, arg2, encoding);
- this.writeUInt8(0x00, typeof arg2 === 'number' ? arg2 + value.length : this.writeOffset);
- return this;
- }
- // Buffers
- /**
- * Reads a Buffer from the internal read position.
- *
- * @param length { Number } The length of data to read as a Buffer.
- *
- * @return { Buffer }
- */
- readBuffer(length) {
- if (typeof length !== 'undefined') {
- utils_1.checkLengthValue(length);
- }
- const lengthVal = typeof length === 'number' ? length : this.length;
- const endPoint = Math.min(this.length, this._readOffset + lengthVal);
- // Read buffer value
- const value = this._buff.slice(this._readOffset, endPoint);
- // Increment internal Buffer read offset
- this._readOffset = endPoint;
- return value;
- }
- /**
- * Writes a Buffer to the current write position.
- *
- * @param value { Buffer } The Buffer to write.
- * @param offset { Number } The offset to write the Buffer to.
- *
- * @return this
- */
- insertBuffer(value, offset) {
- utils_1.checkOffsetValue(offset);
- return this._handleBuffer(value, true, offset);
- }
- /**
- * Writes a Buffer to the current write position.
- *
- * @param value { Buffer } The Buffer to write.
- * @param offset { Number } The offset to write the Buffer to.
- *
- * @return this
- */
- writeBuffer(value, offset) {
- return this._handleBuffer(value, false, offset);
- }
- /**
- * Reads a null-terminated Buffer from the current read poisiton.
- *
- * @return { Buffer }
- */
- readBufferNT() {
- // Set null character position to the end SmartBuffer instance.
- let nullPos = this.length;
- // Find next null character (if one is not found, default from above is used)
- for (let i = this._readOffset; i < this.length; i++) {
- if (this._buff[i] === 0x00) {
- nullPos = i;
- break;
- }
- }
- // Read value
- const value = this._buff.slice(this._readOffset, nullPos);
- // Increment internal Buffer read offset
- this._readOffset = nullPos + 1;
- return value;
- }
- /**
- * Inserts a null-terminated Buffer.
- *
- * @param value { Buffer } The Buffer to write.
- * @param offset { Number } The offset to write the Buffer to.
- *
- * @return this
- */
- insertBufferNT(value, offset) {
- utils_1.checkOffsetValue(offset);
- // Write Values
- this.insertBuffer(value, offset);
- this.insertUInt8(0x00, offset + value.length);
- return this;
- }
- /**
- * Writes a null-terminated Buffer.
- *
- * @param value { Buffer } The Buffer to write.
- * @param offset { Number } The offset to write the Buffer to.
- *
- * @return this
- */
- writeBufferNT(value, offset) {
- // Checks for valid numberic value;
- if (typeof offset !== 'undefined') {
- utils_1.checkOffsetValue(offset);
- }
- // Write Values
- this.writeBuffer(value, offset);
- this.writeUInt8(0x00, typeof offset === 'number' ? offset + value.length : this._writeOffset);
- return this;
- }
- /**
- * Clears the SmartBuffer instance to its original empty state.
- */
- clear() {
- this._writeOffset = 0;
- this._readOffset = 0;
- this.length = 0;
- return this;
- }
- /**
- * Gets the remaining data left to be read from the SmartBuffer instance.
- *
- * @return { Number }
- */
- remaining() {
- return this.length - this._readOffset;
- }
- /**
- * Gets the current read offset value of the SmartBuffer instance.
- *
- * @return { Number }
- */
- get readOffset() {
- return this._readOffset;
- }
- /**
- * Sets the read offset value of the SmartBuffer instance.
- *
- * @param offset { Number } - The offset value to set.
- */
- set readOffset(offset) {
- utils_1.checkOffsetValue(offset);
- // Check for bounds.
- utils_1.checkTargetOffset(offset, this);
- this._readOffset = offset;
- }
- /**
- * Gets the current write offset value of the SmartBuffer instance.
- *
- * @return { Number }
- */
- get writeOffset() {
- return this._writeOffset;
- }
- /**
- * Sets the write offset value of the SmartBuffer instance.
- *
- * @param offset { Number } - The offset value to set.
- */
- set writeOffset(offset) {
- utils_1.checkOffsetValue(offset);
- // Check for bounds.
- utils_1.checkTargetOffset(offset, this);
- this._writeOffset = offset;
- }
- /**
- * Gets the currently set string encoding of the SmartBuffer instance.
- *
- * @return { BufferEncoding } The string Buffer encoding currently set.
- */
- get encoding() {
- return this._encoding;
- }
- /**
- * Sets the string encoding of the SmartBuffer instance.
- *
- * @param encoding { BufferEncoding } The string Buffer encoding to set.
- */
- set encoding(encoding) {
- utils_1.checkEncoding(encoding);
- this._encoding = encoding;
- }
- /**
- * Gets the underlying internal Buffer. (This includes unmanaged data in the Buffer)
- *
- * @return { Buffer } The Buffer value.
- */
- get internalBuffer() {
- return this._buff;
- }
- /**
- * Gets the value of the internal managed Buffer (Includes managed data only)
- *
- * @param { Buffer }
- */
- toBuffer() {
- return this._buff.slice(0, this.length);
- }
- /**
- * Gets the String value of the internal managed Buffer
- *
- * @param encoding { String } The BufferEncoding to display the Buffer as (defaults to instance level encoding).
- */
- toString(encoding) {
- const encodingVal = typeof encoding === 'string' ? encoding : this._encoding;
- // Check for invalid encoding.
- utils_1.checkEncoding(encodingVal);
- return this._buff.toString(encodingVal, 0, this.length);
- }
- /**
- * Destroys the SmartBuffer instance.
- */
- destroy() {
- this.clear();
- return this;
- }
- /**
- * Handles inserting and writing strings.
- *
- * @param value { String } The String value to insert.
- * @param isInsert { Boolean } True if inserting a string, false if writing.
- * @param arg2 { Number | String } The offset to insert the string at, or the BufferEncoding to use.
- * @param encoding { String } The BufferEncoding to use for writing strings (defaults to instance encoding).
- */
- _handleString(value, isInsert, arg3, encoding) {
- let offsetVal = this._writeOffset;
- let encodingVal = this._encoding;
- // Check for offset
- if (typeof arg3 === 'number') {
- offsetVal = arg3;
- // Check for encoding
- }
- else if (typeof arg3 === 'string') {
- utils_1.checkEncoding(arg3);
- encodingVal = arg3;
- }
- // Check for encoding (third param)
- if (typeof encoding === 'string') {
- utils_1.checkEncoding(encoding);
- encodingVal = encoding;
- }
- // Calculate bytelength of string.
- const byteLength = Buffer.byteLength(value, encodingVal);
- // Ensure there is enough internal Buffer capacity.
- if (isInsert) {
- this.ensureInsertable(byteLength, offsetVal);
- }
- else {
- this._ensureWriteable(byteLength, offsetVal);
- }
- // Write value
- this._buff.write(value, offsetVal, byteLength, encodingVal);
- // Increment internal Buffer write offset;
- if (isInsert) {
- this._writeOffset += byteLength;
- }
- else {
- // If an offset was given, check to see if we wrote beyond the current writeOffset.
- if (typeof arg3 === 'number') {
- this._writeOffset = Math.max(this._writeOffset, offsetVal + byteLength);
- }
- else {
- // If no offset was given, we wrote to the end of the SmartBuffer so increment writeOffset.
- this._writeOffset += byteLength;
- }
- }
- return this;
- }
- /**
- * Handles writing or insert of a Buffer.
- *
- * @param value { Buffer } The Buffer to write.
- * @param offset { Number } The offset to write the Buffer to.
- */
- _handleBuffer(value, isInsert, offset) {
- const offsetVal = typeof offset === 'number' ? offset : this._writeOffset;
- // Ensure there is enough internal Buffer capacity.
- if (isInsert) {
- this.ensureInsertable(value.length, offsetVal);
- }
- else {
- this._ensureWriteable(value.length, offsetVal);
- }
- // Write buffer value
- value.copy(this._buff, offsetVal);
- // Increment internal Buffer write offset;
- if (isInsert) {
- this._writeOffset += value.length;
- }
- else {
- // If an offset was given, check to see if we wrote beyond the current writeOffset.
- if (typeof offset === 'number') {
- this._writeOffset = Math.max(this._writeOffset, offsetVal + value.length);
- }
- else {
- // If no offset was given, we wrote to the end of the SmartBuffer so increment writeOffset.
- this._writeOffset += value.length;
- }
- }
- return this;
- }
- /**
- * Ensures that the internal Buffer is large enough to read data.
- *
- * @param length { Number } The length of the data that needs to be read.
- * @param offset { Number } The offset of the data that needs to be read.
- */
- ensureReadable(length, offset) {
- // Offset value defaults to managed read offset.
- let offsetVal = this._readOffset;
- // If an offset was provided, use it.
- if (typeof offset !== 'undefined') {
- // Checks for valid numberic value;
- utils_1.checkOffsetValue(offset);
- // Overide with custom offset.
- offsetVal = offset;
- }
- // Checks if offset is below zero, or the offset+length offset is beyond the total length of the managed data.
- if (offsetVal < 0 || offsetVal + length > this.length) {
- throw new Error(utils_1.ERRORS.INVALID_READ_BEYOND_BOUNDS);
- }
- }
- /**
- * Ensures that the internal Buffer is large enough to insert data.
- *
- * @param dataLength { Number } The length of the data that needs to be written.
- * @param offset { Number } The offset of the data to be written.
- */
- ensureInsertable(dataLength, offset) {
- // Checks for valid numberic value;
- utils_1.checkOffsetValue(offset);
- // Ensure there is enough internal Buffer capacity.
- this._ensureCapacity(this.length + dataLength);
- // If an offset was provided and its not the very end of the buffer, copy data into appropriate location in regards to the offset.
- if (offset < this.length) {
- this._buff.copy(this._buff, offset + dataLength, offset, this._buff.length);
- }
- // Adjust tracked smart buffer length
- if (offset + dataLength > this.length) {
- this.length = offset + dataLength;
- }
- else {
- this.length += dataLength;
- }
- }
- /**
- * Ensures that the internal Buffer is large enough to write data.
- *
- * @param dataLength { Number } The length of the data that needs to be written.
- * @param offset { Number } The offset of the data to be written (defaults to writeOffset).
- */
- _ensureWriteable(dataLength, offset) {
- const offsetVal = typeof offset === 'number' ? offset : this._writeOffset;
- // Ensure enough capacity to write data.
- this._ensureCapacity(offsetVal + dataLength);
- // Adjust SmartBuffer length (if offset + length is larger than managed length, adjust length)
- if (offsetVal + dataLength > this.length) {
- this.length = offsetVal + dataLength;
- }
- }
- /**
- * Ensures that the internal Buffer is large enough to write at least the given amount of data.
- *
- * @param minLength { Number } The minimum length of the data needs to be written.
- */
- _ensureCapacity(minLength) {
- const oldLength = this._buff.length;
- if (minLength > oldLength) {
- let data = this._buff;
- let newLength = (oldLength * 3) / 2 + 1;
- if (newLength < minLength) {
- newLength = minLength;
- }
- this._buff = Buffer.allocUnsafe(newLength);
- data.copy(this._buff, 0, 0, oldLength);
- }
- }
- /**
- * Reads a numeric number value using the provided function.
- *
- * @typeparam T { number | bigint } The type of the value to be read
- *
- * @param func { Function(offset: number) => number } The function to read data on the internal Buffer with.
- * @param byteSize { Number } The number of bytes read.
- * @param offset { Number } The offset to read from (optional). When this is not provided, the managed readOffset is used instead.
- *
- * @returns { T } the number value
- */
- _readNumberValue(func, byteSize, offset) {
- this.ensureReadable(byteSize, offset);
- // Call Buffer.readXXXX();
- const value = func.call(this._buff, typeof offset === 'number' ? offset : this._readOffset);
- // Adjust internal read offset if an optional read offset was not provided.
- if (typeof offset === 'undefined') {
- this._readOffset += byteSize;
- }
- return value;
- }
- /**
- * Inserts a numeric number value based on the given offset and value.
- *
- * @typeparam T { number | bigint } The type of the value to be written
- *
- * @param func { Function(offset: T, offset?) => number} The function to write data on the internal Buffer with.
- * @param byteSize { Number } The number of bytes written.
- * @param value { T } The number value to write.
- * @param offset { Number } the offset to write the number at (REQUIRED).
- *
- * @returns SmartBuffer this buffer
- */
- _insertNumberValue(func, byteSize, value, offset) {
- // Check for invalid offset values.
- utils_1.checkOffsetValue(offset);
- // Ensure there is enough internal Buffer capacity. (raw offset is passed)
- this.ensureInsertable(byteSize, offset);
- // Call buffer.writeXXXX();
- func.call(this._buff, value, offset);
- // Adjusts internally managed write offset.
- this._writeOffset += byteSize;
- return this;
- }
- /**
- * Writes a numeric number value based on the given offset and value.
- *
- * @typeparam T { number | bigint } The type of the value to be written
- *
- * @param func { Function(offset: T, offset?) => number} The function to write data on the internal Buffer with.
- * @param byteSize { Number } The number of bytes written.
- * @param value { T } The number value to write.
- * @param offset { Number } the offset to write the number at (REQUIRED).
- *
- * @returns SmartBuffer this buffer
- */
- _writeNumberValue(func, byteSize, value, offset) {
- // If an offset was provided, validate it.
- if (typeof offset === 'number') {
- // Check if we're writing beyond the bounds of the managed data.
- if (offset < 0) {
- throw new Error(utils_1.ERRORS.INVALID_WRITE_BEYOND_BOUNDS);
- }
- utils_1.checkOffsetValue(offset);
- }
- // Default to writeOffset if no offset value was given.
- const offsetVal = typeof offset === 'number' ? offset : this._writeOffset;
- // Ensure there is enough internal Buffer capacity. (raw offset is passed)
- this._ensureWriteable(byteSize, offsetVal);
- func.call(this._buff, value, offsetVal);
- // If an offset was given, check to see if we wrote beyond the current writeOffset.
- if (typeof offset === 'number') {
- this._writeOffset = Math.max(this._writeOffset, offsetVal + byteSize);
- }
- else {
- // If no numeric offset was given, we wrote to the end of the SmartBuffer so increment writeOffset.
- this._writeOffset += byteSize;
- }
- return this;
- }
-}
-exports.SmartBuffer = SmartBuffer;
-//# sourceMappingURL=smartbuffer.js.map
-
-/***/ }),
-
-/***/ 11725:
-/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
-
-"use strict";
-
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-const buffer_1 = __webpack_require__(20181);
-/**
- * Error strings
- */
-const ERRORS = {
- INVALID_ENCODING: 'Invalid encoding provided. Please specify a valid encoding the internal Node.js Buffer supports.',
- INVALID_SMARTBUFFER_SIZE: 'Invalid size provided. Size must be a valid integer greater than zero.',
- INVALID_SMARTBUFFER_BUFFER: 'Invalid Buffer provided in SmartBufferOptions.',
- INVALID_SMARTBUFFER_OBJECT: 'Invalid SmartBufferOptions object supplied to SmartBuffer constructor or factory methods.',
- INVALID_OFFSET: 'An invalid offset value was provided.',
- INVALID_OFFSET_NON_NUMBER: 'An invalid offset value was provided. A numeric value is required.',
- INVALID_LENGTH: 'An invalid length value was provided.',
- INVALID_LENGTH_NON_NUMBER: 'An invalid length value was provived. A numeric value is required.',
- INVALID_TARGET_OFFSET: 'Target offset is beyond the bounds of the internal SmartBuffer data.',
- INVALID_TARGET_LENGTH: 'Specified length value moves cursor beyong the bounds of the internal SmartBuffer data.',
- INVALID_READ_BEYOND_BOUNDS: 'Attempted to read beyond the bounds of the managed data.',
- INVALID_WRITE_BEYOND_BOUNDS: 'Attempted to write beyond the bounds of the managed data.'
-};
-exports.ERRORS = ERRORS;
-/**
- * Checks if a given encoding is a valid Buffer encoding. (Throws an exception if check fails)
- *
- * @param { String } encoding The encoding string to check.
- */
-function checkEncoding(encoding) {
- if (!buffer_1.Buffer.isEncoding(encoding)) {
- throw new Error(ERRORS.INVALID_ENCODING);
- }
-}
-exports.checkEncoding = checkEncoding;
-/**
- * Checks if a given number is a finite integer. (Throws an exception if check fails)
- *
- * @param { Number } value The number value to check.
- */
-function isFiniteInteger(value) {
- return typeof value === 'number' && isFinite(value) && isInteger(value);
-}
-exports.isFiniteInteger = isFiniteInteger;
-/**
- * Checks if an offset/length value is valid. (Throws an exception if check fails)
- *
- * @param value The value to check.
- * @param offset True if checking an offset, false if checking a length.
- */
-function checkOffsetOrLengthValue(value, offset) {
- if (typeof value === 'number') {
- // Check for non finite/non integers
- if (!isFiniteInteger(value) || value < 0) {
- throw new Error(offset ? ERRORS.INVALID_OFFSET : ERRORS.INVALID_LENGTH);
- }
- }
- else {
- throw new Error(offset ? ERRORS.INVALID_OFFSET_NON_NUMBER : ERRORS.INVALID_LENGTH_NON_NUMBER);
- }
-}
-/**
- * Checks if a length value is valid. (Throws an exception if check fails)
- *
- * @param { Number } length The value to check.
- */
-function checkLengthValue(length) {
- checkOffsetOrLengthValue(length, false);
-}
-exports.checkLengthValue = checkLengthValue;
-/**
- * Checks if a offset value is valid. (Throws an exception if check fails)
- *
- * @param { Number } offset The value to check.
- */
-function checkOffsetValue(offset) {
- checkOffsetOrLengthValue(offset, true);
-}
-exports.checkOffsetValue = checkOffsetValue;
-/**
- * Checks if a target offset value is out of bounds. (Throws an exception if check fails)
- *
- * @param { Number } offset The offset value to check.
- * @param { SmartBuffer } buff The SmartBuffer instance to check against.
- */
-function checkTargetOffset(offset, buff) {
- if (offset < 0 || offset > buff.length) {
- throw new Error(ERRORS.INVALID_TARGET_OFFSET);
- }
-}
-exports.checkTargetOffset = checkTargetOffset;
-/**
- * Determines whether a given number is a integer.
- * @param value The number to check.
- */
-function isInteger(value) {
- return typeof value === 'number' && isFinite(value) && Math.floor(value) === value;
-}
-/**
- * Throws if Node.js version is too low to support bigint
- */
-function bigIntAndBufferInt64Check(bufferMethod) {
- if (typeof BigInt === 'undefined') {
- throw new Error('Platform does not support JS BigInt type.');
- }
- if (typeof buffer_1.Buffer.prototype[bufferMethod] === 'undefined') {
- throw new Error(`Platform does not support Buffer.prototype.${bufferMethod}.`);
- }
-}
-exports.bigIntAndBufferInt64Check = bigIntAndBufferInt64Check;
-//# sourceMappingURL=utils.js.map
-
-/***/ }),
-
-/***/ 77128:
-/***/ (function(__unused_webpack_module, exports, __webpack_require__) {
-
-"use strict";
-
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
- if (k2 === undefined) k2 = k;
- var desc = Object.getOwnPropertyDescriptor(m, k);
- if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
- desc = { enumerable: true, get: function() { return m[k]; } };
- }
- Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
- if (k2 === undefined) k2 = k;
- o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
- Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
- o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
- __setModuleDefault(result, mod);
- return result;
-};
-var __importDefault = (this && this.__importDefault) || function (mod) {
- return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.SocksProxyAgent = void 0;
-const socks_1 = __webpack_require__(65861);
-const agent_base_1 = __webpack_require__(20917);
-const debug_1 = __importDefault(__webpack_require__(45753));
-const dns = __importStar(__webpack_require__(72250));
-const net = __importStar(__webpack_require__(69278));
-const tls = __importStar(__webpack_require__(64756));
-const url_1 = __webpack_require__(87016);
-const debug = (0, debug_1.default)('socks-proxy-agent');
-function parseSocksURL(url) {
- let lookup = false;
- let type = 5;
- const host = url.hostname;
- // From RFC 1928, Section 3: https://tools.ietf.org/html/rfc1928#section-3
- // "The SOCKS service is conventionally located on TCP port 1080"
- const port = parseInt(url.port, 10) || 1080;
- // figure out if we want socks v4 or v5, based on the "protocol" used.
- // Defaults to 5.
- switch (url.protocol.replace(':', '')) {
- case 'socks4':
- lookup = true;
- type = 4;
- break;
- // pass through
- case 'socks4a':
- type = 4;
- break;
- case 'socks5':
- lookup = true;
- type = 5;
- break;
- // pass through
- case 'socks': // no version specified, default to 5h
- type = 5;
- break;
- case 'socks5h':
- type = 5;
- break;
- default:
- throw new TypeError(`A "socks" protocol must be specified! Got: ${String(url.protocol)}`);
- }
- const proxy = {
- host,
- port,
- type,
- };
- if (url.username) {
- Object.defineProperty(proxy, 'userId', {
- value: decodeURIComponent(url.username),
- enumerable: false,
- });
- }
- if (url.password != null) {
- Object.defineProperty(proxy, 'password', {
- value: decodeURIComponent(url.password),
- enumerable: false,
- });
- }
- return { lookup, proxy };
-}
-class SocksProxyAgent extends agent_base_1.Agent {
- constructor(uri, opts) {
- super(opts);
- const url = typeof uri === 'string' ? new url_1.URL(uri) : uri;
- const { proxy, lookup } = parseSocksURL(url);
- this.shouldLookup = lookup;
- this.proxy = proxy;
- this.timeout = opts?.timeout ?? null;
- this.socketOptions = opts?.socketOptions ?? null;
- }
- /**
- * Initiates a SOCKS connection to the specified SOCKS proxy server,
- * which in turn connects to the specified remote host and port.
- */
- async connect(req, opts) {
- const { shouldLookup, proxy, timeout } = this;
- if (!opts.host) {
- throw new Error('No `host` defined!');
- }
- let { host } = opts;
- const { port, lookup: lookupFn = dns.lookup } = opts;
- if (shouldLookup) {
- // Client-side DNS resolution for "4" and "5" socks proxy versions.
- host = await new Promise((resolve, reject) => {
- // Use the request's custom lookup, if one was configured:
- lookupFn(host, {}, (err, res) => {
- if (err) {
- reject(err);
- }
- else {
- resolve(res);
- }
- });
- });
- }
- const socksOpts = {
- proxy,
- destination: {
- host,
- port: typeof port === 'number' ? port : parseInt(port, 10),
- },
- command: 'connect',
- timeout: timeout ?? undefined,
- // @ts-expect-error the type supplied by socks for socket_options is wider
- // than necessary since socks will always override the host and port
- socket_options: this.socketOptions ?? undefined,
- };
- const cleanup = (tlsSocket) => {
- req.destroy();
- socket.destroy();
- if (tlsSocket)
- tlsSocket.destroy();
- };
- debug('Creating socks proxy connection: %o', socksOpts);
- const { socket } = await socks_1.SocksClient.createConnection(socksOpts);
- debug('Successfully created socks proxy connection');
- if (timeout !== null) {
- socket.setTimeout(timeout);
- socket.on('timeout', () => cleanup());
- }
- if (opts.secureEndpoint) {
- // The proxy is connecting to a TLS server, so upgrade
- // this socket connection to a TLS connection.
- debug('Upgrading socket connection to TLS');
- const servername = opts.servername || opts.host;
- const tlsSocket = tls.connect({
- ...omit(opts, 'host', 'path', 'port'),
- socket,
- servername: net.isIP(servername) ? undefined : servername,
- });
- tlsSocket.once('error', (error) => {
- debug('Socket TLS error', error.message);
- cleanup(tlsSocket);
- });
- return tlsSocket;
- }
- return socket;
- }
-}
-SocksProxyAgent.protocols = [
- 'socks',
- 'socks4',
- 'socks4a',
- 'socks5',
- 'socks5h',
-];
-exports.SocksProxyAgent = SocksProxyAgent;
-function omit(obj, ...keys) {
- const ret = {};
- let key;
- for (key in obj) {
- if (!keys.includes(key)) {
- ret[key] = obj[key];
- }
- }
- return ret;
-}
-//# sourceMappingURL=index.js.map
-
-/***/ }),
-
-/***/ 87631:
-/***/ (function(__unused_webpack_module, exports, __webpack_require__) {
-
-"use strict";
-
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.SocksClientError = exports.SocksClient = void 0;
-const events_1 = __webpack_require__(24434);
-const net = __webpack_require__(69278);
-const smart_buffer_1 = __webpack_require__(37575);
-const constants_1 = __webpack_require__(5438);
-const helpers_1 = __webpack_require__(17130);
-const receivebuffer_1 = __webpack_require__(87736);
-const util_1 = __webpack_require__(13763);
-Object.defineProperty(exports, "SocksClientError", ({ enumerable: true, get: function () { return util_1.SocksClientError; } }));
-const ip_address_1 = __webpack_require__(49424);
-class SocksClient extends events_1.EventEmitter {
- constructor(options) {
- super();
- this.options = Object.assign({}, options);
- // Validate SocksClientOptions
- (0, helpers_1.validateSocksClientOptions)(options);
- // Default state
- this.setState(constants_1.SocksClientState.Created);
- }
- /**
- * Creates a new SOCKS connection.
- *
- * Note: Supports callbacks and promises. Only supports the connect command.
- * @param options { SocksClientOptions } Options.
- * @param callback { Function } An optional callback function.
- * @returns { Promise }
- */
- static createConnection(options, callback) {
- return new Promise((resolve, reject) => {
- // Validate SocksClientOptions
- try {
- (0, helpers_1.validateSocksClientOptions)(options, ['connect']);
- }
- catch (err) {
- if (typeof callback === 'function') {
- callback(err);
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- return resolve(err); // Resolves pending promise (prevents memory leaks).
- }
- else {
- return reject(err);
- }
- }
- const client = new SocksClient(options);
- client.connect(options.existing_socket);
- client.once('established', (info) => {
- client.removeAllListeners();
- if (typeof callback === 'function') {
- callback(null, info);
- resolve(info); // Resolves pending promise (prevents memory leaks).
- }
- else {
- resolve(info);
- }
- });
- // Error occurred, failed to establish connection.
- client.once('error', (err) => {
- client.removeAllListeners();
- if (typeof callback === 'function') {
- callback(err);
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- resolve(err); // Resolves pending promise (prevents memory leaks).
- }
- else {
- reject(err);
- }
- });
- });
- }
- /**
- * Creates a new SOCKS connection chain to a destination host through 2 or more SOCKS proxies.
- *
- * Note: Supports callbacks and promises. Only supports the connect method.
- * Note: Implemented via createConnection() factory function.
- * @param options { SocksClientChainOptions } Options
- * @param callback { Function } An optional callback function.
- * @returns { Promise }
- */
- static createConnectionChain(options, callback) {
- // eslint-disable-next-line no-async-promise-executor
- return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {
- // Validate SocksClientChainOptions
- try {
- (0, helpers_1.validateSocksClientChainOptions)(options);
- }
- catch (err) {
- if (typeof callback === 'function') {
- callback(err);
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- return resolve(err); // Resolves pending promise (prevents memory leaks).
- }
- else {
- return reject(err);
- }
- }
- // Shuffle proxies
- if (options.randomizeChain) {
- (0, util_1.shuffleArray)(options.proxies);
- }
- try {
- let sock;
- for (let i = 0; i < options.proxies.length; i++) {
- const nextProxy = options.proxies[i];
- // If we've reached the last proxy in the chain, the destination is the actual destination, otherwise it's the next proxy.
- const nextDestination = i === options.proxies.length - 1
- ? options.destination
- : {
- host: options.proxies[i + 1].host ||
- options.proxies[i + 1].ipaddress,
- port: options.proxies[i + 1].port,
- };
- // Creates the next connection in the chain.
- const result = yield SocksClient.createConnection({
- command: 'connect',
- proxy: nextProxy,
- destination: nextDestination,
- existing_socket: sock,
- });
- // If sock is undefined, assign it here.
- sock = sock || result.socket;
- }
- if (typeof callback === 'function') {
- callback(null, { socket: sock });
- resolve({ socket: sock }); // Resolves pending promise (prevents memory leaks).
- }
- else {
- resolve({ socket: sock });
- }
- }
- catch (err) {
- if (typeof callback === 'function') {
- callback(err);
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- resolve(err); // Resolves pending promise (prevents memory leaks).
- }
- else {
- reject(err);
- }
- }
- }));
- }
- /**
- * Creates a SOCKS UDP Frame.
- * @param options
- */
- static createUDPFrame(options) {
- const buff = new smart_buffer_1.SmartBuffer();
- buff.writeUInt16BE(0);
- buff.writeUInt8(options.frameNumber || 0);
- // IPv4/IPv6/Hostname
- if (net.isIPv4(options.remoteHost.host)) {
- buff.writeUInt8(constants_1.Socks5HostType.IPv4);
- buff.writeUInt32BE((0, helpers_1.ipv4ToInt32)(options.remoteHost.host));
- }
- else if (net.isIPv6(options.remoteHost.host)) {
- buff.writeUInt8(constants_1.Socks5HostType.IPv6);
- buff.writeBuffer((0, helpers_1.ipToBuffer)(options.remoteHost.host));
- }
- else {
- buff.writeUInt8(constants_1.Socks5HostType.Hostname);
- buff.writeUInt8(Buffer.byteLength(options.remoteHost.host));
- buff.writeString(options.remoteHost.host);
- }
- // Port
- buff.writeUInt16BE(options.remoteHost.port);
- // Data
- buff.writeBuffer(options.data);
- return buff.toBuffer();
- }
- /**
- * Parses a SOCKS UDP frame.
- * @param data
- */
- static parseUDPFrame(data) {
- const buff = smart_buffer_1.SmartBuffer.fromBuffer(data);
- buff.readOffset = 2;
- const frameNumber = buff.readUInt8();
- const hostType = buff.readUInt8();
- let remoteHost;
- if (hostType === constants_1.Socks5HostType.IPv4) {
- remoteHost = (0, helpers_1.int32ToIpv4)(buff.readUInt32BE());
- }
- else if (hostType === constants_1.Socks5HostType.IPv6) {
- remoteHost = ip_address_1.Address6.fromByteArray(Array.from(buff.readBuffer(16))).canonicalForm();
- }
- else {
- remoteHost = buff.readString(buff.readUInt8());
- }
- const remotePort = buff.readUInt16BE();
- return {
- frameNumber,
- remoteHost: {
- host: remoteHost,
- port: remotePort,
- },
- data: buff.readBuffer(),
- };
- }
- /**
- * Internal state setter. If the SocksClient is in an error state, it cannot be changed to a non error state.
- */
- setState(newState) {
- if (this.state !== constants_1.SocksClientState.Error) {
- this.state = newState;
- }
- }
- /**
- * Starts the connection establishment to the proxy and destination.
- * @param existingSocket Connected socket to use instead of creating a new one (internal use).
- */
- connect(existingSocket) {
- this.onDataReceived = (data) => this.onDataReceivedHandler(data);
- this.onClose = () => this.onCloseHandler();
- this.onError = (err) => this.onErrorHandler(err);
- this.onConnect = () => this.onConnectHandler();
- // Start timeout timer (defaults to 30 seconds)
- const timer = setTimeout(() => this.onEstablishedTimeout(), this.options.timeout || constants_1.DEFAULT_TIMEOUT);
- // check whether unref is available as it differs from browser to NodeJS (#33)
- if (timer.unref && typeof timer.unref === 'function') {
- timer.unref();
- }
- // If an existing socket is provided, use it to negotiate SOCKS handshake. Otherwise create a new Socket.
- if (existingSocket) {
- this.socket = existingSocket;
- }
- else {
- this.socket = new net.Socket();
- }
- // Attach Socket error handlers.
- this.socket.once('close', this.onClose);
- this.socket.once('error', this.onError);
- this.socket.once('connect', this.onConnect);
- this.socket.on('data', this.onDataReceived);
- this.setState(constants_1.SocksClientState.Connecting);
- this.receiveBuffer = new receivebuffer_1.ReceiveBuffer();
- if (existingSocket) {
- this.socket.emit('connect');
- }
- else {
- this.socket.connect(this.getSocketOptions());
- if (this.options.set_tcp_nodelay !== undefined &&
- this.options.set_tcp_nodelay !== null) {
- this.socket.setNoDelay(!!this.options.set_tcp_nodelay);
- }
- }
- // Listen for established event so we can re-emit any excess data received during handshakes.
- this.prependOnceListener('established', (info) => {
- setImmediate(() => {
- if (this.receiveBuffer.length > 0) {
- const excessData = this.receiveBuffer.get(this.receiveBuffer.length);
- info.socket.emit('data', excessData);
- }
- info.socket.resume();
- });
- });
- }
- // Socket options (defaults host/port to options.proxy.host/options.proxy.port)
- getSocketOptions() {
- return Object.assign(Object.assign({}, this.options.socket_options), { host: this.options.proxy.host || this.options.proxy.ipaddress, port: this.options.proxy.port });
- }
- /**
- * Handles internal Socks timeout callback.
- * Note: If the Socks client is not BoundWaitingForConnection or Established, the connection will be closed.
- */
- onEstablishedTimeout() {
- if (this.state !== constants_1.SocksClientState.Established &&
- this.state !== constants_1.SocksClientState.BoundWaitingForConnection) {
- this.closeSocket(constants_1.ERRORS.ProxyConnectionTimedOut);
- }
- }
- /**
- * Handles Socket connect event.
- */
- onConnectHandler() {
- this.setState(constants_1.SocksClientState.Connected);
- // Send initial handshake.
- if (this.options.proxy.type === 4) {
- this.sendSocks4InitialHandshake();
- }
- else {
- this.sendSocks5InitialHandshake();
- }
- this.setState(constants_1.SocksClientState.SentInitialHandshake);
- }
- /**
- * Handles Socket data event.
- * @param data
- */
- onDataReceivedHandler(data) {
- /*
- All received data is appended to a ReceiveBuffer.
- This makes sure that all the data we need is received before we attempt to process it.
- */
- this.receiveBuffer.append(data);
- // Process data that we have.
- this.processData();
- }
- /**
- * Handles processing of the data we have received.
- */
- processData() {
- // If we have enough data to process the next step in the SOCKS handshake, proceed.
- while (this.state !== constants_1.SocksClientState.Established &&
- this.state !== constants_1.SocksClientState.Error &&
- this.receiveBuffer.length >= this.nextRequiredPacketBufferSize) {
- // Sent initial handshake, waiting for response.
- if (this.state === constants_1.SocksClientState.SentInitialHandshake) {
- if (this.options.proxy.type === 4) {
- // Socks v4 only has one handshake response.
- this.handleSocks4FinalHandshakeResponse();
- }
- else {
- // Socks v5 has two handshakes, handle initial one here.
- this.handleInitialSocks5HandshakeResponse();
- }
- // Sent auth request for Socks v5, waiting for response.
- }
- else if (this.state === constants_1.SocksClientState.SentAuthentication) {
- this.handleInitialSocks5AuthenticationHandshakeResponse();
- // Sent final Socks v5 handshake, waiting for final response.
- }
- else if (this.state === constants_1.SocksClientState.SentFinalHandshake) {
- this.handleSocks5FinalHandshakeResponse();
- // Socks BIND established. Waiting for remote connection via proxy.
- }
- else if (this.state === constants_1.SocksClientState.BoundWaitingForConnection) {
- if (this.options.proxy.type === 4) {
- this.handleSocks4IncomingConnectionResponse();
- }
- else {
- this.handleSocks5IncomingConnectionResponse();
- }
- }
- else {
- this.closeSocket(constants_1.ERRORS.InternalError);
- break;
- }
- }
- }
- /**
- * Handles Socket close event.
- * @param had_error
- */
- onCloseHandler() {
- this.closeSocket(constants_1.ERRORS.SocketClosed);
- }
- /**
- * Handles Socket error event.
- * @param err
- */
- onErrorHandler(err) {
- this.closeSocket(err.message);
- }
- /**
- * Removes internal event listeners on the underlying Socket.
- */
- removeInternalSocketHandlers() {
- // Pauses data flow of the socket (this is internally resumed after 'established' is emitted)
- this.socket.pause();
- this.socket.removeListener('data', this.onDataReceived);
- this.socket.removeListener('close', this.onClose);
- this.socket.removeListener('error', this.onError);
- this.socket.removeListener('connect', this.onConnect);
- }
- /**
- * Closes and destroys the underlying Socket. Emits an error event.
- * @param err { String } An error string to include in error event.
- */
- closeSocket(err) {
- // Make sure only one 'error' event is fired for the lifetime of this SocksClient instance.
- if (this.state !== constants_1.SocksClientState.Error) {
- // Set internal state to Error.
- this.setState(constants_1.SocksClientState.Error);
- // Destroy Socket
- this.socket.destroy();
- // Remove internal listeners
- this.removeInternalSocketHandlers();
- // Fire 'error' event.
- this.emit('error', new util_1.SocksClientError(err, this.options));
- }
- }
- /**
- * Sends initial Socks v4 handshake request.
- */
- sendSocks4InitialHandshake() {
- const userId = this.options.proxy.userId || '';
- const buff = new smart_buffer_1.SmartBuffer();
- buff.writeUInt8(0x04);
- buff.writeUInt8(constants_1.SocksCommand[this.options.command]);
- buff.writeUInt16BE(this.options.destination.port);
- // Socks 4 (IPv4)
- if (net.isIPv4(this.options.destination.host)) {
- buff.writeBuffer((0, helpers_1.ipToBuffer)(this.options.destination.host));
- buff.writeStringNT(userId);
- // Socks 4a (hostname)
- }
- else {
- buff.writeUInt8(0x00);
- buff.writeUInt8(0x00);
- buff.writeUInt8(0x00);
- buff.writeUInt8(0x01);
- buff.writeStringNT(userId);
- buff.writeStringNT(this.options.destination.host);
- }
- this.nextRequiredPacketBufferSize =
- constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks4Response;
- this.socket.write(buff.toBuffer());
- }
- /**
- * Handles Socks v4 handshake response.
- * @param data
- */
- handleSocks4FinalHandshakeResponse() {
- const data = this.receiveBuffer.get(8);
- if (data[1] !== constants_1.Socks4Response.Granted) {
- this.closeSocket(`${constants_1.ERRORS.Socks4ProxyRejectedConnection} - (${constants_1.Socks4Response[data[1]]})`);
- }
- else {
- // Bind response
- if (constants_1.SocksCommand[this.options.command] === constants_1.SocksCommand.bind) {
- const buff = smart_buffer_1.SmartBuffer.fromBuffer(data);
- buff.readOffset = 2;
- const remoteHost = {
- port: buff.readUInt16BE(),
- host: (0, helpers_1.int32ToIpv4)(buff.readUInt32BE()),
- };
- // If host is 0.0.0.0, set to proxy host.
- if (remoteHost.host === '0.0.0.0') {
- remoteHost.host = this.options.proxy.ipaddress;
- }
- this.setState(constants_1.SocksClientState.BoundWaitingForConnection);
- this.emit('bound', { remoteHost, socket: this.socket });
- // Connect response
- }
- else {
- this.setState(constants_1.SocksClientState.Established);
- this.removeInternalSocketHandlers();
- this.emit('established', { socket: this.socket });
- }
- }
- }
- /**
- * Handles Socks v4 incoming connection request (BIND)
- * @param data
- */
- handleSocks4IncomingConnectionResponse() {
- const data = this.receiveBuffer.get(8);
- if (data[1] !== constants_1.Socks4Response.Granted) {
- this.closeSocket(`${constants_1.ERRORS.Socks4ProxyRejectedIncomingBoundConnection} - (${constants_1.Socks4Response[data[1]]})`);
- }
- else {
- const buff = smart_buffer_1.SmartBuffer.fromBuffer(data);
- buff.readOffset = 2;
- const remoteHost = {
- port: buff.readUInt16BE(),
- host: (0, helpers_1.int32ToIpv4)(buff.readUInt32BE()),
- };
- this.setState(constants_1.SocksClientState.Established);
- this.removeInternalSocketHandlers();
- this.emit('established', { remoteHost, socket: this.socket });
- }
- }
- /**
- * Sends initial Socks v5 handshake request.
- */
- sendSocks5InitialHandshake() {
- const buff = new smart_buffer_1.SmartBuffer();
- // By default we always support no auth.
- const supportedAuthMethods = [constants_1.Socks5Auth.NoAuth];
- // We should only tell the proxy we support user/pass auth if auth info is actually provided.
- // Note: As of Tor v0.3.5.7+, if user/pass auth is an option from the client, by default it will always take priority.
- if (this.options.proxy.userId || this.options.proxy.password) {
- supportedAuthMethods.push(constants_1.Socks5Auth.UserPass);
- }
- // Custom auth method?
- if (this.options.proxy.custom_auth_method !== undefined) {
- supportedAuthMethods.push(this.options.proxy.custom_auth_method);
- }
- // Build handshake packet
- buff.writeUInt8(0x05);
- buff.writeUInt8(supportedAuthMethods.length);
- for (const authMethod of supportedAuthMethods) {
- buff.writeUInt8(authMethod);
- }
- this.nextRequiredPacketBufferSize =
- constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5InitialHandshakeResponse;
- this.socket.write(buff.toBuffer());
- this.setState(constants_1.SocksClientState.SentInitialHandshake);
- }
- /**
- * Handles initial Socks v5 handshake response.
- * @param data
- */
- handleInitialSocks5HandshakeResponse() {
- const data = this.receiveBuffer.get(2);
- if (data[0] !== 0x05) {
- this.closeSocket(constants_1.ERRORS.InvalidSocks5IntiailHandshakeSocksVersion);
- }
- else if (data[1] === constants_1.SOCKS5_NO_ACCEPTABLE_AUTH) {
- this.closeSocket(constants_1.ERRORS.InvalidSocks5InitialHandshakeNoAcceptedAuthType);
- }
- else {
- // If selected Socks v5 auth method is no auth, send final handshake request.
- if (data[1] === constants_1.Socks5Auth.NoAuth) {
- this.socks5ChosenAuthType = constants_1.Socks5Auth.NoAuth;
- this.sendSocks5CommandRequest();
- // If selected Socks v5 auth method is user/password, send auth handshake.
- }
- else if (data[1] === constants_1.Socks5Auth.UserPass) {
- this.socks5ChosenAuthType = constants_1.Socks5Auth.UserPass;
- this.sendSocks5UserPassAuthentication();
- // If selected Socks v5 auth method is the custom_auth_method, send custom handshake.
- }
- else if (data[1] === this.options.proxy.custom_auth_method) {
- this.socks5ChosenAuthType = this.options.proxy.custom_auth_method;
- this.sendSocks5CustomAuthentication();
- }
- else {
- this.closeSocket(constants_1.ERRORS.InvalidSocks5InitialHandshakeUnknownAuthType);
- }
- }
- }
- /**
- * Sends Socks v5 user & password auth handshake.
- *
- * Note: No auth and user/pass are currently supported.
- */
- sendSocks5UserPassAuthentication() {
- const userId = this.options.proxy.userId || '';
- const password = this.options.proxy.password || '';
- const buff = new smart_buffer_1.SmartBuffer();
- buff.writeUInt8(0x01);
- buff.writeUInt8(Buffer.byteLength(userId));
- buff.writeString(userId);
- buff.writeUInt8(Buffer.byteLength(password));
- buff.writeString(password);
- this.nextRequiredPacketBufferSize =
- constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5UserPassAuthenticationResponse;
- this.socket.write(buff.toBuffer());
- this.setState(constants_1.SocksClientState.SentAuthentication);
- }
- sendSocks5CustomAuthentication() {
- return __awaiter(this, void 0, void 0, function* () {
- this.nextRequiredPacketBufferSize =
- this.options.proxy.custom_auth_response_size;
- this.socket.write(yield this.options.proxy.custom_auth_request_handler());
- this.setState(constants_1.SocksClientState.SentAuthentication);
- });
- }
- handleSocks5CustomAuthHandshakeResponse(data) {
- return __awaiter(this, void 0, void 0, function* () {
- return yield this.options.proxy.custom_auth_response_handler(data);
- });
- }
- handleSocks5AuthenticationNoAuthHandshakeResponse(data) {
- return __awaiter(this, void 0, void 0, function* () {
- return data[1] === 0x00;
- });
- }
- handleSocks5AuthenticationUserPassHandshakeResponse(data) {
- return __awaiter(this, void 0, void 0, function* () {
- return data[1] === 0x00;
- });
- }
- /**
- * Handles Socks v5 auth handshake response.
- * @param data
- */
- handleInitialSocks5AuthenticationHandshakeResponse() {
- return __awaiter(this, void 0, void 0, function* () {
- this.setState(constants_1.SocksClientState.ReceivedAuthenticationResponse);
- let authResult = false;
- if (this.socks5ChosenAuthType === constants_1.Socks5Auth.NoAuth) {
- authResult = yield this.handleSocks5AuthenticationNoAuthHandshakeResponse(this.receiveBuffer.get(2));
- }
- else if (this.socks5ChosenAuthType === constants_1.Socks5Auth.UserPass) {
- authResult =
- yield this.handleSocks5AuthenticationUserPassHandshakeResponse(this.receiveBuffer.get(2));
- }
- else if (this.socks5ChosenAuthType === this.options.proxy.custom_auth_method) {
- authResult = yield this.handleSocks5CustomAuthHandshakeResponse(this.receiveBuffer.get(this.options.proxy.custom_auth_response_size));
- }
- if (!authResult) {
- this.closeSocket(constants_1.ERRORS.Socks5AuthenticationFailed);
- }
- else {
- this.sendSocks5CommandRequest();
- }
- });
- }
- /**
- * Sends Socks v5 final handshake request.
- */
- sendSocks5CommandRequest() {
- const buff = new smart_buffer_1.SmartBuffer();
- buff.writeUInt8(0x05);
- buff.writeUInt8(constants_1.SocksCommand[this.options.command]);
- buff.writeUInt8(0x00);
- // ipv4, ipv6, domain?
- if (net.isIPv4(this.options.destination.host)) {
- buff.writeUInt8(constants_1.Socks5HostType.IPv4);
- buff.writeBuffer((0, helpers_1.ipToBuffer)(this.options.destination.host));
- }
- else if (net.isIPv6(this.options.destination.host)) {
- buff.writeUInt8(constants_1.Socks5HostType.IPv6);
- buff.writeBuffer((0, helpers_1.ipToBuffer)(this.options.destination.host));
- }
- else {
- buff.writeUInt8(constants_1.Socks5HostType.Hostname);
- buff.writeUInt8(this.options.destination.host.length);
- buff.writeString(this.options.destination.host);
- }
- buff.writeUInt16BE(this.options.destination.port);
- this.nextRequiredPacketBufferSize =
- constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5ResponseHeader;
- this.socket.write(buff.toBuffer());
- this.setState(constants_1.SocksClientState.SentFinalHandshake);
- }
- /**
- * Handles Socks v5 final handshake response.
- * @param data
- */
- handleSocks5FinalHandshakeResponse() {
- // Peek at available data (we need at least 5 bytes to get the hostname length)
- const header = this.receiveBuffer.peek(5);
- if (header[0] !== 0x05 || header[1] !== constants_1.Socks5Response.Granted) {
- this.closeSocket(`${constants_1.ERRORS.InvalidSocks5FinalHandshakeRejected} - ${constants_1.Socks5Response[header[1]]}`);
- }
- else {
- // Read address type
- const addressType = header[3];
- let remoteHost;
- let buff;
- // IPv4
- if (addressType === constants_1.Socks5HostType.IPv4) {
- // Check if data is available.
- const dataNeeded = constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5ResponseIPv4;
- if (this.receiveBuffer.length < dataNeeded) {
- this.nextRequiredPacketBufferSize = dataNeeded;
- return;
- }
- buff = smart_buffer_1.SmartBuffer.fromBuffer(this.receiveBuffer.get(dataNeeded).slice(4));
- remoteHost = {
- host: (0, helpers_1.int32ToIpv4)(buff.readUInt32BE()),
- port: buff.readUInt16BE(),
- };
- // If given host is 0.0.0.0, assume remote proxy ip instead.
- if (remoteHost.host === '0.0.0.0') {
- remoteHost.host = this.options.proxy.ipaddress;
- }
- // Hostname
- }
- else if (addressType === constants_1.Socks5HostType.Hostname) {
- const hostLength = header[4];
- const dataNeeded = constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5ResponseHostname(hostLength); // header + host length + host + port
- // Check if data is available.
- if (this.receiveBuffer.length < dataNeeded) {
- this.nextRequiredPacketBufferSize = dataNeeded;
- return;
- }
- buff = smart_buffer_1.SmartBuffer.fromBuffer(this.receiveBuffer.get(dataNeeded).slice(5));
- remoteHost = {
- host: buff.readString(hostLength),
- port: buff.readUInt16BE(),
- };
- // IPv6
- }
- else if (addressType === constants_1.Socks5HostType.IPv6) {
- // Check if data is available.
- const dataNeeded = constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5ResponseIPv6;
- if (this.receiveBuffer.length < dataNeeded) {
- this.nextRequiredPacketBufferSize = dataNeeded;
- return;
- }
- buff = smart_buffer_1.SmartBuffer.fromBuffer(this.receiveBuffer.get(dataNeeded).slice(4));
- remoteHost = {
- host: ip_address_1.Address6.fromByteArray(Array.from(buff.readBuffer(16))).canonicalForm(),
- port: buff.readUInt16BE(),
- };
- }
- // We have everything we need
- this.setState(constants_1.SocksClientState.ReceivedFinalResponse);
- // If using CONNECT, the client is now in the established state.
- if (constants_1.SocksCommand[this.options.command] === constants_1.SocksCommand.connect) {
- this.setState(constants_1.SocksClientState.Established);
- this.removeInternalSocketHandlers();
- this.emit('established', { remoteHost, socket: this.socket });
- }
- else if (constants_1.SocksCommand[this.options.command] === constants_1.SocksCommand.bind) {
- /* If using BIND, the Socks client is now in BoundWaitingForConnection state.
- This means that the remote proxy server is waiting for a remote connection to the bound port. */
- this.setState(constants_1.SocksClientState.BoundWaitingForConnection);
- this.nextRequiredPacketBufferSize =
- constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5ResponseHeader;
- this.emit('bound', { remoteHost, socket: this.socket });
- /*
- If using Associate, the Socks client is now Established. And the proxy server is now accepting UDP packets at the
- given bound port. This initial Socks TCP connection must remain open for the UDP relay to continue to work.
- */
- }
- else if (constants_1.SocksCommand[this.options.command] === constants_1.SocksCommand.associate) {
- this.setState(constants_1.SocksClientState.Established);
- this.removeInternalSocketHandlers();
- this.emit('established', {
- remoteHost,
- socket: this.socket,
- });
- }
- }
- }
- /**
- * Handles Socks v5 incoming connection request (BIND).
- */
- handleSocks5IncomingConnectionResponse() {
- // Peek at available data (we need at least 5 bytes to get the hostname length)
- const header = this.receiveBuffer.peek(5);
- if (header[0] !== 0x05 || header[1] !== constants_1.Socks5Response.Granted) {
- this.closeSocket(`${constants_1.ERRORS.Socks5ProxyRejectedIncomingBoundConnection} - ${constants_1.Socks5Response[header[1]]}`);
- }
- else {
- // Read address type
- const addressType = header[3];
- let remoteHost;
- let buff;
- // IPv4
- if (addressType === constants_1.Socks5HostType.IPv4) {
- // Check if data is available.
- const dataNeeded = constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5ResponseIPv4;
- if (this.receiveBuffer.length < dataNeeded) {
- this.nextRequiredPacketBufferSize = dataNeeded;
- return;
- }
- buff = smart_buffer_1.SmartBuffer.fromBuffer(this.receiveBuffer.get(dataNeeded).slice(4));
- remoteHost = {
- host: (0, helpers_1.int32ToIpv4)(buff.readUInt32BE()),
- port: buff.readUInt16BE(),
- };
- // If given host is 0.0.0.0, assume remote proxy ip instead.
- if (remoteHost.host === '0.0.0.0') {
- remoteHost.host = this.options.proxy.ipaddress;
- }
- // Hostname
- }
- else if (addressType === constants_1.Socks5HostType.Hostname) {
- const hostLength = header[4];
- const dataNeeded = constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5ResponseHostname(hostLength); // header + host length + port
- // Check if data is available.
- if (this.receiveBuffer.length < dataNeeded) {
- this.nextRequiredPacketBufferSize = dataNeeded;
- return;
- }
- buff = smart_buffer_1.SmartBuffer.fromBuffer(this.receiveBuffer.get(dataNeeded).slice(5));
- remoteHost = {
- host: buff.readString(hostLength),
- port: buff.readUInt16BE(),
- };
- // IPv6
- }
- else if (addressType === constants_1.Socks5HostType.IPv6) {
- // Check if data is available.
- const dataNeeded = constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5ResponseIPv6;
- if (this.receiveBuffer.length < dataNeeded) {
- this.nextRequiredPacketBufferSize = dataNeeded;
- return;
- }
- buff = smart_buffer_1.SmartBuffer.fromBuffer(this.receiveBuffer.get(dataNeeded).slice(4));
- remoteHost = {
- host: ip_address_1.Address6.fromByteArray(Array.from(buff.readBuffer(16))).canonicalForm(),
- port: buff.readUInt16BE(),
- };
- }
- this.setState(constants_1.SocksClientState.Established);
- this.removeInternalSocketHandlers();
- this.emit('established', { remoteHost, socket: this.socket });
- }
- }
- get socksClientOptions() {
- return Object.assign({}, this.options);
- }
-}
-exports.SocksClient = SocksClient;
-//# sourceMappingURL=socksclient.js.map
-
-/***/ }),
-
-/***/ 5438:
-/***/ ((__unused_webpack_module, exports) => {
-
-"use strict";
-
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.SOCKS5_NO_ACCEPTABLE_AUTH = exports.SOCKS5_CUSTOM_AUTH_END = exports.SOCKS5_CUSTOM_AUTH_START = exports.SOCKS_INCOMING_PACKET_SIZES = exports.SocksClientState = exports.Socks5Response = exports.Socks5HostType = exports.Socks5Auth = exports.Socks4Response = exports.SocksCommand = exports.ERRORS = exports.DEFAULT_TIMEOUT = void 0;
-const DEFAULT_TIMEOUT = 30000;
-exports.DEFAULT_TIMEOUT = DEFAULT_TIMEOUT;
-// prettier-ignore
-const ERRORS = {
- InvalidSocksCommand: 'An invalid SOCKS command was provided. Valid options are connect, bind, and associate.',
- InvalidSocksCommandForOperation: 'An invalid SOCKS command was provided. Only a subset of commands are supported for this operation.',
- InvalidSocksCommandChain: 'An invalid SOCKS command was provided. Chaining currently only supports the connect command.',
- InvalidSocksClientOptionsDestination: 'An invalid destination host was provided.',
- InvalidSocksClientOptionsExistingSocket: 'An invalid existing socket was provided. This should be an instance of stream.Duplex.',
- InvalidSocksClientOptionsProxy: 'Invalid SOCKS proxy details were provided.',
- InvalidSocksClientOptionsTimeout: 'An invalid timeout value was provided. Please enter a value above 0 (in ms).',
- InvalidSocksClientOptionsProxiesLength: 'At least two socks proxies must be provided for chaining.',
- InvalidSocksClientOptionsCustomAuthRange: 'Custom auth must be a value between 0x80 and 0xFE.',
- InvalidSocksClientOptionsCustomAuthOptions: 'When a custom_auth_method is provided, custom_auth_request_handler, custom_auth_response_size, and custom_auth_response_handler must also be provided and valid.',
- NegotiationError: 'Negotiation error',
- SocketClosed: 'Socket closed',
- ProxyConnectionTimedOut: 'Proxy connection timed out',
- InternalError: 'SocksClient internal error (this should not happen)',
- InvalidSocks4HandshakeResponse: 'Received invalid Socks4 handshake response',
- Socks4ProxyRejectedConnection: 'Socks4 Proxy rejected connection',
- InvalidSocks4IncomingConnectionResponse: 'Socks4 invalid incoming connection response',
- Socks4ProxyRejectedIncomingBoundConnection: 'Socks4 Proxy rejected incoming bound connection',
- InvalidSocks5InitialHandshakeResponse: 'Received invalid Socks5 initial handshake response',
- InvalidSocks5IntiailHandshakeSocksVersion: 'Received invalid Socks5 initial handshake (invalid socks version)',
- InvalidSocks5InitialHandshakeNoAcceptedAuthType: 'Received invalid Socks5 initial handshake (no accepted authentication type)',
- InvalidSocks5InitialHandshakeUnknownAuthType: 'Received invalid Socks5 initial handshake (unknown authentication type)',
- Socks5AuthenticationFailed: 'Socks5 Authentication failed',
- InvalidSocks5FinalHandshake: 'Received invalid Socks5 final handshake response',
- InvalidSocks5FinalHandshakeRejected: 'Socks5 proxy rejected connection',
- InvalidSocks5IncomingConnectionResponse: 'Received invalid Socks5 incoming connection response',
- Socks5ProxyRejectedIncomingBoundConnection: 'Socks5 Proxy rejected incoming bound connection',
-};
-exports.ERRORS = ERRORS;
-const SOCKS_INCOMING_PACKET_SIZES = {
- Socks5InitialHandshakeResponse: 2,
- Socks5UserPassAuthenticationResponse: 2,
- // Command response + incoming connection (bind)
- Socks5ResponseHeader: 5, // We need at least 5 to read the hostname length, then we wait for the address+port information.
- Socks5ResponseIPv4: 10, // 4 header + 4 ip + 2 port
- Socks5ResponseIPv6: 22, // 4 header + 16 ip + 2 port
- Socks5ResponseHostname: (hostNameLength) => hostNameLength + 7, // 4 header + 1 host length + host + 2 port
- // Command response + incoming connection (bind)
- Socks4Response: 8, // 2 header + 2 port + 4 ip
-};
-exports.SOCKS_INCOMING_PACKET_SIZES = SOCKS_INCOMING_PACKET_SIZES;
-var SocksCommand;
-(function (SocksCommand) {
- SocksCommand[SocksCommand["connect"] = 1] = "connect";
- SocksCommand[SocksCommand["bind"] = 2] = "bind";
- SocksCommand[SocksCommand["associate"] = 3] = "associate";
-})(SocksCommand || (exports.SocksCommand = SocksCommand = {}));
-var Socks4Response;
-(function (Socks4Response) {
- Socks4Response[Socks4Response["Granted"] = 90] = "Granted";
- Socks4Response[Socks4Response["Failed"] = 91] = "Failed";
- Socks4Response[Socks4Response["Rejected"] = 92] = "Rejected";
- Socks4Response[Socks4Response["RejectedIdent"] = 93] = "RejectedIdent";
-})(Socks4Response || (exports.Socks4Response = Socks4Response = {}));
-var Socks5Auth;
-(function (Socks5Auth) {
- Socks5Auth[Socks5Auth["NoAuth"] = 0] = "NoAuth";
- Socks5Auth[Socks5Auth["GSSApi"] = 1] = "GSSApi";
- Socks5Auth[Socks5Auth["UserPass"] = 2] = "UserPass";
-})(Socks5Auth || (exports.Socks5Auth = Socks5Auth = {}));
-const SOCKS5_CUSTOM_AUTH_START = 0x80;
-exports.SOCKS5_CUSTOM_AUTH_START = SOCKS5_CUSTOM_AUTH_START;
-const SOCKS5_CUSTOM_AUTH_END = 0xfe;
-exports.SOCKS5_CUSTOM_AUTH_END = SOCKS5_CUSTOM_AUTH_END;
-const SOCKS5_NO_ACCEPTABLE_AUTH = 0xff;
-exports.SOCKS5_NO_ACCEPTABLE_AUTH = SOCKS5_NO_ACCEPTABLE_AUTH;
-var Socks5Response;
-(function (Socks5Response) {
- Socks5Response[Socks5Response["Granted"] = 0] = "Granted";
- Socks5Response[Socks5Response["Failure"] = 1] = "Failure";
- Socks5Response[Socks5Response["NotAllowed"] = 2] = "NotAllowed";
- Socks5Response[Socks5Response["NetworkUnreachable"] = 3] = "NetworkUnreachable";
- Socks5Response[Socks5Response["HostUnreachable"] = 4] = "HostUnreachable";
- Socks5Response[Socks5Response["ConnectionRefused"] = 5] = "ConnectionRefused";
- Socks5Response[Socks5Response["TTLExpired"] = 6] = "TTLExpired";
- Socks5Response[Socks5Response["CommandNotSupported"] = 7] = "CommandNotSupported";
- Socks5Response[Socks5Response["AddressNotSupported"] = 8] = "AddressNotSupported";
-})(Socks5Response || (exports.Socks5Response = Socks5Response = {}));
-var Socks5HostType;
-(function (Socks5HostType) {
- Socks5HostType[Socks5HostType["IPv4"] = 1] = "IPv4";
- Socks5HostType[Socks5HostType["Hostname"] = 3] = "Hostname";
- Socks5HostType[Socks5HostType["IPv6"] = 4] = "IPv6";
-})(Socks5HostType || (exports.Socks5HostType = Socks5HostType = {}));
-var SocksClientState;
-(function (SocksClientState) {
- SocksClientState[SocksClientState["Created"] = 0] = "Created";
- SocksClientState[SocksClientState["Connecting"] = 1] = "Connecting";
- SocksClientState[SocksClientState["Connected"] = 2] = "Connected";
- SocksClientState[SocksClientState["SentInitialHandshake"] = 3] = "SentInitialHandshake";
- SocksClientState[SocksClientState["ReceivedInitialHandshakeResponse"] = 4] = "ReceivedInitialHandshakeResponse";
- SocksClientState[SocksClientState["SentAuthentication"] = 5] = "SentAuthentication";
- SocksClientState[SocksClientState["ReceivedAuthenticationResponse"] = 6] = "ReceivedAuthenticationResponse";
- SocksClientState[SocksClientState["SentFinalHandshake"] = 7] = "SentFinalHandshake";
- SocksClientState[SocksClientState["ReceivedFinalResponse"] = 8] = "ReceivedFinalResponse";
- SocksClientState[SocksClientState["BoundWaitingForConnection"] = 9] = "BoundWaitingForConnection";
- SocksClientState[SocksClientState["Established"] = 10] = "Established";
- SocksClientState[SocksClientState["Disconnected"] = 11] = "Disconnected";
- SocksClientState[SocksClientState["Error"] = 99] = "Error";
-})(SocksClientState || (exports.SocksClientState = SocksClientState = {}));
-//# sourceMappingURL=constants.js.map
-
-/***/ }),
-
-/***/ 17130:
-/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
-
-"use strict";
-
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.ipToBuffer = exports.int32ToIpv4 = exports.ipv4ToInt32 = exports.validateSocksClientChainOptions = exports.validateSocksClientOptions = void 0;
-const util_1 = __webpack_require__(13763);
-const constants_1 = __webpack_require__(5438);
-const stream = __webpack_require__(2203);
-const ip_address_1 = __webpack_require__(49424);
-const net = __webpack_require__(69278);
-/**
- * Validates the provided SocksClientOptions
- * @param options { SocksClientOptions }
- * @param acceptedCommands { string[] } A list of accepted SocksProxy commands.
- */
-function validateSocksClientOptions(options, acceptedCommands = ['connect', 'bind', 'associate']) {
- // Check SOCKs command option.
- if (!constants_1.SocksCommand[options.command]) {
- throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksCommand, options);
- }
- // Check SocksCommand for acceptable command.
- if (acceptedCommands.indexOf(options.command) === -1) {
- throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksCommandForOperation, options);
- }
- // Check destination
- if (!isValidSocksRemoteHost(options.destination)) {
- throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsDestination, options);
- }
- // Check SOCKS proxy to use
- if (!isValidSocksProxy(options.proxy)) {
- throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsProxy, options);
- }
- // Validate custom auth (if set)
- validateCustomProxyAuth(options.proxy, options);
- // Check timeout
- if (options.timeout && !isValidTimeoutValue(options.timeout)) {
- throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsTimeout, options);
- }
- // Check existing_socket (if provided)
- if (options.existing_socket &&
- !(options.existing_socket instanceof stream.Duplex)) {
- throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsExistingSocket, options);
- }
-}
-exports.validateSocksClientOptions = validateSocksClientOptions;
-/**
- * Validates the SocksClientChainOptions
- * @param options { SocksClientChainOptions }
- */
-function validateSocksClientChainOptions(options) {
- // Only connect is supported when chaining.
- if (options.command !== 'connect') {
- throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksCommandChain, options);
- }
- // Check destination
- if (!isValidSocksRemoteHost(options.destination)) {
- throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsDestination, options);
- }
- // Validate proxies (length)
- if (!(options.proxies &&
- Array.isArray(options.proxies) &&
- options.proxies.length >= 2)) {
- throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsProxiesLength, options);
- }
- // Validate proxies
- options.proxies.forEach((proxy) => {
- if (!isValidSocksProxy(proxy)) {
- throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsProxy, options);
- }
- // Validate custom auth (if set)
- validateCustomProxyAuth(proxy, options);
- });
- // Check timeout
- if (options.timeout && !isValidTimeoutValue(options.timeout)) {
- throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsTimeout, options);
- }
-}
-exports.validateSocksClientChainOptions = validateSocksClientChainOptions;
-function validateCustomProxyAuth(proxy, options) {
- if (proxy.custom_auth_method !== undefined) {
- // Invalid auth method range
- if (proxy.custom_auth_method < constants_1.SOCKS5_CUSTOM_AUTH_START ||
- proxy.custom_auth_method > constants_1.SOCKS5_CUSTOM_AUTH_END) {
- throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsCustomAuthRange, options);
- }
- // Missing custom_auth_request_handler
- if (proxy.custom_auth_request_handler === undefined ||
- typeof proxy.custom_auth_request_handler !== 'function') {
- throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsCustomAuthOptions, options);
- }
- // Missing custom_auth_response_size
- if (proxy.custom_auth_response_size === undefined) {
- throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsCustomAuthOptions, options);
- }
- // Missing/invalid custom_auth_response_handler
- if (proxy.custom_auth_response_handler === undefined ||
- typeof proxy.custom_auth_response_handler !== 'function') {
- throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsCustomAuthOptions, options);
- }
- }
-}
-/**
- * Validates a SocksRemoteHost
- * @param remoteHost { SocksRemoteHost }
- */
-function isValidSocksRemoteHost(remoteHost) {
- return (remoteHost &&
- typeof remoteHost.host === 'string' &&
- typeof remoteHost.port === 'number' &&
- remoteHost.port >= 0 &&
- remoteHost.port <= 65535);
-}
-/**
- * Validates a SocksProxy
- * @param proxy { SocksProxy }
- */
-function isValidSocksProxy(proxy) {
- return (proxy &&
- (typeof proxy.host === 'string' || typeof proxy.ipaddress === 'string') &&
- typeof proxy.port === 'number' &&
- proxy.port >= 0 &&
- proxy.port <= 65535 &&
- (proxy.type === 4 || proxy.type === 5));
-}
-/**
- * Validates a timeout value.
- * @param value { Number }
- */
-function isValidTimeoutValue(value) {
- return typeof value === 'number' && value > 0;
-}
-function ipv4ToInt32(ip) {
- const address = new ip_address_1.Address4(ip);
- // Convert the IPv4 address parts to an integer
- return address.toArray().reduce((acc, part) => (acc << 8) + part, 0);
-}
-exports.ipv4ToInt32 = ipv4ToInt32;
-function int32ToIpv4(int32) {
- // Extract each byte (octet) from the 32-bit integer
- const octet1 = (int32 >>> 24) & 0xff;
- const octet2 = (int32 >>> 16) & 0xff;
- const octet3 = (int32 >>> 8) & 0xff;
- const octet4 = int32 & 0xff;
- // Combine the octets into a string in IPv4 format
- return [octet1, octet2, octet3, octet4].join('.');
-}
-exports.int32ToIpv4 = int32ToIpv4;
-function ipToBuffer(ip) {
- if (net.isIPv4(ip)) {
- // Handle IPv4 addresses
- const address = new ip_address_1.Address4(ip);
- return Buffer.from(address.toArray());
- }
- else if (net.isIPv6(ip)) {
- // Handle IPv6 addresses
- const address = new ip_address_1.Address6(ip);
- return Buffer.from(address.toByteArray());
- }
- else {
- throw new Error('Invalid IP address format');
- }
-}
-exports.ipToBuffer = ipToBuffer;
-//# sourceMappingURL=helpers.js.map
-
-/***/ }),
-
-/***/ 87736:
-/***/ ((__unused_webpack_module, exports) => {
-
-"use strict";
-
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.ReceiveBuffer = void 0;
-class ReceiveBuffer {
- constructor(size = 4096) {
- this.buffer = Buffer.allocUnsafe(size);
- this.offset = 0;
- this.originalSize = size;
- }
- get length() {
- return this.offset;
- }
- append(data) {
- if (!Buffer.isBuffer(data)) {
- throw new Error('Attempted to append a non-buffer instance to ReceiveBuffer.');
- }
- if (this.offset + data.length >= this.buffer.length) {
- const tmp = this.buffer;
- this.buffer = Buffer.allocUnsafe(Math.max(this.buffer.length + this.originalSize, this.buffer.length + data.length));
- tmp.copy(this.buffer);
- }
- data.copy(this.buffer, this.offset);
- return (this.offset += data.length);
- }
- peek(length) {
- if (length > this.offset) {
- throw new Error('Attempted to read beyond the bounds of the managed internal data.');
- }
- return this.buffer.slice(0, length);
- }
- get(length) {
- if (length > this.offset) {
- throw new Error('Attempted to read beyond the bounds of the managed internal data.');
- }
- const value = Buffer.allocUnsafe(length);
- this.buffer.slice(0, length).copy(value);
- this.buffer.copyWithin(0, length, length + this.offset - length);
- this.offset -= length;
- return value;
- }
-}
-exports.ReceiveBuffer = ReceiveBuffer;
-//# sourceMappingURL=receivebuffer.js.map
-
-/***/ }),
-
-/***/ 13763:
-/***/ ((__unused_webpack_module, exports) => {
-
-"use strict";
-
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.shuffleArray = exports.SocksClientError = void 0;
-/**
- * Error wrapper for SocksClient
- */
-class SocksClientError extends Error {
- constructor(message, options) {
- super(message);
- this.options = options;
- }
-}
-exports.SocksClientError = SocksClientError;
-/**
- * Shuffles a given array.
- * @param array The array to shuffle.
- */
-function shuffleArray(array) {
- for (let i = array.length - 1; i > 0; i--) {
- const j = Math.floor(Math.random() * (i + 1));
- [array[i], array[j]] = [array[j], array[i]];
- }
-}
-exports.shuffleArray = shuffleArray;
-//# sourceMappingURL=util.js.map
-
-/***/ }),
-
-/***/ 65861:
-/***/ (function(__unused_webpack_module, exports, __webpack_require__) {
-
-"use strict";
-
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
- if (k2 === undefined) k2 = k;
- var desc = Object.getOwnPropertyDescriptor(m, k);
- if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
- desc = { enumerable: true, get: function() { return m[k]; } };
- }
- Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
- if (k2 === undefined) k2 = k;
- o[k2] = m[k];
-}));
-var __exportStar = (this && this.__exportStar) || function(m, exports) {
- for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
-};
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-__exportStar(__webpack_require__(87631), exports);
-//# sourceMappingURL=index.js.map
-
-/***/ }),
-
-/***/ 17604:
-/***/ ((module, exports, __webpack_require__) => {
-
-var __WEBPACK_AMD_DEFINE_RESULT__;/* global window, exports, define */
-
-!function() {
- 'use strict'
-
- var re = {
- not_string: /[^s]/,
- not_bool: /[^t]/,
- not_type: /[^T]/,
- not_primitive: /[^v]/,
- number: /[diefg]/,
- numeric_arg: /[bcdiefguxX]/,
- json: /[j]/,
- not_json: /[^j]/,
- text: /^[^\x25]+/,
- modulo: /^\x25{2}/,
- placeholder: /^\x25(?:([1-9]\d*)\$|\(([^)]+)\))?(\+)?(0|'[^$])?(-)?(\d+)?(?:\.(\d+))?([b-gijostTuvxX])/,
- key: /^([a-z_][a-z_\d]*)/i,
- key_access: /^\.([a-z_][a-z_\d]*)/i,
- index_access: /^\[(\d+)\]/,
- sign: /^[+-]/
- }
-
- function sprintf(key) {
- // `arguments` is not an array, but should be fine for this call
- return sprintf_format(sprintf_parse(key), arguments)
- }
-
- function vsprintf(fmt, argv) {
- return sprintf.apply(null, [fmt].concat(argv || []))
- }
-
- function sprintf_format(parse_tree, argv) {
- var cursor = 1, tree_length = parse_tree.length, arg, output = '', i, k, ph, pad, pad_character, pad_length, is_positive, sign
- for (i = 0; i < tree_length; i++) {
- if (typeof parse_tree[i] === 'string') {
- output += parse_tree[i]
- }
- else if (typeof parse_tree[i] === 'object') {
- ph = parse_tree[i] // convenience purposes only
- if (ph.keys) { // keyword argument
- arg = argv[cursor]
- for (k = 0; k < ph.keys.length; k++) {
- if (arg == undefined) {
- throw new Error(sprintf('[sprintf] Cannot access property "%s" of undefined value "%s"', ph.keys[k], ph.keys[k-1]))
- }
- arg = arg[ph.keys[k]]
- }
- }
- else if (ph.param_no) { // positional argument (explicit)
- arg = argv[ph.param_no]
- }
- else { // positional argument (implicit)
- arg = argv[cursor++]
- }
-
- if (re.not_type.test(ph.type) && re.not_primitive.test(ph.type) && arg instanceof Function) {
- arg = arg()
- }
-
- if (re.numeric_arg.test(ph.type) && (typeof arg !== 'number' && isNaN(arg))) {
- throw new TypeError(sprintf('[sprintf] expecting number but found %T', arg))
- }
-
- if (re.number.test(ph.type)) {
- is_positive = arg >= 0
- }
-
- switch (ph.type) {
- case 'b':
- arg = parseInt(arg, 10).toString(2)
- break
- case 'c':
- arg = String.fromCharCode(parseInt(arg, 10))
- break
- case 'd':
- case 'i':
- arg = parseInt(arg, 10)
- break
- case 'j':
- arg = JSON.stringify(arg, null, ph.width ? parseInt(ph.width) : 0)
- break
- case 'e':
- arg = ph.precision ? parseFloat(arg).toExponential(ph.precision) : parseFloat(arg).toExponential()
- break
- case 'f':
- arg = ph.precision ? parseFloat(arg).toFixed(ph.precision) : parseFloat(arg)
- break
- case 'g':
- arg = ph.precision ? String(Number(arg.toPrecision(ph.precision))) : parseFloat(arg)
- break
- case 'o':
- arg = (parseInt(arg, 10) >>> 0).toString(8)
- break
- case 's':
- arg = String(arg)
- arg = (ph.precision ? arg.substring(0, ph.precision) : arg)
- break
- case 't':
- arg = String(!!arg)
- arg = (ph.precision ? arg.substring(0, ph.precision) : arg)
- break
- case 'T':
- arg = Object.prototype.toString.call(arg).slice(8, -1).toLowerCase()
- arg = (ph.precision ? arg.substring(0, ph.precision) : arg)
- break
- case 'u':
- arg = parseInt(arg, 10) >>> 0
- break
- case 'v':
- arg = arg.valueOf()
- arg = (ph.precision ? arg.substring(0, ph.precision) : arg)
- break
- case 'x':
- arg = (parseInt(arg, 10) >>> 0).toString(16)
- break
- case 'X':
- arg = (parseInt(arg, 10) >>> 0).toString(16).toUpperCase()
- break
- }
- if (re.json.test(ph.type)) {
- output += arg
- }
- else {
- if (re.number.test(ph.type) && (!is_positive || ph.sign)) {
- sign = is_positive ? '+' : '-'
- arg = arg.toString().replace(re.sign, '')
- }
- else {
- sign = ''
- }
- pad_character = ph.pad_char ? ph.pad_char === '0' ? '0' : ph.pad_char.charAt(1) : ' '
- pad_length = ph.width - (sign + arg).length
- pad = ph.width ? (pad_length > 0 ? pad_character.repeat(pad_length) : '') : ''
- output += ph.align ? sign + arg + pad : (pad_character === '0' ? sign + pad + arg : pad + sign + arg)
- }
- }
- }
- return output
- }
-
- var sprintf_cache = Object.create(null)
-
- function sprintf_parse(fmt) {
- if (sprintf_cache[fmt]) {
- return sprintf_cache[fmt]
- }
-
- var _fmt = fmt, match, parse_tree = [], arg_names = 0
- while (_fmt) {
- if ((match = re.text.exec(_fmt)) !== null) {
- parse_tree.push(match[0])
- }
- else if ((match = re.modulo.exec(_fmt)) !== null) {
- parse_tree.push('%')
- }
- else if ((match = re.placeholder.exec(_fmt)) !== null) {
- if (match[2]) {
- arg_names |= 1
- var field_list = [], replacement_field = match[2], field_match = []
- if ((field_match = re.key.exec(replacement_field)) !== null) {
- field_list.push(field_match[1])
- while ((replacement_field = replacement_field.substring(field_match[0].length)) !== '') {
- if ((field_match = re.key_access.exec(replacement_field)) !== null) {
- field_list.push(field_match[1])
- }
- else if ((field_match = re.index_access.exec(replacement_field)) !== null) {
- field_list.push(field_match[1])
- }
- else {
- throw new SyntaxError('[sprintf] failed to parse named argument key')
- }
- }
- }
- else {
- throw new SyntaxError('[sprintf] failed to parse named argument key')
- }
- match[2] = field_list
- }
- else {
- arg_names |= 2
- }
- if (arg_names === 3) {
- throw new Error('[sprintf] mixing positional and named placeholders is not (yet) supported')
- }
-
- parse_tree.push(
- {
- placeholder: match[0],
- param_no: match[1],
- keys: match[2],
- sign: match[3],
- pad_char: match[4],
- align: match[5],
- width: match[6],
- precision: match[7],
- type: match[8]
- }
- )
- }
- else {
- throw new SyntaxError('[sprintf] unexpected placeholder')
- }
- _fmt = _fmt.substring(match[0].length)
- }
- return sprintf_cache[fmt] = parse_tree
- }
-
- /**
- * export to either browser or node.js
- */
- /* eslint-disable quote-props */
- if (true) {
- exports.sprintf = sprintf
- exports.vsprintf = vsprintf
- }
- if (typeof window !== 'undefined') {
- window['sprintf'] = sprintf
- window['vsprintf'] = vsprintf
-
- if (true) {
- !(__WEBPACK_AMD_DEFINE_RESULT__ = (function() {
- return {
- 'sprintf': sprintf,
- 'vsprintf': vsprintf
- }
- }).call(exports, __webpack_require__, exports, module),
- __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__))
- }
- }
- /* eslint-enable quote-props */
-}(); // eslint-disable-line
-
-
/***/ }),
/***/ 83141:
@@ -88569,14 +81838,6 @@ module.exports = require("crypto");
/***/ }),
-/***/ 72250:
-/***/ ((module) => {
-
-"use strict";
-module.exports = require("dns");
-
-/***/ }),
-
/***/ 24434:
/***/ ((module) => {
@@ -88609,14 +81870,6 @@ module.exports = require("https");
/***/ }),
-/***/ 69278:
-/***/ ((module) => {
-
-"use strict";
-module.exports = require("net");
-
-/***/ }),
-
/***/ 77598:
/***/ ((module) => {
@@ -88665,14 +81918,6 @@ module.exports = require("stream");
/***/ }),
-/***/ 64756:
-/***/ ((module) => {
-
-"use strict";
-module.exports = require("tls");
-
-/***/ }),
-
/***/ 52018:
/***/ ((module) => {
@@ -95197,7 +88442,7 @@ __webpack_require__.d(scalar_native_namespaceObject, {
add: () => (scalar_native_add),
band: () => (band),
bitLength: () => (bitLength),
- bits: () => (scalar_native_bits),
+ bits: () => (bits),
bor: () => (bor),
bxor: () => (bxor),
div: () => (div),
@@ -147290,2697 +140535,8 @@ class BrowserProvider extends JsonRpcApiPollingProvider {
//# sourceMappingURL=provider-browser.js.map
// EXTERNAL MODULE: ./node_modules/cross-fetch/dist/node-ponyfill.js
var node_ponyfill = __webpack_require__(15221);
-// EXTERNAL MODULE: ./node_modules/http-proxy-agent/dist/index.js
-var dist = __webpack_require__(6645);
-// EXTERNAL MODULE: ./node_modules/https-proxy-agent/dist/index.js
-var https_proxy_agent_dist = __webpack_require__(62288);
-// EXTERNAL MODULE: ./node_modules/socks-proxy-agent/dist/index.js
-var socks_proxy_agent_dist = __webpack_require__(77128);
// EXTERNAL MODULE: ./node_modules/bn.js/lib/bn.js
var bn = __webpack_require__(39404);
-;// CONCATENATED MODULE: external "module"
-const external_module_namespaceObject = require("module");
-;// CONCATENATED MODULE: ./node_modules/fflate/esm/index.mjs
-
-var esm_require = (0,external_module_namespaceObject.createRequire)('/');
-// DEFLATE is a complex format; to read this code, you should probably check the RFC first:
-// https://tools.ietf.org/html/rfc1951
-// You may also wish to take a look at the guide I made about this program:
-// https://gist.github.com/101arrowz/253f31eb5abc3d9275ab943003ffecad
-// Some of the following code is similar to that of UZIP.js:
-// https://github.com/photopea/UZIP.js
-// However, the vast majority of the codebase has diverged from UZIP.js to increase performance and reduce bundle size.
-// Sometimes 0 will appear where -1 would be more appropriate. This is because using a uint
-// is better for memory in most engines (I *think*).
-// Mediocre shim
-var esm_Worker;
-var workerAdd = ";var __w=require('worker_threads');__w.parentPort.on('message',function(m){onmessage({data:m})}),postMessage=function(m,t){__w.parentPort.postMessage(m,t)},close=process.exit;self=global";
-try {
- esm_Worker = esm_require('worker_threads').Worker;
-}
-catch (e) {
-}
-var wk = esm_Worker ? function (c, _, msg, transfer, cb) {
- var done = false;
- var w = new esm_Worker(c + workerAdd, { eval: true })
- .on('error', function (e) { return cb(e, null); })
- .on('message', function (m) { return cb(null, m); })
- .on('exit', function (c) {
- if (c && !done)
- cb(new Error('exited with code ' + c), null);
- });
- w.postMessage(msg, transfer);
- w.terminate = function () {
- done = true;
- return esm_Worker.prototype.terminate.call(w);
- };
- return w;
-} : function (_, __, ___, ____, cb) {
- setImmediate(function () { return cb(new Error('async operations unsupported - update to Node 12+ (or Node 10-11 with the --experimental-worker CLI flag)'), null); });
- var NOP = function () { };
- return {
- terminate: NOP,
- postMessage: NOP
- };
-};
-
-// aliases for shorter compressed code (most minifers don't do this)
-var esm_u8 = Uint8Array, u16 = Uint16Array, i32 = Int32Array;
-// fixed length extra bits
-var fleb = new esm_u8([0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0, /* unused */ 0, 0, /* impossible */ 0]);
-// fixed distance extra bits
-var fdeb = new esm_u8([0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, /* unused */ 0, 0]);
-// code length index map
-var clim = new esm_u8([16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15]);
-// get base, reverse index map from extra bits
-var freb = function (eb, start) {
- var b = new u16(31);
- for (var i = 0; i < 31; ++i) {
- b[i] = start += 1 << eb[i - 1];
- }
- // numbers here are at max 18 bits
- var r = new i32(b[30]);
- for (var i = 1; i < 30; ++i) {
- for (var j = b[i]; j < b[i + 1]; ++j) {
- r[j] = ((j - b[i]) << 5) | i;
- }
- }
- return { b: b, r: r };
-};
-var _a = freb(fleb, 2), fl = _a.b, revfl = _a.r;
-// we can ignore the fact that the other numbers are wrong; they never happen anyway
-fl[28] = 258, revfl[258] = 28;
-var _b = freb(fdeb, 0), fd = _b.b, revfd = _b.r;
-// map of value to reverse (assuming 16 bits)
-var rev = new u16(32768);
-for (var i = 0; i < 32768; ++i) {
- // reverse table algorithm from SO
- var x = ((i & 0xAAAA) >> 1) | ((i & 0x5555) << 1);
- x = ((x & 0xCCCC) >> 2) | ((x & 0x3333) << 2);
- x = ((x & 0xF0F0) >> 4) | ((x & 0x0F0F) << 4);
- rev[i] = (((x & 0xFF00) >> 8) | ((x & 0x00FF) << 8)) >> 1;
-}
-// create huffman tree from u8 "map": index -> code length for code index
-// mb (max bits) must be at most 15
-// TODO: optimize/split up?
-var hMap = (function (cd, mb, r) {
- var s = cd.length;
- // index
- var i = 0;
- // u16 "map": index -> # of codes with bit length = index
- var l = new u16(mb);
- // length of cd must be 288 (total # of codes)
- for (; i < s; ++i) {
- if (cd[i])
- ++l[cd[i] - 1];
- }
- // u16 "map": index -> minimum code for bit length = index
- var le = new u16(mb);
- for (i = 1; i < mb; ++i) {
- le[i] = (le[i - 1] + l[i - 1]) << 1;
- }
- var co;
- if (r) {
- // u16 "map": index -> number of actual bits, symbol for code
- co = new u16(1 << mb);
- // bits to remove for reverser
- var rvb = 15 - mb;
- for (i = 0; i < s; ++i) {
- // ignore 0 lengths
- if (cd[i]) {
- // num encoding both symbol and bits read
- var sv = (i << 4) | cd[i];
- // free bits
- var r_1 = mb - cd[i];
- // start value
- var v = le[cd[i] - 1]++ << r_1;
- // m is end value
- for (var m = v | ((1 << r_1) - 1); v <= m; ++v) {
- // every 16 bit value starting with the code yields the same result
- co[rev[v] >> rvb] = sv;
- }
- }
- }
- }
- else {
- co = new u16(s);
- for (i = 0; i < s; ++i) {
- if (cd[i]) {
- co[i] = rev[le[cd[i] - 1]++] >> (15 - cd[i]);
- }
- }
- }
- return co;
-});
-// fixed length tree
-var flt = new esm_u8(288);
-for (var i = 0; i < 144; ++i)
- flt[i] = 8;
-for (var i = 144; i < 256; ++i)
- flt[i] = 9;
-for (var i = 256; i < 280; ++i)
- flt[i] = 7;
-for (var i = 280; i < 288; ++i)
- flt[i] = 8;
-// fixed distance tree
-var fdt = new esm_u8(32);
-for (var i = 0; i < 32; ++i)
- fdt[i] = 5;
-// fixed length map
-var flm = /*#__PURE__*/ hMap(flt, 9, 0), flrm = /*#__PURE__*/ hMap(flt, 9, 1);
-// fixed distance map
-var fdm = /*#__PURE__*/ hMap(fdt, 5, 0), fdrm = /*#__PURE__*/ hMap(fdt, 5, 1);
-// find max of array
-var max = function (a) {
- var m = a[0];
- for (var i = 1; i < a.length; ++i) {
- if (a[i] > m)
- m = a[i];
- }
- return m;
-};
-// read d, starting at bit p and mask with m
-var bits = function (d, p, m) {
- var o = (p / 8) | 0;
- return ((d[o] | (d[o + 1] << 8)) >> (p & 7)) & m;
-};
-// read d, starting at bit p continuing for at least 16 bits
-var bits16 = function (d, p) {
- var o = (p / 8) | 0;
- return ((d[o] | (d[o + 1] << 8) | (d[o + 2] << 16)) >> (p & 7));
-};
-// get end of byte
-var shft = function (p) { return ((p + 7) / 8) | 0; };
-// typed array slice - allows garbage collector to free original reference,
-// while being more compatible than .slice
-var slc = function (v, s, e) {
- if (s == null || s < 0)
- s = 0;
- if (e == null || e > v.length)
- e = v.length;
- // can't use .constructor in case user-supplied
- return new esm_u8(v.subarray(s, e));
-};
-/**
- * Codes for errors generated within this library
- */
-var FlateErrorCode = {
- UnexpectedEOF: 0,
- InvalidBlockType: 1,
- InvalidLengthLiteral: 2,
- InvalidDistance: 3,
- StreamFinished: 4,
- NoStreamHandler: 5,
- InvalidHeader: 6,
- NoCallback: 7,
- InvalidUTF8: 8,
- ExtraFieldTooLong: 9,
- InvalidDate: 10,
- FilenameTooLong: 11,
- StreamFinishing: 12,
- InvalidZipData: 13,
- UnknownCompressionMethod: 14
-};
-// error codes
-var ec = [
- 'unexpected EOF',
- 'invalid block type',
- 'invalid length/literal',
- 'invalid distance',
- 'stream finished',
- 'no stream handler',
- ,
- 'no callback',
- 'invalid UTF-8 data',
- 'extra field too long',
- 'date not in range 1980-2099',
- 'filename too long',
- 'stream finishing',
- 'invalid zip data'
- // determined by unknown compression method
-];
-;
-var err = function (ind, msg, nt) {
- var e = new Error(msg || ec[ind]);
- e.code = ind;
- if (Error.captureStackTrace)
- Error.captureStackTrace(e, err);
- if (!nt)
- throw e;
- return e;
-};
-// expands raw DEFLATE data
-var inflt = function (dat, st, buf, dict) {
- // source length dict length
- var sl = dat.length, dl = dict ? dict.length : 0;
- if (!sl || st.f && !st.l)
- return buf || new esm_u8(0);
- var noBuf = !buf;
- // have to estimate size
- var resize = noBuf || st.i != 2;
- // no state
- var noSt = st.i;
- // Assumes roughly 33% compression ratio average
- if (noBuf)
- buf = new esm_u8(sl * 3);
- // ensure buffer can fit at least l elements
- var cbuf = function (l) {
- var bl = buf.length;
- // need to increase size to fit
- if (l > bl) {
- // Double or set to necessary, whichever is greater
- var nbuf = new esm_u8(Math.max(bl * 2, l));
- nbuf.set(buf);
- buf = nbuf;
- }
- };
- // last chunk bitpos bytes
- var final = st.f || 0, pos = st.p || 0, bt = st.b || 0, lm = st.l, dm = st.d, lbt = st.m, dbt = st.n;
- // total bits
- var tbts = sl * 8;
- do {
- if (!lm) {
- // BFINAL - this is only 1 when last chunk is next
- final = bits(dat, pos, 1);
- // type: 0 = no compression, 1 = fixed huffman, 2 = dynamic huffman
- var type = bits(dat, pos + 1, 3);
- pos += 3;
- if (!type) {
- // go to end of byte boundary
- var s = shft(pos) + 4, l = dat[s - 4] | (dat[s - 3] << 8), t = s + l;
- if (t > sl) {
- if (noSt)
- err(0);
- break;
- }
- // ensure size
- if (resize)
- cbuf(bt + l);
- // Copy over uncompressed data
- buf.set(dat.subarray(s, t), bt);
- // Get new bitpos, update byte count
- st.b = bt += l, st.p = pos = t * 8, st.f = final;
- continue;
- }
- else if (type == 1)
- lm = flrm, dm = fdrm, lbt = 9, dbt = 5;
- else if (type == 2) {
- // literal lengths
- var hLit = bits(dat, pos, 31) + 257, hcLen = bits(dat, pos + 10, 15) + 4;
- var tl = hLit + bits(dat, pos + 5, 31) + 1;
- pos += 14;
- // length+distance tree
- var ldt = new esm_u8(tl);
- // code length tree
- var clt = new esm_u8(19);
- for (var i = 0; i < hcLen; ++i) {
- // use index map to get real code
- clt[clim[i]] = bits(dat, pos + i * 3, 7);
- }
- pos += hcLen * 3;
- // code lengths bits
- var clb = max(clt), clbmsk = (1 << clb) - 1;
- // code lengths map
- var clm = hMap(clt, clb, 1);
- for (var i = 0; i < tl;) {
- var r = clm[bits(dat, pos, clbmsk)];
- // bits read
- pos += r & 15;
- // symbol
- var s = r >> 4;
- // code length to copy
- if (s < 16) {
- ldt[i++] = s;
- }
- else {
- // copy count
- var c = 0, n = 0;
- if (s == 16)
- n = 3 + bits(dat, pos, 3), pos += 2, c = ldt[i - 1];
- else if (s == 17)
- n = 3 + bits(dat, pos, 7), pos += 3;
- else if (s == 18)
- n = 11 + bits(dat, pos, 127), pos += 7;
- while (n--)
- ldt[i++] = c;
- }
- }
- // length tree distance tree
- var lt = ldt.subarray(0, hLit), dt = ldt.subarray(hLit);
- // max length bits
- lbt = max(lt);
- // max dist bits
- dbt = max(dt);
- lm = hMap(lt, lbt, 1);
- dm = hMap(dt, dbt, 1);
- }
- else
- err(1);
- if (pos > tbts) {
- if (noSt)
- err(0);
- break;
- }
- }
- // Make sure the buffer can hold this + the largest possible addition
- // Maximum chunk size (practically, theoretically infinite) is 2^17
- if (resize)
- cbuf(bt + 131072);
- var lms = (1 << lbt) - 1, dms = (1 << dbt) - 1;
- var lpos = pos;
- for (;; lpos = pos) {
- // bits read, code
- var c = lm[bits16(dat, pos) & lms], sym = c >> 4;
- pos += c & 15;
- if (pos > tbts) {
- if (noSt)
- err(0);
- break;
- }
- if (!c)
- err(2);
- if (sym < 256)
- buf[bt++] = sym;
- else if (sym == 256) {
- lpos = pos, lm = null;
- break;
- }
- else {
- var add = sym - 254;
- // no extra bits needed if less
- if (sym > 264) {
- // index
- var i = sym - 257, b = fleb[i];
- add = bits(dat, pos, (1 << b) - 1) + fl[i];
- pos += b;
- }
- // dist
- var d = dm[bits16(dat, pos) & dms], dsym = d >> 4;
- if (!d)
- err(3);
- pos += d & 15;
- var dt = fd[dsym];
- if (dsym > 3) {
- var b = fdeb[dsym];
- dt += bits16(dat, pos) & (1 << b) - 1, pos += b;
- }
- if (pos > tbts) {
- if (noSt)
- err(0);
- break;
- }
- if (resize)
- cbuf(bt + 131072);
- var end = bt + add;
- if (bt < dt) {
- var shift = dl - dt, dend = Math.min(dt, end);
- if (shift + bt < 0)
- err(3);
- for (; bt < dend; ++bt)
- buf[bt] = dict[shift + bt];
- }
- for (; bt < end; ++bt)
- buf[bt] = buf[bt - dt];
- }
- }
- st.l = lm, st.p = lpos, st.b = bt, st.f = final;
- if (lm)
- final = 1, st.m = lbt, st.d = dm, st.n = dbt;
- } while (!final);
- // don't reallocate for streams or user buffers
- return bt != buf.length && noBuf ? slc(buf, 0, bt) : buf.subarray(0, bt);
-};
-// starting at p, write the minimum number of bits that can hold v to d
-var wbits = function (d, p, v) {
- v <<= p & 7;
- var o = (p / 8) | 0;
- d[o] |= v;
- d[o + 1] |= v >> 8;
-};
-// starting at p, write the minimum number of bits (>8) that can hold v to d
-var wbits16 = function (d, p, v) {
- v <<= p & 7;
- var o = (p / 8) | 0;
- d[o] |= v;
- d[o + 1] |= v >> 8;
- d[o + 2] |= v >> 16;
-};
-// creates code lengths from a frequency table
-var hTree = function (d, mb) {
- // Need extra info to make a tree
- var t = [];
- for (var i = 0; i < d.length; ++i) {
- if (d[i])
- t.push({ s: i, f: d[i] });
- }
- var s = t.length;
- var t2 = t.slice();
- if (!s)
- return { t: et, l: 0 };
- if (s == 1) {
- var v = new esm_u8(t[0].s + 1);
- v[t[0].s] = 1;
- return { t: v, l: 1 };
- }
- t.sort(function (a, b) { return a.f - b.f; });
- // after i2 reaches last ind, will be stopped
- // freq must be greater than largest possible number of symbols
- t.push({ s: -1, f: 25001 });
- var l = t[0], r = t[1], i0 = 0, i1 = 1, i2 = 2;
- t[0] = { s: -1, f: l.f + r.f, l: l, r: r };
- // efficient algorithm from UZIP.js
- // i0 is lookbehind, i2 is lookahead - after processing two low-freq
- // symbols that combined have high freq, will start processing i2 (high-freq,
- // non-composite) symbols instead
- // see https://reddit.com/r/photopea/comments/ikekht/uzipjs_questions/
- while (i1 != s - 1) {
- l = t[t[i0].f < t[i2].f ? i0++ : i2++];
- r = t[i0 != i1 && t[i0].f < t[i2].f ? i0++ : i2++];
- t[i1++] = { s: -1, f: l.f + r.f, l: l, r: r };
- }
- var maxSym = t2[0].s;
- for (var i = 1; i < s; ++i) {
- if (t2[i].s > maxSym)
- maxSym = t2[i].s;
- }
- // code lengths
- var tr = new u16(maxSym + 1);
- // max bits in tree
- var mbt = ln(t[i1 - 1], tr, 0);
- if (mbt > mb) {
- // more algorithms from UZIP.js
- // TODO: find out how this code works (debt)
- // ind debt
- var i = 0, dt = 0;
- // left cost
- var lft = mbt - mb, cst = 1 << lft;
- t2.sort(function (a, b) { return tr[b.s] - tr[a.s] || a.f - b.f; });
- for (; i < s; ++i) {
- var i2_1 = t2[i].s;
- if (tr[i2_1] > mb) {
- dt += cst - (1 << (mbt - tr[i2_1]));
- tr[i2_1] = mb;
- }
- else
- break;
- }
- dt >>= lft;
- while (dt > 0) {
- var i2_2 = t2[i].s;
- if (tr[i2_2] < mb)
- dt -= 1 << (mb - tr[i2_2]++ - 1);
- else
- ++i;
- }
- for (; i >= 0 && dt; --i) {
- var i2_3 = t2[i].s;
- if (tr[i2_3] == mb) {
- --tr[i2_3];
- ++dt;
- }
- }
- mbt = mb;
- }
- return { t: new esm_u8(tr), l: mbt };
-};
-// get the max length and assign length codes
-var ln = function (n, l, d) {
- return n.s == -1
- ? Math.max(ln(n.l, l, d + 1), ln(n.r, l, d + 1))
- : (l[n.s] = d);
-};
-// length codes generation
-var lc = function (c) {
- var s = c.length;
- // Note that the semicolon was intentional
- while (s && !c[--s])
- ;
- var cl = new u16(++s);
- // ind num streak
- var cli = 0, cln = c[0], cls = 1;
- var w = function (v) { cl[cli++] = v; };
- for (var i = 1; i <= s; ++i) {
- if (c[i] == cln && i != s)
- ++cls;
- else {
- if (!cln && cls > 2) {
- for (; cls > 138; cls -= 138)
- w(32754);
- if (cls > 2) {
- w(cls > 10 ? ((cls - 11) << 5) | 28690 : ((cls - 3) << 5) | 12305);
- cls = 0;
- }
- }
- else if (cls > 3) {
- w(cln), --cls;
- for (; cls > 6; cls -= 6)
- w(8304);
- if (cls > 2)
- w(((cls - 3) << 5) | 8208), cls = 0;
- }
- while (cls--)
- w(cln);
- cls = 1;
- cln = c[i];
- }
- }
- return { c: cl.subarray(0, cli), n: s };
-};
-// calculate the length of output from tree, code lengths
-var clen = function (cf, cl) {
- var l = 0;
- for (var i = 0; i < cl.length; ++i)
- l += cf[i] * cl[i];
- return l;
-};
-// writes a fixed block
-// returns the new bit pos
-var wfblk = function (out, pos, dat) {
- // no need to write 00 as type: TypedArray defaults to 0
- var s = dat.length;
- var o = shft(pos + 2);
- out[o] = s & 255;
- out[o + 1] = s >> 8;
- out[o + 2] = out[o] ^ 255;
- out[o + 3] = out[o + 1] ^ 255;
- for (var i = 0; i < s; ++i)
- out[o + i + 4] = dat[i];
- return (o + 4 + s) * 8;
-};
-// writes a block
-var wblk = function (dat, out, final, syms, lf, df, eb, li, bs, bl, p) {
- wbits(out, p++, final);
- ++lf[256];
- var _a = hTree(lf, 15), dlt = _a.t, mlb = _a.l;
- var _b = hTree(df, 15), ddt = _b.t, mdb = _b.l;
- var _c = lc(dlt), lclt = _c.c, nlc = _c.n;
- var _d = lc(ddt), lcdt = _d.c, ndc = _d.n;
- var lcfreq = new u16(19);
- for (var i = 0; i < lclt.length; ++i)
- ++lcfreq[lclt[i] & 31];
- for (var i = 0; i < lcdt.length; ++i)
- ++lcfreq[lcdt[i] & 31];
- var _e = hTree(lcfreq, 7), lct = _e.t, mlcb = _e.l;
- var nlcc = 19;
- for (; nlcc > 4 && !lct[clim[nlcc - 1]]; --nlcc)
- ;
- var flen = (bl + 5) << 3;
- var ftlen = clen(lf, flt) + clen(df, fdt) + eb;
- var dtlen = clen(lf, dlt) + clen(df, ddt) + eb + 14 + 3 * nlcc + clen(lcfreq, lct) + 2 * lcfreq[16] + 3 * lcfreq[17] + 7 * lcfreq[18];
- if (bs >= 0 && flen <= ftlen && flen <= dtlen)
- return wfblk(out, p, dat.subarray(bs, bs + bl));
- var lm, ll, dm, dl;
- wbits(out, p, 1 + (dtlen < ftlen)), p += 2;
- if (dtlen < ftlen) {
- lm = hMap(dlt, mlb, 0), ll = dlt, dm = hMap(ddt, mdb, 0), dl = ddt;
- var llm = hMap(lct, mlcb, 0);
- wbits(out, p, nlc - 257);
- wbits(out, p + 5, ndc - 1);
- wbits(out, p + 10, nlcc - 4);
- p += 14;
- for (var i = 0; i < nlcc; ++i)
- wbits(out, p + 3 * i, lct[clim[i]]);
- p += 3 * nlcc;
- var lcts = [lclt, lcdt];
- for (var it = 0; it < 2; ++it) {
- var clct = lcts[it];
- for (var i = 0; i < clct.length; ++i) {
- var len = clct[i] & 31;
- wbits(out, p, llm[len]), p += lct[len];
- if (len > 15)
- wbits(out, p, (clct[i] >> 5) & 127), p += clct[i] >> 12;
- }
- }
- }
- else {
- lm = flm, ll = flt, dm = fdm, dl = fdt;
- }
- for (var i = 0; i < li; ++i) {
- var sym = syms[i];
- if (sym > 255) {
- var len = (sym >> 18) & 31;
- wbits16(out, p, lm[len + 257]), p += ll[len + 257];
- if (len > 7)
- wbits(out, p, (sym >> 23) & 31), p += fleb[len];
- var dst = sym & 31;
- wbits16(out, p, dm[dst]), p += dl[dst];
- if (dst > 3)
- wbits16(out, p, (sym >> 5) & 8191), p += fdeb[dst];
- }
- else {
- wbits16(out, p, lm[sym]), p += ll[sym];
- }
- }
- wbits16(out, p, lm[256]);
- return p + ll[256];
-};
-// deflate options (nice << 13) | chain
-var deo = /*#__PURE__*/ new i32([65540, 131080, 131088, 131104, 262176, 1048704, 1048832, 2114560, 2117632]);
-// empty
-var et = /*#__PURE__*/ new esm_u8(0);
-// compresses data into a raw DEFLATE buffer
-var dflt = function (dat, lvl, plvl, pre, post, st) {
- var s = st.z || dat.length;
- var o = new esm_u8(pre + s + 5 * (1 + Math.ceil(s / 7000)) + post);
- // writing to this writes to the output buffer
- var w = o.subarray(pre, o.length - post);
- var lst = st.l;
- var pos = (st.r || 0) & 7;
- if (lvl) {
- if (pos)
- w[0] = st.r >> 3;
- var opt = deo[lvl - 1];
- var n = opt >> 13, c = opt & 8191;
- var msk_1 = (1 << plvl) - 1;
- // prev 2-byte val map curr 2-byte val map
- var prev = st.p || new u16(32768), head = st.h || new u16(msk_1 + 1);
- var bs1_1 = Math.ceil(plvl / 3), bs2_1 = 2 * bs1_1;
- var hsh = function (i) { return (dat[i] ^ (dat[i + 1] << bs1_1) ^ (dat[i + 2] << bs2_1)) & msk_1; };
- // 24576 is an arbitrary number of maximum symbols per block
- // 424 buffer for last block
- var syms = new i32(25000);
- // length/literal freq distance freq
- var lf = new u16(288), df = new u16(32);
- // l/lcnt exbits index l/lind waitdx blkpos
- var lc_1 = 0, eb = 0, i = st.i || 0, li = 0, wi = st.w || 0, bs = 0;
- for (; i + 2 < s; ++i) {
- // hash value
- var hv = hsh(i);
- // index mod 32768 previous index mod
- var imod = i & 32767, pimod = head[hv];
- prev[imod] = pimod;
- head[hv] = imod;
- // We always should modify head and prev, but only add symbols if
- // this data is not yet processed ("wait" for wait index)
- if (wi <= i) {
- // bytes remaining
- var rem = s - i;
- if ((lc_1 > 7000 || li > 24576) && (rem > 423 || !lst)) {
- pos = wblk(dat, w, 0, syms, lf, df, eb, li, bs, i - bs, pos);
- li = lc_1 = eb = 0, bs = i;
- for (var j = 0; j < 286; ++j)
- lf[j] = 0;
- for (var j = 0; j < 30; ++j)
- df[j] = 0;
- }
- // len dist chain
- var l = 2, d = 0, ch_1 = c, dif = imod - pimod & 32767;
- if (rem > 2 && hv == hsh(i - dif)) {
- var maxn = Math.min(n, rem) - 1;
- var maxd = Math.min(32767, i);
- // max possible length
- // not capped at dif because decompressors implement "rolling" index population
- var ml = Math.min(258, rem);
- while (dif <= maxd && --ch_1 && imod != pimod) {
- if (dat[i + l] == dat[i + l - dif]) {
- var nl = 0;
- for (; nl < ml && dat[i + nl] == dat[i + nl - dif]; ++nl)
- ;
- if (nl > l) {
- l = nl, d = dif;
- // break out early when we reach "nice" (we are satisfied enough)
- if (nl > maxn)
- break;
- // now, find the rarest 2-byte sequence within this
- // length of literals and search for that instead.
- // Much faster than just using the start
- var mmd = Math.min(dif, nl - 2);
- var md = 0;
- for (var j = 0; j < mmd; ++j) {
- var ti = i - dif + j & 32767;
- var pti = prev[ti];
- var cd = ti - pti & 32767;
- if (cd > md)
- md = cd, pimod = ti;
- }
- }
- }
- // check the previous match
- imod = pimod, pimod = prev[imod];
- dif += imod - pimod & 32767;
- }
- }
- // d will be nonzero only when a match was found
- if (d) {
- // store both dist and len data in one int32
- // Make sure this is recognized as a len/dist with 28th bit (2^28)
- syms[li++] = 268435456 | (revfl[l] << 18) | revfd[d];
- var lin = revfl[l] & 31, din = revfd[d] & 31;
- eb += fleb[lin] + fdeb[din];
- ++lf[257 + lin];
- ++df[din];
- wi = i + l;
- ++lc_1;
- }
- else {
- syms[li++] = dat[i];
- ++lf[dat[i]];
- }
- }
- }
- for (i = Math.max(i, wi); i < s; ++i) {
- syms[li++] = dat[i];
- ++lf[dat[i]];
- }
- pos = wblk(dat, w, lst, syms, lf, df, eb, li, bs, i - bs, pos);
- if (!lst) {
- st.r = (pos & 7) | w[(pos / 8) | 0] << 3;
- // shft(pos) now 1 less if pos & 7 != 0
- pos -= 7;
- st.h = head, st.p = prev, st.i = i, st.w = wi;
- }
- }
- else {
- for (var i = st.w || 0; i < s + lst; i += 65535) {
- // end
- var e = i + 65535;
- if (e >= s) {
- // write final block
- w[(pos / 8) | 0] = lst;
- e = s;
- }
- pos = wfblk(w, pos + 1, dat.subarray(i, e));
- }
- st.i = s;
- }
- return slc(o, 0, pre + shft(pos) + post);
-};
-// CRC32 table
-var crct = /*#__PURE__*/ (function () {
- var t = new Int32Array(256);
- for (var i = 0; i < 256; ++i) {
- var c = i, k = 9;
- while (--k)
- c = ((c & 1) && -306674912) ^ (c >>> 1);
- t[i] = c;
- }
- return t;
-})();
-// CRC32
-var crc = function () {
- var c = -1;
- return {
- p: function (d) {
- // closures have awful performance
- var cr = c;
- for (var i = 0; i < d.length; ++i)
- cr = crct[(cr & 255) ^ d[i]] ^ (cr >>> 8);
- c = cr;
- },
- d: function () { return ~c; }
- };
-};
-// Adler32
-var adler = function () {
- var a = 1, b = 0;
- return {
- p: function (d) {
- // closures have awful performance
- var n = a, m = b;
- var l = d.length | 0;
- for (var i = 0; i != l;) {
- var e = Math.min(i + 2655, l);
- for (; i < e; ++i)
- m += n += d[i];
- n = (n & 65535) + 15 * (n >> 16), m = (m & 65535) + 15 * (m >> 16);
- }
- a = n, b = m;
- },
- d: function () {
- a %= 65521, b %= 65521;
- return (a & 255) << 24 | (a & 0xFF00) << 8 | (b & 255) << 8 | (b >> 8);
- }
- };
-};
-;
-// deflate with opts
-var dopt = function (dat, opt, pre, post, st) {
- if (!st) {
- st = { l: 1 };
- if (opt.dictionary) {
- var dict = opt.dictionary.subarray(-32768);
- var newDat = new esm_u8(dict.length + dat.length);
- newDat.set(dict);
- newDat.set(dat, dict.length);
- dat = newDat;
- st.w = dict.length;
- }
- }
- return dflt(dat, opt.level == null ? 6 : opt.level, opt.mem == null ? (st.l ? Math.ceil(Math.max(8, Math.min(13, Math.log(dat.length))) * 1.5) : 20) : (12 + opt.mem), pre, post, st);
-};
-// Walmart object spread
-var mrg = function (a, b) {
- var o = {};
- for (var k in a)
- o[k] = a[k];
- for (var k in b)
- o[k] = b[k];
- return o;
-};
-// worker clone
-// This is possibly the craziest part of the entire codebase, despite how simple it may seem.
-// The only parameter to this function is a closure that returns an array of variables outside of the function scope.
-// We're going to try to figure out the variable names used in the closure as strings because that is crucial for workerization.
-// We will return an object mapping of true variable name to value (basically, the current scope as a JS object).
-// The reason we can't just use the original variable names is minifiers mangling the toplevel scope.
-// This took me three weeks to figure out how to do.
-var wcln = function (fn, fnStr, td) {
- var dt = fn();
- var st = fn.toString();
- var ks = st.slice(st.indexOf('[') + 1, st.lastIndexOf(']')).replace(/\s+/g, '').split(',');
- for (var i = 0; i < dt.length; ++i) {
- var v = dt[i], k = ks[i];
- if (typeof v == 'function') {
- fnStr += ';' + k + '=';
- var st_1 = v.toString();
- if (v.prototype) {
- // for global objects
- if (st_1.indexOf('[native code]') != -1) {
- var spInd = st_1.indexOf(' ', 8) + 1;
- fnStr += st_1.slice(spInd, st_1.indexOf('(', spInd));
- }
- else {
- fnStr += st_1;
- for (var t in v.prototype)
- fnStr += ';' + k + '.prototype.' + t + '=' + v.prototype[t].toString();
- }
- }
- else
- fnStr += st_1;
- }
- else
- td[k] = v;
- }
- return fnStr;
-};
-var ch = [];
-// clone bufs
-var cbfs = function (v) {
- var tl = [];
- for (var k in v) {
- if (v[k].buffer) {
- tl.push((v[k] = new v[k].constructor(v[k])).buffer);
- }
- }
- return tl;
-};
-// use a worker to execute code
-var wrkr = function (fns, init, id, cb) {
- if (!ch[id]) {
- var fnStr = '', td_1 = {}, m = fns.length - 1;
- for (var i = 0; i < m; ++i)
- fnStr = wcln(fns[i], fnStr, td_1);
- ch[id] = { c: wcln(fns[m], fnStr, td_1), e: td_1 };
- }
- var td = mrg({}, ch[id].e);
- return wk(ch[id].c + ';onmessage=function(e){for(var k in e.data)self[k]=e.data[k];onmessage=' + init.toString() + '}', id, td, cbfs(td), cb);
-};
-// base async inflate fn
-var bInflt = function () { return [esm_u8, u16, i32, fleb, fdeb, clim, fl, fd, flrm, fdrm, rev, ec, hMap, max, bits, bits16, shft, slc, err, inflt, inflateSync, pbf, gopt]; };
-var bDflt = function () { return [esm_u8, u16, i32, fleb, fdeb, clim, revfl, revfd, flm, flt, fdm, fdt, rev, deo, et, hMap, wbits, wbits16, hTree, ln, lc, clen, wfblk, wblk, shft, slc, dflt, dopt, deflateSync, pbf]; };
-// gzip extra
-var gze = function () { return [gzh, gzhl, wbytes, crc, crct]; };
-// gunzip extra
-var guze = function () { return [gzs, gzl]; };
-// zlib extra
-var zle = function () { return [zlh, wbytes, adler]; };
-// unzlib extra
-var zule = function () { return [zls]; };
-// post buf
-var pbf = function (msg) { return postMessage(msg, [msg.buffer]); };
-// get opts
-var gopt = function (o) { return o && {
- out: o.size && new esm_u8(o.size),
- dictionary: o.dictionary
-}; };
-// async helper
-var cbify = function (dat, opts, fns, init, id, cb) {
- var w = wrkr(fns, init, id, function (err, dat) {
- w.terminate();
- cb(err, dat);
- });
- w.postMessage([dat, opts], opts.consume ? [dat.buffer] : []);
- return function () { w.terminate(); };
-};
-// auto stream
-var astrm = function (strm) {
- strm.ondata = function (dat, final) { return postMessage([dat, final], [dat.buffer]); };
- return function (ev) {
- if (ev.data.length) {
- strm.push(ev.data[0], ev.data[1]);
- postMessage([ev.data[0].length]);
- }
- else
- strm.flush();
- };
-};
-// async stream attach
-var astrmify = function (fns, strm, opts, init, id, flush, ext) {
- var t;
- var w = wrkr(fns, init, id, function (err, dat) {
- if (err)
- w.terminate(), strm.ondata.call(strm, err);
- else if (!Array.isArray(dat))
- ext(dat);
- else if (dat.length == 1) {
- strm.queuedSize -= dat[0];
- if (strm.ondrain)
- strm.ondrain(dat[0]);
- }
- else {
- if (dat[1])
- w.terminate();
- strm.ondata.call(strm, err, dat[0], dat[1]);
- }
- });
- w.postMessage(opts);
- strm.queuedSize = 0;
- strm.push = function (d, f) {
- if (!strm.ondata)
- err(5);
- if (t)
- strm.ondata(err(4, 0, 1), null, !!f);
- strm.queuedSize += d.length;
- w.postMessage([d, t = f], [d.buffer]);
- };
- strm.terminate = function () { w.terminate(); };
- if (flush) {
- strm.flush = function () { w.postMessage([]); };
- }
-};
-// read 2 bytes
-var b2 = function (d, b) { return d[b] | (d[b + 1] << 8); };
-// read 4 bytes
-var b4 = function (d, b) { return (d[b] | (d[b + 1] << 8) | (d[b + 2] << 16) | (d[b + 3] << 24)) >>> 0; };
-var b8 = function (d, b) { return b4(d, b) + (b4(d, b + 4) * 4294967296); };
-// write bytes
-var wbytes = function (d, b, v) {
- for (; v; ++b)
- d[b] = v, v >>>= 8;
-};
-// gzip header
-var gzh = function (c, o) {
- var fn = o.filename;
- c[0] = 31, c[1] = 139, c[2] = 8, c[8] = o.level < 2 ? 4 : o.level == 9 ? 2 : 0, c[9] = 3; // assume Unix
- if (o.mtime != 0)
- wbytes(c, 4, Math.floor(new Date(o.mtime || Date.now()) / 1000));
- if (fn) {
- c[3] = 8;
- for (var i = 0; i <= fn.length; ++i)
- c[i + 10] = fn.charCodeAt(i);
- }
-};
-// gzip footer: -8 to -4 = CRC, -4 to -0 is length
-// gzip start
-var gzs = function (d) {
- if (d[0] != 31 || d[1] != 139 || d[2] != 8)
- err(6, 'invalid gzip data');
- var flg = d[3];
- var st = 10;
- if (flg & 4)
- st += (d[10] | d[11] << 8) + 2;
- for (var zs = (flg >> 3 & 1) + (flg >> 4 & 1); zs > 0; zs -= !d[st++])
- ;
- return st + (flg & 2);
-};
-// gzip length
-var gzl = function (d) {
- var l = d.length;
- return (d[l - 4] | d[l - 3] << 8 | d[l - 2] << 16 | d[l - 1] << 24) >>> 0;
-};
-// gzip header length
-var gzhl = function (o) { return 10 + (o.filename ? o.filename.length + 1 : 0); };
-// zlib header
-var zlh = function (c, o) {
- var lv = o.level, fl = lv == 0 ? 0 : lv < 6 ? 1 : lv == 9 ? 3 : 2;
- c[0] = 120, c[1] = (fl << 6) | (o.dictionary && 32);
- c[1] |= 31 - ((c[0] << 8) | c[1]) % 31;
- if (o.dictionary) {
- var h = adler();
- h.p(o.dictionary);
- wbytes(c, 2, h.d());
- }
-};
-// zlib start
-var zls = function (d, dict) {
- if ((d[0] & 15) != 8 || (d[0] >> 4) > 7 || ((d[0] << 8 | d[1]) % 31))
- err(6, 'invalid zlib data');
- if ((d[1] >> 5 & 1) == +!dict)
- err(6, 'invalid zlib data: ' + (d[1] & 32 ? 'need' : 'unexpected') + ' dictionary');
- return (d[1] >> 3 & 4) + 2;
-};
-function StrmOpt(opts, cb) {
- if (typeof opts == 'function')
- cb = opts, opts = {};
- this.ondata = cb;
- return opts;
-}
-/**
- * Streaming DEFLATE compression
- */
-var Deflate = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
- function Deflate(opts, cb) {
- if (typeof opts == 'function')
- cb = opts, opts = {};
- this.ondata = cb;
- this.o = opts || {};
- this.s = { l: 0, i: 32768, w: 32768, z: 32768 };
- // Buffer length must always be 0 mod 32768 for index calculations to be correct when modifying head and prev
- // 98304 = 32768 (lookback) + 65536 (common chunk size)
- this.b = new esm_u8(98304);
- if (this.o.dictionary) {
- var dict = this.o.dictionary.subarray(-32768);
- this.b.set(dict, 32768 - dict.length);
- this.s.i = 32768 - dict.length;
- }
- }
- Deflate.prototype.p = function (c, f) {
- this.ondata(dopt(c, this.o, 0, 0, this.s), f);
- };
- /**
- * Pushes a chunk to be deflated
- * @param chunk The chunk to push
- * @param final Whether this is the last chunk
- */
- Deflate.prototype.push = function (chunk, final) {
- if (!this.ondata)
- err(5);
- if (this.s.l)
- err(4);
- var endLen = chunk.length + this.s.z;
- if (endLen > this.b.length) {
- if (endLen > 2 * this.b.length - 32768) {
- var newBuf = new esm_u8(endLen & -32768);
- newBuf.set(this.b.subarray(0, this.s.z));
- this.b = newBuf;
- }
- var split = this.b.length - this.s.z;
- this.b.set(chunk.subarray(0, split), this.s.z);
- this.s.z = this.b.length;
- this.p(this.b, false);
- this.b.set(this.b.subarray(-32768));
- this.b.set(chunk.subarray(split), 32768);
- this.s.z = chunk.length - split + 32768;
- this.s.i = 32766, this.s.w = 32768;
- }
- else {
- this.b.set(chunk, this.s.z);
- this.s.z += chunk.length;
- }
- this.s.l = final & 1;
- if (this.s.z > this.s.w + 8191 || final) {
- this.p(this.b, final || false);
- this.s.w = this.s.i, this.s.i -= 2;
- }
- };
- /**
- * Flushes buffered uncompressed data. Useful to immediately retrieve the
- * deflated output for small inputs.
- */
- Deflate.prototype.flush = function () {
- if (!this.ondata)
- err(5);
- if (this.s.l)
- err(4);
- this.p(this.b, false);
- this.s.w = this.s.i, this.s.i -= 2;
- };
- return Deflate;
-}())));
-
-/**
- * Asynchronous streaming DEFLATE compression
- */
-var AsyncDeflate = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
- function AsyncDeflate(opts, cb) {
- astrmify([
- bDflt,
- function () { return [astrm, Deflate]; }
- ], this, StrmOpt.call(this, opts, cb), function (ev) {
- var strm = new Deflate(ev.data);
- onmessage = astrm(strm);
- }, 6, 1);
- }
- return AsyncDeflate;
-}())));
-
-function deflate(data, opts, cb) {
- if (!cb)
- cb = opts, opts = {};
- if (typeof cb != 'function')
- err(7);
- return cbify(data, opts, [
- bDflt,
- ], function (ev) { return pbf(deflateSync(ev.data[0], ev.data[1])); }, 0, cb);
-}
-/**
- * Compresses data with DEFLATE without any wrapper
- * @param data The data to compress
- * @param opts The compression options
- * @returns The deflated version of the data
- */
-function deflateSync(data, opts) {
- return dopt(data, opts || {}, 0, 0);
-}
-/**
- * Streaming DEFLATE decompression
- */
-var Inflate = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
- function Inflate(opts, cb) {
- // no StrmOpt here to avoid adding to workerizer
- if (typeof opts == 'function')
- cb = opts, opts = {};
- this.ondata = cb;
- var dict = opts && opts.dictionary && opts.dictionary.subarray(-32768);
- this.s = { i: 0, b: dict ? dict.length : 0 };
- this.o = new esm_u8(32768);
- this.p = new esm_u8(0);
- if (dict)
- this.o.set(dict);
- }
- Inflate.prototype.e = function (c) {
- if (!this.ondata)
- err(5);
- if (this.d)
- err(4);
- if (!this.p.length)
- this.p = c;
- else if (c.length) {
- var n = new esm_u8(this.p.length + c.length);
- n.set(this.p), n.set(c, this.p.length), this.p = n;
- }
- };
- Inflate.prototype.c = function (final) {
- this.s.i = +(this.d = final || false);
- var bts = this.s.b;
- var dt = inflt(this.p, this.s, this.o);
- this.ondata(slc(dt, bts, this.s.b), this.d);
- this.o = slc(dt, this.s.b - 32768), this.s.b = this.o.length;
- this.p = slc(this.p, (this.s.p / 8) | 0), this.s.p &= 7;
- };
- /**
- * Pushes a chunk to be inflated
- * @param chunk The chunk to push
- * @param final Whether this is the final chunk
- */
- Inflate.prototype.push = function (chunk, final) {
- this.e(chunk), this.c(final);
- };
- return Inflate;
-}())));
-
-/**
- * Asynchronous streaming DEFLATE decompression
- */
-var AsyncInflate = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
- function AsyncInflate(opts, cb) {
- astrmify([
- bInflt,
- function () { return [astrm, Inflate]; }
- ], this, StrmOpt.call(this, opts, cb), function (ev) {
- var strm = new Inflate(ev.data);
- onmessage = astrm(strm);
- }, 7, 0);
- }
- return AsyncInflate;
-}())));
-
-function inflate(data, opts, cb) {
- if (!cb)
- cb = opts, opts = {};
- if (typeof cb != 'function')
- err(7);
- return cbify(data, opts, [
- bInflt
- ], function (ev) { return pbf(inflateSync(ev.data[0], gopt(ev.data[1]))); }, 1, cb);
-}
-/**
- * Expands DEFLATE data with no wrapper
- * @param data The data to decompress
- * @param opts The decompression options
- * @returns The decompressed version of the data
- */
-function inflateSync(data, opts) {
- return inflt(data, { i: 2 }, opts && opts.out, opts && opts.dictionary);
-}
-// before you yell at me for not just using extends, my reason is that TS inheritance is hard to workerize.
-/**
- * Streaming GZIP compression
- */
-var Gzip = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
- function Gzip(opts, cb) {
- this.c = crc();
- this.l = 0;
- this.v = 1;
- Deflate.call(this, opts, cb);
- }
- /**
- * Pushes a chunk to be GZIPped
- * @param chunk The chunk to push
- * @param final Whether this is the last chunk
- */
- Gzip.prototype.push = function (chunk, final) {
- this.c.p(chunk);
- this.l += chunk.length;
- Deflate.prototype.push.call(this, chunk, final);
- };
- Gzip.prototype.p = function (c, f) {
- var raw = dopt(c, this.o, this.v && gzhl(this.o), f && 8, this.s);
- if (this.v)
- gzh(raw, this.o), this.v = 0;
- if (f)
- wbytes(raw, raw.length - 8, this.c.d()), wbytes(raw, raw.length - 4, this.l);
- this.ondata(raw, f);
- };
- /**
- * Flushes buffered uncompressed data. Useful to immediately retrieve the
- * GZIPped output for small inputs.
- */
- Gzip.prototype.flush = function () {
- Deflate.prototype.flush.call(this);
- };
- return Gzip;
-}())));
-
-/**
- * Asynchronous streaming GZIP compression
- */
-var AsyncGzip = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
- function AsyncGzip(opts, cb) {
- astrmify([
- bDflt,
- gze,
- function () { return [astrm, Deflate, Gzip]; }
- ], this, StrmOpt.call(this, opts, cb), function (ev) {
- var strm = new Gzip(ev.data);
- onmessage = astrm(strm);
- }, 8, 1);
- }
- return AsyncGzip;
-}())));
-
-function gzip(data, opts, cb) {
- if (!cb)
- cb = opts, opts = {};
- if (typeof cb != 'function')
- err(7);
- return cbify(data, opts, [
- bDflt,
- gze,
- function () { return [gzipSync]; }
- ], function (ev) { return pbf(gzipSync(ev.data[0], ev.data[1])); }, 2, cb);
-}
-/**
- * Compresses data with GZIP
- * @param data The data to compress
- * @param opts The compression options
- * @returns The gzipped version of the data
- */
-function gzipSync(data, opts) {
- if (!opts)
- opts = {};
- var c = crc(), l = data.length;
- c.p(data);
- var d = dopt(data, opts, gzhl(opts), 8), s = d.length;
- return gzh(d, opts), wbytes(d, s - 8, c.d()), wbytes(d, s - 4, l), d;
-}
-/**
- * Streaming single or multi-member GZIP decompression
- */
-var Gunzip = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
- function Gunzip(opts, cb) {
- this.v = 1;
- this.r = 0;
- Inflate.call(this, opts, cb);
- }
- /**
- * Pushes a chunk to be GUNZIPped
- * @param chunk The chunk to push
- * @param final Whether this is the last chunk
- */
- Gunzip.prototype.push = function (chunk, final) {
- Inflate.prototype.e.call(this, chunk);
- this.r += chunk.length;
- if (this.v) {
- var p = this.p.subarray(this.v - 1);
- var s = p.length > 3 ? gzs(p) : 4;
- if (s > p.length) {
- if (!final)
- return;
- }
- else if (this.v > 1 && this.onmember) {
- this.onmember(this.r - p.length);
- }
- this.p = p.subarray(s), this.v = 0;
- }
- // necessary to prevent TS from using the closure value
- // This allows for workerization to function correctly
- Inflate.prototype.c.call(this, final);
- // process concatenated GZIP
- if (this.s.f && !this.s.l && !final) {
- this.v = shft(this.s.p) + 9;
- this.s = { i: 0 };
- this.o = new esm_u8(0);
- this.push(new esm_u8(0), final);
- }
- };
- return Gunzip;
-}())));
-
-/**
- * Asynchronous streaming single or multi-member GZIP decompression
- */
-var AsyncGunzip = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
- function AsyncGunzip(opts, cb) {
- var _this = this;
- astrmify([
- bInflt,
- guze,
- function () { return [astrm, Inflate, Gunzip]; }
- ], this, StrmOpt.call(this, opts, cb), function (ev) {
- var strm = new Gunzip(ev.data);
- strm.onmember = function (offset) { return postMessage(offset); };
- onmessage = astrm(strm);
- }, 9, 0, function (offset) { return _this.onmember && _this.onmember(offset); });
- }
- return AsyncGunzip;
-}())));
-
-function gunzip(data, opts, cb) {
- if (!cb)
- cb = opts, opts = {};
- if (typeof cb != 'function')
- err(7);
- return cbify(data, opts, [
- bInflt,
- guze,
- function () { return [gunzipSync]; }
- ], function (ev) { return pbf(gunzipSync(ev.data[0], ev.data[1])); }, 3, cb);
-}
-/**
- * Expands GZIP data
- * @param data The data to decompress
- * @param opts The decompression options
- * @returns The decompressed version of the data
- */
-function gunzipSync(data, opts) {
- var st = gzs(data);
- if (st + 8 > data.length)
- err(6, 'invalid gzip data');
- return inflt(data.subarray(st, -8), { i: 2 }, opts && opts.out || new esm_u8(gzl(data)), opts && opts.dictionary);
-}
-/**
- * Streaming Zlib compression
- */
-var Zlib = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
- function Zlib(opts, cb) {
- this.c = adler();
- this.v = 1;
- Deflate.call(this, opts, cb);
- }
- /**
- * Pushes a chunk to be zlibbed
- * @param chunk The chunk to push
- * @param final Whether this is the last chunk
- */
- Zlib.prototype.push = function (chunk, final) {
- this.c.p(chunk);
- Deflate.prototype.push.call(this, chunk, final);
- };
- Zlib.prototype.p = function (c, f) {
- var raw = dopt(c, this.o, this.v && (this.o.dictionary ? 6 : 2), f && 4, this.s);
- if (this.v)
- zlh(raw, this.o), this.v = 0;
- if (f)
- wbytes(raw, raw.length - 4, this.c.d());
- this.ondata(raw, f);
- };
- /**
- * Flushes buffered uncompressed data. Useful to immediately retrieve the
- * zlibbed output for small inputs.
- */
- Zlib.prototype.flush = function () {
- Deflate.prototype.flush.call(this);
- };
- return Zlib;
-}())));
-
-/**
- * Asynchronous streaming Zlib compression
- */
-var AsyncZlib = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
- function AsyncZlib(opts, cb) {
- astrmify([
- bDflt,
- zle,
- function () { return [astrm, Deflate, Zlib]; }
- ], this, StrmOpt.call(this, opts, cb), function (ev) {
- var strm = new Zlib(ev.data);
- onmessage = astrm(strm);
- }, 10, 1);
- }
- return AsyncZlib;
-}())));
-
-function zlib(data, opts, cb) {
- if (!cb)
- cb = opts, opts = {};
- if (typeof cb != 'function')
- err(7);
- return cbify(data, opts, [
- bDflt,
- zle,
- function () { return [zlibSync]; }
- ], function (ev) { return pbf(zlibSync(ev.data[0], ev.data[1])); }, 4, cb);
-}
-/**
- * Compress data with Zlib
- * @param data The data to compress
- * @param opts The compression options
- * @returns The zlib-compressed version of the data
- */
-function zlibSync(data, opts) {
- if (!opts)
- opts = {};
- var a = adler();
- a.p(data);
- var d = dopt(data, opts, opts.dictionary ? 6 : 2, 4);
- return zlh(d, opts), wbytes(d, d.length - 4, a.d()), d;
-}
-/**
- * Streaming Zlib decompression
- */
-var Unzlib = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
- function Unzlib(opts, cb) {
- Inflate.call(this, opts, cb);
- this.v = opts && opts.dictionary ? 2 : 1;
- }
- /**
- * Pushes a chunk to be unzlibbed
- * @param chunk The chunk to push
- * @param final Whether this is the last chunk
- */
- Unzlib.prototype.push = function (chunk, final) {
- Inflate.prototype.e.call(this, chunk);
- if (this.v) {
- if (this.p.length < 6 && !final)
- return;
- this.p = this.p.subarray(zls(this.p, this.v - 1)), this.v = 0;
- }
- if (final) {
- if (this.p.length < 4)
- err(6, 'invalid zlib data');
- this.p = this.p.subarray(0, -4);
- }
- // necessary to prevent TS from using the closure value
- // This allows for workerization to function correctly
- Inflate.prototype.c.call(this, final);
- };
- return Unzlib;
-}())));
-
-/**
- * Asynchronous streaming Zlib decompression
- */
-var AsyncUnzlib = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
- function AsyncUnzlib(opts, cb) {
- astrmify([
- bInflt,
- zule,
- function () { return [astrm, Inflate, Unzlib]; }
- ], this, StrmOpt.call(this, opts, cb), function (ev) {
- var strm = new Unzlib(ev.data);
- onmessage = astrm(strm);
- }, 11, 0);
- }
- return AsyncUnzlib;
-}())));
-
-function unzlib(data, opts, cb) {
- if (!cb)
- cb = opts, opts = {};
- if (typeof cb != 'function')
- err(7);
- return cbify(data, opts, [
- bInflt,
- zule,
- function () { return [unzlibSync]; }
- ], function (ev) { return pbf(unzlibSync(ev.data[0], gopt(ev.data[1]))); }, 5, cb);
-}
-/**
- * Expands Zlib data
- * @param data The data to decompress
- * @param opts The decompression options
- * @returns The decompressed version of the data
- */
-function unzlibSync(data, opts) {
- return inflt(data.subarray(zls(data, opts && opts.dictionary), -4), { i: 2 }, opts && opts.out, opts && opts.dictionary);
-}
-// Default algorithm for compression (used because having a known output size allows faster decompression)
-
-
-/**
- * Streaming GZIP, Zlib, or raw DEFLATE decompression
- */
-var Decompress = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
- function Decompress(opts, cb) {
- this.o = StrmOpt.call(this, opts, cb) || {};
- this.G = Gunzip;
- this.I = Inflate;
- this.Z = Unzlib;
- }
- // init substream
- // overriden by AsyncDecompress
- Decompress.prototype.i = function () {
- var _this = this;
- this.s.ondata = function (dat, final) {
- _this.ondata(dat, final);
- };
- };
- /**
- * Pushes a chunk to be decompressed
- * @param chunk The chunk to push
- * @param final Whether this is the last chunk
- */
- Decompress.prototype.push = function (chunk, final) {
- if (!this.ondata)
- err(5);
- if (!this.s) {
- if (this.p && this.p.length) {
- var n = new esm_u8(this.p.length + chunk.length);
- n.set(this.p), n.set(chunk, this.p.length);
- }
- else
- this.p = chunk;
- if (this.p.length > 2) {
- this.s = (this.p[0] == 31 && this.p[1] == 139 && this.p[2] == 8)
- ? new this.G(this.o)
- : ((this.p[0] & 15) != 8 || (this.p[0] >> 4) > 7 || ((this.p[0] << 8 | this.p[1]) % 31))
- ? new this.I(this.o)
- : new this.Z(this.o);
- this.i();
- this.s.push(this.p, final);
- this.p = null;
- }
- }
- else
- this.s.push(chunk, final);
- };
- return Decompress;
-}())));
-
-/**
- * Asynchronous streaming GZIP, Zlib, or raw DEFLATE decompression
- */
-var AsyncDecompress = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
- function AsyncDecompress(opts, cb) {
- Decompress.call(this, opts, cb);
- this.queuedSize = 0;
- this.G = AsyncGunzip;
- this.I = AsyncInflate;
- this.Z = AsyncUnzlib;
- }
- AsyncDecompress.prototype.i = function () {
- var _this = this;
- this.s.ondata = function (err, dat, final) {
- _this.ondata(err, dat, final);
- };
- this.s.ondrain = function (size) {
- _this.queuedSize -= size;
- if (_this.ondrain)
- _this.ondrain(size);
- };
- };
- /**
- * Pushes a chunk to be decompressed
- * @param chunk The chunk to push
- * @param final Whether this is the last chunk
- */
- AsyncDecompress.prototype.push = function (chunk, final) {
- this.queuedSize += chunk.length;
- Decompress.prototype.push.call(this, chunk, final);
- };
- return AsyncDecompress;
-}())));
-
-function decompress(data, opts, cb) {
- if (!cb)
- cb = opts, opts = {};
- if (typeof cb != 'function')
- err(7);
- return (data[0] == 31 && data[1] == 139 && data[2] == 8)
- ? gunzip(data, opts, cb)
- : ((data[0] & 15) != 8 || (data[0] >> 4) > 7 || ((data[0] << 8 | data[1]) % 31))
- ? inflate(data, opts, cb)
- : unzlib(data, opts, cb);
-}
-/**
- * Expands compressed GZIP, Zlib, or raw DEFLATE data, automatically detecting the format
- * @param data The data to decompress
- * @param opts The decompression options
- * @returns The decompressed version of the data
- */
-function decompressSync(data, opts) {
- return (data[0] == 31 && data[1] == 139 && data[2] == 8)
- ? gunzipSync(data, opts)
- : ((data[0] & 15) != 8 || (data[0] >> 4) > 7 || ((data[0] << 8 | data[1]) % 31))
- ? inflateSync(data, opts)
- : unzlibSync(data, opts);
-}
-// flatten a directory structure
-var fltn = function (d, p, t, o) {
- for (var k in d) {
- var val = d[k], n = p + k, op = o;
- if (Array.isArray(val))
- op = mrg(o, val[1]), val = val[0];
- if (val instanceof esm_u8)
- t[n] = [val, op];
- else {
- t[n += '/'] = [new esm_u8(0), op];
- fltn(val, n, t, o);
- }
- }
-};
-// text encoder
-var te = typeof TextEncoder != 'undefined' && /*#__PURE__*/ new TextEncoder();
-// text decoder
-var td = typeof TextDecoder != 'undefined' && /*#__PURE__*/ new TextDecoder();
-// text decoder stream
-var tds = 0;
-try {
- td.decode(et, { stream: true });
- tds = 1;
-}
-catch (e) { }
-// decode UTF8
-var dutf8 = function (d) {
- for (var r = '', i = 0;;) {
- var c = d[i++];
- var eb = (c > 127) + (c > 223) + (c > 239);
- if (i + eb > d.length)
- return { s: r, r: slc(d, i - 1) };
- if (!eb)
- r += String.fromCharCode(c);
- else if (eb == 3) {
- c = ((c & 15) << 18 | (d[i++] & 63) << 12 | (d[i++] & 63) << 6 | (d[i++] & 63)) - 65536,
- r += String.fromCharCode(55296 | (c >> 10), 56320 | (c & 1023));
- }
- else if (eb & 1)
- r += String.fromCharCode((c & 31) << 6 | (d[i++] & 63));
- else
- r += String.fromCharCode((c & 15) << 12 | (d[i++] & 63) << 6 | (d[i++] & 63));
- }
-};
-/**
- * Streaming UTF-8 decoding
- */
-var DecodeUTF8 = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
- /**
- * Creates a UTF-8 decoding stream
- * @param cb The callback to call whenever data is decoded
- */
- function DecodeUTF8(cb) {
- this.ondata = cb;
- if (tds)
- this.t = new TextDecoder();
- else
- this.p = et;
- }
- /**
- * Pushes a chunk to be decoded from UTF-8 binary
- * @param chunk The chunk to push
- * @param final Whether this is the last chunk
- */
- DecodeUTF8.prototype.push = function (chunk, final) {
- if (!this.ondata)
- err(5);
- final = !!final;
- if (this.t) {
- this.ondata(this.t.decode(chunk, { stream: true }), final);
- if (final) {
- if (this.t.decode().length)
- err(8);
- this.t = null;
- }
- return;
- }
- if (!this.p)
- err(4);
- var dat = new esm_u8(this.p.length + chunk.length);
- dat.set(this.p);
- dat.set(chunk, this.p.length);
- var _a = dutf8(dat), s = _a.s, r = _a.r;
- if (final) {
- if (r.length)
- err(8);
- this.p = null;
- }
- else
- this.p = r;
- this.ondata(s, final);
- };
- return DecodeUTF8;
-}())));
-
-/**
- * Streaming UTF-8 encoding
- */
-var EncodeUTF8 = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
- /**
- * Creates a UTF-8 decoding stream
- * @param cb The callback to call whenever data is encoded
- */
- function EncodeUTF8(cb) {
- this.ondata = cb;
- }
- /**
- * Pushes a chunk to be encoded to UTF-8
- * @param chunk The string data to push
- * @param final Whether this is the last chunk
- */
- EncodeUTF8.prototype.push = function (chunk, final) {
- if (!this.ondata)
- err(5);
- if (this.d)
- err(4);
- this.ondata(strToU8(chunk), this.d = final || false);
- };
- return EncodeUTF8;
-}())));
-
-/**
- * Converts a string into a Uint8Array for use with compression/decompression methods
- * @param str The string to encode
- * @param latin1 Whether or not to interpret the data as Latin-1. This should
- * not need to be true unless decoding a binary string.
- * @returns The string encoded in UTF-8/Latin-1 binary
- */
-function strToU8(str, latin1) {
- if (latin1) {
- var ar_1 = new esm_u8(str.length);
- for (var i = 0; i < str.length; ++i)
- ar_1[i] = str.charCodeAt(i);
- return ar_1;
- }
- if (te)
- return te.encode(str);
- var l = str.length;
- var ar = new esm_u8(str.length + (str.length >> 1));
- var ai = 0;
- var w = function (v) { ar[ai++] = v; };
- for (var i = 0; i < l; ++i) {
- if (ai + 5 > ar.length) {
- var n = new esm_u8(ai + 8 + ((l - i) << 1));
- n.set(ar);
- ar = n;
- }
- var c = str.charCodeAt(i);
- if (c < 128 || latin1)
- w(c);
- else if (c < 2048)
- w(192 | (c >> 6)), w(128 | (c & 63));
- else if (c > 55295 && c < 57344)
- c = 65536 + (c & 1023 << 10) | (str.charCodeAt(++i) & 1023),
- w(240 | (c >> 18)), w(128 | ((c >> 12) & 63)), w(128 | ((c >> 6) & 63)), w(128 | (c & 63));
- else
- w(224 | (c >> 12)), w(128 | ((c >> 6) & 63)), w(128 | (c & 63));
- }
- return slc(ar, 0, ai);
-}
-/**
- * Converts a Uint8Array to a string
- * @param dat The data to decode to string
- * @param latin1 Whether or not to interpret the data as Latin-1. This should
- * not need to be true unless encoding to binary string.
- * @returns The original UTF-8/Latin-1 string
- */
-function strFromU8(dat, latin1) {
- if (latin1) {
- var r = '';
- for (var i = 0; i < dat.length; i += 16384)
- r += String.fromCharCode.apply(null, dat.subarray(i, i + 16384));
- return r;
- }
- else if (td) {
- return td.decode(dat);
- }
- else {
- var _a = dutf8(dat), s = _a.s, r = _a.r;
- if (r.length)
- err(8);
- return s;
- }
-}
-;
-// deflate bit flag
-var dbf = function (l) { return l == 1 ? 3 : l < 6 ? 2 : l == 9 ? 1 : 0; };
-// skip local zip header
-var slzh = function (d, b) { return b + 30 + b2(d, b + 26) + b2(d, b + 28); };
-// read zip header
-var zh = function (d, b, z) {
- var fnl = b2(d, b + 28), fn = strFromU8(d.subarray(b + 46, b + 46 + fnl), !(b2(d, b + 8) & 2048)), es = b + 46 + fnl, bs = b4(d, b + 20);
- var _a = z && bs == 4294967295 ? z64e(d, es) : [bs, b4(d, b + 24), b4(d, b + 42)], sc = _a[0], su = _a[1], off = _a[2];
- return [b2(d, b + 10), sc, su, fn, es + b2(d, b + 30) + b2(d, b + 32), off];
-};
-// read zip64 extra field
-var z64e = function (d, b) {
- for (; b2(d, b) != 1; b += 4 + b2(d, b + 2))
- ;
- return [b8(d, b + 12), b8(d, b + 4), b8(d, b + 20)];
-};
-// extra field length
-var exfl = function (ex) {
- var le = 0;
- if (ex) {
- for (var k in ex) {
- var l = ex[k].length;
- if (l > 65535)
- err(9);
- le += l + 4;
- }
- }
- return le;
-};
-// write zip header
-var wzh = function (d, b, f, fn, u, c, ce, co) {
- var fl = fn.length, ex = f.extra, col = co && co.length;
- var exl = exfl(ex);
- wbytes(d, b, ce != null ? 0x2014B50 : 0x4034B50), b += 4;
- if (ce != null)
- d[b++] = 20, d[b++] = f.os;
- d[b] = 20, b += 2; // spec compliance? what's that?
- d[b++] = (f.flag << 1) | (c < 0 && 8), d[b++] = u && 8;
- d[b++] = f.compression & 255, d[b++] = f.compression >> 8;
- var dt = new Date(f.mtime == null ? Date.now() : f.mtime), y = dt.getFullYear() - 1980;
- if (y < 0 || y > 119)
- err(10);
- wbytes(d, b, (y << 25) | ((dt.getMonth() + 1) << 21) | (dt.getDate() << 16) | (dt.getHours() << 11) | (dt.getMinutes() << 5) | (dt.getSeconds() >> 1)), b += 4;
- if (c != -1) {
- wbytes(d, b, f.crc);
- wbytes(d, b + 4, c < 0 ? -c - 2 : c);
- wbytes(d, b + 8, f.size);
- }
- wbytes(d, b + 12, fl);
- wbytes(d, b + 14, exl), b += 16;
- if (ce != null) {
- wbytes(d, b, col);
- wbytes(d, b + 6, f.attrs);
- wbytes(d, b + 10, ce), b += 14;
- }
- d.set(fn, b);
- b += fl;
- if (exl) {
- for (var k in ex) {
- var exf = ex[k], l = exf.length;
- wbytes(d, b, +k);
- wbytes(d, b + 2, l);
- d.set(exf, b + 4), b += 4 + l;
- }
- }
- if (col)
- d.set(co, b), b += col;
- return b;
-};
-// write zip footer (end of central directory)
-var wzf = function (o, b, c, d, e) {
- wbytes(o, b, 0x6054B50); // skip disk
- wbytes(o, b + 8, c);
- wbytes(o, b + 10, c);
- wbytes(o, b + 12, d);
- wbytes(o, b + 16, e);
-};
-/**
- * A pass-through stream to keep data uncompressed in a ZIP archive.
- */
-var ZipPassThrough = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
- /**
- * Creates a pass-through stream that can be added to ZIP archives
- * @param filename The filename to associate with this data stream
- */
- function ZipPassThrough(filename) {
- this.filename = filename;
- this.c = crc();
- this.size = 0;
- this.compression = 0;
- }
- /**
- * Processes a chunk and pushes to the output stream. You can override this
- * method in a subclass for custom behavior, but by default this passes
- * the data through. You must call this.ondata(err, chunk, final) at some
- * point in this method.
- * @param chunk The chunk to process
- * @param final Whether this is the last chunk
- */
- ZipPassThrough.prototype.process = function (chunk, final) {
- this.ondata(null, chunk, final);
- };
- /**
- * Pushes a chunk to be added. If you are subclassing this with a custom
- * compression algorithm, note that you must push data from the source
- * file only, pre-compression.
- * @param chunk The chunk to push
- * @param final Whether this is the last chunk
- */
- ZipPassThrough.prototype.push = function (chunk, final) {
- if (!this.ondata)
- err(5);
- this.c.p(chunk);
- this.size += chunk.length;
- if (final)
- this.crc = this.c.d();
- this.process(chunk, final || false);
- };
- return ZipPassThrough;
-}())));
-
-// I don't extend because TypeScript extension adds 1kB of runtime bloat
-/**
- * Streaming DEFLATE compression for ZIP archives. Prefer using AsyncZipDeflate
- * for better performance
- */
-var ZipDeflate = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
- /**
- * Creates a DEFLATE stream that can be added to ZIP archives
- * @param filename The filename to associate with this data stream
- * @param opts The compression options
- */
- function ZipDeflate(filename, opts) {
- var _this = this;
- if (!opts)
- opts = {};
- ZipPassThrough.call(this, filename);
- this.d = new Deflate(opts, function (dat, final) {
- _this.ondata(null, dat, final);
- });
- this.compression = 8;
- this.flag = dbf(opts.level);
- }
- ZipDeflate.prototype.process = function (chunk, final) {
- try {
- this.d.push(chunk, final);
- }
- catch (e) {
- this.ondata(e, null, final);
- }
- };
- /**
- * Pushes a chunk to be deflated
- * @param chunk The chunk to push
- * @param final Whether this is the last chunk
- */
- ZipDeflate.prototype.push = function (chunk, final) {
- ZipPassThrough.prototype.push.call(this, chunk, final);
- };
- return ZipDeflate;
-}())));
-
-/**
- * Asynchronous streaming DEFLATE compression for ZIP archives
- */
-var AsyncZipDeflate = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
- /**
- * Creates an asynchronous DEFLATE stream that can be added to ZIP archives
- * @param filename The filename to associate with this data stream
- * @param opts The compression options
- */
- function AsyncZipDeflate(filename, opts) {
- var _this = this;
- if (!opts)
- opts = {};
- ZipPassThrough.call(this, filename);
- this.d = new AsyncDeflate(opts, function (err, dat, final) {
- _this.ondata(err, dat, final);
- });
- this.compression = 8;
- this.flag = dbf(opts.level);
- this.terminate = this.d.terminate;
- }
- AsyncZipDeflate.prototype.process = function (chunk, final) {
- this.d.push(chunk, final);
- };
- /**
- * Pushes a chunk to be deflated
- * @param chunk The chunk to push
- * @param final Whether this is the last chunk
- */
- AsyncZipDeflate.prototype.push = function (chunk, final) {
- ZipPassThrough.prototype.push.call(this, chunk, final);
- };
- return AsyncZipDeflate;
-}())));
-
-// TODO: Better tree shaking
-/**
- * A zippable archive to which files can incrementally be added
- */
-var Zip = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
- /**
- * Creates an empty ZIP archive to which files can be added
- * @param cb The callback to call whenever data for the generated ZIP archive
- * is available
- */
- function Zip(cb) {
- this.ondata = cb;
- this.u = [];
- this.d = 1;
- }
- /**
- * Adds a file to the ZIP archive
- * @param file The file stream to add
- */
- Zip.prototype.add = function (file) {
- var _this = this;
- if (!this.ondata)
- err(5);
- // finishing or finished
- if (this.d & 2)
- this.ondata(err(4 + (this.d & 1) * 8, 0, 1), null, false);
- else {
- var f = strToU8(file.filename), fl_1 = f.length;
- var com = file.comment, o = com && strToU8(com);
- var u = fl_1 != file.filename.length || (o && (com.length != o.length));
- var hl_1 = fl_1 + exfl(file.extra) + 30;
- if (fl_1 > 65535)
- this.ondata(err(11, 0, 1), null, false);
- var header = new esm_u8(hl_1);
- wzh(header, 0, file, f, u, -1);
- var chks_1 = [header];
- var pAll_1 = function () {
- for (var _i = 0, chks_2 = chks_1; _i < chks_2.length; _i++) {
- var chk = chks_2[_i];
- _this.ondata(null, chk, false);
- }
- chks_1 = [];
- };
- var tr_1 = this.d;
- this.d = 0;
- var ind_1 = this.u.length;
- var uf_1 = mrg(file, {
- f: f,
- u: u,
- o: o,
- t: function () {
- if (file.terminate)
- file.terminate();
- },
- r: function () {
- pAll_1();
- if (tr_1) {
- var nxt = _this.u[ind_1 + 1];
- if (nxt)
- nxt.r();
- else
- _this.d = 1;
- }
- tr_1 = 1;
- }
- });
- var cl_1 = 0;
- file.ondata = function (err, dat, final) {
- if (err) {
- _this.ondata(err, dat, final);
- _this.terminate();
- }
- else {
- cl_1 += dat.length;
- chks_1.push(dat);
- if (final) {
- var dd = new esm_u8(16);
- wbytes(dd, 0, 0x8074B50);
- wbytes(dd, 4, file.crc);
- wbytes(dd, 8, cl_1);
- wbytes(dd, 12, file.size);
- chks_1.push(dd);
- uf_1.c = cl_1, uf_1.b = hl_1 + cl_1 + 16, uf_1.crc = file.crc, uf_1.size = file.size;
- if (tr_1)
- uf_1.r();
- tr_1 = 1;
- }
- else if (tr_1)
- pAll_1();
- }
- };
- this.u.push(uf_1);
- }
- };
- /**
- * Ends the process of adding files and prepares to emit the final chunks.
- * This *must* be called after adding all desired files for the resulting
- * ZIP file to work properly.
- */
- Zip.prototype.end = function () {
- var _this = this;
- if (this.d & 2) {
- this.ondata(err(4 + (this.d & 1) * 8, 0, 1), null, true);
- return;
- }
- if (this.d)
- this.e();
- else
- this.u.push({
- r: function () {
- if (!(_this.d & 1))
- return;
- _this.u.splice(-1, 1);
- _this.e();
- },
- t: function () { }
- });
- this.d = 3;
- };
- Zip.prototype.e = function () {
- var bt = 0, l = 0, tl = 0;
- for (var _i = 0, _a = this.u; _i < _a.length; _i++) {
- var f = _a[_i];
- tl += 46 + f.f.length + exfl(f.extra) + (f.o ? f.o.length : 0);
- }
- var out = new esm_u8(tl + 22);
- for (var _b = 0, _c = this.u; _b < _c.length; _b++) {
- var f = _c[_b];
- wzh(out, bt, f, f.f, f.u, -f.c - 2, l, f.o);
- bt += 46 + f.f.length + exfl(f.extra) + (f.o ? f.o.length : 0), l += f.b;
- }
- wzf(out, bt, this.u.length, tl, l);
- this.ondata(null, out, true);
- this.d = 2;
- };
- /**
- * A method to terminate any internal workers used by the stream. Subsequent
- * calls to add() will fail.
- */
- Zip.prototype.terminate = function () {
- for (var _i = 0, _a = this.u; _i < _a.length; _i++) {
- var f = _a[_i];
- f.t();
- }
- this.d = 2;
- };
- return Zip;
-}())));
-
-function zip(data, opts, cb) {
- if (!cb)
- cb = opts, opts = {};
- if (typeof cb != 'function')
- err(7);
- var r = {};
- fltn(data, '', r, opts);
- var k = Object.keys(r);
- var lft = k.length, o = 0, tot = 0;
- var slft = lft, files = new Array(lft);
- var term = [];
- var tAll = function () {
- for (var i = 0; i < term.length; ++i)
- term[i]();
- };
- var cbd = function (a, b) {
- mt(function () { cb(a, b); });
- };
- mt(function () { cbd = cb; });
- var cbf = function () {
- var out = new esm_u8(tot + 22), oe = o, cdl = tot - o;
- tot = 0;
- for (var i = 0; i < slft; ++i) {
- var f = files[i];
- try {
- var l = f.c.length;
- wzh(out, tot, f, f.f, f.u, l);
- var badd = 30 + f.f.length + exfl(f.extra);
- var loc = tot + badd;
- out.set(f.c, loc);
- wzh(out, o, f, f.f, f.u, l, tot, f.m), o += 16 + badd + (f.m ? f.m.length : 0), tot = loc + l;
- }
- catch (e) {
- return cbd(e, null);
- }
- }
- wzf(out, o, files.length, cdl, oe);
- cbd(null, out);
- };
- if (!lft)
- cbf();
- var _loop_1 = function (i) {
- var fn = k[i];
- var _a = r[fn], file = _a[0], p = _a[1];
- var c = crc(), size = file.length;
- c.p(file);
- var f = strToU8(fn), s = f.length;
- var com = p.comment, m = com && strToU8(com), ms = m && m.length;
- var exl = exfl(p.extra);
- var compression = p.level == 0 ? 0 : 8;
- var cbl = function (e, d) {
- if (e) {
- tAll();
- cbd(e, null);
- }
- else {
- var l = d.length;
- files[i] = mrg(p, {
- size: size,
- crc: c.d(),
- c: d,
- f: f,
- m: m,
- u: s != fn.length || (m && (com.length != ms)),
- compression: compression
- });
- o += 30 + s + exl + l;
- tot += 76 + 2 * (s + exl) + (ms || 0) + l;
- if (!--lft)
- cbf();
- }
- };
- if (s > 65535)
- cbl(err(11, 0, 1), null);
- if (!compression)
- cbl(null, file);
- else if (size < 160000) {
- try {
- cbl(null, deflateSync(file, p));
- }
- catch (e) {
- cbl(e, null);
- }
- }
- else
- term.push(deflate(file, p, cbl));
- };
- // Cannot use lft because it can decrease
- for (var i = 0; i < slft; ++i) {
- _loop_1(i);
- }
- return tAll;
-}
-/**
- * Synchronously creates a ZIP file. Prefer using `zip` for better performance
- * with more than one file.
- * @param data The directory structure for the ZIP archive
- * @param opts The main options, merged with per-file options
- * @returns The generated ZIP archive
- */
-function zipSync(data, opts) {
- if (!opts)
- opts = {};
- var r = {};
- var files = [];
- fltn(data, '', r, opts);
- var o = 0;
- var tot = 0;
- for (var fn in r) {
- var _a = r[fn], file = _a[0], p = _a[1];
- var compression = p.level == 0 ? 0 : 8;
- var f = strToU8(fn), s = f.length;
- var com = p.comment, m = com && strToU8(com), ms = m && m.length;
- var exl = exfl(p.extra);
- if (s > 65535)
- err(11);
- var d = compression ? deflateSync(file, p) : file, l = d.length;
- var c = crc();
- c.p(file);
- files.push(mrg(p, {
- size: file.length,
- crc: c.d(),
- c: d,
- f: f,
- m: m,
- u: s != fn.length || (m && (com.length != ms)),
- o: o,
- compression: compression
- }));
- o += 30 + s + exl + l;
- tot += 76 + 2 * (s + exl) + (ms || 0) + l;
- }
- var out = new esm_u8(tot + 22), oe = o, cdl = tot - o;
- for (var i = 0; i < files.length; ++i) {
- var f = files[i];
- wzh(out, f.o, f, f.f, f.u, f.c.length);
- var badd = 30 + f.f.length + exfl(f.extra);
- out.set(f.c, f.o + badd);
- wzh(out, o, f, f.f, f.u, f.c.length, f.o, f.m), o += 16 + badd + (f.m ? f.m.length : 0);
- }
- wzf(out, o, files.length, cdl, oe);
- return out;
-}
-/**
- * Streaming pass-through decompression for ZIP archives
- */
-var UnzipPassThrough = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
- function UnzipPassThrough() {
- }
- UnzipPassThrough.prototype.push = function (data, final) {
- this.ondata(null, data, final);
- };
- UnzipPassThrough.compression = 0;
- return UnzipPassThrough;
-}())));
-
-/**
- * Streaming DEFLATE decompression for ZIP archives. Prefer AsyncZipInflate for
- * better performance.
- */
-var UnzipInflate = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
- /**
- * Creates a DEFLATE decompression that can be used in ZIP archives
- */
- function UnzipInflate() {
- var _this = this;
- this.i = new Inflate(function (dat, final) {
- _this.ondata(null, dat, final);
- });
- }
- UnzipInflate.prototype.push = function (data, final) {
- try {
- this.i.push(data, final);
- }
- catch (e) {
- this.ondata(e, null, final);
- }
- };
- UnzipInflate.compression = 8;
- return UnzipInflate;
-}())));
-
-/**
- * Asynchronous streaming DEFLATE decompression for ZIP archives
- */
-var AsyncUnzipInflate = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
- /**
- * Creates a DEFLATE decompression that can be used in ZIP archives
- */
- function AsyncUnzipInflate(_, sz) {
- var _this = this;
- if (sz < 320000) {
- this.i = new Inflate(function (dat, final) {
- _this.ondata(null, dat, final);
- });
- }
- else {
- this.i = new AsyncInflate(function (err, dat, final) {
- _this.ondata(err, dat, final);
- });
- this.terminate = this.i.terminate;
- }
- }
- AsyncUnzipInflate.prototype.push = function (data, final) {
- if (this.i.terminate)
- data = slc(data, 0);
- this.i.push(data, final);
- };
- AsyncUnzipInflate.compression = 8;
- return AsyncUnzipInflate;
-}())));
-
-/**
- * A ZIP archive decompression stream that emits files as they are discovered
- */
-var Unzip = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
- /**
- * Creates a ZIP decompression stream
- * @param cb The callback to call whenever a file in the ZIP archive is found
- */
- function Unzip(cb) {
- this.onfile = cb;
- this.k = [];
- this.o = {
- 0: UnzipPassThrough
- };
- this.p = et;
- }
- /**
- * Pushes a chunk to be unzipped
- * @param chunk The chunk to push
- * @param final Whether this is the last chunk
- */
- Unzip.prototype.push = function (chunk, final) {
- var _this = this;
- if (!this.onfile)
- err(5);
- if (!this.p)
- err(4);
- if (this.c > 0) {
- var len = Math.min(this.c, chunk.length);
- var toAdd = chunk.subarray(0, len);
- this.c -= len;
- if (this.d)
- this.d.push(toAdd, !this.c);
- else
- this.k[0].push(toAdd);
- chunk = chunk.subarray(len);
- if (chunk.length)
- return this.push(chunk, final);
- }
- else {
- var f = 0, i = 0, is = void 0, buf = void 0;
- if (!this.p.length)
- buf = chunk;
- else if (!chunk.length)
- buf = this.p;
- else {
- buf = new esm_u8(this.p.length + chunk.length);
- buf.set(this.p), buf.set(chunk, this.p.length);
- }
- var l = buf.length, oc = this.c, add = oc && this.d;
- var _loop_2 = function () {
- var _a;
- var sig = b4(buf, i);
- if (sig == 0x4034B50) {
- f = 1, is = i;
- this_1.d = null;
- this_1.c = 0;
- var bf = b2(buf, i + 6), cmp_1 = b2(buf, i + 8), u = bf & 2048, dd = bf & 8, fnl = b2(buf, i + 26), es = b2(buf, i + 28);
- if (l > i + 30 + fnl + es) {
- var chks_3 = [];
- this_1.k.unshift(chks_3);
- f = 2;
- var sc_1 = b4(buf, i + 18), su_1 = b4(buf, i + 22);
- var fn_1 = strFromU8(buf.subarray(i + 30, i += 30 + fnl), !u);
- if (sc_1 == 4294967295) {
- _a = dd ? [-2] : z64e(buf, i), sc_1 = _a[0], su_1 = _a[1];
- }
- else if (dd)
- sc_1 = -1;
- i += es;
- this_1.c = sc_1;
- var d_1;
- var file_1 = {
- name: fn_1,
- compression: cmp_1,
- start: function () {
- if (!file_1.ondata)
- err(5);
- if (!sc_1)
- file_1.ondata(null, et, true);
- else {
- var ctr = _this.o[cmp_1];
- if (!ctr)
- file_1.ondata(err(14, 'unknown compression type ' + cmp_1, 1), null, false);
- d_1 = sc_1 < 0 ? new ctr(fn_1) : new ctr(fn_1, sc_1, su_1);
- d_1.ondata = function (err, dat, final) { file_1.ondata(err, dat, final); };
- for (var _i = 0, chks_4 = chks_3; _i < chks_4.length; _i++) {
- var dat = chks_4[_i];
- d_1.push(dat, false);
- }
- if (_this.k[0] == chks_3 && _this.c)
- _this.d = d_1;
- else
- d_1.push(et, true);
- }
- },
- terminate: function () {
- if (d_1 && d_1.terminate)
- d_1.terminate();
- }
- };
- if (sc_1 >= 0)
- file_1.size = sc_1, file_1.originalSize = su_1;
- this_1.onfile(file_1);
- }
- return "break";
- }
- else if (oc) {
- if (sig == 0x8074B50) {
- is = i += 12 + (oc == -2 && 8), f = 3, this_1.c = 0;
- return "break";
- }
- else if (sig == 0x2014B50) {
- is = i -= 4, f = 3, this_1.c = 0;
- return "break";
- }
- }
- };
- var this_1 = this;
- for (; i < l - 4; ++i) {
- var state_1 = _loop_2();
- if (state_1 === "break")
- break;
- }
- this.p = et;
- if (oc < 0) {
- var dat = f ? buf.subarray(0, is - 12 - (oc == -2 && 8) - (b4(buf, is - 16) == 0x8074B50 && 4)) : buf.subarray(0, i);
- if (add)
- add.push(dat, !!f);
- else
- this.k[+(f == 2)].push(dat);
- }
- if (f & 2)
- return this.push(buf.subarray(i), final);
- this.p = buf.subarray(i);
- }
- if (final) {
- if (this.c)
- err(13);
- this.p = null;
- }
- };
- /**
- * Registers a decoder with the stream, allowing for files compressed with
- * the compression type provided to be expanded correctly
- * @param decoder The decoder constructor
- */
- Unzip.prototype.register = function (decoder) {
- this.o[decoder.compression] = decoder;
- };
- return Unzip;
-}())));
-
-var mt = typeof queueMicrotask == 'function' ? queueMicrotask : typeof setTimeout == 'function' ? setTimeout : function (fn) { fn(); };
-function unzip(data, opts, cb) {
- if (!cb)
- cb = opts, opts = {};
- if (typeof cb != 'function')
- err(7);
- var term = [];
- var tAll = function () {
- for (var i = 0; i < term.length; ++i)
- term[i]();
- };
- var files = {};
- var cbd = function (a, b) {
- mt(function () { cb(a, b); });
- };
- mt(function () { cbd = cb; });
- var e = data.length - 22;
- for (; b4(data, e) != 0x6054B50; --e) {
- if (!e || data.length - e > 65558) {
- cbd(err(13, 0, 1), null);
- return tAll;
- }
- }
- ;
- var lft = b2(data, e + 8);
- if (lft) {
- var c = lft;
- var o = b4(data, e + 16);
- var z = o == 4294967295 || c == 65535;
- if (z) {
- var ze = b4(data, e - 12);
- z = b4(data, ze) == 0x6064B50;
- if (z) {
- c = lft = b4(data, ze + 32);
- o = b4(data, ze + 48);
- }
- }
- var fltr = opts && opts.filter;
- var _loop_3 = function (i) {
- var _a = zh(data, o, z), c_1 = _a[0], sc = _a[1], su = _a[2], fn = _a[3], no = _a[4], off = _a[5], b = slzh(data, off);
- o = no;
- var cbl = function (e, d) {
- if (e) {
- tAll();
- cbd(e, null);
- }
- else {
- if (d)
- files[fn] = d;
- if (!--lft)
- cbd(null, files);
- }
- };
- if (!fltr || fltr({
- name: fn,
- size: sc,
- originalSize: su,
- compression: c_1
- })) {
- if (!c_1)
- cbl(null, slc(data, b, b + sc));
- else if (c_1 == 8) {
- var infl = data.subarray(b, b + sc);
- // Synchronously decompress under 512KB, or barely-compressed data
- if (su < 524288 || sc > 0.8 * su) {
- try {
- cbl(null, inflateSync(infl, { out: new esm_u8(su) }));
- }
- catch (e) {
- cbl(e, null);
- }
- }
- else
- term.push(inflate(infl, { size: su }, cbl));
- }
- else
- cbl(err(14, 'unknown compression type ' + c_1, 1), null);
- }
- else
- cbl(null, null);
- };
- for (var i = 0; i < c; ++i) {
- _loop_3(i);
- }
- }
- else
- cbd(null, {});
- return tAll;
-}
-/**
- * Synchronously decompresses a ZIP archive. Prefer using `unzip` for better
- * performance with more than one file.
- * @param data The raw compressed ZIP file
- * @param opts The ZIP extraction options
- * @returns The decompressed files
- */
-function unzipSync(data, opts) {
- var files = {};
- var e = data.length - 22;
- for (; b4(data, e) != 0x6054B50; --e) {
- if (!e || data.length - e > 65558)
- err(13);
- }
- ;
- var c = b2(data, e + 8);
- if (!c)
- return {};
- var o = b4(data, e + 16);
- var z = o == 4294967295 || c == 65535;
- if (z) {
- var ze = b4(data, e - 12);
- z = b4(data, ze) == 0x6064B50;
- if (z) {
- c = b4(data, ze + 32);
- o = b4(data, ze + 48);
- }
- }
- var fltr = opts && opts.filter;
- for (var i = 0; i < c; ++i) {
- var _a = zh(data, o, z), c_2 = _a[0], sc = _a[1], su = _a[2], fn = _a[3], no = _a[4], off = _a[5], b = slzh(data, off);
- o = no;
- if (!fltr || fltr({
- name: fn,
- size: sc,
- originalSize: su,
- compression: c_2
- })) {
- if (!c_2)
- files[fn] = slc(data, b, b + sc);
- else if (c_2 == 8)
- files[fn] = inflateSync(data.subarray(b, b + sc), { out: new esm_u8(su) });
- else
- err(14, 'unknown compression type ' + c_2);
- }
- }
- return files;
-}
-
// EXTERNAL MODULE: ./node_modules/ajv/dist/ajv.js
var ajv = __webpack_require__(63282);
;// CONCATENATED MODULE: ./node_modules/ffjavascript/src/scalar_native.js
@@ -150056,7 +140612,7 @@ function naf(n) {
}
-function scalar_native_bits(n) {
+function bits(n) {
let E = BigInt(n);
const res = [];
while (E) {
@@ -151080,7 +141636,7 @@ function _revSlow(idx, bits) {
return res;
}
-function polfield_rev(idx, bits) {
+function rev(idx, bits) {
return (
_revTable[idx >>> 24] |
(_revTable[(idx >>> 16) & 0xFF] << 8) |
@@ -151091,7 +141647,7 @@ function polfield_rev(idx, bits) {
function __bitReverse(p, bits) {
for (let k=0; kk) {
const tmp= p[k];
p[k] = p[r];
@@ -185271,11 +175827,9 @@ const poseidonContract=(/* unused pure expression or super */ null && (_poseidon
// EXTERNAL MODULE: ./node_modules/@metamask/eth-sig-util/dist/index.js
-var eth_sig_util_dist = __webpack_require__(51594);
+var dist = __webpack_require__(51594);
// EXTERNAL MODULE: ./node_modules/@tornado/fixed-merkle-tree/lib/index.js
var fixed_merkle_tree_lib = __webpack_require__(41217);
-// EXTERNAL MODULE: ./node_modules/bloomfilter.js/index.js
-var bloomfilter_js = __webpack_require__(65403);
// EXTERNAL MODULE: ./node_modules/@tornado/websnark/src/utils.js
var src_utils = __webpack_require__(84276);
// EXTERNAL MODULE: ./node_modules/@tornado/websnark/src/groth16.js
@@ -185291,16 +175845,6 @@ var groth16 = __webpack_require__(36336);
-
-
-
-
-
-
-
-
-
-
const dist_abi$6 = [
@@ -187929,7 +178473,7 @@ function substring(str, length = 10) {
return `${str.substring(0, length)}...${str.substring(str.length - length)}`;
}
-var __async$f = (__this, __arguments, generator) => {
+var __async$c = (__this, __arguments, generator) => {
return new Promise((resolve, reject) => {
var fulfilled = (value) => {
try {
@@ -187950,7 +178494,7 @@ var __async$f = (__this, __arguments, generator) => {
});
};
function multicall(Multicall2, calls) {
- return __async$f(this, null, function* () {
+ return __async$c(this, null, function* () {
const calldata = calls.map((call) => {
var _a, _b, _c;
const target = ((_a = call.contract) == null ? void 0 : _a.target) || call.address;
@@ -187973,29 +178517,29 @@ function multicall(Multicall2, calls) {
});
}
-var __defProp$5 = Object.defineProperty;
-var __defProps$4 = Object.defineProperties;
-var __getOwnPropDescs$4 = Object.getOwnPropertyDescriptors;
-var __getOwnPropSymbols$5 = Object.getOwnPropertySymbols;
+var __defProp$4 = Object.defineProperty;
+var __defProps$3 = Object.defineProperties;
+var __getOwnPropDescs$3 = Object.getOwnPropertyDescriptors;
+var __getOwnPropSymbols$4 = Object.getOwnPropertySymbols;
var __getProtoOf$1 = Object.getPrototypeOf;
-var __hasOwnProp$5 = Object.prototype.hasOwnProperty;
-var __propIsEnum$5 = Object.prototype.propertyIsEnumerable;
+var __hasOwnProp$4 = Object.prototype.hasOwnProperty;
+var __propIsEnum$4 = Object.prototype.propertyIsEnumerable;
var __reflectGet$1 = Reflect.get;
-var __defNormalProp$5 = (obj, key, value) => key in obj ? __defProp$5(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
-var __spreadValues$5 = (a, b) => {
+var __defNormalProp$4 = (obj, key, value) => key in obj ? __defProp$4(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
+var __spreadValues$4 = (a, b) => {
for (var prop in b || (b = {}))
- if (__hasOwnProp$5.call(b, prop))
- __defNormalProp$5(a, prop, b[prop]);
- if (__getOwnPropSymbols$5)
- for (var prop of __getOwnPropSymbols$5(b)) {
- if (__propIsEnum$5.call(b, prop))
- __defNormalProp$5(a, prop, b[prop]);
+ if (__hasOwnProp$4.call(b, prop))
+ __defNormalProp$4(a, prop, b[prop]);
+ if (__getOwnPropSymbols$4)
+ for (var prop of __getOwnPropSymbols$4(b)) {
+ if (__propIsEnum$4.call(b, prop))
+ __defNormalProp$4(a, prop, b[prop]);
}
return a;
};
-var __spreadProps$4 = (a, b) => __defProps$4(a, __getOwnPropDescs$4(b));
+var __spreadProps$3 = (a, b) => __defProps$3(a, __getOwnPropDescs$3(b));
var __superGet$1 = (cls, obj, key) => __reflectGet$1(__getProtoOf$1(cls), key, obj);
-var __async$e = (__this, __arguments, generator) => {
+var __async$b = (__this, __arguments, generator) => {
return new Promise((resolve, reject) => {
var fulfilled = (value) => {
try {
@@ -188023,25 +178567,28 @@ function getHttpAgent({
torPort,
retry
}) {
+ const { HttpProxyAgent } = require("http-proxy-agent");
+ const { HttpsProxyAgent } = require("https-proxy-agent");
+ const { SocksProxyAgent } = require("socks-proxy-agent");
if (torPort) {
- return new socks_proxy_agent_dist.SocksProxyAgent(`socks5h://tor${retry}@127.0.0.1:${torPort}`);
+ return new SocksProxyAgent(`socks5h://tor${retry}@127.0.0.1:${torPort}`);
}
if (!proxyUrl) {
return;
}
const isHttps = fetchUrl.includes("https://");
if (proxyUrl.includes("socks://") || proxyUrl.includes("socks4://") || proxyUrl.includes("socks5://")) {
- return new socks_proxy_agent_dist.SocksProxyAgent(proxyUrl);
+ return new SocksProxyAgent(proxyUrl);
}
if (proxyUrl.includes("http://") || proxyUrl.includes("https://")) {
if (isHttps) {
- return new https_proxy_agent_dist.HttpsProxyAgent(proxyUrl);
+ return new HttpsProxyAgent(proxyUrl);
}
- return new dist.HttpProxyAgent(proxyUrl);
+ return new HttpProxyAgent(proxyUrl);
}
}
function fetchData(_0) {
- return __async$e(this, arguments, function* (url, options = {}) {
+ return __async$b(this, arguments, function* (url, options = {}) {
var _a, _b, _c;
const MAX_RETRY = (_a = options.maxRetry) != null ? _a : 3;
const RETRY_ON = (_b = options.retryOn) != null ? _b : 500;
@@ -188133,7 +178680,7 @@ function fetchData(_0) {
throw errorObject;
});
}
-const fetchGetUrlFunc = (options = {}) => (req, _signal) => __async$e(void 0, null, function* () {
+const fetchGetUrlFunc = (options = {}) => (req, _signal) => __async$b(void 0, null, function* () {
let signal;
if (_signal) {
const controller = new AbortController();
@@ -188142,7 +178689,7 @@ const fetchGetUrlFunc = (options = {}) => (req, _signal) => __async$e(void 0, nu
controller.abort();
});
}
- const init = __spreadProps$4(__spreadValues$5({}, options), {
+ const init = __spreadProps$3(__spreadValues$4({}, options), {
method: req.method || "POST",
headers: req.headers,
body: req.body || void 0,
@@ -188167,7 +178714,7 @@ const oracleMapper = /* @__PURE__ */ new Map();
const multicallMapper = /* @__PURE__ */ new Map();
function getGasOraclePlugin(networkKey, fetchOptions) {
const gasStationApi = (fetchOptions == null ? void 0 : fetchOptions.gasStationApi) || "https://gasstation.polygon.technology/v2";
- return new FetchUrlFeeDataNetworkPlugin(gasStationApi, (fetchFeeData, provider, request) => __async$e(this, null, function* () {
+ return new FetchUrlFeeDataNetworkPlugin(gasStationApi, (fetchFeeData, provider, request) => __async$b(this, null, function* () {
if (!oracleMapper.has(networkKey)) {
oracleMapper.set(networkKey, GasPriceOracle__factory.connect(fetchOptions == null ? void 0 : fetchOptions.gasPriceOracle, provider));
}
@@ -188226,7 +178773,7 @@ function getGasOraclePlugin(networkKey, fetchOptions) {
}));
}
function dist_getProvider(rpcUrl, fetchOptions) {
- return __async$e(this, null, function* () {
+ return __async$b(this, null, function* () {
const fetchReq = new FetchRequest(rpcUrl);
fetchReq.getUrlFunc = fetchGetUrlFunc(fetchOptions);
const _staticNetwork = yield new JsonRpcProvider(fetchReq).getNetwork();
@@ -188276,7 +178823,7 @@ function getProviderWithNetId(netId, rpcUrl, config, fetchOptions) {
provider.pollingInterval = (fetchOptions == null ? void 0 : fetchOptions.pollingInterval) || pollInterval * 1e3;
return provider;
}
-const populateTransaction = (signer, tx) => __async$e(void 0, null, function* () {
+const populateTransaction = (signer, tx) => __async$b(void 0, null, function* () {
const provider = signer.provider;
if (!tx.from) {
tx.from = signer.address;
@@ -188285,7 +178832,7 @@ const populateTransaction = (signer, tx) => __async$e(void 0, null, function* ()
throw new Error(errMsg);
}
const [feeData, nonce] = yield Promise.all([
- (() => __async$e(void 0, null, function* () {
+ (() => __async$b(void 0, null, function* () {
if (tx.maxFeePerGas && tx.maxPriorityFeePerGas) {
return new FeeData(null, BigInt(tx.maxFeePerGas), BigInt(tx.maxPriorityFeePerGas));
}
@@ -188307,7 +178854,7 @@ const populateTransaction = (signer, tx) => __async$e(void 0, null, function* ()
);
}
}))(),
- (() => __async$e(void 0, null, function* () {
+ (() => __async$b(void 0, null, function* () {
if (tx.nonce) {
return tx.nonce;
}
@@ -188337,7 +178884,7 @@ const populateTransaction = (signer, tx) => __async$e(void 0, null, function* ()
delete tx.maxFeePerGas;
delete tx.maxPriorityFeePerGas;
}
- tx.gasLimit = tx.gasLimit || (yield (() => __async$e(void 0, null, function* () {
+ tx.gasLimit = tx.gasLimit || (yield (() => __async$b(void 0, null, function* () {
try {
const gasLimit = yield provider.estimateGas(tx);
return gasLimit === BigInt(21e3) ? gasLimit : gasLimit * (BigInt(1e4) + BigInt(signer.gasLimitBump)) / BigInt(1e4);
@@ -188354,7 +178901,7 @@ const populateTransaction = (signer, tx) => __async$e(void 0, null, function* ()
class TornadoWallet extends Wallet {
constructor(key, provider, { gasPriceBump, gasLimitBump, gasFailover, bumpNonce } = {}) {
super(key, provider);
- this.gasPriceBump = gasPriceBump != null ? gasPriceBump : 0;
+ this.gasPriceBump = gasPriceBump != null ? gasPriceBump : 1e3;
this.gasLimitBump = gasLimitBump != null ? gasLimitBump : 3e3;
this.gasFailover = gasFailover != null ? gasFailover : false;
this.bumpNonce = bumpNonce != null ? bumpNonce : false;
@@ -188365,7 +178912,7 @@ class TornadoWallet extends Wallet {
return new TornadoWallet(privateKey, provider, options);
}
populateTransaction(tx) {
- return __async$e(this, null, function* () {
+ return __async$b(this, null, function* () {
const txObject = yield populateTransaction(this, tx);
this.nonce = txObject.nonce;
return __superGet$1(TornadoWallet.prototype, this, "populateTransaction").call(this, txObject);
@@ -188375,13 +178922,13 @@ class TornadoWallet extends Wallet {
class TornadoVoidSigner extends VoidSigner {
constructor(address, provider, { gasPriceBump, gasLimitBump, gasFailover, bumpNonce } = {}) {
super(address, provider);
- this.gasPriceBump = gasPriceBump != null ? gasPriceBump : 0;
+ this.gasPriceBump = gasPriceBump != null ? gasPriceBump : 1e3;
this.gasLimitBump = gasLimitBump != null ? gasLimitBump : 3e3;
this.gasFailover = gasFailover != null ? gasFailover : false;
this.bumpNonce = bumpNonce != null ? bumpNonce : false;
}
populateTransaction(tx) {
- return __async$e(this, null, function* () {
+ return __async$b(this, null, function* () {
const txObject = yield populateTransaction(this, tx);
this.nonce = txObject.nonce;
return __superGet$1(TornadoVoidSigner.prototype, this, "populateTransaction").call(this, txObject);
@@ -188391,13 +178938,13 @@ class TornadoVoidSigner extends VoidSigner {
class TornadoRpcSigner extends JsonRpcSigner {
constructor(provider, address, { gasPriceBump, gasLimitBump, gasFailover, bumpNonce } = {}) {
super(provider, address);
- this.gasPriceBump = gasPriceBump != null ? gasPriceBump : 0;
+ this.gasPriceBump = gasPriceBump != null ? gasPriceBump : 1e3;
this.gasLimitBump = gasLimitBump != null ? gasLimitBump : 3e3;
this.gasFailover = gasFailover != null ? gasFailover : false;
this.bumpNonce = bumpNonce != null ? bumpNonce : false;
}
sendUncheckedTransaction(tx) {
- return __async$e(this, null, function* () {
+ return __async$b(this, null, function* () {
return __superGet$1(TornadoRpcSigner.prototype, this, "sendUncheckedTransaction").call(this, yield populateTransaction(this, tx));
});
}
@@ -188408,7 +178955,7 @@ class TornadoBrowserProvider extends BrowserProvider {
this.options = options;
}
getSigner(address) {
- return __async$e(this, null, function* () {
+ return __async$b(this, null, function* () {
var _a, _b, _c, _d, _e, _f, _g, _h, _i;
const signerAddress = (yield __superGet$1(TornadoBrowserProvider.prototype, this, "getSigner").call(this, address)).address;
if (((_a = this.options) == null ? void 0 : _a.webChainId) && ((_b = this.options) == null ? void 0 : _b.connectWallet) && Number(yield __superGet$1(TornadoBrowserProvider.prototype, this, "send").call(this, "eth_chainId", [])) !== Number((_c = this.options) == null ? void 0 : _c.webChainId)) {
@@ -188617,26 +179164,26 @@ const GET_GOVERNANCE_APY = `
}
`;
-var __defProp$4 = Object.defineProperty;
-var __defProps$3 = Object.defineProperties;
-var __getOwnPropDescs$3 = Object.getOwnPropertyDescriptors;
-var __getOwnPropSymbols$4 = Object.getOwnPropertySymbols;
-var __hasOwnProp$4 = Object.prototype.hasOwnProperty;
-var __propIsEnum$4 = Object.prototype.propertyIsEnumerable;
-var __defNormalProp$4 = (obj, key, value) => key in obj ? __defProp$4(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
-var __spreadValues$4 = (a, b) => {
+var __defProp$3 = Object.defineProperty;
+var __defProps$2 = Object.defineProperties;
+var __getOwnPropDescs$2 = Object.getOwnPropertyDescriptors;
+var __getOwnPropSymbols$3 = Object.getOwnPropertySymbols;
+var __hasOwnProp$3 = Object.prototype.hasOwnProperty;
+var __propIsEnum$3 = Object.prototype.propertyIsEnumerable;
+var __defNormalProp$3 = (obj, key, value) => key in obj ? __defProp$3(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
+var __spreadValues$3 = (a, b) => {
for (var prop in b || (b = {}))
- if (__hasOwnProp$4.call(b, prop))
- __defNormalProp$4(a, prop, b[prop]);
- if (__getOwnPropSymbols$4)
- for (var prop of __getOwnPropSymbols$4(b)) {
- if (__propIsEnum$4.call(b, prop))
- __defNormalProp$4(a, prop, b[prop]);
+ if (__hasOwnProp$3.call(b, prop))
+ __defNormalProp$3(a, prop, b[prop]);
+ if (__getOwnPropSymbols$3)
+ for (var prop of __getOwnPropSymbols$3(b)) {
+ if (__propIsEnum$3.call(b, prop))
+ __defNormalProp$3(a, prop, b[prop]);
}
return a;
};
-var __spreadProps$3 = (a, b) => __defProps$3(a, __getOwnPropDescs$3(b));
-var __async$d = (__this, __arguments, generator) => {
+var __spreadProps$2 = (a, b) => __defProps$2(a, __getOwnPropDescs$2(b));
+var __async$a = (__this, __arguments, generator) => {
return new Promise((resolve, reject) => {
var fulfilled = (value) => {
try {
@@ -188659,7 +179206,7 @@ var __async$d = (__this, __arguments, generator) => {
const isEmptyArray = (arr) => !Array.isArray(arr) || !arr.length;
const first = 1e3;
function queryGraph(_0) {
- return __async$d(this, arguments, function* ({
+ return __async$a(this, arguments, function* ({
graphApi,
subgraphName,
query,
@@ -188668,7 +179215,7 @@ function queryGraph(_0) {
}) {
var _a;
const graphUrl = `${graphApi}/subgraphs/name/${subgraphName}`;
- const { data, errors } = yield fetchData(graphUrl, __spreadProps$3(__spreadValues$4({}, fetchDataOptions2), {
+ const { data, errors } = yield fetchData(graphUrl, __spreadProps$2(__spreadValues$3({}, fetchDataOptions2), {
method: "POST",
headers: {
"Content-Type": "application/json"
@@ -188688,7 +179235,7 @@ function queryGraph(_0) {
});
}
function getStatistic(_0) {
- return __async$d(this, arguments, function* ({
+ return __async$a(this, arguments, function* ({
graphApi,
subgraphName,
currency,
@@ -188735,7 +179282,7 @@ function getStatistic(_0) {
});
}
function getMeta(_0) {
- return __async$d(this, arguments, function* ({ graphApi, subgraphName, fetchDataOptions: fetchDataOptions2 }) {
+ return __async$a(this, arguments, function* ({ graphApi, subgraphName, fetchDataOptions: fetchDataOptions2 }) {
try {
const {
_meta: {
@@ -188780,7 +179327,7 @@ function getRegisters({
});
}
function getAllRegisters(_0) {
- return __async$d(this, arguments, function* ({
+ return __async$a(this, arguments, function* ({
graphApi,
subgraphName,
fromBlock,
@@ -188869,7 +179416,7 @@ function getDeposits({
});
}
function getAllDeposits(_0) {
- return __async$d(this, arguments, function* ({
+ return __async$a(this, arguments, function* ({
graphApi,
subgraphName,
currency,
@@ -188966,7 +179513,7 @@ function getWithdrawals({
});
}
function getAllWithdrawals(_0) {
- return __async$d(this, arguments, function* ({
+ return __async$a(this, arguments, function* ({
graphApi,
subgraphName,
currency,
@@ -189042,7 +179589,7 @@ function getAllWithdrawals(_0) {
});
}
function getNoteAccounts(_0) {
- return __async$d(this, arguments, function* ({
+ return __async$a(this, arguments, function* ({
graphApi,
subgraphName,
address,
@@ -189095,7 +179642,7 @@ function getGraphEchoEvents({
});
}
function getAllGraphEchoEvents(_0) {
- return __async$d(this, arguments, function* ({
+ return __async$a(this, arguments, function* ({
graphApi,
subgraphName,
fromBlock,
@@ -189184,7 +179731,7 @@ function getEncryptedNotes({
});
}
function getAllEncryptedNotes(_0) {
- return __async$d(this, arguments, function* ({
+ return __async$a(this, arguments, function* ({
graphApi,
subgraphName,
fromBlock,
@@ -189269,7 +179816,7 @@ function getGovernanceEvents({
});
}
function getAllGovernanceEvents(_0) {
- return __async$d(this, arguments, function* ({
+ return __async$a(this, arguments, function* ({
graphApi,
subgraphName,
fromBlock,
@@ -189428,7 +179975,7 @@ var graph = /*#__PURE__*/Object.freeze({
queryGraph: queryGraph
});
-var __async$c = (__this, __arguments, generator) => {
+var __async$9 = (__this, __arguments, generator) => {
return new Promise((resolve, reject) => {
var fulfilled = (value) => {
try {
@@ -189467,7 +180014,7 @@ class BatchBlockService {
this.retryOn = retryOn;
}
getBlock(blockTag) {
- return __async$c(this, null, function* () {
+ return __async$9(this, null, function* () {
const blockObject = yield this.provider.getBlock(blockTag);
if (!blockObject) {
const errMsg = `No block for ${blockTag}`;
@@ -189477,9 +180024,9 @@ class BatchBlockService {
});
}
createBatchRequest(batchArray) {
- return batchArray.map((blocks, index) => __async$c(this, null, function* () {
+ return batchArray.map((blocks, index) => __async$9(this, null, function* () {
yield dist_sleep(20 * index);
- return (() => __async$c(this, null, function* () {
+ return (() => __async$9(this, null, function* () {
let retries = 0;
let err;
while (!this.shouldRetry && retries === 0 || this.shouldRetry && retries < this.retryMax) {
@@ -189496,7 +180043,7 @@ class BatchBlockService {
}));
}
getBatchBlocks(blocks) {
- return __async$c(this, null, function* () {
+ return __async$9(this, null, function* () {
let blockCount = 0;
const results = [];
for (const chunks of chunk(blocks, this.concurrencySize * this.batchSize)) {
@@ -189534,7 +180081,7 @@ class BatchTransactionService {
this.retryOn = retryOn;
}
getTransaction(txHash) {
- return __async$c(this, null, function* () {
+ return __async$9(this, null, function* () {
const txObject = yield this.provider.getTransaction(txHash);
if (!txObject) {
const errMsg = `No transaction for ${txHash}`;
@@ -189544,9 +180091,9 @@ class BatchTransactionService {
});
}
createBatchRequest(batchArray) {
- return batchArray.map((txs, index) => __async$c(this, null, function* () {
+ return batchArray.map((txs, index) => __async$9(this, null, function* () {
yield dist_sleep(20 * index);
- return (() => __async$c(this, null, function* () {
+ return (() => __async$9(this, null, function* () {
let retries = 0;
let err;
while (!this.shouldRetry && retries === 0 || this.shouldRetry && retries < this.retryMax) {
@@ -189563,7 +180110,7 @@ class BatchTransactionService {
}));
}
getBatchTransactions(txs) {
- return __async$c(this, null, function* () {
+ return __async$9(this, null, function* () {
let txCount = 0;
const results = [];
for (const chunks of chunk(txs, this.concurrencySize * this.batchSize)) {
@@ -189599,7 +180146,7 @@ class BatchEventsService {
this.retryOn = retryOn;
}
getPastEvents(_0) {
- return __async$c(this, arguments, function* ({ fromBlock, toBlock, type }) {
+ return __async$9(this, arguments, function* ({ fromBlock, toBlock, type }) {
let err;
let retries = 0;
while (!this.shouldRetry && retries === 0 || this.shouldRetry && retries < this.retryMax) {
@@ -189619,13 +180166,13 @@ class BatchEventsService {
});
}
createBatchRequest(batchArray) {
- return batchArray.map((event, index) => __async$c(this, null, function* () {
+ return batchArray.map((event, index) => __async$9(this, null, function* () {
yield dist_sleep(20 * index);
return this.getPastEvents(event);
}));
}
getBatchEvents(_0) {
- return __async$c(this, arguments, function* ({ fromBlock, toBlock, type = "*" }) {
+ return __async$9(this, arguments, function* ({ fromBlock, toBlock, type = "*" }) {
if (!toBlock) {
toBlock = yield this.provider.getBlockNumber();
}
@@ -189656,29 +180203,29 @@ class BatchEventsService {
}
}
-var __defProp$3 = Object.defineProperty;
-var __defProps$2 = Object.defineProperties;
-var __getOwnPropDescs$2 = Object.getOwnPropertyDescriptors;
-var __getOwnPropSymbols$3 = Object.getOwnPropertySymbols;
+var __defProp$2 = Object.defineProperty;
+var __defProps$1 = Object.defineProperties;
+var __getOwnPropDescs$1 = Object.getOwnPropertyDescriptors;
+var __getOwnPropSymbols$2 = Object.getOwnPropertySymbols;
var __getProtoOf = Object.getPrototypeOf;
-var __hasOwnProp$3 = Object.prototype.hasOwnProperty;
-var __propIsEnum$3 = Object.prototype.propertyIsEnumerable;
+var __hasOwnProp$2 = Object.prototype.hasOwnProperty;
+var __propIsEnum$2 = Object.prototype.propertyIsEnumerable;
var __reflectGet = Reflect.get;
-var __defNormalProp$3 = (obj, key, value) => key in obj ? __defProp$3(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
-var __spreadValues$3 = (a, b) => {
+var __defNormalProp$2 = (obj, key, value) => key in obj ? __defProp$2(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
+var __spreadValues$2 = (a, b) => {
for (var prop in b || (b = {}))
- if (__hasOwnProp$3.call(b, prop))
- __defNormalProp$3(a, prop, b[prop]);
- if (__getOwnPropSymbols$3)
- for (var prop of __getOwnPropSymbols$3(b)) {
- if (__propIsEnum$3.call(b, prop))
- __defNormalProp$3(a, prop, b[prop]);
+ if (__hasOwnProp$2.call(b, prop))
+ __defNormalProp$2(a, prop, b[prop]);
+ if (__getOwnPropSymbols$2)
+ for (var prop of __getOwnPropSymbols$2(b)) {
+ if (__propIsEnum$2.call(b, prop))
+ __defNormalProp$2(a, prop, b[prop]);
}
return a;
};
-var __spreadProps$2 = (a, b) => __defProps$2(a, __getOwnPropDescs$2(b));
+var __spreadProps$1 = (a, b) => __defProps$1(a, __getOwnPropDescs$1(b));
var __superGet = (cls, obj, key) => __reflectGet(__getProtoOf(cls), key, obj);
-var __async$b = (__this, __arguments, generator) => {
+var __async$8 = (__this, __arguments, generator) => {
return new Promise((resolve, reject) => {
var fulfilled = (value) => {
try {
@@ -189753,7 +180300,7 @@ class BaseEventsService {
}
/* eslint-enable @typescript-eslint/no-unused-vars */
formatEvents(events) {
- return __async$b(this, null, function* () {
+ return __async$8(this, null, function* () {
return yield new Promise((resolve) => resolve(events));
});
}
@@ -189761,7 +180308,7 @@ class BaseEventsService {
* Get saved or cached events
*/
getEventsFromDB() {
- return __async$b(this, null, function* () {
+ return __async$8(this, null, function* () {
return {
events: [],
lastBlock: null
@@ -189769,7 +180316,7 @@ class BaseEventsService {
});
}
getEventsFromCache() {
- return __async$b(this, null, function* () {
+ return __async$8(this, null, function* () {
return {
events: [],
lastBlock: null
@@ -189777,7 +180324,7 @@ class BaseEventsService {
});
}
getSavedEvents() {
- return __async$b(this, null, function* () {
+ return __async$8(this, null, function* () {
let cachedEvents = yield this.getEventsFromDB();
if (!cachedEvents || !cachedEvents.events.length) {
cachedEvents = yield this.getEventsFromCache();
@@ -189789,7 +180336,7 @@ class BaseEventsService {
* Get latest events
*/
getEventsFromGraph(_0) {
- return __async$b(this, arguments, function* ({
+ return __async$8(this, arguments, function* ({
fromBlock,
methodName = ""
}) {
@@ -189799,7 +180346,7 @@ class BaseEventsService {
lastBlock: fromBlock
};
}
- const { events, lastSyncBlock } = yield graph[methodName || this.getGraphMethod()](__spreadValues$3({
+ const { events, lastSyncBlock } = yield graph[methodName || this.getGraphMethod()](__spreadValues$2({
fromBlock
}, this.getGraphParams()));
return {
@@ -189809,7 +180356,7 @@ class BaseEventsService {
});
}
getEventsFromRpc(_0) {
- return __async$b(this, arguments, function* ({
+ return __async$8(this, arguments, function* ({
fromBlock,
toBlock
}) {
@@ -189847,7 +180394,7 @@ class BaseEventsService {
});
}
getLatestEvents(_0) {
- return __async$b(this, arguments, function* ({ fromBlock }) {
+ return __async$8(this, arguments, function* ({ fromBlock }) {
const allEvents = [];
const graphEvents = yield this.getEventsFromGraph({ fromBlock });
const lastSyncBlock = graphEvents.lastBlock && graphEvents.lastBlock >= fromBlock ? graphEvents.lastBlock : fromBlock;
@@ -189869,14 +180416,14 @@ class BaseEventsService {
*/
// eslint-disable-next-line @typescript-eslint/no-unused-vars
saveEvents(_0) {
- return __async$b(this, arguments, function* ({ events, lastBlock }) {
+ return __async$8(this, arguments, function* ({ events, lastBlock }) {
});
}
/**
* Trigger saving and receiving latest events
*/
updateEvents() {
- return __async$b(this, null, function* () {
+ return __async$8(this, null, function* () {
const savedEvents = yield this.getSavedEvents();
let fromBlock = this.deployedBlock;
if (savedEvents && savedEvents.lastBlock) {
@@ -189950,7 +180497,7 @@ class BaseTornadoService extends BaseEventsService {
};
}
formatEvents(events) {
- return __async$b(this, null, function* () {
+ return __async$8(this, null, function* () {
const type = this.getType().toLowerCase();
if (type === DEPOSIT) {
const formattedEvents = events.map(({ blockNumber, index: logIndex, transactionHash, args }) => {
@@ -189969,7 +180516,7 @@ class BaseTornadoService extends BaseEventsService {
]);
return formattedEvents.map((event) => {
const { from } = txs.find(({ hash }) => hash === event.transactionHash);
- return __spreadProps$2(__spreadValues$3({}, event), {
+ return __spreadProps$1(__spreadValues$2({}, event), {
from
});
});
@@ -189990,7 +180537,7 @@ class BaseTornadoService extends BaseEventsService {
]);
return formattedEvents.map((event) => {
const { timestamp } = blocks.find(({ number }) => number === event.blockNumber);
- return __spreadProps$2(__spreadValues$3({}, event), {
+ return __spreadProps$1(__spreadValues$2({}, event), {
timestamp
});
});
@@ -190029,7 +180576,7 @@ class BaseEchoService extends BaseEventsService {
return "getAllGraphEchoEvents";
}
formatEvents(events) {
- return __async$b(this, null, function* () {
+ return __async$8(this, null, function* () {
return events.map(({ blockNumber, index: logIndex, transactionHash, args }) => {
const { who, data } = args;
if (who && data) {
@@ -190038,7 +180585,7 @@ class BaseEchoService extends BaseEventsService {
logIndex,
transactionHash
};
- return __spreadProps$2(__spreadValues$3({}, eventObjects), {
+ return __spreadProps$1(__spreadValues$2({}, eventObjects), {
address: who,
encryptedAccount: data
});
@@ -190047,7 +180594,7 @@ class BaseEchoService extends BaseEventsService {
});
}
getEventsFromGraph(_0) {
- return __async$b(this, arguments, function* ({ fromBlock }) {
+ return __async$8(this, arguments, function* ({ fromBlock }) {
if (!this.graphApi || this.graphApi.includes("api.thegraph.com")) {
return {
events: [],
@@ -190080,7 +180627,7 @@ class BaseEncryptedNotesService extends BaseEventsService {
return "getAllEncryptedNotes";
}
formatEvents(events) {
- return __async$b(this, null, function* () {
+ return __async$8(this, null, function* () {
return events.map(({ blockNumber, index: logIndex, transactionHash, args }) => {
const { encryptedNote } = args;
if (encryptedNote) {
@@ -190089,7 +180636,7 @@ class BaseEncryptedNotesService extends BaseEventsService {
logIndex,
transactionHash
};
- return __spreadProps$2(__spreadValues$3({}, eventObjects), {
+ return __spreadProps$1(__spreadValues$2({}, eventObjects), {
encryptedNote
});
}
@@ -190123,7 +180670,7 @@ class BaseGovernanceService extends BaseEventsService {
return "getAllGovernanceEvents";
}
formatEvents(events) {
- return __async$b(this, null, function* () {
+ return __async$8(this, null, function* () {
const proposalEvents = [];
const votedEvents = [];
const delegatedEvents = [];
@@ -190137,7 +180684,7 @@ class BaseGovernanceService extends BaseEventsService {
};
if (event === "ProposalCreated") {
const { id, proposer, target, startTime, endTime, description } = args;
- proposalEvents.push(__spreadProps$2(__spreadValues$3({}, eventObjects), {
+ proposalEvents.push(__spreadProps$1(__spreadValues$2({}, eventObjects), {
id: Number(id),
proposer,
target,
@@ -190148,7 +180695,7 @@ class BaseGovernanceService extends BaseEventsService {
}
if (event === "Voted") {
const { proposalId, voter, support, votes } = args;
- votedEvents.push(__spreadProps$2(__spreadValues$3({}, eventObjects), {
+ votedEvents.push(__spreadProps$1(__spreadValues$2({}, eventObjects), {
proposalId: Number(proposalId),
voter,
support,
@@ -190159,14 +180706,14 @@ class BaseGovernanceService extends BaseEventsService {
}
if (event === "Delegated") {
const { account, to: delegateTo } = args;
- delegatedEvents.push(__spreadProps$2(__spreadValues$3({}, eventObjects), {
+ delegatedEvents.push(__spreadProps$1(__spreadValues$2({}, eventObjects), {
account,
delegateTo
}));
}
if (event === "Undelegated") {
const { account, from: delegateFrom } = args;
- undelegatedEvents.push(__spreadProps$2(__spreadValues$3({}, eventObjects), {
+ undelegatedEvents.push(__spreadProps$1(__spreadValues$2({}, eventObjects), {
account,
delegateFrom
}));
@@ -190190,7 +180737,7 @@ class BaseGovernanceService extends BaseEventsService {
});
}
getEventsFromGraph(_0) {
- return __async$b(this, arguments, function* ({ fromBlock }) {
+ return __async$8(this, arguments, function* ({ fromBlock }) {
if (!this.graphApi || !this.subgraphName || this.graphApi.includes("api.thegraph.com")) {
return {
events: [],
@@ -190225,14 +180772,14 @@ class BaseRegistryService extends BaseEventsService {
return "getAllRegisters";
}
formatEvents(events) {
- return __async$b(this, null, function* () {
+ return __async$8(this, null, function* () {
return events.map(({ blockNumber, index: logIndex, transactionHash, args }) => {
const eventObjects = {
blockNumber,
logIndex,
transactionHash
};
- return __spreadProps$2(__spreadValues$3({}, eventObjects), {
+ return __spreadProps$1(__spreadValues$2({}, eventObjects), {
ensName: args.ensName,
relayerAddress: args.relayerAddress
});
@@ -190240,843 +180787,25 @@ class BaseRegistryService extends BaseEventsService {
});
}
fetchRelayers() {
- return __async$b(this, null, function* () {
+ return __async$8(this, null, function* () {
return (yield this.updateEvents()).events;
});
}
}
-var __async$a = (__this, __arguments, generator) => {
- return new Promise((resolve, reject) => {
- var fulfilled = (value) => {
- try {
- step(generator.next(value));
- } catch (e) {
- reject(e);
- }
- };
- var rejected = (value) => {
- try {
- step(generator.throw(value));
- } catch (e) {
- reject(e);
- }
- };
- var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected);
- step((generator = generator.apply(__this, __arguments)).next());
- });
-};
-function existsAsync(fileOrDir) {
- return __async$a(this, null, function* () {
- try {
- yield (0,promises_namespaceObject.stat)(fileOrDir);
- return true;
- } catch (e) {
- return false;
- }
- });
-}
-function zipAsync(file) {
- return new Promise((res, rej) => {
- zip(file, { mtime: /* @__PURE__ */ new Date("1/1/1980") }, (err, data) => {
- if (err) {
- rej(err);
- return;
- }
- res(data);
- });
- });
-}
-function unzipAsync(data) {
- return new Promise((res, rej) => {
- unzip(data, {}, (err, data2) => {
- if (err) {
- rej(err);
- return;
- }
- res(data2);
- });
- });
-}
-function saveUserFile(_0) {
- return __async$a(this, arguments, function* ({
- fileName,
- userDirectory,
- dataString
- }) {
- fileName = fileName.toLowerCase();
- const filePath = external_path_.join(userDirectory, fileName);
- const payload = yield zipAsync({
- [fileName]: new TextEncoder().encode(dataString)
- });
- if (!(yield existsAsync(userDirectory))) {
- yield (0,promises_namespaceObject.mkdir)(userDirectory, { recursive: true });
- }
- yield (0,promises_namespaceObject.writeFile)(filePath + ".zip", payload);
- yield (0,promises_namespaceObject.writeFile)(filePath, dataString);
- });
-}
-function loadSavedEvents(_0) {
- return __async$a(this, arguments, function* ({
- name,
- userDirectory,
- deployedBlock
- }) {
- const filePath = external_path_.join(userDirectory, `${name}.json`.toLowerCase());
- if (!(yield existsAsync(filePath))) {
- return {
- events: [],
- lastBlock: null
- };
- }
- try {
- const events = JSON.parse(yield (0,promises_namespaceObject.readFile)(filePath, { encoding: "utf8" }));
- return {
- events,
- lastBlock: events && events.length ? events[events.length - 1].blockNumber : deployedBlock
- };
- } catch (err) {
- console.log("Method loadSavedEvents has error");
- console.log(err);
- return {
- events: [],
- lastBlock: deployedBlock
- };
- }
- });
-}
-function download(_0) {
- return __async$a(this, arguments, function* ({ name, cacheDirectory }) {
- const fileName = `${name}.json`.toLowerCase();
- const zipName = `${fileName}.zip`;
- const zipPath = external_path_.join(cacheDirectory, zipName);
- const data = yield (0,promises_namespaceObject.readFile)(zipPath);
- const { [fileName]: content } = yield unzipAsync(data);
- return new TextDecoder().decode(content);
- });
-}
-function loadCachedEvents(_0) {
- return __async$a(this, arguments, function* ({
- name,
- cacheDirectory,
- deployedBlock
- }) {
- try {
- const module = yield download({ cacheDirectory, name });
- if (module) {
- const events = JSON.parse(module);
- const lastBlock = events && events.length ? events[events.length - 1].blockNumber : deployedBlock;
- return {
- events,
- lastBlock
- };
- }
- return {
- events: [],
- lastBlock: deployedBlock
- };
- } catch (err) {
- console.log("Method loadCachedEvents has error");
- console.log(err);
- return {
- events: [],
- lastBlock: deployedBlock
- };
- }
- });
-}
-
-var __async$9 = (__this, __arguments, generator) => {
- return new Promise((resolve, reject) => {
- var fulfilled = (value) => {
- try {
- step(generator.next(value));
- } catch (e) {
- reject(e);
- }
- };
- var rejected = (value) => {
- try {
- step(generator.throw(value));
- } catch (e) {
- reject(e);
- }
- };
- var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected);
- step((generator = generator.apply(__this, __arguments)).next());
- });
-};
-class NodeTornadoService extends BaseTornadoService {
- constructor({
- netId,
- provider,
- graphApi,
- subgraphName,
- Tornado,
- type,
- amount,
- currency,
- deployedBlock,
- fetchDataOptions,
- cacheDirectory,
- userDirectory
- }) {
- super({
- netId,
- provider,
- graphApi,
- subgraphName,
- Tornado,
- type,
- amount,
- currency,
- deployedBlock,
- fetchDataOptions
- });
- this.cacheDirectory = cacheDirectory;
- this.userDirectory = userDirectory;
- }
- updateEventProgress({ type, fromBlock, toBlock, count }) {
- if (toBlock) {
- console.log(`fromBlock - ${fromBlock}`);
- console.log(`toBlock - ${toBlock}`);
- if (count) {
- console.log(`downloaded ${type} events count - ${count}`);
- console.log("____________________________________________");
- console.log(`Fetched ${type} events from ${fromBlock} to ${toBlock}
-`);
- }
- }
- }
- updateTransactionProgress({ currentIndex, totalIndex }) {
- if (totalIndex) {
- console.log(`Fetched ${currentIndex} deposit txs of ${totalIndex}`);
- }
- }
- updateBlockProgress({ currentIndex, totalIndex }) {
- if (totalIndex) {
- console.log(`Fetched ${currentIndex} withdrawal blocks of ${totalIndex}`);
- }
- }
- updateGraphProgress({ type, fromBlock, toBlock, count }) {
- if (toBlock) {
- console.log(`fromBlock - ${fromBlock}`);
- console.log(`toBlock - ${toBlock}`);
- if (count) {
- console.log(`downloaded ${type} events from graph node count - ${count}`);
- console.log("____________________________________________");
- console.log(`Fetched ${type} events from graph node ${fromBlock} to ${toBlock}
-`);
- }
- }
- }
- getEventsFromDB() {
- return __async$9(this, null, function* () {
- if (!this.userDirectory) {
- console.log(
- "Updating events for",
- this.amount,
- this.currency.toUpperCase(),
- `${this.getType().toLowerCase()}s
-`
- );
- console.log(`savedEvents count - ${0}`);
- console.log(`savedEvents lastBlock - ${this.deployedBlock}
-`);
- return {
- events: [],
- lastBlock: this.deployedBlock
- };
- }
- const savedEvents = yield loadSavedEvents({
- name: this.getInstanceName(),
- userDirectory: this.userDirectory,
- deployedBlock: this.deployedBlock
- });
- console.log("Updating events for", this.amount, this.currency.toUpperCase(), `${this.getType().toLowerCase()}s
-`);
- console.log(`savedEvents count - ${savedEvents.events.length}`);
- console.log(`savedEvents lastBlock - ${savedEvents.lastBlock}
-`);
- return savedEvents;
- });
- }
- getEventsFromCache() {
- return __async$9(this, null, function* () {
- if (!this.cacheDirectory) {
- console.log(`cachedEvents count - ${0}`);
- console.log(`cachedEvents lastBlock - ${this.deployedBlock}
-`);
- return {
- events: [],
- lastBlock: this.deployedBlock
- };
- }
- const cachedEvents = yield loadCachedEvents({
- name: this.getInstanceName(),
- cacheDirectory: this.cacheDirectory,
- deployedBlock: this.deployedBlock
- });
- console.log(`cachedEvents count - ${cachedEvents.events.length}`);
- console.log(`cachedEvents lastBlock - ${cachedEvents.lastBlock}
-`);
- return cachedEvents;
- });
- }
- saveEvents(_0) {
- return __async$9(this, arguments, function* ({ events, lastBlock }) {
- const instanceName = this.getInstanceName();
- console.log("\ntotalEvents count - ", events.length);
- console.log(
- `totalEvents lastBlock - ${events[events.length - 1] ? events[events.length - 1].blockNumber : lastBlock}
-`
- );
- const eventTable = new cli_table3();
- eventTable.push(
- [{ colSpan: 2, content: `${this.getType()}s`, hAlign: "center" }],
- ["Instance", `${this.netId} chain ${this.amount} ${this.currency.toUpperCase()}`],
- ["Anonymity set", `${events.length} equal user ${this.getType().toLowerCase()}s`],
- [{ colSpan: 2, content: `Latest ${this.getType().toLowerCase()}s` }],
- ...events.slice(events.length - 10).reverse().map(({ timestamp }, index) => {
- const eventIndex = events.length - index;
- const eventTime = moment.unix(timestamp).fromNow();
- return [eventIndex, eventTime];
- })
- );
- console.log(eventTable.toString() + "\n");
- if (this.userDirectory) {
- yield saveUserFile({
- fileName: instanceName + ".json",
- userDirectory: this.userDirectory,
- dataString: JSON.stringify(events, null, 2) + "\n"
- });
- }
- });
- }
-}
-class NodeEchoService extends BaseEchoService {
- constructor({
- netId,
- provider,
- graphApi,
- subgraphName,
- Echoer,
- deployedBlock,
- fetchDataOptions,
- cacheDirectory,
- userDirectory
- }) {
- super({
- netId,
- provider,
- graphApi,
- subgraphName,
- Echoer,
- deployedBlock,
- fetchDataOptions
- });
- this.cacheDirectory = cacheDirectory;
- this.userDirectory = userDirectory;
- }
- updateEventProgress({ type, fromBlock, toBlock, count }) {
- if (toBlock) {
- console.log(`fromBlock - ${fromBlock}`);
- console.log(`toBlock - ${toBlock}`);
- if (count) {
- console.log(`downloaded ${type} events count - ${count}`);
- console.log("____________________________________________");
- console.log(`Fetched ${type} events from ${fromBlock} to ${toBlock}
-`);
- }
- }
- }
- updateGraphProgress({ type, fromBlock, toBlock, count }) {
- if (toBlock) {
- console.log(`fromBlock - ${fromBlock}`);
- console.log(`toBlock - ${toBlock}`);
- if (count) {
- console.log(`downloaded ${type} events from graph node count - ${count}`);
- console.log("____________________________________________");
- console.log(`Fetched ${type} events from graph node ${fromBlock} to ${toBlock}
-`);
- }
- }
- }
- getEventsFromDB() {
- return __async$9(this, null, function* () {
- if (!this.userDirectory) {
- console.log(`Updating events for ${this.netId} chain echo events
-`);
- console.log(`savedEvents count - ${0}`);
- console.log(`savedEvents lastBlock - ${this.deployedBlock}
-`);
- return {
- events: [],
- lastBlock: this.deployedBlock
- };
- }
- const savedEvents = yield loadSavedEvents({
- name: this.getInstanceName(),
- userDirectory: this.userDirectory,
- deployedBlock: this.deployedBlock
- });
- console.log(`Updating events for ${this.netId} chain echo events
-`);
- console.log(`savedEvents count - ${savedEvents.events.length}`);
- console.log(`savedEvents lastBlock - ${savedEvents.lastBlock}
-`);
- return savedEvents;
- });
- }
- getEventsFromCache() {
- return __async$9(this, null, function* () {
- if (!this.cacheDirectory) {
- console.log(`cachedEvents count - ${0}`);
- console.log(`cachedEvents lastBlock - ${this.deployedBlock}
-`);
- return {
- events: [],
- lastBlock: this.deployedBlock
- };
- }
- const cachedEvents = yield loadCachedEvents({
- name: this.getInstanceName(),
- cacheDirectory: this.cacheDirectory,
- deployedBlock: this.deployedBlock
- });
- console.log(`cachedEvents count - ${cachedEvents.events.length}`);
- console.log(`cachedEvents lastBlock - ${cachedEvents.lastBlock}
-`);
- return cachedEvents;
- });
- }
- saveEvents(_0) {
- return __async$9(this, arguments, function* ({ events, lastBlock }) {
- const instanceName = this.getInstanceName();
- console.log("\ntotalEvents count - ", events.length);
- console.log(
- `totalEvents lastBlock - ${events[events.length - 1] ? events[events.length - 1].blockNumber : lastBlock}
-`
- );
- const eventTable = new cli_table3();
- eventTable.push(
- [{ colSpan: 2, content: "Echo Accounts", hAlign: "center" }],
- ["Network", `${this.netId} chain`],
- ["Events", `${events.length} events`],
- [{ colSpan: 2, content: "Latest events" }],
- ...events.slice(events.length - 10).reverse().map(({ blockNumber }, index) => {
- const eventIndex = events.length - index;
- return [eventIndex, blockNumber];
- })
- );
- console.log(eventTable.toString() + "\n");
- if (this.userDirectory) {
- yield saveUserFile({
- fileName: instanceName + ".json",
- userDirectory: this.userDirectory,
- dataString: JSON.stringify(events, null, 2) + "\n"
- });
- }
- });
- }
-}
-class NodeEncryptedNotesService extends BaseEncryptedNotesService {
- constructor({
- netId,
- provider,
- graphApi,
- subgraphName,
- Router,
- deployedBlock,
- fetchDataOptions,
- cacheDirectory,
- userDirectory
- }) {
- super({
- netId,
- provider,
- graphApi,
- subgraphName,
- Router,
- deployedBlock,
- fetchDataOptions
- });
- this.cacheDirectory = cacheDirectory;
- this.userDirectory = userDirectory;
- }
- updateEventProgress({ type, fromBlock, toBlock, count }) {
- if (toBlock) {
- console.log(`fromBlock - ${fromBlock}`);
- console.log(`toBlock - ${toBlock}`);
- if (count) {
- console.log(`downloaded ${type} events count - ${count}`);
- console.log("____________________________________________");
- console.log(`Fetched ${type} events from ${fromBlock} to ${toBlock}
-`);
- }
- }
- }
- updateGraphProgress({ type, fromBlock, toBlock, count }) {
- if (toBlock) {
- console.log(`fromBlock - ${fromBlock}`);
- console.log(`toBlock - ${toBlock}`);
- if (count) {
- console.log(`downloaded ${type} events from graph node count - ${count}`);
- console.log("____________________________________________");
- console.log(`Fetched ${type} events from graph node ${fromBlock} to ${toBlock}
-`);
- }
- }
- }
- getEventsFromDB() {
- return __async$9(this, null, function* () {
- if (!this.userDirectory) {
- console.log(`Updating events for ${this.netId} chain encrypted events
-`);
- console.log(`savedEvents count - ${0}`);
- console.log(`savedEvents lastBlock - ${this.deployedBlock}
-`);
- return {
- events: [],
- lastBlock: this.deployedBlock
- };
- }
- const savedEvents = yield loadSavedEvents({
- name: this.getInstanceName(),
- userDirectory: this.userDirectory,
- deployedBlock: this.deployedBlock
- });
- console.log(`Updating events for ${this.netId} chain encrypted events
-`);
- console.log(`savedEvents count - ${savedEvents.events.length}`);
- console.log(`savedEvents lastBlock - ${savedEvents.lastBlock}
-`);
- return savedEvents;
- });
- }
- getEventsFromCache() {
- return __async$9(this, null, function* () {
- if (!this.cacheDirectory) {
- console.log(`cachedEvents count - ${0}`);
- console.log(`cachedEvents lastBlock - ${this.deployedBlock}
-`);
- return {
- events: [],
- lastBlock: this.deployedBlock
- };
- }
- const cachedEvents = yield loadCachedEvents({
- name: this.getInstanceName(),
- cacheDirectory: this.cacheDirectory,
- deployedBlock: this.deployedBlock
- });
- console.log(`cachedEvents count - ${cachedEvents.events.length}`);
- console.log(`cachedEvents lastBlock - ${cachedEvents.lastBlock}
-`);
- return cachedEvents;
- });
- }
- saveEvents(_0) {
- return __async$9(this, arguments, function* ({ events, lastBlock }) {
- const instanceName = this.getInstanceName();
- console.log("\ntotalEvents count - ", events.length);
- console.log(
- `totalEvents lastBlock - ${events[events.length - 1] ? events[events.length - 1].blockNumber : lastBlock}
-`
- );
- const eventTable = new cli_table3();
- eventTable.push(
- [{ colSpan: 2, content: "Encrypted Notes", hAlign: "center" }],
- ["Network", `${this.netId} chain`],
- ["Events", `${events.length} events`],
- [{ colSpan: 2, content: "Latest events" }],
- ...events.slice(events.length - 10).reverse().map(({ blockNumber }, index) => {
- const eventIndex = events.length - index;
- return [eventIndex, blockNumber];
- })
- );
- console.log(eventTable.toString() + "\n");
- if (this.userDirectory) {
- yield saveUserFile({
- fileName: instanceName + ".json",
- userDirectory: this.userDirectory,
- dataString: JSON.stringify(events, null, 2) + "\n"
- });
- }
- });
- }
-}
-class NodeGovernanceService extends BaseGovernanceService {
- constructor({
- netId,
- provider,
- graphApi,
- subgraphName,
- Governance,
- deployedBlock,
- fetchDataOptions,
- cacheDirectory,
- userDirectory
- }) {
- super({
- netId,
- provider,
- graphApi,
- subgraphName,
- Governance,
- deployedBlock,
- fetchDataOptions
- });
- this.cacheDirectory = cacheDirectory;
- this.userDirectory = userDirectory;
- }
- updateEventProgress({ type, fromBlock, toBlock, count }) {
- if (toBlock) {
- console.log(`fromBlock - ${fromBlock}`);
- console.log(`toBlock - ${toBlock}`);
- if (count) {
- console.log(`downloaded ${type} events count - ${count}`);
- console.log("____________________________________________");
- console.log(`Fetched ${type} events from ${fromBlock} to ${toBlock}
-`);
- }
- }
- }
- updateGraphProgress({ type, fromBlock, toBlock, count }) {
- if (toBlock) {
- console.log(`fromBlock - ${fromBlock}`);
- console.log(`toBlock - ${toBlock}`);
- if (count) {
- console.log(`downloaded ${type} events from graph node count - ${count}`);
- console.log("____________________________________________");
- console.log(`Fetched ${type} events from graph node ${fromBlock} to ${toBlock}
-`);
- }
- }
- }
- updateTransactionProgress({ currentIndex, totalIndex }) {
- if (totalIndex) {
- console.log(`Fetched ${currentIndex} governance txs of ${totalIndex}`);
- }
- }
- getEventsFromDB() {
- return __async$9(this, null, function* () {
- if (!this.userDirectory) {
- console.log(`Updating events for ${this.netId} chain governance events
-`);
- console.log(`savedEvents count - ${0}`);
- console.log(`savedEvents lastBlock - ${this.deployedBlock}
-`);
- return {
- events: [],
- lastBlock: this.deployedBlock
- };
- }
- const savedEvents = yield loadSavedEvents({
- name: this.getInstanceName(),
- userDirectory: this.userDirectory,
- deployedBlock: this.deployedBlock
- });
- console.log(`Updating events for ${this.netId} chain governance events
-`);
- console.log(`savedEvents count - ${savedEvents.events.length}`);
- console.log(`savedEvents lastBlock - ${savedEvents.lastBlock}
-`);
- return savedEvents;
- });
- }
- getEventsFromCache() {
- return __async$9(this, null, function* () {
- if (!this.cacheDirectory) {
- console.log(`cachedEvents count - ${0}`);
- console.log(`cachedEvents lastBlock - ${this.deployedBlock}
-`);
- return {
- events: [],
- lastBlock: this.deployedBlock
- };
- }
- const cachedEvents = yield loadCachedEvents({
- name: this.getInstanceName(),
- cacheDirectory: this.cacheDirectory,
- deployedBlock: this.deployedBlock
- });
- console.log(`cachedEvents count - ${cachedEvents.events.length}`);
- console.log(`cachedEvents lastBlock - ${cachedEvents.lastBlock}
-`);
- return cachedEvents;
- });
- }
- saveEvents(_0) {
- return __async$9(this, arguments, function* ({ events, lastBlock }) {
- const instanceName = this.getInstanceName();
- console.log("\ntotalEvents count - ", events.length);
- console.log(
- `totalEvents lastBlock - ${events[events.length - 1] ? events[events.length - 1].blockNumber : lastBlock}
-`
- );
- const eventTable = new cli_table3();
- eventTable.push(
- [{ colSpan: 2, content: "Governance Events", hAlign: "center" }],
- ["Network", `${this.netId} chain`],
- ["Events", `${events.length} events`],
- [{ colSpan: 2, content: "Latest events" }],
- ...events.slice(events.length - 10).reverse().map(({ blockNumber }, index) => {
- const eventIndex = events.length - index;
- return [eventIndex, blockNumber];
- })
- );
- console.log(eventTable.toString() + "\n");
- if (this.userDirectory) {
- yield saveUserFile({
- fileName: instanceName + ".json",
- userDirectory: this.userDirectory,
- dataString: JSON.stringify(events, null, 2) + "\n"
- });
- }
- });
- }
-}
-class NodeRegistryService extends BaseRegistryService {
- constructor({
- netId,
- provider,
- graphApi,
- subgraphName,
- RelayerRegistry,
- deployedBlock,
- fetchDataOptions,
- cacheDirectory,
- userDirectory
- }) {
- super({
- netId,
- provider,
- graphApi,
- subgraphName,
- RelayerRegistry,
- deployedBlock,
- fetchDataOptions
- });
- this.cacheDirectory = cacheDirectory;
- this.userDirectory = userDirectory;
- }
- updateEventProgress({ type, fromBlock, toBlock, count }) {
- if (toBlock) {
- console.log(`fromBlock - ${fromBlock}`);
- console.log(`toBlock - ${toBlock}`);
- if (count) {
- console.log(`downloaded ${type} events count - ${count}`);
- console.log("____________________________________________");
- console.log(`Fetched ${type} events from ${fromBlock} to ${toBlock}
-`);
- }
- }
- }
- updateGraphProgress({ type, fromBlock, toBlock, count }) {
- if (toBlock) {
- console.log(`fromBlock - ${fromBlock}`);
- console.log(`toBlock - ${toBlock}`);
- if (count) {
- console.log(`downloaded ${type} events from graph node count - ${count}`);
- console.log("____________________________________________");
- console.log(`Fetched ${type} events from graph node ${fromBlock} to ${toBlock}
-`);
- }
- }
- }
- getEventsFromDB() {
- return __async$9(this, null, function* () {
- if (!this.userDirectory) {
- console.log(`Updating events for ${this.netId} chain registry events
-`);
- console.log(`savedEvents count - ${0}`);
- console.log(`savedEvents lastBlock - ${this.deployedBlock}
-`);
- return {
- events: [],
- lastBlock: this.deployedBlock
- };
- }
- const savedEvents = yield loadSavedEvents({
- name: this.getInstanceName(),
- userDirectory: this.userDirectory,
- deployedBlock: this.deployedBlock
- });
- console.log(`Updating events for ${this.netId} chain registry events
-`);
- console.log(`savedEvents count - ${savedEvents.events.length}`);
- console.log(`savedEvents lastBlock - ${savedEvents.lastBlock}
-`);
- return savedEvents;
- });
- }
- getEventsFromCache() {
- return __async$9(this, null, function* () {
- if (!this.cacheDirectory) {
- console.log(`cachedEvents count - ${0}`);
- console.log(`cachedEvents lastBlock - ${this.deployedBlock}
-`);
- return {
- events: [],
- lastBlock: this.deployedBlock
- };
- }
- const cachedEvents = yield loadCachedEvents({
- name: this.getInstanceName(),
- cacheDirectory: this.cacheDirectory,
- deployedBlock: this.deployedBlock
- });
- console.log(`cachedEvents count - ${cachedEvents.events.length}`);
- console.log(`cachedEvents lastBlock - ${cachedEvents.lastBlock}
-`);
- return cachedEvents;
- });
- }
- saveEvents(_0) {
- return __async$9(this, arguments, function* ({ events, lastBlock }) {
- const instanceName = this.getInstanceName();
- console.log("\ntotalEvents count - ", events.length);
- console.log(
- `totalEvents lastBlock - ${events[events.length - 1] ? events[events.length - 1].blockNumber : lastBlock}
-`
- );
- const eventTable = new cli_table3();
- eventTable.push(
- [{ colSpan: 2, content: "Registered Relayers", hAlign: "center" }],
- ["Network", `${this.netId} chain`],
- ["Events", `${events.length} events`],
- [{ colSpan: 2, content: "Latest events" }],
- ...events.slice(events.length - 10).reverse().map(({ blockNumber }, index) => {
- const eventIndex = events.length - index;
- return [eventIndex, blockNumber];
- })
- );
- console.log(eventTable.toString() + "\n");
- if (this.userDirectory) {
- yield saveUserFile({
- fileName: instanceName + ".json",
- userDirectory: this.userDirectory,
- dataString: JSON.stringify(events, null, 2) + "\n"
- });
- }
- });
- }
-}
-
-var __defProp$2 = Object.defineProperty;
-var __getOwnPropSymbols$2 = Object.getOwnPropertySymbols;
-var __hasOwnProp$2 = Object.prototype.hasOwnProperty;
-var __propIsEnum$2 = Object.prototype.propertyIsEnumerable;
-var __defNormalProp$2 = (obj, key, value) => key in obj ? __defProp$2(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
-var __spreadValues$2 = (a, b) => {
+var __defProp$1 = Object.defineProperty;
+var __getOwnPropSymbols$1 = Object.getOwnPropertySymbols;
+var __hasOwnProp$1 = Object.prototype.hasOwnProperty;
+var __propIsEnum$1 = Object.prototype.propertyIsEnumerable;
+var __defNormalProp$1 = (obj, key, value) => key in obj ? __defProp$1(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
+var __spreadValues$1 = (a, b) => {
for (var prop in b || (b = {}))
- if (__hasOwnProp$2.call(b, prop))
- __defNormalProp$2(a, prop, b[prop]);
- if (__getOwnPropSymbols$2)
- for (var prop of __getOwnPropSymbols$2(b)) {
- if (__propIsEnum$2.call(b, prop))
- __defNormalProp$2(a, prop, b[prop]);
+ if (__hasOwnProp$1.call(b, prop))
+ __defNormalProp$1(a, prop, b[prop]);
+ if (__getOwnPropSymbols$1)
+ for (var prop of __getOwnPropSymbols$1(b)) {
+ if (__propIsEnum$1.call(b, prop))
+ __defNormalProp$1(a, prop, b[prop]);
}
return a;
};
@@ -191705,10 +181434,10 @@ function addNetwork(newConfig) {
enabledChains.push(
...Object.keys(newConfig).map((netId) => Number(netId)).filter((netId) => !enabledChains.includes(netId))
);
- customConfig = __spreadValues$2(__spreadValues$2({}, customConfig), newConfig);
+ customConfig = __spreadValues$1(__spreadValues$1({}, customConfig), newConfig);
}
function getNetworkConfig() {
- const allConfig = __spreadValues$2(__spreadValues$2({}, defaultConfig), customConfig);
+ const allConfig = __spreadValues$1(__spreadValues$1({}, defaultConfig), customConfig);
return enabledChains.reduce((acc, curr) => {
acc[curr] = allConfig[curr];
return acc;
@@ -191865,7 +181594,7 @@ dist_ajv.addKeyword({
errors: true
});
-var __async$8 = (__this, __arguments, generator) => {
+var __async$7 = (__this, __arguments, generator) => {
return new Promise((resolve, reject) => {
var fulfilled = (value) => {
try {
@@ -191890,13 +181619,13 @@ class Pedersen {
this.pedersenPromise = this.initPedersen();
}
initPedersen() {
- return __async$8(this, null, function* () {
+ return __async$7(this, null, function* () {
this.pedersenHash = yield pedersen_hash_buildPedersenHash();
this.babyJub = this.pedersenHash.babyJub;
});
}
unpackPoint(buffer) {
- return __async$8(this, null, function* () {
+ return __async$7(this, null, function* () {
var _a, _b;
yield this.pedersenPromise;
return (_b = this.babyJub) == null ? void 0 : _b.unpackPoint((_a = this.pedersenHash) == null ? void 0 : _a.hash(buffer));
@@ -191909,13 +181638,13 @@ class Pedersen {
}
const pedersen = new Pedersen();
function buffPedersenHash(buffer) {
- return __async$8(this, null, function* () {
+ return __async$7(this, null, function* () {
const [hash] = yield pedersen.unpackPoint(buffer);
return pedersen.toStringBuffer(hash);
});
}
-var __async$7 = (__this, __arguments, generator) => {
+var __async$6 = (__this, __arguments, generator) => {
return new Promise((resolve, reject) => {
var fulfilled = (value) => {
try {
@@ -191936,7 +181665,7 @@ var __async$7 = (__this, __arguments, generator) => {
});
};
function createDeposit(_0) {
- return __async$7(this, arguments, function* ({ nullifier, secret }) {
+ return __async$6(this, arguments, function* ({ nullifier, secret }) {
const preimage = new Uint8Array([...dist_leInt2Buff(nullifier), ...dist_leInt2Buff(secret)]);
const noteHex = toFixedHex(bytesToBN(preimage), 62);
const commitment = BigInt(yield buffPedersenHash(preimage));
@@ -191996,7 +181725,7 @@ class Deposit {
);
}
static createNote(_0) {
- return __async$7(this, arguments, function* ({ currency, amount, netId, nullifier, secret }) {
+ return __async$6(this, arguments, function* ({ currency, amount, netId, nullifier, secret }) {
if (!nullifier) {
nullifier = rBigInt(31);
}
@@ -192023,7 +181752,7 @@ class Deposit {
});
}
static parseNote(noteString) {
- return __async$7(this, null, function* () {
+ return __async$6(this, null, function* () {
const noteRegex = new RegExp("tornado-(?\\w+)-(?[\\d.]+)-(?\\d+)-0x(?[0-9a-fA-F]{124})", "g");
const match = noteRegex.exec(noteString);
if (!match) {
@@ -192115,7 +181844,7 @@ class NoteAccount {
this.blockNumber = blockNumber;
this.recoveryKey = recoveryKey;
this.recoveryAddress = computeAddress("0x" + recoveryKey);
- this.recoveryPublicKey = (0,eth_sig_util_dist.getEncryptionPublicKey)(recoveryKey);
+ this.recoveryPublicKey = (0,dist.getEncryptionPublicKey)(recoveryKey);
this.Echoer = Echoer2;
}
/**
@@ -192127,13 +181856,13 @@ class NoteAccount {
if (privateKey.startsWith("0x")) {
privateKey = privateKey.replace("0x", "");
}
- return (0,eth_sig_util_dist.getEncryptionPublicKey)(privateKey);
+ return (0,dist.getEncryptionPublicKey)(privateKey);
}
// This function intends to provide an encrypted value of recoveryKey for an on-chain Echoer backup purpose
// Thus, the pubKey should be derived by a Wallet instance or from Web3 wallets
// pubKey: base64 encoded 32 bytes key from https://docs.metamask.io/wallet/reference/eth_getencryptionpublickey/
getEncryptedAccount(walletPublicKey) {
- const encryptedData = (0,eth_sig_util_dist.encrypt)({
+ const encryptedData = (0,dist.encrypt)({
publicKey: walletPublicKey,
data: this.recoveryKey,
version: "x25519-xsalsa20-poly1305"
@@ -192160,7 +181889,7 @@ class NoteAccount {
for (const event of events) {
try {
const unpackedMessage = unpackEncryptedMessage(event.encryptedAccount);
- const recoveryKey = (0,eth_sig_util_dist.decrypt)({
+ const recoveryKey = (0,dist.decrypt)({
encryptedData: unpackedMessage,
privateKey
});
@@ -192183,7 +181912,7 @@ class NoteAccount {
for (const event of events) {
try {
const unpackedMessage = unpackEncryptedMessage(event.encryptedNote);
- const [address, noteHex] = (0,eth_sig_util_dist.decrypt)({
+ const [address, noteHex] = (0,dist.decrypt)({
encryptedData: unpackedMessage,
privateKey: this.recoveryKey
}).split("-");
@@ -192199,7 +181928,7 @@ class NoteAccount {
return decryptedEvents;
}
encryptNote({ address, noteHex }) {
- const encryptedData = (0,eth_sig_util_dist.encrypt)({
+ const encryptedData = (0,dist.encrypt)({
publicKey: this.recoveryPublicKey,
data: `${address}-${noteHex}`,
version: "x25519-xsalsa20-poly1305"
@@ -192283,7 +182012,7 @@ class TornadoFeeOracle {
}
}
-var __async$6 = (__this, __arguments, generator) => {
+var __async$5 = (__this, __arguments, generator) => {
return new Promise((resolve, reject) => {
var fulfilled = (value) => {
try {
@@ -192308,7 +182037,7 @@ class Mimc {
this.mimcPromise = this.initMimc();
}
initMimc() {
- return __async$6(this, null, function* () {
+ return __async$5(this, null, function* () {
this.sponge = yield mimcsponge_buildMimcSponge();
this.hash = (left, right) => {
var _a, _b;
@@ -192317,7 +182046,7 @@ class Mimc {
});
}
getHash() {
- return __async$6(this, null, function* () {
+ return __async$5(this, null, function* () {
yield this.mimcPromise;
return {
sponge: this.sponge,
@@ -192328,7 +182057,7 @@ class Mimc {
}
const mimc = new Mimc();
-var __async$5 = (__this, __arguments, generator) => {
+var __async$4 = (__this, __arguments, generator) => {
return new Promise((resolve, reject) => {
var fulfilled = (value) => {
try {
@@ -192371,7 +182100,7 @@ class MerkleTreeService {
this.merkleWorkerPath = merkleWorkerPath;
}
createTree(events) {
- return __async$5(this, null, function* () {
+ return __async$4(this, null, function* () {
const { hash: hashFunction } = yield mimc.getHash();
if (this.merkleWorkerPath) {
console.log("Using merkleWorker\n");
@@ -192423,7 +182152,7 @@ class MerkleTreeService {
});
}
createPartialTree(_0) {
- return __async$5(this, arguments, function* ({ edge, elements }) {
+ return __async$4(this, arguments, function* ({ edge, elements }) {
const { hash: hashFunction } = yield mimc.getHash();
if (this.merkleWorkerPath) {
console.log("Using merkleWorker\n");
@@ -192477,7 +182206,7 @@ class MerkleTreeService {
});
}
verifyTree(events) {
- return __async$5(this, null, function* () {
+ return __async$4(this, null, function* () {
console.log(
`
Creating deposit tree for ${this.netId} ${this.amount} ${this.currency.toUpperCase()} would take a while
@@ -192497,72 +182226,7 @@ Creating deposit tree for ${this.netId} ${this.amount} ${this.currency.toUpperCa
}
}
-function parseNumber(value) {
- if (!value || isNaN(Number(value))) {
- throw new InvalidArgumentError("Invalid Number");
- }
- return Number(value);
-}
-function parseUrl(value) {
- if (!value || !validateUrl(value, ["http:", "https:"])) {
- throw new InvalidArgumentError("Invalid URL");
- }
- return value;
-}
-function parseRelayer(value) {
- if (!value || !(value.endsWith(".eth") || validateUrl(value, ["http:", "https:"]))) {
- throw new InvalidArgumentError("Invalid Relayer ETH address or URL");
- }
- return value;
-}
-function parseAddress(value) {
- if (!value) {
- throw new InvalidArgumentError("Invalid Address");
- }
- try {
- return address_getAddress(value);
- } catch (e) {
- throw new InvalidArgumentError("Invalid Address");
- }
-}
-function parseMnemonic(value) {
- if (!value) {
- throw new InvalidArgumentError("Invalid Mnemonic");
- }
- try {
- Mnemonic.fromPhrase(value);
- } catch (e) {
- throw new InvalidArgumentError("Invalid Mnemonic");
- }
- return value;
-}
-function parseKey(value) {
- if (!value) {
- throw new InvalidArgumentError("Invalid Private Key");
- }
- if (value.length === 64) {
- value = "0x" + value;
- }
- try {
- computeAddress(value);
- } catch (e) {
- throw new InvalidArgumentError("Invalid Private Key");
- }
- return value;
-}
-function parseRecoveryKey(value) {
- if (!value) {
- throw new InvalidArgumentError("Invalid Recovery Key");
- }
- try {
- computeAddress("0x" + value);
- } catch (e) {
- throw new InvalidArgumentError("Invalid Recovery Key");
- }
- return value;
-}
-
-var __async$4 = (__this, __arguments, generator) => {
+var __async$3 = (__this, __arguments, generator) => {
return new Promise((resolve, reject) => {
var fulfilled = (value) => {
try {
@@ -192589,7 +182253,7 @@ class TokenPriceOracle {
this.oracle = oracle;
}
fetchPrices(tokens) {
- return __async$4(this, null, function* () {
+ return __async$3(this, null, function* () {
if (!this.oracle) {
return new Promise((resolve) => resolve(tokens.map(() => parseEther("0.0001"))));
}
@@ -192608,26 +182272,26 @@ class TokenPriceOracle {
}
}
-var __defProp$1 = Object.defineProperty;
-var __defProps$1 = Object.defineProperties;
-var __getOwnPropDescs$1 = Object.getOwnPropertyDescriptors;
-var __getOwnPropSymbols$1 = Object.getOwnPropertySymbols;
-var __hasOwnProp$1 = Object.prototype.hasOwnProperty;
-var __propIsEnum$1 = Object.prototype.propertyIsEnumerable;
-var __defNormalProp$1 = (obj, key, value) => key in obj ? __defProp$1(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
-var __spreadValues$1 = (a, b) => {
+var __defProp = Object.defineProperty;
+var __defProps = Object.defineProperties;
+var __getOwnPropDescs = Object.getOwnPropertyDescriptors;
+var __getOwnPropSymbols = Object.getOwnPropertySymbols;
+var __hasOwnProp = Object.prototype.hasOwnProperty;
+var __propIsEnum = Object.prototype.propertyIsEnumerable;
+var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
+var __spreadValues = (a, b) => {
for (var prop in b || (b = {}))
- if (__hasOwnProp$1.call(b, prop))
- __defNormalProp$1(a, prop, b[prop]);
- if (__getOwnPropSymbols$1)
- for (var prop of __getOwnPropSymbols$1(b)) {
- if (__propIsEnum$1.call(b, prop))
- __defNormalProp$1(a, prop, b[prop]);
+ if (__hasOwnProp.call(b, prop))
+ __defNormalProp(a, prop, b[prop]);
+ if (__getOwnPropSymbols)
+ for (var prop of __getOwnPropSymbols(b)) {
+ if (__propIsEnum.call(b, prop))
+ __defNormalProp(a, prop, b[prop]);
}
return a;
};
-var __spreadProps$1 = (a, b) => __defProps$1(a, __getOwnPropDescs$1(b));
-var __async$3 = (__this, __arguments, generator) => {
+var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b));
+var __async$2 = (__this, __arguments, generator) => {
return new Promise((resolve, reject) => {
var fulfilled = (value) => {
try {
@@ -192709,13 +182373,13 @@ class RelayerClient {
this.fetchDataOptions = fetchDataOptions2;
}
askRelayerStatus(_0) {
- return __async$3(this, arguments, function* ({
+ return __async$2(this, arguments, function* ({
hostname,
relayerAddress
}) {
var _a, _b;
const url = `https://${!hostname.endsWith("/") ? hostname + "/" : hostname}`;
- const rawStatus = yield fetchData(`${url}status`, __spreadProps$1(__spreadValues$1({}, this.fetchDataOptions), {
+ const rawStatus = yield fetchData(`${url}status`, __spreadProps(__spreadValues({}, this.fetchDataOptions), {
headers: {
"Content-Type": "application/json, application/x-www-form-urlencoded"
},
@@ -192726,7 +182390,7 @@ class RelayerClient {
if (!statusValidator(rawStatus)) {
throw new Error("Invalid status schema");
}
- const status = __spreadProps$1(__spreadValues$1({}, rawStatus), {
+ const status = __spreadProps(__spreadValues({}, rawStatus), {
url
});
if (status.currentQueue > 5) {
@@ -192745,7 +182409,7 @@ class RelayerClient {
});
}
filterRelayer(curr, relayer, subdomains, debugRelayer = false) {
- return __async$3(this, null, function* () {
+ return __async$2(this, null, function* () {
var _a;
const { relayerEnsSubdomain } = this.config;
const subdomainIndex = subdomains.indexOf(relayerEnsSubdomain);
@@ -192798,7 +182462,7 @@ class RelayerClient {
});
}
getValidRelayers(relayers, subdomains, debugRelayer = false) {
- return __async$3(this, null, function* () {
+ return __async$2(this, null, function* () {
const relayersSet = /* @__PURE__ */ new Set();
const uniqueRelayers = relayers.reverse().filter(({ ensName }) => {
if (!relayersSet.has(ensName)) {
@@ -192829,9 +182493,9 @@ class RelayerClient {
return pickWeightedRandomRelayer(relayers, this.netId);
}
tornadoWithdraw(_0) {
- return __async$3(this, arguments, function* ({ contract, proof, args }) {
+ return __async$2(this, arguments, function* ({ contract, proof, args }) {
const { url } = this.selectedRelayer;
- const withdrawResponse = yield fetchData(`${url}v1/tornadoWithdraw`, __spreadProps$1(__spreadValues$1({}, this.fetchDataOptions), {
+ const withdrawResponse = yield fetchData(`${url}v1/tornadoWithdraw`, __spreadProps(__spreadValues({}, this.fetchDataOptions), {
method: "POST",
headers: {
"Content-Type": "application/json"
@@ -192851,7 +182515,7 @@ class RelayerClient {
console.log(`Job submitted: ${jobUrl}
`);
while (!relayerStatus || !["FAILED", "CONFIRMED"].includes(relayerStatus)) {
- const jobResponse = yield fetchData(jobUrl, __spreadProps$1(__spreadValues$1({}, this.fetchDataOptions), {
+ const jobResponse = yield fetchData(jobUrl, __spreadProps(__spreadValues({}, this.fetchDataOptions), {
method: "GET",
headers: {
"Content-Type": "application/json"
@@ -192891,7 +182555,7 @@ class RelayerClient {
}
}
-var __async$2 = (__this, __arguments, generator) => {
+var __async$1 = (__this, __arguments, generator) => {
return new Promise((resolve, reject) => {
var fulfilled = (value) => {
try {
@@ -192912,7 +182576,7 @@ var __async$2 = (__this, __arguments, generator) => {
});
};
function getTokenBalances(_0) {
- return __async$2(this, arguments, function* ({
+ return __async$1(this, arguments, function* ({
provider,
Multicall: Multicall2,
currencyName,
@@ -192975,120 +182639,6 @@ function getTokenBalances(_0) {
});
}
-var __defProp = Object.defineProperty;
-var __defProps = Object.defineProperties;
-var __getOwnPropDescs = Object.getOwnPropertyDescriptors;
-var __getOwnPropSymbols = Object.getOwnPropertySymbols;
-var __hasOwnProp = Object.prototype.hasOwnProperty;
-var __propIsEnum = Object.prototype.propertyIsEnumerable;
-var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
-var __spreadValues = (a, b) => {
- for (var prop in b || (b = {}))
- if (__hasOwnProp.call(b, prop))
- __defNormalProp(a, prop, b[prop]);
- if (__getOwnPropSymbols)
- for (var prop of __getOwnPropSymbols(b)) {
- if (__propIsEnum.call(b, prop))
- __defNormalProp(a, prop, b[prop]);
- }
- return a;
-};
-var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b));
-var __objRest = (source, exclude) => {
- var target = {};
- for (var prop in source)
- if (__hasOwnProp.call(source, prop) && exclude.indexOf(prop) < 0)
- target[prop] = source[prop];
- if (source != null && __getOwnPropSymbols)
- for (var prop of __getOwnPropSymbols(source)) {
- if (exclude.indexOf(prop) < 0 && __propIsEnum.call(source, prop))
- target[prop] = source[prop];
- }
- return target;
-};
-var __async$1 = (__this, __arguments, generator) => {
- return new Promise((resolve, reject) => {
- var fulfilled = (value) => {
- try {
- step(generator.next(value));
- } catch (e) {
- reject(e);
- }
- };
- var rejected = (value) => {
- try {
- step(generator.throw(value));
- } catch (e) {
- reject(e);
- }
- };
- var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected);
- step((generator = generator.apply(__this, __arguments)).next());
- });
-};
-class TreeCache {
- constructor({ netId, amount, currency, userDirectory, PARTS_COUNT = 4 }) {
- this.netId = netId;
- this.amount = amount;
- this.currency = currency;
- this.userDirectory = userDirectory;
- this.PARTS_COUNT = PARTS_COUNT;
- }
- getInstanceName() {
- return `deposits_${this.netId}_${this.currency}_${this.amount}`;
- }
- createTree(events, tree) {
- return __async$1(this, null, function* () {
- const bloom = new bloomfilter_js(events.length);
- console.log(`Creating cached tree for ${this.getInstanceName()}
-`);
- const eventsData = events.reduce(
- (acc, _a, i) => {
- var _b = _a, { leafIndex, commitment } = _b, rest = __objRest(_b, ["leafIndex", "commitment"]);
- if (leafIndex !== i) {
- throw new Error(`leafIndex (${leafIndex}) !== i (${i})`);
- }
- acc[commitment] = __spreadProps(__spreadValues({}, rest), { leafIndex });
- return acc;
- },
- {}
- );
- const slices = tree.getTreeSlices(this.PARTS_COUNT);
- yield Promise.all(
- slices.map((slice, index) => __async$1(this, null, function* () {
- const metadata = slice.elements.reduce((acc, curr) => {
- if (index < this.PARTS_COUNT - 1) {
- bloom.add(curr);
- }
- acc.push(eventsData[curr]);
- return acc;
- }, []);
- const dataString2 = JSON.stringify(
- __spreadProps(__spreadValues({}, slice), {
- metadata
- }),
- null,
- 2
- ) + "\n";
- const fileName2 = `${this.getInstanceName()}_slice${index + 1}.json`;
- yield saveUserFile({
- fileName: fileName2,
- userDirectory: this.userDirectory,
- dataString: dataString2
- });
- }))
- );
- const dataString = bloom.serialize() + "\n";
- const fileName = `${this.getInstanceName()}_bloom.json`;
- yield saveUserFile({
- fileName,
- userDirectory: this.userDirectory,
- dataString
- });
- });
- }
-}
-
var __async = (__this, __arguments, generator) => {
return new Promise((resolve, reject) => {
var fulfilled = (value) => {
@@ -193154,8 +182704,3718 @@ function calculateSnarkProof(input, circuit, provingKey) {
;// CONCATENATED MODULE: ./package.json
-const package_namespaceObject = /*#__PURE__*/JSON.parse('{"name":"tornado-cli","version":"1.0.3-alpha","description":"Modern Toolsets for Privacy Pools on Ethereum"}');
+const package_namespaceObject = /*#__PURE__*/JSON.parse('{"version":"1.0.3-alpha","description":"Modern Toolsets for Privacy Pools on Ethereum"}');
var package_namespaceObject_0 = /*#__PURE__*/__webpack_require__.t(package_namespaceObject, 2);
+;// CONCATENATED MODULE: external "module"
+const external_module_namespaceObject = require("module");
+;// CONCATENATED MODULE: ./node_modules/fflate/esm/index.mjs
+
+var esm_require = (0,external_module_namespaceObject.createRequire)('/');
+// DEFLATE is a complex format; to read this code, you should probably check the RFC first:
+// https://tools.ietf.org/html/rfc1951
+// You may also wish to take a look at the guide I made about this program:
+// https://gist.github.com/101arrowz/253f31eb5abc3d9275ab943003ffecad
+// Some of the following code is similar to that of UZIP.js:
+// https://github.com/photopea/UZIP.js
+// However, the vast majority of the codebase has diverged from UZIP.js to increase performance and reduce bundle size.
+// Sometimes 0 will appear where -1 would be more appropriate. This is because using a uint
+// is better for memory in most engines (I *think*).
+// Mediocre shim
+var esm_Worker;
+var workerAdd = ";var __w=require('worker_threads');__w.parentPort.on('message',function(m){onmessage({data:m})}),postMessage=function(m,t){__w.parentPort.postMessage(m,t)},close=process.exit;self=global";
+try {
+ esm_Worker = esm_require('worker_threads').Worker;
+}
+catch (e) {
+}
+var wk = esm_Worker ? function (c, _, msg, transfer, cb) {
+ var done = false;
+ var w = new esm_Worker(c + workerAdd, { eval: true })
+ .on('error', function (e) { return cb(e, null); })
+ .on('message', function (m) { return cb(null, m); })
+ .on('exit', function (c) {
+ if (c && !done)
+ cb(new Error('exited with code ' + c), null);
+ });
+ w.postMessage(msg, transfer);
+ w.terminate = function () {
+ done = true;
+ return esm_Worker.prototype.terminate.call(w);
+ };
+ return w;
+} : function (_, __, ___, ____, cb) {
+ setImmediate(function () { return cb(new Error('async operations unsupported - update to Node 12+ (or Node 10-11 with the --experimental-worker CLI flag)'), null); });
+ var NOP = function () { };
+ return {
+ terminate: NOP,
+ postMessage: NOP
+ };
+};
+
+// aliases for shorter compressed code (most minifers don't do this)
+var esm_u8 = Uint8Array, u16 = Uint16Array, i32 = Int32Array;
+// fixed length extra bits
+var fleb = new esm_u8([0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0, /* unused */ 0, 0, /* impossible */ 0]);
+// fixed distance extra bits
+var fdeb = new esm_u8([0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, /* unused */ 0, 0]);
+// code length index map
+var clim = new esm_u8([16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15]);
+// get base, reverse index map from extra bits
+var freb = function (eb, start) {
+ var b = new u16(31);
+ for (var i = 0; i < 31; ++i) {
+ b[i] = start += 1 << eb[i - 1];
+ }
+ // numbers here are at max 18 bits
+ var r = new i32(b[30]);
+ for (var i = 1; i < 30; ++i) {
+ for (var j = b[i]; j < b[i + 1]; ++j) {
+ r[j] = ((j - b[i]) << 5) | i;
+ }
+ }
+ return { b: b, r: r };
+};
+var _a = freb(fleb, 2), fl = _a.b, revfl = _a.r;
+// we can ignore the fact that the other numbers are wrong; they never happen anyway
+fl[28] = 258, revfl[258] = 28;
+var _b = freb(fdeb, 0), fd = _b.b, revfd = _b.r;
+// map of value to reverse (assuming 16 bits)
+var esm_rev = new u16(32768);
+for (var i = 0; i < 32768; ++i) {
+ // reverse table algorithm from SO
+ var x = ((i & 0xAAAA) >> 1) | ((i & 0x5555) << 1);
+ x = ((x & 0xCCCC) >> 2) | ((x & 0x3333) << 2);
+ x = ((x & 0xF0F0) >> 4) | ((x & 0x0F0F) << 4);
+ esm_rev[i] = (((x & 0xFF00) >> 8) | ((x & 0x00FF) << 8)) >> 1;
+}
+// create huffman tree from u8 "map": index -> code length for code index
+// mb (max bits) must be at most 15
+// TODO: optimize/split up?
+var hMap = (function (cd, mb, r) {
+ var s = cd.length;
+ // index
+ var i = 0;
+ // u16 "map": index -> # of codes with bit length = index
+ var l = new u16(mb);
+ // length of cd must be 288 (total # of codes)
+ for (; i < s; ++i) {
+ if (cd[i])
+ ++l[cd[i] - 1];
+ }
+ // u16 "map": index -> minimum code for bit length = index
+ var le = new u16(mb);
+ for (i = 1; i < mb; ++i) {
+ le[i] = (le[i - 1] + l[i - 1]) << 1;
+ }
+ var co;
+ if (r) {
+ // u16 "map": index -> number of actual bits, symbol for code
+ co = new u16(1 << mb);
+ // bits to remove for reverser
+ var rvb = 15 - mb;
+ for (i = 0; i < s; ++i) {
+ // ignore 0 lengths
+ if (cd[i]) {
+ // num encoding both symbol and bits read
+ var sv = (i << 4) | cd[i];
+ // free bits
+ var r_1 = mb - cd[i];
+ // start value
+ var v = le[cd[i] - 1]++ << r_1;
+ // m is end value
+ for (var m = v | ((1 << r_1) - 1); v <= m; ++v) {
+ // every 16 bit value starting with the code yields the same result
+ co[esm_rev[v] >> rvb] = sv;
+ }
+ }
+ }
+ }
+ else {
+ co = new u16(s);
+ for (i = 0; i < s; ++i) {
+ if (cd[i]) {
+ co[i] = esm_rev[le[cd[i] - 1]++] >> (15 - cd[i]);
+ }
+ }
+ }
+ return co;
+});
+// fixed length tree
+var flt = new esm_u8(288);
+for (var i = 0; i < 144; ++i)
+ flt[i] = 8;
+for (var i = 144; i < 256; ++i)
+ flt[i] = 9;
+for (var i = 256; i < 280; ++i)
+ flt[i] = 7;
+for (var i = 280; i < 288; ++i)
+ flt[i] = 8;
+// fixed distance tree
+var fdt = new esm_u8(32);
+for (var i = 0; i < 32; ++i)
+ fdt[i] = 5;
+// fixed length map
+var flm = /*#__PURE__*/ hMap(flt, 9, 0), flrm = /*#__PURE__*/ hMap(flt, 9, 1);
+// fixed distance map
+var fdm = /*#__PURE__*/ hMap(fdt, 5, 0), fdrm = /*#__PURE__*/ hMap(fdt, 5, 1);
+// find max of array
+var max = function (a) {
+ var m = a[0];
+ for (var i = 1; i < a.length; ++i) {
+ if (a[i] > m)
+ m = a[i];
+ }
+ return m;
+};
+// read d, starting at bit p and mask with m
+var esm_bits = function (d, p, m) {
+ var o = (p / 8) | 0;
+ return ((d[o] | (d[o + 1] << 8)) >> (p & 7)) & m;
+};
+// read d, starting at bit p continuing for at least 16 bits
+var bits16 = function (d, p) {
+ var o = (p / 8) | 0;
+ return ((d[o] | (d[o + 1] << 8) | (d[o + 2] << 16)) >> (p & 7));
+};
+// get end of byte
+var shft = function (p) { return ((p + 7) / 8) | 0; };
+// typed array slice - allows garbage collector to free original reference,
+// while being more compatible than .slice
+var slc = function (v, s, e) {
+ if (s == null || s < 0)
+ s = 0;
+ if (e == null || e > v.length)
+ e = v.length;
+ // can't use .constructor in case user-supplied
+ return new esm_u8(v.subarray(s, e));
+};
+/**
+ * Codes for errors generated within this library
+ */
+var FlateErrorCode = {
+ UnexpectedEOF: 0,
+ InvalidBlockType: 1,
+ InvalidLengthLiteral: 2,
+ InvalidDistance: 3,
+ StreamFinished: 4,
+ NoStreamHandler: 5,
+ InvalidHeader: 6,
+ NoCallback: 7,
+ InvalidUTF8: 8,
+ ExtraFieldTooLong: 9,
+ InvalidDate: 10,
+ FilenameTooLong: 11,
+ StreamFinishing: 12,
+ InvalidZipData: 13,
+ UnknownCompressionMethod: 14
+};
+// error codes
+var ec = [
+ 'unexpected EOF',
+ 'invalid block type',
+ 'invalid length/literal',
+ 'invalid distance',
+ 'stream finished',
+ 'no stream handler',
+ ,
+ 'no callback',
+ 'invalid UTF-8 data',
+ 'extra field too long',
+ 'date not in range 1980-2099',
+ 'filename too long',
+ 'stream finishing',
+ 'invalid zip data'
+ // determined by unknown compression method
+];
+;
+var err = function (ind, msg, nt) {
+ var e = new Error(msg || ec[ind]);
+ e.code = ind;
+ if (Error.captureStackTrace)
+ Error.captureStackTrace(e, err);
+ if (!nt)
+ throw e;
+ return e;
+};
+// expands raw DEFLATE data
+var inflt = function (dat, st, buf, dict) {
+ // source length dict length
+ var sl = dat.length, dl = dict ? dict.length : 0;
+ if (!sl || st.f && !st.l)
+ return buf || new esm_u8(0);
+ var noBuf = !buf;
+ // have to estimate size
+ var resize = noBuf || st.i != 2;
+ // no state
+ var noSt = st.i;
+ // Assumes roughly 33% compression ratio average
+ if (noBuf)
+ buf = new esm_u8(sl * 3);
+ // ensure buffer can fit at least l elements
+ var cbuf = function (l) {
+ var bl = buf.length;
+ // need to increase size to fit
+ if (l > bl) {
+ // Double or set to necessary, whichever is greater
+ var nbuf = new esm_u8(Math.max(bl * 2, l));
+ nbuf.set(buf);
+ buf = nbuf;
+ }
+ };
+ // last chunk bitpos bytes
+ var final = st.f || 0, pos = st.p || 0, bt = st.b || 0, lm = st.l, dm = st.d, lbt = st.m, dbt = st.n;
+ // total bits
+ var tbts = sl * 8;
+ do {
+ if (!lm) {
+ // BFINAL - this is only 1 when last chunk is next
+ final = esm_bits(dat, pos, 1);
+ // type: 0 = no compression, 1 = fixed huffman, 2 = dynamic huffman
+ var type = esm_bits(dat, pos + 1, 3);
+ pos += 3;
+ if (!type) {
+ // go to end of byte boundary
+ var s = shft(pos) + 4, l = dat[s - 4] | (dat[s - 3] << 8), t = s + l;
+ if (t > sl) {
+ if (noSt)
+ err(0);
+ break;
+ }
+ // ensure size
+ if (resize)
+ cbuf(bt + l);
+ // Copy over uncompressed data
+ buf.set(dat.subarray(s, t), bt);
+ // Get new bitpos, update byte count
+ st.b = bt += l, st.p = pos = t * 8, st.f = final;
+ continue;
+ }
+ else if (type == 1)
+ lm = flrm, dm = fdrm, lbt = 9, dbt = 5;
+ else if (type == 2) {
+ // literal lengths
+ var hLit = esm_bits(dat, pos, 31) + 257, hcLen = esm_bits(dat, pos + 10, 15) + 4;
+ var tl = hLit + esm_bits(dat, pos + 5, 31) + 1;
+ pos += 14;
+ // length+distance tree
+ var ldt = new esm_u8(tl);
+ // code length tree
+ var clt = new esm_u8(19);
+ for (var i = 0; i < hcLen; ++i) {
+ // use index map to get real code
+ clt[clim[i]] = esm_bits(dat, pos + i * 3, 7);
+ }
+ pos += hcLen * 3;
+ // code lengths bits
+ var clb = max(clt), clbmsk = (1 << clb) - 1;
+ // code lengths map
+ var clm = hMap(clt, clb, 1);
+ for (var i = 0; i < tl;) {
+ var r = clm[esm_bits(dat, pos, clbmsk)];
+ // bits read
+ pos += r & 15;
+ // symbol
+ var s = r >> 4;
+ // code length to copy
+ if (s < 16) {
+ ldt[i++] = s;
+ }
+ else {
+ // copy count
+ var c = 0, n = 0;
+ if (s == 16)
+ n = 3 + esm_bits(dat, pos, 3), pos += 2, c = ldt[i - 1];
+ else if (s == 17)
+ n = 3 + esm_bits(dat, pos, 7), pos += 3;
+ else if (s == 18)
+ n = 11 + esm_bits(dat, pos, 127), pos += 7;
+ while (n--)
+ ldt[i++] = c;
+ }
+ }
+ // length tree distance tree
+ var lt = ldt.subarray(0, hLit), dt = ldt.subarray(hLit);
+ // max length bits
+ lbt = max(lt);
+ // max dist bits
+ dbt = max(dt);
+ lm = hMap(lt, lbt, 1);
+ dm = hMap(dt, dbt, 1);
+ }
+ else
+ err(1);
+ if (pos > tbts) {
+ if (noSt)
+ err(0);
+ break;
+ }
+ }
+ // Make sure the buffer can hold this + the largest possible addition
+ // Maximum chunk size (practically, theoretically infinite) is 2^17
+ if (resize)
+ cbuf(bt + 131072);
+ var lms = (1 << lbt) - 1, dms = (1 << dbt) - 1;
+ var lpos = pos;
+ for (;; lpos = pos) {
+ // bits read, code
+ var c = lm[bits16(dat, pos) & lms], sym = c >> 4;
+ pos += c & 15;
+ if (pos > tbts) {
+ if (noSt)
+ err(0);
+ break;
+ }
+ if (!c)
+ err(2);
+ if (sym < 256)
+ buf[bt++] = sym;
+ else if (sym == 256) {
+ lpos = pos, lm = null;
+ break;
+ }
+ else {
+ var add = sym - 254;
+ // no extra bits needed if less
+ if (sym > 264) {
+ // index
+ var i = sym - 257, b = fleb[i];
+ add = esm_bits(dat, pos, (1 << b) - 1) + fl[i];
+ pos += b;
+ }
+ // dist
+ var d = dm[bits16(dat, pos) & dms], dsym = d >> 4;
+ if (!d)
+ err(3);
+ pos += d & 15;
+ var dt = fd[dsym];
+ if (dsym > 3) {
+ var b = fdeb[dsym];
+ dt += bits16(dat, pos) & (1 << b) - 1, pos += b;
+ }
+ if (pos > tbts) {
+ if (noSt)
+ err(0);
+ break;
+ }
+ if (resize)
+ cbuf(bt + 131072);
+ var end = bt + add;
+ if (bt < dt) {
+ var shift = dl - dt, dend = Math.min(dt, end);
+ if (shift + bt < 0)
+ err(3);
+ for (; bt < dend; ++bt)
+ buf[bt] = dict[shift + bt];
+ }
+ for (; bt < end; ++bt)
+ buf[bt] = buf[bt - dt];
+ }
+ }
+ st.l = lm, st.p = lpos, st.b = bt, st.f = final;
+ if (lm)
+ final = 1, st.m = lbt, st.d = dm, st.n = dbt;
+ } while (!final);
+ // don't reallocate for streams or user buffers
+ return bt != buf.length && noBuf ? slc(buf, 0, bt) : buf.subarray(0, bt);
+};
+// starting at p, write the minimum number of bits that can hold v to d
+var wbits = function (d, p, v) {
+ v <<= p & 7;
+ var o = (p / 8) | 0;
+ d[o] |= v;
+ d[o + 1] |= v >> 8;
+};
+// starting at p, write the minimum number of bits (>8) that can hold v to d
+var wbits16 = function (d, p, v) {
+ v <<= p & 7;
+ var o = (p / 8) | 0;
+ d[o] |= v;
+ d[o + 1] |= v >> 8;
+ d[o + 2] |= v >> 16;
+};
+// creates code lengths from a frequency table
+var hTree = function (d, mb) {
+ // Need extra info to make a tree
+ var t = [];
+ for (var i = 0; i < d.length; ++i) {
+ if (d[i])
+ t.push({ s: i, f: d[i] });
+ }
+ var s = t.length;
+ var t2 = t.slice();
+ if (!s)
+ return { t: et, l: 0 };
+ if (s == 1) {
+ var v = new esm_u8(t[0].s + 1);
+ v[t[0].s] = 1;
+ return { t: v, l: 1 };
+ }
+ t.sort(function (a, b) { return a.f - b.f; });
+ // after i2 reaches last ind, will be stopped
+ // freq must be greater than largest possible number of symbols
+ t.push({ s: -1, f: 25001 });
+ var l = t[0], r = t[1], i0 = 0, i1 = 1, i2 = 2;
+ t[0] = { s: -1, f: l.f + r.f, l: l, r: r };
+ // efficient algorithm from UZIP.js
+ // i0 is lookbehind, i2 is lookahead - after processing two low-freq
+ // symbols that combined have high freq, will start processing i2 (high-freq,
+ // non-composite) symbols instead
+ // see https://reddit.com/r/photopea/comments/ikekht/uzipjs_questions/
+ while (i1 != s - 1) {
+ l = t[t[i0].f < t[i2].f ? i0++ : i2++];
+ r = t[i0 != i1 && t[i0].f < t[i2].f ? i0++ : i2++];
+ t[i1++] = { s: -1, f: l.f + r.f, l: l, r: r };
+ }
+ var maxSym = t2[0].s;
+ for (var i = 1; i < s; ++i) {
+ if (t2[i].s > maxSym)
+ maxSym = t2[i].s;
+ }
+ // code lengths
+ var tr = new u16(maxSym + 1);
+ // max bits in tree
+ var mbt = ln(t[i1 - 1], tr, 0);
+ if (mbt > mb) {
+ // more algorithms from UZIP.js
+ // TODO: find out how this code works (debt)
+ // ind debt
+ var i = 0, dt = 0;
+ // left cost
+ var lft = mbt - mb, cst = 1 << lft;
+ t2.sort(function (a, b) { return tr[b.s] - tr[a.s] || a.f - b.f; });
+ for (; i < s; ++i) {
+ var i2_1 = t2[i].s;
+ if (tr[i2_1] > mb) {
+ dt += cst - (1 << (mbt - tr[i2_1]));
+ tr[i2_1] = mb;
+ }
+ else
+ break;
+ }
+ dt >>= lft;
+ while (dt > 0) {
+ var i2_2 = t2[i].s;
+ if (tr[i2_2] < mb)
+ dt -= 1 << (mb - tr[i2_2]++ - 1);
+ else
+ ++i;
+ }
+ for (; i >= 0 && dt; --i) {
+ var i2_3 = t2[i].s;
+ if (tr[i2_3] == mb) {
+ --tr[i2_3];
+ ++dt;
+ }
+ }
+ mbt = mb;
+ }
+ return { t: new esm_u8(tr), l: mbt };
+};
+// get the max length and assign length codes
+var ln = function (n, l, d) {
+ return n.s == -1
+ ? Math.max(ln(n.l, l, d + 1), ln(n.r, l, d + 1))
+ : (l[n.s] = d);
+};
+// length codes generation
+var lc = function (c) {
+ var s = c.length;
+ // Note that the semicolon was intentional
+ while (s && !c[--s])
+ ;
+ var cl = new u16(++s);
+ // ind num streak
+ var cli = 0, cln = c[0], cls = 1;
+ var w = function (v) { cl[cli++] = v; };
+ for (var i = 1; i <= s; ++i) {
+ if (c[i] == cln && i != s)
+ ++cls;
+ else {
+ if (!cln && cls > 2) {
+ for (; cls > 138; cls -= 138)
+ w(32754);
+ if (cls > 2) {
+ w(cls > 10 ? ((cls - 11) << 5) | 28690 : ((cls - 3) << 5) | 12305);
+ cls = 0;
+ }
+ }
+ else if (cls > 3) {
+ w(cln), --cls;
+ for (; cls > 6; cls -= 6)
+ w(8304);
+ if (cls > 2)
+ w(((cls - 3) << 5) | 8208), cls = 0;
+ }
+ while (cls--)
+ w(cln);
+ cls = 1;
+ cln = c[i];
+ }
+ }
+ return { c: cl.subarray(0, cli), n: s };
+};
+// calculate the length of output from tree, code lengths
+var clen = function (cf, cl) {
+ var l = 0;
+ for (var i = 0; i < cl.length; ++i)
+ l += cf[i] * cl[i];
+ return l;
+};
+// writes a fixed block
+// returns the new bit pos
+var wfblk = function (out, pos, dat) {
+ // no need to write 00 as type: TypedArray defaults to 0
+ var s = dat.length;
+ var o = shft(pos + 2);
+ out[o] = s & 255;
+ out[o + 1] = s >> 8;
+ out[o + 2] = out[o] ^ 255;
+ out[o + 3] = out[o + 1] ^ 255;
+ for (var i = 0; i < s; ++i)
+ out[o + i + 4] = dat[i];
+ return (o + 4 + s) * 8;
+};
+// writes a block
+var wblk = function (dat, out, final, syms, lf, df, eb, li, bs, bl, p) {
+ wbits(out, p++, final);
+ ++lf[256];
+ var _a = hTree(lf, 15), dlt = _a.t, mlb = _a.l;
+ var _b = hTree(df, 15), ddt = _b.t, mdb = _b.l;
+ var _c = lc(dlt), lclt = _c.c, nlc = _c.n;
+ var _d = lc(ddt), lcdt = _d.c, ndc = _d.n;
+ var lcfreq = new u16(19);
+ for (var i = 0; i < lclt.length; ++i)
+ ++lcfreq[lclt[i] & 31];
+ for (var i = 0; i < lcdt.length; ++i)
+ ++lcfreq[lcdt[i] & 31];
+ var _e = hTree(lcfreq, 7), lct = _e.t, mlcb = _e.l;
+ var nlcc = 19;
+ for (; nlcc > 4 && !lct[clim[nlcc - 1]]; --nlcc)
+ ;
+ var flen = (bl + 5) << 3;
+ var ftlen = clen(lf, flt) + clen(df, fdt) + eb;
+ var dtlen = clen(lf, dlt) + clen(df, ddt) + eb + 14 + 3 * nlcc + clen(lcfreq, lct) + 2 * lcfreq[16] + 3 * lcfreq[17] + 7 * lcfreq[18];
+ if (bs >= 0 && flen <= ftlen && flen <= dtlen)
+ return wfblk(out, p, dat.subarray(bs, bs + bl));
+ var lm, ll, dm, dl;
+ wbits(out, p, 1 + (dtlen < ftlen)), p += 2;
+ if (dtlen < ftlen) {
+ lm = hMap(dlt, mlb, 0), ll = dlt, dm = hMap(ddt, mdb, 0), dl = ddt;
+ var llm = hMap(lct, mlcb, 0);
+ wbits(out, p, nlc - 257);
+ wbits(out, p + 5, ndc - 1);
+ wbits(out, p + 10, nlcc - 4);
+ p += 14;
+ for (var i = 0; i < nlcc; ++i)
+ wbits(out, p + 3 * i, lct[clim[i]]);
+ p += 3 * nlcc;
+ var lcts = [lclt, lcdt];
+ for (var it = 0; it < 2; ++it) {
+ var clct = lcts[it];
+ for (var i = 0; i < clct.length; ++i) {
+ var len = clct[i] & 31;
+ wbits(out, p, llm[len]), p += lct[len];
+ if (len > 15)
+ wbits(out, p, (clct[i] >> 5) & 127), p += clct[i] >> 12;
+ }
+ }
+ }
+ else {
+ lm = flm, ll = flt, dm = fdm, dl = fdt;
+ }
+ for (var i = 0; i < li; ++i) {
+ var sym = syms[i];
+ if (sym > 255) {
+ var len = (sym >> 18) & 31;
+ wbits16(out, p, lm[len + 257]), p += ll[len + 257];
+ if (len > 7)
+ wbits(out, p, (sym >> 23) & 31), p += fleb[len];
+ var dst = sym & 31;
+ wbits16(out, p, dm[dst]), p += dl[dst];
+ if (dst > 3)
+ wbits16(out, p, (sym >> 5) & 8191), p += fdeb[dst];
+ }
+ else {
+ wbits16(out, p, lm[sym]), p += ll[sym];
+ }
+ }
+ wbits16(out, p, lm[256]);
+ return p + ll[256];
+};
+// deflate options (nice << 13) | chain
+var deo = /*#__PURE__*/ new i32([65540, 131080, 131088, 131104, 262176, 1048704, 1048832, 2114560, 2117632]);
+// empty
+var et = /*#__PURE__*/ new esm_u8(0);
+// compresses data into a raw DEFLATE buffer
+var dflt = function (dat, lvl, plvl, pre, post, st) {
+ var s = st.z || dat.length;
+ var o = new esm_u8(pre + s + 5 * (1 + Math.ceil(s / 7000)) + post);
+ // writing to this writes to the output buffer
+ var w = o.subarray(pre, o.length - post);
+ var lst = st.l;
+ var pos = (st.r || 0) & 7;
+ if (lvl) {
+ if (pos)
+ w[0] = st.r >> 3;
+ var opt = deo[lvl - 1];
+ var n = opt >> 13, c = opt & 8191;
+ var msk_1 = (1 << plvl) - 1;
+ // prev 2-byte val map curr 2-byte val map
+ var prev = st.p || new u16(32768), head = st.h || new u16(msk_1 + 1);
+ var bs1_1 = Math.ceil(plvl / 3), bs2_1 = 2 * bs1_1;
+ var hsh = function (i) { return (dat[i] ^ (dat[i + 1] << bs1_1) ^ (dat[i + 2] << bs2_1)) & msk_1; };
+ // 24576 is an arbitrary number of maximum symbols per block
+ // 424 buffer for last block
+ var syms = new i32(25000);
+ // length/literal freq distance freq
+ var lf = new u16(288), df = new u16(32);
+ // l/lcnt exbits index l/lind waitdx blkpos
+ var lc_1 = 0, eb = 0, i = st.i || 0, li = 0, wi = st.w || 0, bs = 0;
+ for (; i + 2 < s; ++i) {
+ // hash value
+ var hv = hsh(i);
+ // index mod 32768 previous index mod
+ var imod = i & 32767, pimod = head[hv];
+ prev[imod] = pimod;
+ head[hv] = imod;
+ // We always should modify head and prev, but only add symbols if
+ // this data is not yet processed ("wait" for wait index)
+ if (wi <= i) {
+ // bytes remaining
+ var rem = s - i;
+ if ((lc_1 > 7000 || li > 24576) && (rem > 423 || !lst)) {
+ pos = wblk(dat, w, 0, syms, lf, df, eb, li, bs, i - bs, pos);
+ li = lc_1 = eb = 0, bs = i;
+ for (var j = 0; j < 286; ++j)
+ lf[j] = 0;
+ for (var j = 0; j < 30; ++j)
+ df[j] = 0;
+ }
+ // len dist chain
+ var l = 2, d = 0, ch_1 = c, dif = imod - pimod & 32767;
+ if (rem > 2 && hv == hsh(i - dif)) {
+ var maxn = Math.min(n, rem) - 1;
+ var maxd = Math.min(32767, i);
+ // max possible length
+ // not capped at dif because decompressors implement "rolling" index population
+ var ml = Math.min(258, rem);
+ while (dif <= maxd && --ch_1 && imod != pimod) {
+ if (dat[i + l] == dat[i + l - dif]) {
+ var nl = 0;
+ for (; nl < ml && dat[i + nl] == dat[i + nl - dif]; ++nl)
+ ;
+ if (nl > l) {
+ l = nl, d = dif;
+ // break out early when we reach "nice" (we are satisfied enough)
+ if (nl > maxn)
+ break;
+ // now, find the rarest 2-byte sequence within this
+ // length of literals and search for that instead.
+ // Much faster than just using the start
+ var mmd = Math.min(dif, nl - 2);
+ var md = 0;
+ for (var j = 0; j < mmd; ++j) {
+ var ti = i - dif + j & 32767;
+ var pti = prev[ti];
+ var cd = ti - pti & 32767;
+ if (cd > md)
+ md = cd, pimod = ti;
+ }
+ }
+ }
+ // check the previous match
+ imod = pimod, pimod = prev[imod];
+ dif += imod - pimod & 32767;
+ }
+ }
+ // d will be nonzero only when a match was found
+ if (d) {
+ // store both dist and len data in one int32
+ // Make sure this is recognized as a len/dist with 28th bit (2^28)
+ syms[li++] = 268435456 | (revfl[l] << 18) | revfd[d];
+ var lin = revfl[l] & 31, din = revfd[d] & 31;
+ eb += fleb[lin] + fdeb[din];
+ ++lf[257 + lin];
+ ++df[din];
+ wi = i + l;
+ ++lc_1;
+ }
+ else {
+ syms[li++] = dat[i];
+ ++lf[dat[i]];
+ }
+ }
+ }
+ for (i = Math.max(i, wi); i < s; ++i) {
+ syms[li++] = dat[i];
+ ++lf[dat[i]];
+ }
+ pos = wblk(dat, w, lst, syms, lf, df, eb, li, bs, i - bs, pos);
+ if (!lst) {
+ st.r = (pos & 7) | w[(pos / 8) | 0] << 3;
+ // shft(pos) now 1 less if pos & 7 != 0
+ pos -= 7;
+ st.h = head, st.p = prev, st.i = i, st.w = wi;
+ }
+ }
+ else {
+ for (var i = st.w || 0; i < s + lst; i += 65535) {
+ // end
+ var e = i + 65535;
+ if (e >= s) {
+ // write final block
+ w[(pos / 8) | 0] = lst;
+ e = s;
+ }
+ pos = wfblk(w, pos + 1, dat.subarray(i, e));
+ }
+ st.i = s;
+ }
+ return slc(o, 0, pre + shft(pos) + post);
+};
+// CRC32 table
+var crct = /*#__PURE__*/ (function () {
+ var t = new Int32Array(256);
+ for (var i = 0; i < 256; ++i) {
+ var c = i, k = 9;
+ while (--k)
+ c = ((c & 1) && -306674912) ^ (c >>> 1);
+ t[i] = c;
+ }
+ return t;
+})();
+// CRC32
+var crc = function () {
+ var c = -1;
+ return {
+ p: function (d) {
+ // closures have awful performance
+ var cr = c;
+ for (var i = 0; i < d.length; ++i)
+ cr = crct[(cr & 255) ^ d[i]] ^ (cr >>> 8);
+ c = cr;
+ },
+ d: function () { return ~c; }
+ };
+};
+// Adler32
+var adler = function () {
+ var a = 1, b = 0;
+ return {
+ p: function (d) {
+ // closures have awful performance
+ var n = a, m = b;
+ var l = d.length | 0;
+ for (var i = 0; i != l;) {
+ var e = Math.min(i + 2655, l);
+ for (; i < e; ++i)
+ m += n += d[i];
+ n = (n & 65535) + 15 * (n >> 16), m = (m & 65535) + 15 * (m >> 16);
+ }
+ a = n, b = m;
+ },
+ d: function () {
+ a %= 65521, b %= 65521;
+ return (a & 255) << 24 | (a & 0xFF00) << 8 | (b & 255) << 8 | (b >> 8);
+ }
+ };
+};
+;
+// deflate with opts
+var dopt = function (dat, opt, pre, post, st) {
+ if (!st) {
+ st = { l: 1 };
+ if (opt.dictionary) {
+ var dict = opt.dictionary.subarray(-32768);
+ var newDat = new esm_u8(dict.length + dat.length);
+ newDat.set(dict);
+ newDat.set(dat, dict.length);
+ dat = newDat;
+ st.w = dict.length;
+ }
+ }
+ return dflt(dat, opt.level == null ? 6 : opt.level, opt.mem == null ? (st.l ? Math.ceil(Math.max(8, Math.min(13, Math.log(dat.length))) * 1.5) : 20) : (12 + opt.mem), pre, post, st);
+};
+// Walmart object spread
+var mrg = function (a, b) {
+ var o = {};
+ for (var k in a)
+ o[k] = a[k];
+ for (var k in b)
+ o[k] = b[k];
+ return o;
+};
+// worker clone
+// This is possibly the craziest part of the entire codebase, despite how simple it may seem.
+// The only parameter to this function is a closure that returns an array of variables outside of the function scope.
+// We're going to try to figure out the variable names used in the closure as strings because that is crucial for workerization.
+// We will return an object mapping of true variable name to value (basically, the current scope as a JS object).
+// The reason we can't just use the original variable names is minifiers mangling the toplevel scope.
+// This took me three weeks to figure out how to do.
+var wcln = function (fn, fnStr, td) {
+ var dt = fn();
+ var st = fn.toString();
+ var ks = st.slice(st.indexOf('[') + 1, st.lastIndexOf(']')).replace(/\s+/g, '').split(',');
+ for (var i = 0; i < dt.length; ++i) {
+ var v = dt[i], k = ks[i];
+ if (typeof v == 'function') {
+ fnStr += ';' + k + '=';
+ var st_1 = v.toString();
+ if (v.prototype) {
+ // for global objects
+ if (st_1.indexOf('[native code]') != -1) {
+ var spInd = st_1.indexOf(' ', 8) + 1;
+ fnStr += st_1.slice(spInd, st_1.indexOf('(', spInd));
+ }
+ else {
+ fnStr += st_1;
+ for (var t in v.prototype)
+ fnStr += ';' + k + '.prototype.' + t + '=' + v.prototype[t].toString();
+ }
+ }
+ else
+ fnStr += st_1;
+ }
+ else
+ td[k] = v;
+ }
+ return fnStr;
+};
+var ch = [];
+// clone bufs
+var cbfs = function (v) {
+ var tl = [];
+ for (var k in v) {
+ if (v[k].buffer) {
+ tl.push((v[k] = new v[k].constructor(v[k])).buffer);
+ }
+ }
+ return tl;
+};
+// use a worker to execute code
+var wrkr = function (fns, init, id, cb) {
+ if (!ch[id]) {
+ var fnStr = '', td_1 = {}, m = fns.length - 1;
+ for (var i = 0; i < m; ++i)
+ fnStr = wcln(fns[i], fnStr, td_1);
+ ch[id] = { c: wcln(fns[m], fnStr, td_1), e: td_1 };
+ }
+ var td = mrg({}, ch[id].e);
+ return wk(ch[id].c + ';onmessage=function(e){for(var k in e.data)self[k]=e.data[k];onmessage=' + init.toString() + '}', id, td, cbfs(td), cb);
+};
+// base async inflate fn
+var bInflt = function () { return [esm_u8, u16, i32, fleb, fdeb, clim, fl, fd, flrm, fdrm, esm_rev, ec, hMap, max, esm_bits, bits16, shft, slc, err, inflt, inflateSync, pbf, gopt]; };
+var bDflt = function () { return [esm_u8, u16, i32, fleb, fdeb, clim, revfl, revfd, flm, flt, fdm, fdt, esm_rev, deo, et, hMap, wbits, wbits16, hTree, ln, lc, clen, wfblk, wblk, shft, slc, dflt, dopt, deflateSync, pbf]; };
+// gzip extra
+var gze = function () { return [gzh, gzhl, wbytes, crc, crct]; };
+// gunzip extra
+var guze = function () { return [gzs, gzl]; };
+// zlib extra
+var zle = function () { return [zlh, wbytes, adler]; };
+// unzlib extra
+var zule = function () { return [zls]; };
+// post buf
+var pbf = function (msg) { return postMessage(msg, [msg.buffer]); };
+// get opts
+var gopt = function (o) { return o && {
+ out: o.size && new esm_u8(o.size),
+ dictionary: o.dictionary
+}; };
+// async helper
+var cbify = function (dat, opts, fns, init, id, cb) {
+ var w = wrkr(fns, init, id, function (err, dat) {
+ w.terminate();
+ cb(err, dat);
+ });
+ w.postMessage([dat, opts], opts.consume ? [dat.buffer] : []);
+ return function () { w.terminate(); };
+};
+// auto stream
+var astrm = function (strm) {
+ strm.ondata = function (dat, final) { return postMessage([dat, final], [dat.buffer]); };
+ return function (ev) {
+ if (ev.data.length) {
+ strm.push(ev.data[0], ev.data[1]);
+ postMessage([ev.data[0].length]);
+ }
+ else
+ strm.flush();
+ };
+};
+// async stream attach
+var astrmify = function (fns, strm, opts, init, id, flush, ext) {
+ var t;
+ var w = wrkr(fns, init, id, function (err, dat) {
+ if (err)
+ w.terminate(), strm.ondata.call(strm, err);
+ else if (!Array.isArray(dat))
+ ext(dat);
+ else if (dat.length == 1) {
+ strm.queuedSize -= dat[0];
+ if (strm.ondrain)
+ strm.ondrain(dat[0]);
+ }
+ else {
+ if (dat[1])
+ w.terminate();
+ strm.ondata.call(strm, err, dat[0], dat[1]);
+ }
+ });
+ w.postMessage(opts);
+ strm.queuedSize = 0;
+ strm.push = function (d, f) {
+ if (!strm.ondata)
+ err(5);
+ if (t)
+ strm.ondata(err(4, 0, 1), null, !!f);
+ strm.queuedSize += d.length;
+ w.postMessage([d, t = f], [d.buffer]);
+ };
+ strm.terminate = function () { w.terminate(); };
+ if (flush) {
+ strm.flush = function () { w.postMessage([]); };
+ }
+};
+// read 2 bytes
+var b2 = function (d, b) { return d[b] | (d[b + 1] << 8); };
+// read 4 bytes
+var b4 = function (d, b) { return (d[b] | (d[b + 1] << 8) | (d[b + 2] << 16) | (d[b + 3] << 24)) >>> 0; };
+var b8 = function (d, b) { return b4(d, b) + (b4(d, b + 4) * 4294967296); };
+// write bytes
+var wbytes = function (d, b, v) {
+ for (; v; ++b)
+ d[b] = v, v >>>= 8;
+};
+// gzip header
+var gzh = function (c, o) {
+ var fn = o.filename;
+ c[0] = 31, c[1] = 139, c[2] = 8, c[8] = o.level < 2 ? 4 : o.level == 9 ? 2 : 0, c[9] = 3; // assume Unix
+ if (o.mtime != 0)
+ wbytes(c, 4, Math.floor(new Date(o.mtime || Date.now()) / 1000));
+ if (fn) {
+ c[3] = 8;
+ for (var i = 0; i <= fn.length; ++i)
+ c[i + 10] = fn.charCodeAt(i);
+ }
+};
+// gzip footer: -8 to -4 = CRC, -4 to -0 is length
+// gzip start
+var gzs = function (d) {
+ if (d[0] != 31 || d[1] != 139 || d[2] != 8)
+ err(6, 'invalid gzip data');
+ var flg = d[3];
+ var st = 10;
+ if (flg & 4)
+ st += (d[10] | d[11] << 8) + 2;
+ for (var zs = (flg >> 3 & 1) + (flg >> 4 & 1); zs > 0; zs -= !d[st++])
+ ;
+ return st + (flg & 2);
+};
+// gzip length
+var gzl = function (d) {
+ var l = d.length;
+ return (d[l - 4] | d[l - 3] << 8 | d[l - 2] << 16 | d[l - 1] << 24) >>> 0;
+};
+// gzip header length
+var gzhl = function (o) { return 10 + (o.filename ? o.filename.length + 1 : 0); };
+// zlib header
+var zlh = function (c, o) {
+ var lv = o.level, fl = lv == 0 ? 0 : lv < 6 ? 1 : lv == 9 ? 3 : 2;
+ c[0] = 120, c[1] = (fl << 6) | (o.dictionary && 32);
+ c[1] |= 31 - ((c[0] << 8) | c[1]) % 31;
+ if (o.dictionary) {
+ var h = adler();
+ h.p(o.dictionary);
+ wbytes(c, 2, h.d());
+ }
+};
+// zlib start
+var zls = function (d, dict) {
+ if ((d[0] & 15) != 8 || (d[0] >> 4) > 7 || ((d[0] << 8 | d[1]) % 31))
+ err(6, 'invalid zlib data');
+ if ((d[1] >> 5 & 1) == +!dict)
+ err(6, 'invalid zlib data: ' + (d[1] & 32 ? 'need' : 'unexpected') + ' dictionary');
+ return (d[1] >> 3 & 4) + 2;
+};
+function StrmOpt(opts, cb) {
+ if (typeof opts == 'function')
+ cb = opts, opts = {};
+ this.ondata = cb;
+ return opts;
+}
+/**
+ * Streaming DEFLATE compression
+ */
+var Deflate = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
+ function Deflate(opts, cb) {
+ if (typeof opts == 'function')
+ cb = opts, opts = {};
+ this.ondata = cb;
+ this.o = opts || {};
+ this.s = { l: 0, i: 32768, w: 32768, z: 32768 };
+ // Buffer length must always be 0 mod 32768 for index calculations to be correct when modifying head and prev
+ // 98304 = 32768 (lookback) + 65536 (common chunk size)
+ this.b = new esm_u8(98304);
+ if (this.o.dictionary) {
+ var dict = this.o.dictionary.subarray(-32768);
+ this.b.set(dict, 32768 - dict.length);
+ this.s.i = 32768 - dict.length;
+ }
+ }
+ Deflate.prototype.p = function (c, f) {
+ this.ondata(dopt(c, this.o, 0, 0, this.s), f);
+ };
+ /**
+ * Pushes a chunk to be deflated
+ * @param chunk The chunk to push
+ * @param final Whether this is the last chunk
+ */
+ Deflate.prototype.push = function (chunk, final) {
+ if (!this.ondata)
+ err(5);
+ if (this.s.l)
+ err(4);
+ var endLen = chunk.length + this.s.z;
+ if (endLen > this.b.length) {
+ if (endLen > 2 * this.b.length - 32768) {
+ var newBuf = new esm_u8(endLen & -32768);
+ newBuf.set(this.b.subarray(0, this.s.z));
+ this.b = newBuf;
+ }
+ var split = this.b.length - this.s.z;
+ this.b.set(chunk.subarray(0, split), this.s.z);
+ this.s.z = this.b.length;
+ this.p(this.b, false);
+ this.b.set(this.b.subarray(-32768));
+ this.b.set(chunk.subarray(split), 32768);
+ this.s.z = chunk.length - split + 32768;
+ this.s.i = 32766, this.s.w = 32768;
+ }
+ else {
+ this.b.set(chunk, this.s.z);
+ this.s.z += chunk.length;
+ }
+ this.s.l = final & 1;
+ if (this.s.z > this.s.w + 8191 || final) {
+ this.p(this.b, final || false);
+ this.s.w = this.s.i, this.s.i -= 2;
+ }
+ };
+ /**
+ * Flushes buffered uncompressed data. Useful to immediately retrieve the
+ * deflated output for small inputs.
+ */
+ Deflate.prototype.flush = function () {
+ if (!this.ondata)
+ err(5);
+ if (this.s.l)
+ err(4);
+ this.p(this.b, false);
+ this.s.w = this.s.i, this.s.i -= 2;
+ };
+ return Deflate;
+}())));
+
+/**
+ * Asynchronous streaming DEFLATE compression
+ */
+var AsyncDeflate = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
+ function AsyncDeflate(opts, cb) {
+ astrmify([
+ bDflt,
+ function () { return [astrm, Deflate]; }
+ ], this, StrmOpt.call(this, opts, cb), function (ev) {
+ var strm = new Deflate(ev.data);
+ onmessage = astrm(strm);
+ }, 6, 1);
+ }
+ return AsyncDeflate;
+}())));
+
+function deflate(data, opts, cb) {
+ if (!cb)
+ cb = opts, opts = {};
+ if (typeof cb != 'function')
+ err(7);
+ return cbify(data, opts, [
+ bDflt,
+ ], function (ev) { return pbf(deflateSync(ev.data[0], ev.data[1])); }, 0, cb);
+}
+/**
+ * Compresses data with DEFLATE without any wrapper
+ * @param data The data to compress
+ * @param opts The compression options
+ * @returns The deflated version of the data
+ */
+function deflateSync(data, opts) {
+ return dopt(data, opts || {}, 0, 0);
+}
+/**
+ * Streaming DEFLATE decompression
+ */
+var Inflate = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
+ function Inflate(opts, cb) {
+ // no StrmOpt here to avoid adding to workerizer
+ if (typeof opts == 'function')
+ cb = opts, opts = {};
+ this.ondata = cb;
+ var dict = opts && opts.dictionary && opts.dictionary.subarray(-32768);
+ this.s = { i: 0, b: dict ? dict.length : 0 };
+ this.o = new esm_u8(32768);
+ this.p = new esm_u8(0);
+ if (dict)
+ this.o.set(dict);
+ }
+ Inflate.prototype.e = function (c) {
+ if (!this.ondata)
+ err(5);
+ if (this.d)
+ err(4);
+ if (!this.p.length)
+ this.p = c;
+ else if (c.length) {
+ var n = new esm_u8(this.p.length + c.length);
+ n.set(this.p), n.set(c, this.p.length), this.p = n;
+ }
+ };
+ Inflate.prototype.c = function (final) {
+ this.s.i = +(this.d = final || false);
+ var bts = this.s.b;
+ var dt = inflt(this.p, this.s, this.o);
+ this.ondata(slc(dt, bts, this.s.b), this.d);
+ this.o = slc(dt, this.s.b - 32768), this.s.b = this.o.length;
+ this.p = slc(this.p, (this.s.p / 8) | 0), this.s.p &= 7;
+ };
+ /**
+ * Pushes a chunk to be inflated
+ * @param chunk The chunk to push
+ * @param final Whether this is the final chunk
+ */
+ Inflate.prototype.push = function (chunk, final) {
+ this.e(chunk), this.c(final);
+ };
+ return Inflate;
+}())));
+
+/**
+ * Asynchronous streaming DEFLATE decompression
+ */
+var AsyncInflate = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
+ function AsyncInflate(opts, cb) {
+ astrmify([
+ bInflt,
+ function () { return [astrm, Inflate]; }
+ ], this, StrmOpt.call(this, opts, cb), function (ev) {
+ var strm = new Inflate(ev.data);
+ onmessage = astrm(strm);
+ }, 7, 0);
+ }
+ return AsyncInflate;
+}())));
+
+function inflate(data, opts, cb) {
+ if (!cb)
+ cb = opts, opts = {};
+ if (typeof cb != 'function')
+ err(7);
+ return cbify(data, opts, [
+ bInflt
+ ], function (ev) { return pbf(inflateSync(ev.data[0], gopt(ev.data[1]))); }, 1, cb);
+}
+/**
+ * Expands DEFLATE data with no wrapper
+ * @param data The data to decompress
+ * @param opts The decompression options
+ * @returns The decompressed version of the data
+ */
+function inflateSync(data, opts) {
+ return inflt(data, { i: 2 }, opts && opts.out, opts && opts.dictionary);
+}
+// before you yell at me for not just using extends, my reason is that TS inheritance is hard to workerize.
+/**
+ * Streaming GZIP compression
+ */
+var Gzip = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
+ function Gzip(opts, cb) {
+ this.c = crc();
+ this.l = 0;
+ this.v = 1;
+ Deflate.call(this, opts, cb);
+ }
+ /**
+ * Pushes a chunk to be GZIPped
+ * @param chunk The chunk to push
+ * @param final Whether this is the last chunk
+ */
+ Gzip.prototype.push = function (chunk, final) {
+ this.c.p(chunk);
+ this.l += chunk.length;
+ Deflate.prototype.push.call(this, chunk, final);
+ };
+ Gzip.prototype.p = function (c, f) {
+ var raw = dopt(c, this.o, this.v && gzhl(this.o), f && 8, this.s);
+ if (this.v)
+ gzh(raw, this.o), this.v = 0;
+ if (f)
+ wbytes(raw, raw.length - 8, this.c.d()), wbytes(raw, raw.length - 4, this.l);
+ this.ondata(raw, f);
+ };
+ /**
+ * Flushes buffered uncompressed data. Useful to immediately retrieve the
+ * GZIPped output for small inputs.
+ */
+ Gzip.prototype.flush = function () {
+ Deflate.prototype.flush.call(this);
+ };
+ return Gzip;
+}())));
+
+/**
+ * Asynchronous streaming GZIP compression
+ */
+var AsyncGzip = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
+ function AsyncGzip(opts, cb) {
+ astrmify([
+ bDflt,
+ gze,
+ function () { return [astrm, Deflate, Gzip]; }
+ ], this, StrmOpt.call(this, opts, cb), function (ev) {
+ var strm = new Gzip(ev.data);
+ onmessage = astrm(strm);
+ }, 8, 1);
+ }
+ return AsyncGzip;
+}())));
+
+function gzip(data, opts, cb) {
+ if (!cb)
+ cb = opts, opts = {};
+ if (typeof cb != 'function')
+ err(7);
+ return cbify(data, opts, [
+ bDflt,
+ gze,
+ function () { return [gzipSync]; }
+ ], function (ev) { return pbf(gzipSync(ev.data[0], ev.data[1])); }, 2, cb);
+}
+/**
+ * Compresses data with GZIP
+ * @param data The data to compress
+ * @param opts The compression options
+ * @returns The gzipped version of the data
+ */
+function gzipSync(data, opts) {
+ if (!opts)
+ opts = {};
+ var c = crc(), l = data.length;
+ c.p(data);
+ var d = dopt(data, opts, gzhl(opts), 8), s = d.length;
+ return gzh(d, opts), wbytes(d, s - 8, c.d()), wbytes(d, s - 4, l), d;
+}
+/**
+ * Streaming single or multi-member GZIP decompression
+ */
+var Gunzip = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
+ function Gunzip(opts, cb) {
+ this.v = 1;
+ this.r = 0;
+ Inflate.call(this, opts, cb);
+ }
+ /**
+ * Pushes a chunk to be GUNZIPped
+ * @param chunk The chunk to push
+ * @param final Whether this is the last chunk
+ */
+ Gunzip.prototype.push = function (chunk, final) {
+ Inflate.prototype.e.call(this, chunk);
+ this.r += chunk.length;
+ if (this.v) {
+ var p = this.p.subarray(this.v - 1);
+ var s = p.length > 3 ? gzs(p) : 4;
+ if (s > p.length) {
+ if (!final)
+ return;
+ }
+ else if (this.v > 1 && this.onmember) {
+ this.onmember(this.r - p.length);
+ }
+ this.p = p.subarray(s), this.v = 0;
+ }
+ // necessary to prevent TS from using the closure value
+ // This allows for workerization to function correctly
+ Inflate.prototype.c.call(this, final);
+ // process concatenated GZIP
+ if (this.s.f && !this.s.l && !final) {
+ this.v = shft(this.s.p) + 9;
+ this.s = { i: 0 };
+ this.o = new esm_u8(0);
+ this.push(new esm_u8(0), final);
+ }
+ };
+ return Gunzip;
+}())));
+
+/**
+ * Asynchronous streaming single or multi-member GZIP decompression
+ */
+var AsyncGunzip = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
+ function AsyncGunzip(opts, cb) {
+ var _this = this;
+ astrmify([
+ bInflt,
+ guze,
+ function () { return [astrm, Inflate, Gunzip]; }
+ ], this, StrmOpt.call(this, opts, cb), function (ev) {
+ var strm = new Gunzip(ev.data);
+ strm.onmember = function (offset) { return postMessage(offset); };
+ onmessage = astrm(strm);
+ }, 9, 0, function (offset) { return _this.onmember && _this.onmember(offset); });
+ }
+ return AsyncGunzip;
+}())));
+
+function gunzip(data, opts, cb) {
+ if (!cb)
+ cb = opts, opts = {};
+ if (typeof cb != 'function')
+ err(7);
+ return cbify(data, opts, [
+ bInflt,
+ guze,
+ function () { return [gunzipSync]; }
+ ], function (ev) { return pbf(gunzipSync(ev.data[0], ev.data[1])); }, 3, cb);
+}
+/**
+ * Expands GZIP data
+ * @param data The data to decompress
+ * @param opts The decompression options
+ * @returns The decompressed version of the data
+ */
+function gunzipSync(data, opts) {
+ var st = gzs(data);
+ if (st + 8 > data.length)
+ err(6, 'invalid gzip data');
+ return inflt(data.subarray(st, -8), { i: 2 }, opts && opts.out || new esm_u8(gzl(data)), opts && opts.dictionary);
+}
+/**
+ * Streaming Zlib compression
+ */
+var Zlib = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
+ function Zlib(opts, cb) {
+ this.c = adler();
+ this.v = 1;
+ Deflate.call(this, opts, cb);
+ }
+ /**
+ * Pushes a chunk to be zlibbed
+ * @param chunk The chunk to push
+ * @param final Whether this is the last chunk
+ */
+ Zlib.prototype.push = function (chunk, final) {
+ this.c.p(chunk);
+ Deflate.prototype.push.call(this, chunk, final);
+ };
+ Zlib.prototype.p = function (c, f) {
+ var raw = dopt(c, this.o, this.v && (this.o.dictionary ? 6 : 2), f && 4, this.s);
+ if (this.v)
+ zlh(raw, this.o), this.v = 0;
+ if (f)
+ wbytes(raw, raw.length - 4, this.c.d());
+ this.ondata(raw, f);
+ };
+ /**
+ * Flushes buffered uncompressed data. Useful to immediately retrieve the
+ * zlibbed output for small inputs.
+ */
+ Zlib.prototype.flush = function () {
+ Deflate.prototype.flush.call(this);
+ };
+ return Zlib;
+}())));
+
+/**
+ * Asynchronous streaming Zlib compression
+ */
+var AsyncZlib = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
+ function AsyncZlib(opts, cb) {
+ astrmify([
+ bDflt,
+ zle,
+ function () { return [astrm, Deflate, Zlib]; }
+ ], this, StrmOpt.call(this, opts, cb), function (ev) {
+ var strm = new Zlib(ev.data);
+ onmessage = astrm(strm);
+ }, 10, 1);
+ }
+ return AsyncZlib;
+}())));
+
+function zlib(data, opts, cb) {
+ if (!cb)
+ cb = opts, opts = {};
+ if (typeof cb != 'function')
+ err(7);
+ return cbify(data, opts, [
+ bDflt,
+ zle,
+ function () { return [zlibSync]; }
+ ], function (ev) { return pbf(zlibSync(ev.data[0], ev.data[1])); }, 4, cb);
+}
+/**
+ * Compress data with Zlib
+ * @param data The data to compress
+ * @param opts The compression options
+ * @returns The zlib-compressed version of the data
+ */
+function zlibSync(data, opts) {
+ if (!opts)
+ opts = {};
+ var a = adler();
+ a.p(data);
+ var d = dopt(data, opts, opts.dictionary ? 6 : 2, 4);
+ return zlh(d, opts), wbytes(d, d.length - 4, a.d()), d;
+}
+/**
+ * Streaming Zlib decompression
+ */
+var Unzlib = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
+ function Unzlib(opts, cb) {
+ Inflate.call(this, opts, cb);
+ this.v = opts && opts.dictionary ? 2 : 1;
+ }
+ /**
+ * Pushes a chunk to be unzlibbed
+ * @param chunk The chunk to push
+ * @param final Whether this is the last chunk
+ */
+ Unzlib.prototype.push = function (chunk, final) {
+ Inflate.prototype.e.call(this, chunk);
+ if (this.v) {
+ if (this.p.length < 6 && !final)
+ return;
+ this.p = this.p.subarray(zls(this.p, this.v - 1)), this.v = 0;
+ }
+ if (final) {
+ if (this.p.length < 4)
+ err(6, 'invalid zlib data');
+ this.p = this.p.subarray(0, -4);
+ }
+ // necessary to prevent TS from using the closure value
+ // This allows for workerization to function correctly
+ Inflate.prototype.c.call(this, final);
+ };
+ return Unzlib;
+}())));
+
+/**
+ * Asynchronous streaming Zlib decompression
+ */
+var AsyncUnzlib = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
+ function AsyncUnzlib(opts, cb) {
+ astrmify([
+ bInflt,
+ zule,
+ function () { return [astrm, Inflate, Unzlib]; }
+ ], this, StrmOpt.call(this, opts, cb), function (ev) {
+ var strm = new Unzlib(ev.data);
+ onmessage = astrm(strm);
+ }, 11, 0);
+ }
+ return AsyncUnzlib;
+}())));
+
+function unzlib(data, opts, cb) {
+ if (!cb)
+ cb = opts, opts = {};
+ if (typeof cb != 'function')
+ err(7);
+ return cbify(data, opts, [
+ bInflt,
+ zule,
+ function () { return [unzlibSync]; }
+ ], function (ev) { return pbf(unzlibSync(ev.data[0], gopt(ev.data[1]))); }, 5, cb);
+}
+/**
+ * Expands Zlib data
+ * @param data The data to decompress
+ * @param opts The decompression options
+ * @returns The decompressed version of the data
+ */
+function unzlibSync(data, opts) {
+ return inflt(data.subarray(zls(data, opts && opts.dictionary), -4), { i: 2 }, opts && opts.out, opts && opts.dictionary);
+}
+// Default algorithm for compression (used because having a known output size allows faster decompression)
+
+
+/**
+ * Streaming GZIP, Zlib, or raw DEFLATE decompression
+ */
+var Decompress = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
+ function Decompress(opts, cb) {
+ this.o = StrmOpt.call(this, opts, cb) || {};
+ this.G = Gunzip;
+ this.I = Inflate;
+ this.Z = Unzlib;
+ }
+ // init substream
+ // overriden by AsyncDecompress
+ Decompress.prototype.i = function () {
+ var _this = this;
+ this.s.ondata = function (dat, final) {
+ _this.ondata(dat, final);
+ };
+ };
+ /**
+ * Pushes a chunk to be decompressed
+ * @param chunk The chunk to push
+ * @param final Whether this is the last chunk
+ */
+ Decompress.prototype.push = function (chunk, final) {
+ if (!this.ondata)
+ err(5);
+ if (!this.s) {
+ if (this.p && this.p.length) {
+ var n = new esm_u8(this.p.length + chunk.length);
+ n.set(this.p), n.set(chunk, this.p.length);
+ }
+ else
+ this.p = chunk;
+ if (this.p.length > 2) {
+ this.s = (this.p[0] == 31 && this.p[1] == 139 && this.p[2] == 8)
+ ? new this.G(this.o)
+ : ((this.p[0] & 15) != 8 || (this.p[0] >> 4) > 7 || ((this.p[0] << 8 | this.p[1]) % 31))
+ ? new this.I(this.o)
+ : new this.Z(this.o);
+ this.i();
+ this.s.push(this.p, final);
+ this.p = null;
+ }
+ }
+ else
+ this.s.push(chunk, final);
+ };
+ return Decompress;
+}())));
+
+/**
+ * Asynchronous streaming GZIP, Zlib, or raw DEFLATE decompression
+ */
+var AsyncDecompress = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
+ function AsyncDecompress(opts, cb) {
+ Decompress.call(this, opts, cb);
+ this.queuedSize = 0;
+ this.G = AsyncGunzip;
+ this.I = AsyncInflate;
+ this.Z = AsyncUnzlib;
+ }
+ AsyncDecompress.prototype.i = function () {
+ var _this = this;
+ this.s.ondata = function (err, dat, final) {
+ _this.ondata(err, dat, final);
+ };
+ this.s.ondrain = function (size) {
+ _this.queuedSize -= size;
+ if (_this.ondrain)
+ _this.ondrain(size);
+ };
+ };
+ /**
+ * Pushes a chunk to be decompressed
+ * @param chunk The chunk to push
+ * @param final Whether this is the last chunk
+ */
+ AsyncDecompress.prototype.push = function (chunk, final) {
+ this.queuedSize += chunk.length;
+ Decompress.prototype.push.call(this, chunk, final);
+ };
+ return AsyncDecompress;
+}())));
+
+function decompress(data, opts, cb) {
+ if (!cb)
+ cb = opts, opts = {};
+ if (typeof cb != 'function')
+ err(7);
+ return (data[0] == 31 && data[1] == 139 && data[2] == 8)
+ ? gunzip(data, opts, cb)
+ : ((data[0] & 15) != 8 || (data[0] >> 4) > 7 || ((data[0] << 8 | data[1]) % 31))
+ ? inflate(data, opts, cb)
+ : unzlib(data, opts, cb);
+}
+/**
+ * Expands compressed GZIP, Zlib, or raw DEFLATE data, automatically detecting the format
+ * @param data The data to decompress
+ * @param opts The decompression options
+ * @returns The decompressed version of the data
+ */
+function decompressSync(data, opts) {
+ return (data[0] == 31 && data[1] == 139 && data[2] == 8)
+ ? gunzipSync(data, opts)
+ : ((data[0] & 15) != 8 || (data[0] >> 4) > 7 || ((data[0] << 8 | data[1]) % 31))
+ ? inflateSync(data, opts)
+ : unzlibSync(data, opts);
+}
+// flatten a directory structure
+var fltn = function (d, p, t, o) {
+ for (var k in d) {
+ var val = d[k], n = p + k, op = o;
+ if (Array.isArray(val))
+ op = mrg(o, val[1]), val = val[0];
+ if (val instanceof esm_u8)
+ t[n] = [val, op];
+ else {
+ t[n += '/'] = [new esm_u8(0), op];
+ fltn(val, n, t, o);
+ }
+ }
+};
+// text encoder
+var te = typeof TextEncoder != 'undefined' && /*#__PURE__*/ new TextEncoder();
+// text decoder
+var td = typeof TextDecoder != 'undefined' && /*#__PURE__*/ new TextDecoder();
+// text decoder stream
+var tds = 0;
+try {
+ td.decode(et, { stream: true });
+ tds = 1;
+}
+catch (e) { }
+// decode UTF8
+var dutf8 = function (d) {
+ for (var r = '', i = 0;;) {
+ var c = d[i++];
+ var eb = (c > 127) + (c > 223) + (c > 239);
+ if (i + eb > d.length)
+ return { s: r, r: slc(d, i - 1) };
+ if (!eb)
+ r += String.fromCharCode(c);
+ else if (eb == 3) {
+ c = ((c & 15) << 18 | (d[i++] & 63) << 12 | (d[i++] & 63) << 6 | (d[i++] & 63)) - 65536,
+ r += String.fromCharCode(55296 | (c >> 10), 56320 | (c & 1023));
+ }
+ else if (eb & 1)
+ r += String.fromCharCode((c & 31) << 6 | (d[i++] & 63));
+ else
+ r += String.fromCharCode((c & 15) << 12 | (d[i++] & 63) << 6 | (d[i++] & 63));
+ }
+};
+/**
+ * Streaming UTF-8 decoding
+ */
+var DecodeUTF8 = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
+ /**
+ * Creates a UTF-8 decoding stream
+ * @param cb The callback to call whenever data is decoded
+ */
+ function DecodeUTF8(cb) {
+ this.ondata = cb;
+ if (tds)
+ this.t = new TextDecoder();
+ else
+ this.p = et;
+ }
+ /**
+ * Pushes a chunk to be decoded from UTF-8 binary
+ * @param chunk The chunk to push
+ * @param final Whether this is the last chunk
+ */
+ DecodeUTF8.prototype.push = function (chunk, final) {
+ if (!this.ondata)
+ err(5);
+ final = !!final;
+ if (this.t) {
+ this.ondata(this.t.decode(chunk, { stream: true }), final);
+ if (final) {
+ if (this.t.decode().length)
+ err(8);
+ this.t = null;
+ }
+ return;
+ }
+ if (!this.p)
+ err(4);
+ var dat = new esm_u8(this.p.length + chunk.length);
+ dat.set(this.p);
+ dat.set(chunk, this.p.length);
+ var _a = dutf8(dat), s = _a.s, r = _a.r;
+ if (final) {
+ if (r.length)
+ err(8);
+ this.p = null;
+ }
+ else
+ this.p = r;
+ this.ondata(s, final);
+ };
+ return DecodeUTF8;
+}())));
+
+/**
+ * Streaming UTF-8 encoding
+ */
+var EncodeUTF8 = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
+ /**
+ * Creates a UTF-8 decoding stream
+ * @param cb The callback to call whenever data is encoded
+ */
+ function EncodeUTF8(cb) {
+ this.ondata = cb;
+ }
+ /**
+ * Pushes a chunk to be encoded to UTF-8
+ * @param chunk The string data to push
+ * @param final Whether this is the last chunk
+ */
+ EncodeUTF8.prototype.push = function (chunk, final) {
+ if (!this.ondata)
+ err(5);
+ if (this.d)
+ err(4);
+ this.ondata(strToU8(chunk), this.d = final || false);
+ };
+ return EncodeUTF8;
+}())));
+
+/**
+ * Converts a string into a Uint8Array for use with compression/decompression methods
+ * @param str The string to encode
+ * @param latin1 Whether or not to interpret the data as Latin-1. This should
+ * not need to be true unless decoding a binary string.
+ * @returns The string encoded in UTF-8/Latin-1 binary
+ */
+function strToU8(str, latin1) {
+ if (latin1) {
+ var ar_1 = new esm_u8(str.length);
+ for (var i = 0; i < str.length; ++i)
+ ar_1[i] = str.charCodeAt(i);
+ return ar_1;
+ }
+ if (te)
+ return te.encode(str);
+ var l = str.length;
+ var ar = new esm_u8(str.length + (str.length >> 1));
+ var ai = 0;
+ var w = function (v) { ar[ai++] = v; };
+ for (var i = 0; i < l; ++i) {
+ if (ai + 5 > ar.length) {
+ var n = new esm_u8(ai + 8 + ((l - i) << 1));
+ n.set(ar);
+ ar = n;
+ }
+ var c = str.charCodeAt(i);
+ if (c < 128 || latin1)
+ w(c);
+ else if (c < 2048)
+ w(192 | (c >> 6)), w(128 | (c & 63));
+ else if (c > 55295 && c < 57344)
+ c = 65536 + (c & 1023 << 10) | (str.charCodeAt(++i) & 1023),
+ w(240 | (c >> 18)), w(128 | ((c >> 12) & 63)), w(128 | ((c >> 6) & 63)), w(128 | (c & 63));
+ else
+ w(224 | (c >> 12)), w(128 | ((c >> 6) & 63)), w(128 | (c & 63));
+ }
+ return slc(ar, 0, ai);
+}
+/**
+ * Converts a Uint8Array to a string
+ * @param dat The data to decode to string
+ * @param latin1 Whether or not to interpret the data as Latin-1. This should
+ * not need to be true unless encoding to binary string.
+ * @returns The original UTF-8/Latin-1 string
+ */
+function strFromU8(dat, latin1) {
+ if (latin1) {
+ var r = '';
+ for (var i = 0; i < dat.length; i += 16384)
+ r += String.fromCharCode.apply(null, dat.subarray(i, i + 16384));
+ return r;
+ }
+ else if (td) {
+ return td.decode(dat);
+ }
+ else {
+ var _a = dutf8(dat), s = _a.s, r = _a.r;
+ if (r.length)
+ err(8);
+ return s;
+ }
+}
+;
+// deflate bit flag
+var dbf = function (l) { return l == 1 ? 3 : l < 6 ? 2 : l == 9 ? 1 : 0; };
+// skip local zip header
+var slzh = function (d, b) { return b + 30 + b2(d, b + 26) + b2(d, b + 28); };
+// read zip header
+var zh = function (d, b, z) {
+ var fnl = b2(d, b + 28), fn = strFromU8(d.subarray(b + 46, b + 46 + fnl), !(b2(d, b + 8) & 2048)), es = b + 46 + fnl, bs = b4(d, b + 20);
+ var _a = z && bs == 4294967295 ? z64e(d, es) : [bs, b4(d, b + 24), b4(d, b + 42)], sc = _a[0], su = _a[1], off = _a[2];
+ return [b2(d, b + 10), sc, su, fn, es + b2(d, b + 30) + b2(d, b + 32), off];
+};
+// read zip64 extra field
+var z64e = function (d, b) {
+ for (; b2(d, b) != 1; b += 4 + b2(d, b + 2))
+ ;
+ return [b8(d, b + 12), b8(d, b + 4), b8(d, b + 20)];
+};
+// extra field length
+var exfl = function (ex) {
+ var le = 0;
+ if (ex) {
+ for (var k in ex) {
+ var l = ex[k].length;
+ if (l > 65535)
+ err(9);
+ le += l + 4;
+ }
+ }
+ return le;
+};
+// write zip header
+var wzh = function (d, b, f, fn, u, c, ce, co) {
+ var fl = fn.length, ex = f.extra, col = co && co.length;
+ var exl = exfl(ex);
+ wbytes(d, b, ce != null ? 0x2014B50 : 0x4034B50), b += 4;
+ if (ce != null)
+ d[b++] = 20, d[b++] = f.os;
+ d[b] = 20, b += 2; // spec compliance? what's that?
+ d[b++] = (f.flag << 1) | (c < 0 && 8), d[b++] = u && 8;
+ d[b++] = f.compression & 255, d[b++] = f.compression >> 8;
+ var dt = new Date(f.mtime == null ? Date.now() : f.mtime), y = dt.getFullYear() - 1980;
+ if (y < 0 || y > 119)
+ err(10);
+ wbytes(d, b, (y << 25) | ((dt.getMonth() + 1) << 21) | (dt.getDate() << 16) | (dt.getHours() << 11) | (dt.getMinutes() << 5) | (dt.getSeconds() >> 1)), b += 4;
+ if (c != -1) {
+ wbytes(d, b, f.crc);
+ wbytes(d, b + 4, c < 0 ? -c - 2 : c);
+ wbytes(d, b + 8, f.size);
+ }
+ wbytes(d, b + 12, fl);
+ wbytes(d, b + 14, exl), b += 16;
+ if (ce != null) {
+ wbytes(d, b, col);
+ wbytes(d, b + 6, f.attrs);
+ wbytes(d, b + 10, ce), b += 14;
+ }
+ d.set(fn, b);
+ b += fl;
+ if (exl) {
+ for (var k in ex) {
+ var exf = ex[k], l = exf.length;
+ wbytes(d, b, +k);
+ wbytes(d, b + 2, l);
+ d.set(exf, b + 4), b += 4 + l;
+ }
+ }
+ if (col)
+ d.set(co, b), b += col;
+ return b;
+};
+// write zip footer (end of central directory)
+var wzf = function (o, b, c, d, e) {
+ wbytes(o, b, 0x6054B50); // skip disk
+ wbytes(o, b + 8, c);
+ wbytes(o, b + 10, c);
+ wbytes(o, b + 12, d);
+ wbytes(o, b + 16, e);
+};
+/**
+ * A pass-through stream to keep data uncompressed in a ZIP archive.
+ */
+var ZipPassThrough = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
+ /**
+ * Creates a pass-through stream that can be added to ZIP archives
+ * @param filename The filename to associate with this data stream
+ */
+ function ZipPassThrough(filename) {
+ this.filename = filename;
+ this.c = crc();
+ this.size = 0;
+ this.compression = 0;
+ }
+ /**
+ * Processes a chunk and pushes to the output stream. You can override this
+ * method in a subclass for custom behavior, but by default this passes
+ * the data through. You must call this.ondata(err, chunk, final) at some
+ * point in this method.
+ * @param chunk The chunk to process
+ * @param final Whether this is the last chunk
+ */
+ ZipPassThrough.prototype.process = function (chunk, final) {
+ this.ondata(null, chunk, final);
+ };
+ /**
+ * Pushes a chunk to be added. If you are subclassing this with a custom
+ * compression algorithm, note that you must push data from the source
+ * file only, pre-compression.
+ * @param chunk The chunk to push
+ * @param final Whether this is the last chunk
+ */
+ ZipPassThrough.prototype.push = function (chunk, final) {
+ if (!this.ondata)
+ err(5);
+ this.c.p(chunk);
+ this.size += chunk.length;
+ if (final)
+ this.crc = this.c.d();
+ this.process(chunk, final || false);
+ };
+ return ZipPassThrough;
+}())));
+
+// I don't extend because TypeScript extension adds 1kB of runtime bloat
+/**
+ * Streaming DEFLATE compression for ZIP archives. Prefer using AsyncZipDeflate
+ * for better performance
+ */
+var ZipDeflate = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
+ /**
+ * Creates a DEFLATE stream that can be added to ZIP archives
+ * @param filename The filename to associate with this data stream
+ * @param opts The compression options
+ */
+ function ZipDeflate(filename, opts) {
+ var _this = this;
+ if (!opts)
+ opts = {};
+ ZipPassThrough.call(this, filename);
+ this.d = new Deflate(opts, function (dat, final) {
+ _this.ondata(null, dat, final);
+ });
+ this.compression = 8;
+ this.flag = dbf(opts.level);
+ }
+ ZipDeflate.prototype.process = function (chunk, final) {
+ try {
+ this.d.push(chunk, final);
+ }
+ catch (e) {
+ this.ondata(e, null, final);
+ }
+ };
+ /**
+ * Pushes a chunk to be deflated
+ * @param chunk The chunk to push
+ * @param final Whether this is the last chunk
+ */
+ ZipDeflate.prototype.push = function (chunk, final) {
+ ZipPassThrough.prototype.push.call(this, chunk, final);
+ };
+ return ZipDeflate;
+}())));
+
+/**
+ * Asynchronous streaming DEFLATE compression for ZIP archives
+ */
+var AsyncZipDeflate = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
+ /**
+ * Creates an asynchronous DEFLATE stream that can be added to ZIP archives
+ * @param filename The filename to associate with this data stream
+ * @param opts The compression options
+ */
+ function AsyncZipDeflate(filename, opts) {
+ var _this = this;
+ if (!opts)
+ opts = {};
+ ZipPassThrough.call(this, filename);
+ this.d = new AsyncDeflate(opts, function (err, dat, final) {
+ _this.ondata(err, dat, final);
+ });
+ this.compression = 8;
+ this.flag = dbf(opts.level);
+ this.terminate = this.d.terminate;
+ }
+ AsyncZipDeflate.prototype.process = function (chunk, final) {
+ this.d.push(chunk, final);
+ };
+ /**
+ * Pushes a chunk to be deflated
+ * @param chunk The chunk to push
+ * @param final Whether this is the last chunk
+ */
+ AsyncZipDeflate.prototype.push = function (chunk, final) {
+ ZipPassThrough.prototype.push.call(this, chunk, final);
+ };
+ return AsyncZipDeflate;
+}())));
+
+// TODO: Better tree shaking
+/**
+ * A zippable archive to which files can incrementally be added
+ */
+var Zip = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
+ /**
+ * Creates an empty ZIP archive to which files can be added
+ * @param cb The callback to call whenever data for the generated ZIP archive
+ * is available
+ */
+ function Zip(cb) {
+ this.ondata = cb;
+ this.u = [];
+ this.d = 1;
+ }
+ /**
+ * Adds a file to the ZIP archive
+ * @param file The file stream to add
+ */
+ Zip.prototype.add = function (file) {
+ var _this = this;
+ if (!this.ondata)
+ err(5);
+ // finishing or finished
+ if (this.d & 2)
+ this.ondata(err(4 + (this.d & 1) * 8, 0, 1), null, false);
+ else {
+ var f = strToU8(file.filename), fl_1 = f.length;
+ var com = file.comment, o = com && strToU8(com);
+ var u = fl_1 != file.filename.length || (o && (com.length != o.length));
+ var hl_1 = fl_1 + exfl(file.extra) + 30;
+ if (fl_1 > 65535)
+ this.ondata(err(11, 0, 1), null, false);
+ var header = new esm_u8(hl_1);
+ wzh(header, 0, file, f, u, -1);
+ var chks_1 = [header];
+ var pAll_1 = function () {
+ for (var _i = 0, chks_2 = chks_1; _i < chks_2.length; _i++) {
+ var chk = chks_2[_i];
+ _this.ondata(null, chk, false);
+ }
+ chks_1 = [];
+ };
+ var tr_1 = this.d;
+ this.d = 0;
+ var ind_1 = this.u.length;
+ var uf_1 = mrg(file, {
+ f: f,
+ u: u,
+ o: o,
+ t: function () {
+ if (file.terminate)
+ file.terminate();
+ },
+ r: function () {
+ pAll_1();
+ if (tr_1) {
+ var nxt = _this.u[ind_1 + 1];
+ if (nxt)
+ nxt.r();
+ else
+ _this.d = 1;
+ }
+ tr_1 = 1;
+ }
+ });
+ var cl_1 = 0;
+ file.ondata = function (err, dat, final) {
+ if (err) {
+ _this.ondata(err, dat, final);
+ _this.terminate();
+ }
+ else {
+ cl_1 += dat.length;
+ chks_1.push(dat);
+ if (final) {
+ var dd = new esm_u8(16);
+ wbytes(dd, 0, 0x8074B50);
+ wbytes(dd, 4, file.crc);
+ wbytes(dd, 8, cl_1);
+ wbytes(dd, 12, file.size);
+ chks_1.push(dd);
+ uf_1.c = cl_1, uf_1.b = hl_1 + cl_1 + 16, uf_1.crc = file.crc, uf_1.size = file.size;
+ if (tr_1)
+ uf_1.r();
+ tr_1 = 1;
+ }
+ else if (tr_1)
+ pAll_1();
+ }
+ };
+ this.u.push(uf_1);
+ }
+ };
+ /**
+ * Ends the process of adding files and prepares to emit the final chunks.
+ * This *must* be called after adding all desired files for the resulting
+ * ZIP file to work properly.
+ */
+ Zip.prototype.end = function () {
+ var _this = this;
+ if (this.d & 2) {
+ this.ondata(err(4 + (this.d & 1) * 8, 0, 1), null, true);
+ return;
+ }
+ if (this.d)
+ this.e();
+ else
+ this.u.push({
+ r: function () {
+ if (!(_this.d & 1))
+ return;
+ _this.u.splice(-1, 1);
+ _this.e();
+ },
+ t: function () { }
+ });
+ this.d = 3;
+ };
+ Zip.prototype.e = function () {
+ var bt = 0, l = 0, tl = 0;
+ for (var _i = 0, _a = this.u; _i < _a.length; _i++) {
+ var f = _a[_i];
+ tl += 46 + f.f.length + exfl(f.extra) + (f.o ? f.o.length : 0);
+ }
+ var out = new esm_u8(tl + 22);
+ for (var _b = 0, _c = this.u; _b < _c.length; _b++) {
+ var f = _c[_b];
+ wzh(out, bt, f, f.f, f.u, -f.c - 2, l, f.o);
+ bt += 46 + f.f.length + exfl(f.extra) + (f.o ? f.o.length : 0), l += f.b;
+ }
+ wzf(out, bt, this.u.length, tl, l);
+ this.ondata(null, out, true);
+ this.d = 2;
+ };
+ /**
+ * A method to terminate any internal workers used by the stream. Subsequent
+ * calls to add() will fail.
+ */
+ Zip.prototype.terminate = function () {
+ for (var _i = 0, _a = this.u; _i < _a.length; _i++) {
+ var f = _a[_i];
+ f.t();
+ }
+ this.d = 2;
+ };
+ return Zip;
+}())));
+
+function zip(data, opts, cb) {
+ if (!cb)
+ cb = opts, opts = {};
+ if (typeof cb != 'function')
+ err(7);
+ var r = {};
+ fltn(data, '', r, opts);
+ var k = Object.keys(r);
+ var lft = k.length, o = 0, tot = 0;
+ var slft = lft, files = new Array(lft);
+ var term = [];
+ var tAll = function () {
+ for (var i = 0; i < term.length; ++i)
+ term[i]();
+ };
+ var cbd = function (a, b) {
+ mt(function () { cb(a, b); });
+ };
+ mt(function () { cbd = cb; });
+ var cbf = function () {
+ var out = new esm_u8(tot + 22), oe = o, cdl = tot - o;
+ tot = 0;
+ for (var i = 0; i < slft; ++i) {
+ var f = files[i];
+ try {
+ var l = f.c.length;
+ wzh(out, tot, f, f.f, f.u, l);
+ var badd = 30 + f.f.length + exfl(f.extra);
+ var loc = tot + badd;
+ out.set(f.c, loc);
+ wzh(out, o, f, f.f, f.u, l, tot, f.m), o += 16 + badd + (f.m ? f.m.length : 0), tot = loc + l;
+ }
+ catch (e) {
+ return cbd(e, null);
+ }
+ }
+ wzf(out, o, files.length, cdl, oe);
+ cbd(null, out);
+ };
+ if (!lft)
+ cbf();
+ var _loop_1 = function (i) {
+ var fn = k[i];
+ var _a = r[fn], file = _a[0], p = _a[1];
+ var c = crc(), size = file.length;
+ c.p(file);
+ var f = strToU8(fn), s = f.length;
+ var com = p.comment, m = com && strToU8(com), ms = m && m.length;
+ var exl = exfl(p.extra);
+ var compression = p.level == 0 ? 0 : 8;
+ var cbl = function (e, d) {
+ if (e) {
+ tAll();
+ cbd(e, null);
+ }
+ else {
+ var l = d.length;
+ files[i] = mrg(p, {
+ size: size,
+ crc: c.d(),
+ c: d,
+ f: f,
+ m: m,
+ u: s != fn.length || (m && (com.length != ms)),
+ compression: compression
+ });
+ o += 30 + s + exl + l;
+ tot += 76 + 2 * (s + exl) + (ms || 0) + l;
+ if (!--lft)
+ cbf();
+ }
+ };
+ if (s > 65535)
+ cbl(err(11, 0, 1), null);
+ if (!compression)
+ cbl(null, file);
+ else if (size < 160000) {
+ try {
+ cbl(null, deflateSync(file, p));
+ }
+ catch (e) {
+ cbl(e, null);
+ }
+ }
+ else
+ term.push(deflate(file, p, cbl));
+ };
+ // Cannot use lft because it can decrease
+ for (var i = 0; i < slft; ++i) {
+ _loop_1(i);
+ }
+ return tAll;
+}
+/**
+ * Synchronously creates a ZIP file. Prefer using `zip` for better performance
+ * with more than one file.
+ * @param data The directory structure for the ZIP archive
+ * @param opts The main options, merged with per-file options
+ * @returns The generated ZIP archive
+ */
+function zipSync(data, opts) {
+ if (!opts)
+ opts = {};
+ var r = {};
+ var files = [];
+ fltn(data, '', r, opts);
+ var o = 0;
+ var tot = 0;
+ for (var fn in r) {
+ var _a = r[fn], file = _a[0], p = _a[1];
+ var compression = p.level == 0 ? 0 : 8;
+ var f = strToU8(fn), s = f.length;
+ var com = p.comment, m = com && strToU8(com), ms = m && m.length;
+ var exl = exfl(p.extra);
+ if (s > 65535)
+ err(11);
+ var d = compression ? deflateSync(file, p) : file, l = d.length;
+ var c = crc();
+ c.p(file);
+ files.push(mrg(p, {
+ size: file.length,
+ crc: c.d(),
+ c: d,
+ f: f,
+ m: m,
+ u: s != fn.length || (m && (com.length != ms)),
+ o: o,
+ compression: compression
+ }));
+ o += 30 + s + exl + l;
+ tot += 76 + 2 * (s + exl) + (ms || 0) + l;
+ }
+ var out = new esm_u8(tot + 22), oe = o, cdl = tot - o;
+ for (var i = 0; i < files.length; ++i) {
+ var f = files[i];
+ wzh(out, f.o, f, f.f, f.u, f.c.length);
+ var badd = 30 + f.f.length + exfl(f.extra);
+ out.set(f.c, f.o + badd);
+ wzh(out, o, f, f.f, f.u, f.c.length, f.o, f.m), o += 16 + badd + (f.m ? f.m.length : 0);
+ }
+ wzf(out, o, files.length, cdl, oe);
+ return out;
+}
+/**
+ * Streaming pass-through decompression for ZIP archives
+ */
+var UnzipPassThrough = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
+ function UnzipPassThrough() {
+ }
+ UnzipPassThrough.prototype.push = function (data, final) {
+ this.ondata(null, data, final);
+ };
+ UnzipPassThrough.compression = 0;
+ return UnzipPassThrough;
+}())));
+
+/**
+ * Streaming DEFLATE decompression for ZIP archives. Prefer AsyncZipInflate for
+ * better performance.
+ */
+var UnzipInflate = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
+ /**
+ * Creates a DEFLATE decompression that can be used in ZIP archives
+ */
+ function UnzipInflate() {
+ var _this = this;
+ this.i = new Inflate(function (dat, final) {
+ _this.ondata(null, dat, final);
+ });
+ }
+ UnzipInflate.prototype.push = function (data, final) {
+ try {
+ this.i.push(data, final);
+ }
+ catch (e) {
+ this.ondata(e, null, final);
+ }
+ };
+ UnzipInflate.compression = 8;
+ return UnzipInflate;
+}())));
+
+/**
+ * Asynchronous streaming DEFLATE decompression for ZIP archives
+ */
+var AsyncUnzipInflate = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
+ /**
+ * Creates a DEFLATE decompression that can be used in ZIP archives
+ */
+ function AsyncUnzipInflate(_, sz) {
+ var _this = this;
+ if (sz < 320000) {
+ this.i = new Inflate(function (dat, final) {
+ _this.ondata(null, dat, final);
+ });
+ }
+ else {
+ this.i = new AsyncInflate(function (err, dat, final) {
+ _this.ondata(err, dat, final);
+ });
+ this.terminate = this.i.terminate;
+ }
+ }
+ AsyncUnzipInflate.prototype.push = function (data, final) {
+ if (this.i.terminate)
+ data = slc(data, 0);
+ this.i.push(data, final);
+ };
+ AsyncUnzipInflate.compression = 8;
+ return AsyncUnzipInflate;
+}())));
+
+/**
+ * A ZIP archive decompression stream that emits files as they are discovered
+ */
+var Unzip = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () {
+ /**
+ * Creates a ZIP decompression stream
+ * @param cb The callback to call whenever a file in the ZIP archive is found
+ */
+ function Unzip(cb) {
+ this.onfile = cb;
+ this.k = [];
+ this.o = {
+ 0: UnzipPassThrough
+ };
+ this.p = et;
+ }
+ /**
+ * Pushes a chunk to be unzipped
+ * @param chunk The chunk to push
+ * @param final Whether this is the last chunk
+ */
+ Unzip.prototype.push = function (chunk, final) {
+ var _this = this;
+ if (!this.onfile)
+ err(5);
+ if (!this.p)
+ err(4);
+ if (this.c > 0) {
+ var len = Math.min(this.c, chunk.length);
+ var toAdd = chunk.subarray(0, len);
+ this.c -= len;
+ if (this.d)
+ this.d.push(toAdd, !this.c);
+ else
+ this.k[0].push(toAdd);
+ chunk = chunk.subarray(len);
+ if (chunk.length)
+ return this.push(chunk, final);
+ }
+ else {
+ var f = 0, i = 0, is = void 0, buf = void 0;
+ if (!this.p.length)
+ buf = chunk;
+ else if (!chunk.length)
+ buf = this.p;
+ else {
+ buf = new esm_u8(this.p.length + chunk.length);
+ buf.set(this.p), buf.set(chunk, this.p.length);
+ }
+ var l = buf.length, oc = this.c, add = oc && this.d;
+ var _loop_2 = function () {
+ var _a;
+ var sig = b4(buf, i);
+ if (sig == 0x4034B50) {
+ f = 1, is = i;
+ this_1.d = null;
+ this_1.c = 0;
+ var bf = b2(buf, i + 6), cmp_1 = b2(buf, i + 8), u = bf & 2048, dd = bf & 8, fnl = b2(buf, i + 26), es = b2(buf, i + 28);
+ if (l > i + 30 + fnl + es) {
+ var chks_3 = [];
+ this_1.k.unshift(chks_3);
+ f = 2;
+ var sc_1 = b4(buf, i + 18), su_1 = b4(buf, i + 22);
+ var fn_1 = strFromU8(buf.subarray(i + 30, i += 30 + fnl), !u);
+ if (sc_1 == 4294967295) {
+ _a = dd ? [-2] : z64e(buf, i), sc_1 = _a[0], su_1 = _a[1];
+ }
+ else if (dd)
+ sc_1 = -1;
+ i += es;
+ this_1.c = sc_1;
+ var d_1;
+ var file_1 = {
+ name: fn_1,
+ compression: cmp_1,
+ start: function () {
+ if (!file_1.ondata)
+ err(5);
+ if (!sc_1)
+ file_1.ondata(null, et, true);
+ else {
+ var ctr = _this.o[cmp_1];
+ if (!ctr)
+ file_1.ondata(err(14, 'unknown compression type ' + cmp_1, 1), null, false);
+ d_1 = sc_1 < 0 ? new ctr(fn_1) : new ctr(fn_1, sc_1, su_1);
+ d_1.ondata = function (err, dat, final) { file_1.ondata(err, dat, final); };
+ for (var _i = 0, chks_4 = chks_3; _i < chks_4.length; _i++) {
+ var dat = chks_4[_i];
+ d_1.push(dat, false);
+ }
+ if (_this.k[0] == chks_3 && _this.c)
+ _this.d = d_1;
+ else
+ d_1.push(et, true);
+ }
+ },
+ terminate: function () {
+ if (d_1 && d_1.terminate)
+ d_1.terminate();
+ }
+ };
+ if (sc_1 >= 0)
+ file_1.size = sc_1, file_1.originalSize = su_1;
+ this_1.onfile(file_1);
+ }
+ return "break";
+ }
+ else if (oc) {
+ if (sig == 0x8074B50) {
+ is = i += 12 + (oc == -2 && 8), f = 3, this_1.c = 0;
+ return "break";
+ }
+ else if (sig == 0x2014B50) {
+ is = i -= 4, f = 3, this_1.c = 0;
+ return "break";
+ }
+ }
+ };
+ var this_1 = this;
+ for (; i < l - 4; ++i) {
+ var state_1 = _loop_2();
+ if (state_1 === "break")
+ break;
+ }
+ this.p = et;
+ if (oc < 0) {
+ var dat = f ? buf.subarray(0, is - 12 - (oc == -2 && 8) - (b4(buf, is - 16) == 0x8074B50 && 4)) : buf.subarray(0, i);
+ if (add)
+ add.push(dat, !!f);
+ else
+ this.k[+(f == 2)].push(dat);
+ }
+ if (f & 2)
+ return this.push(buf.subarray(i), final);
+ this.p = buf.subarray(i);
+ }
+ if (final) {
+ if (this.c)
+ err(13);
+ this.p = null;
+ }
+ };
+ /**
+ * Registers a decoder with the stream, allowing for files compressed with
+ * the compression type provided to be expanded correctly
+ * @param decoder The decoder constructor
+ */
+ Unzip.prototype.register = function (decoder) {
+ this.o[decoder.compression] = decoder;
+ };
+ return Unzip;
+}())));
+
+var mt = typeof queueMicrotask == 'function' ? queueMicrotask : typeof setTimeout == 'function' ? setTimeout : function (fn) { fn(); };
+function unzip(data, opts, cb) {
+ if (!cb)
+ cb = opts, opts = {};
+ if (typeof cb != 'function')
+ err(7);
+ var term = [];
+ var tAll = function () {
+ for (var i = 0; i < term.length; ++i)
+ term[i]();
+ };
+ var files = {};
+ var cbd = function (a, b) {
+ mt(function () { cb(a, b); });
+ };
+ mt(function () { cbd = cb; });
+ var e = data.length - 22;
+ for (; b4(data, e) != 0x6054B50; --e) {
+ if (!e || data.length - e > 65558) {
+ cbd(err(13, 0, 1), null);
+ return tAll;
+ }
+ }
+ ;
+ var lft = b2(data, e + 8);
+ if (lft) {
+ var c = lft;
+ var o = b4(data, e + 16);
+ var z = o == 4294967295 || c == 65535;
+ if (z) {
+ var ze = b4(data, e - 12);
+ z = b4(data, ze) == 0x6064B50;
+ if (z) {
+ c = lft = b4(data, ze + 32);
+ o = b4(data, ze + 48);
+ }
+ }
+ var fltr = opts && opts.filter;
+ var _loop_3 = function (i) {
+ var _a = zh(data, o, z), c_1 = _a[0], sc = _a[1], su = _a[2], fn = _a[3], no = _a[4], off = _a[5], b = slzh(data, off);
+ o = no;
+ var cbl = function (e, d) {
+ if (e) {
+ tAll();
+ cbd(e, null);
+ }
+ else {
+ if (d)
+ files[fn] = d;
+ if (!--lft)
+ cbd(null, files);
+ }
+ };
+ if (!fltr || fltr({
+ name: fn,
+ size: sc,
+ originalSize: su,
+ compression: c_1
+ })) {
+ if (!c_1)
+ cbl(null, slc(data, b, b + sc));
+ else if (c_1 == 8) {
+ var infl = data.subarray(b, b + sc);
+ // Synchronously decompress under 512KB, or barely-compressed data
+ if (su < 524288 || sc > 0.8 * su) {
+ try {
+ cbl(null, inflateSync(infl, { out: new esm_u8(su) }));
+ }
+ catch (e) {
+ cbl(e, null);
+ }
+ }
+ else
+ term.push(inflate(infl, { size: su }, cbl));
+ }
+ else
+ cbl(err(14, 'unknown compression type ' + c_1, 1), null);
+ }
+ else
+ cbl(null, null);
+ };
+ for (var i = 0; i < c; ++i) {
+ _loop_3(i);
+ }
+ }
+ else
+ cbd(null, {});
+ return tAll;
+}
+/**
+ * Synchronously decompresses a ZIP archive. Prefer using `unzip` for better
+ * performance with more than one file.
+ * @param data The raw compressed ZIP file
+ * @param opts The ZIP extraction options
+ * @returns The decompressed files
+ */
+function unzipSync(data, opts) {
+ var files = {};
+ var e = data.length - 22;
+ for (; b4(data, e) != 0x6054B50; --e) {
+ if (!e || data.length - e > 65558)
+ err(13);
+ }
+ ;
+ var c = b2(data, e + 8);
+ if (!c)
+ return {};
+ var o = b4(data, e + 16);
+ var z = o == 4294967295 || c == 65535;
+ if (z) {
+ var ze = b4(data, e - 12);
+ z = b4(data, ze) == 0x6064B50;
+ if (z) {
+ c = b4(data, ze + 32);
+ o = b4(data, ze + 48);
+ }
+ }
+ var fltr = opts && opts.filter;
+ for (var i = 0; i < c; ++i) {
+ var _a = zh(data, o, z), c_2 = _a[0], sc = _a[1], su = _a[2], fn = _a[3], no = _a[4], off = _a[5], b = slzh(data, off);
+ o = no;
+ if (!fltr || fltr({
+ name: fn,
+ size: sc,
+ originalSize: su,
+ compression: c_2
+ })) {
+ if (!c_2)
+ files[fn] = slc(data, b, b + sc);
+ else if (c_2 == 8)
+ files[fn] = inflateSync(data.subarray(b, b + sc), { out: new esm_u8(su) });
+ else
+ err(14, 'unknown compression type ' + c_2);
+ }
+ }
+ return files;
+}
+
+;// CONCATENATED MODULE: ./src/services/data.ts
+
+var data_async = (__this, __arguments, generator) => {
+ return new Promise((resolve, reject) => {
+ var fulfilled = (value) => {
+ try {
+ step(generator.next(value));
+ } catch (e) {
+ reject(e);
+ }
+ };
+ var rejected = (value) => {
+ try {
+ step(generator.throw(value));
+ } catch (e) {
+ reject(e);
+ }
+ };
+ var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected);
+ step((generator = generator.apply(__this, __arguments)).next());
+ });
+};
+
+
+
+function existsAsync(fileOrDir) {
+ return data_async(this, null, function* () {
+ try {
+ yield (0,promises_namespaceObject.stat)(fileOrDir);
+ return true;
+ } catch (e) {
+ return false;
+ }
+ });
+}
+function zipAsync(file) {
+ return new Promise((res, rej) => {
+ zip(file, { mtime: /* @__PURE__ */ new Date("1/1/1980") }, (err, data) => {
+ if (err) {
+ rej(err);
+ return;
+ }
+ res(data);
+ });
+ });
+}
+function unzipAsync(data) {
+ return new Promise((res, rej) => {
+ unzip(data, {}, (err, data2) => {
+ if (err) {
+ rej(err);
+ return;
+ }
+ res(data2);
+ });
+ });
+}
+function saveUserFile(_0) {
+ return data_async(this, arguments, function* ({
+ fileName,
+ userDirectory,
+ dataString
+ }) {
+ fileName = fileName.toLowerCase();
+ const filePath = external_path_default().join(userDirectory, fileName);
+ const payload = yield zipAsync({
+ [fileName]: new TextEncoder().encode(dataString)
+ });
+ if (!(yield existsAsync(userDirectory))) {
+ yield (0,promises_namespaceObject.mkdir)(userDirectory, { recursive: true });
+ }
+ yield (0,promises_namespaceObject.writeFile)(filePath + ".zip", payload);
+ yield (0,promises_namespaceObject.writeFile)(filePath, dataString);
+ });
+}
+function loadSavedEvents(_0) {
+ return data_async(this, arguments, function* ({
+ name,
+ userDirectory,
+ deployedBlock
+ }) {
+ const filePath = external_path_default().join(userDirectory, `${name}.json`.toLowerCase());
+ if (!(yield existsAsync(filePath))) {
+ return {
+ events: [],
+ lastBlock: null
+ };
+ }
+ try {
+ const events = JSON.parse(yield (0,promises_namespaceObject.readFile)(filePath, { encoding: "utf8" }));
+ return {
+ events,
+ lastBlock: events && events.length ? events[events.length - 1].blockNumber : deployedBlock
+ };
+ } catch (err) {
+ console.log("Method loadSavedEvents has error");
+ console.log(err);
+ return {
+ events: [],
+ lastBlock: deployedBlock
+ };
+ }
+ });
+}
+function download(_0) {
+ return data_async(this, arguments, function* ({ name, cacheDirectory }) {
+ const fileName = `${name}.json`.toLowerCase();
+ const zipName = `${fileName}.zip`;
+ const zipPath = external_path_default().join(cacheDirectory, zipName);
+ const data = yield (0,promises_namespaceObject.readFile)(zipPath);
+ const { [fileName]: content } = yield unzipAsync(data);
+ return new TextDecoder().decode(content);
+ });
+}
+function loadCachedEvents(_0) {
+ return data_async(this, arguments, function* ({
+ name,
+ cacheDirectory,
+ deployedBlock
+ }) {
+ try {
+ const module = yield download({ cacheDirectory, name });
+ if (module) {
+ const events = JSON.parse(module);
+ const lastBlock = events && events.length ? events[events.length - 1].blockNumber : deployedBlock;
+ return {
+ events,
+ lastBlock
+ };
+ }
+ return {
+ events: [],
+ lastBlock: deployedBlock
+ };
+ } catch (err) {
+ console.log("Method loadCachedEvents has error");
+ console.log(err);
+ return {
+ events: [],
+ lastBlock: deployedBlock
+ };
+ }
+ });
+}
+
+;// CONCATENATED MODULE: ./src/services/nodeEvents.ts
+
+var nodeEvents_async = (__this, __arguments, generator) => {
+ return new Promise((resolve, reject) => {
+ var fulfilled = (value) => {
+ try {
+ step(generator.next(value));
+ } catch (e) {
+ reject(e);
+ }
+ };
+ var rejected = (value) => {
+ try {
+ step(generator.throw(value));
+ } catch (e) {
+ reject(e);
+ }
+ };
+ var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected);
+ step((generator = generator.apply(__this, __arguments)).next());
+ });
+};
+
+
+
+
+class NodeTornadoService extends BaseTornadoService {
+ constructor({
+ netId,
+ provider,
+ graphApi,
+ subgraphName,
+ Tornado,
+ type,
+ amount,
+ currency,
+ deployedBlock,
+ fetchDataOptions,
+ cacheDirectory,
+ userDirectory
+ }) {
+ super({
+ netId,
+ provider,
+ graphApi,
+ subgraphName,
+ Tornado,
+ type,
+ amount,
+ currency,
+ deployedBlock,
+ fetchDataOptions
+ });
+ this.cacheDirectory = cacheDirectory;
+ this.userDirectory = userDirectory;
+ }
+ updateEventProgress({ type, fromBlock, toBlock, count }) {
+ if (toBlock) {
+ console.log(`fromBlock - ${fromBlock}`);
+ console.log(`toBlock - ${toBlock}`);
+ if (count) {
+ console.log(`downloaded ${type} events count - ${count}`);
+ console.log("____________________________________________");
+ console.log(`Fetched ${type} events from ${fromBlock} to ${toBlock}
+`);
+ }
+ }
+ }
+ updateTransactionProgress({ currentIndex, totalIndex }) {
+ if (totalIndex) {
+ console.log(`Fetched ${currentIndex} deposit txs of ${totalIndex}`);
+ }
+ }
+ updateBlockProgress({ currentIndex, totalIndex }) {
+ if (totalIndex) {
+ console.log(`Fetched ${currentIndex} withdrawal blocks of ${totalIndex}`);
+ }
+ }
+ updateGraphProgress({ type, fromBlock, toBlock, count }) {
+ if (toBlock) {
+ console.log(`fromBlock - ${fromBlock}`);
+ console.log(`toBlock - ${toBlock}`);
+ if (count) {
+ console.log(`downloaded ${type} events from graph node count - ${count}`);
+ console.log("____________________________________________");
+ console.log(`Fetched ${type} events from graph node ${fromBlock} to ${toBlock}
+`);
+ }
+ }
+ }
+ getEventsFromDB() {
+ return nodeEvents_async(this, null, function* () {
+ if (!this.userDirectory) {
+ console.log(
+ "Updating events for",
+ this.amount,
+ this.currency.toUpperCase(),
+ `${this.getType().toLowerCase()}s
+`
+ );
+ console.log(`savedEvents count - ${0}`);
+ console.log(`savedEvents lastBlock - ${this.deployedBlock}
+`);
+ return {
+ events: [],
+ lastBlock: this.deployedBlock
+ };
+ }
+ const savedEvents = yield loadSavedEvents({
+ name: this.getInstanceName(),
+ userDirectory: this.userDirectory,
+ deployedBlock: this.deployedBlock
+ });
+ console.log("Updating events for", this.amount, this.currency.toUpperCase(), `${this.getType().toLowerCase()}s
+`);
+ console.log(`savedEvents count - ${savedEvents.events.length}`);
+ console.log(`savedEvents lastBlock - ${savedEvents.lastBlock}
+`);
+ return savedEvents;
+ });
+ }
+ getEventsFromCache() {
+ return nodeEvents_async(this, null, function* () {
+ if (!this.cacheDirectory) {
+ console.log(`cachedEvents count - ${0}`);
+ console.log(`cachedEvents lastBlock - ${this.deployedBlock}
+`);
+ return {
+ events: [],
+ lastBlock: this.deployedBlock
+ };
+ }
+ const cachedEvents = yield loadCachedEvents({
+ name: this.getInstanceName(),
+ cacheDirectory: this.cacheDirectory,
+ deployedBlock: this.deployedBlock
+ });
+ console.log(`cachedEvents count - ${cachedEvents.events.length}`);
+ console.log(`cachedEvents lastBlock - ${cachedEvents.lastBlock}
+`);
+ return cachedEvents;
+ });
+ }
+ saveEvents(_0) {
+ return nodeEvents_async(this, arguments, function* ({ events, lastBlock }) {
+ const instanceName = this.getInstanceName();
+ console.log("\ntotalEvents count - ", events.length);
+ console.log(
+ `totalEvents lastBlock - ${events[events.length - 1] ? events[events.length - 1].blockNumber : lastBlock}
+`
+ );
+ const eventTable = new (cli_table3_default())();
+ eventTable.push(
+ [{ colSpan: 2, content: `${this.getType()}s`, hAlign: "center" }],
+ ["Instance", `${this.netId} chain ${this.amount} ${this.currency.toUpperCase()}`],
+ ["Anonymity set", `${events.length} equal user ${this.getType().toLowerCase()}s`],
+ [{ colSpan: 2, content: `Latest ${this.getType().toLowerCase()}s` }],
+ ...events.slice(events.length - 10).reverse().map(({ timestamp }, index) => {
+ const eventIndex = events.length - index;
+ const eventTime = moment_default().unix(timestamp).fromNow();
+ return [eventIndex, eventTime];
+ })
+ );
+ console.log(eventTable.toString() + "\n");
+ if (this.userDirectory) {
+ yield saveUserFile({
+ fileName: instanceName + ".json",
+ userDirectory: this.userDirectory,
+ dataString: JSON.stringify(events, null, 2) + "\n"
+ });
+ }
+ });
+ }
+}
+class NodeEchoService extends BaseEchoService {
+ constructor({
+ netId,
+ provider,
+ graphApi,
+ subgraphName,
+ Echoer,
+ deployedBlock,
+ fetchDataOptions,
+ cacheDirectory,
+ userDirectory
+ }) {
+ super({
+ netId,
+ provider,
+ graphApi,
+ subgraphName,
+ Echoer,
+ deployedBlock,
+ fetchDataOptions
+ });
+ this.cacheDirectory = cacheDirectory;
+ this.userDirectory = userDirectory;
+ }
+ updateEventProgress({ type, fromBlock, toBlock, count }) {
+ if (toBlock) {
+ console.log(`fromBlock - ${fromBlock}`);
+ console.log(`toBlock - ${toBlock}`);
+ if (count) {
+ console.log(`downloaded ${type} events count - ${count}`);
+ console.log("____________________________________________");
+ console.log(`Fetched ${type} events from ${fromBlock} to ${toBlock}
+`);
+ }
+ }
+ }
+ updateGraphProgress({ type, fromBlock, toBlock, count }) {
+ if (toBlock) {
+ console.log(`fromBlock - ${fromBlock}`);
+ console.log(`toBlock - ${toBlock}`);
+ if (count) {
+ console.log(`downloaded ${type} events from graph node count - ${count}`);
+ console.log("____________________________________________");
+ console.log(`Fetched ${type} events from graph node ${fromBlock} to ${toBlock}
+`);
+ }
+ }
+ }
+ getEventsFromDB() {
+ return nodeEvents_async(this, null, function* () {
+ if (!this.userDirectory) {
+ console.log(`Updating events for ${this.netId} chain echo events
+`);
+ console.log(`savedEvents count - ${0}`);
+ console.log(`savedEvents lastBlock - ${this.deployedBlock}
+`);
+ return {
+ events: [],
+ lastBlock: this.deployedBlock
+ };
+ }
+ const savedEvents = yield loadSavedEvents({
+ name: this.getInstanceName(),
+ userDirectory: this.userDirectory,
+ deployedBlock: this.deployedBlock
+ });
+ console.log(`Updating events for ${this.netId} chain echo events
+`);
+ console.log(`savedEvents count - ${savedEvents.events.length}`);
+ console.log(`savedEvents lastBlock - ${savedEvents.lastBlock}
+`);
+ return savedEvents;
+ });
+ }
+ getEventsFromCache() {
+ return nodeEvents_async(this, null, function* () {
+ if (!this.cacheDirectory) {
+ console.log(`cachedEvents count - ${0}`);
+ console.log(`cachedEvents lastBlock - ${this.deployedBlock}
+`);
+ return {
+ events: [],
+ lastBlock: this.deployedBlock
+ };
+ }
+ const cachedEvents = yield loadCachedEvents({
+ name: this.getInstanceName(),
+ cacheDirectory: this.cacheDirectory,
+ deployedBlock: this.deployedBlock
+ });
+ console.log(`cachedEvents count - ${cachedEvents.events.length}`);
+ console.log(`cachedEvents lastBlock - ${cachedEvents.lastBlock}
+`);
+ return cachedEvents;
+ });
+ }
+ saveEvents(_0) {
+ return nodeEvents_async(this, arguments, function* ({ events, lastBlock }) {
+ const instanceName = this.getInstanceName();
+ console.log("\ntotalEvents count - ", events.length);
+ console.log(
+ `totalEvents lastBlock - ${events[events.length - 1] ? events[events.length - 1].blockNumber : lastBlock}
+`
+ );
+ const eventTable = new (cli_table3_default())();
+ eventTable.push(
+ [{ colSpan: 2, content: "Echo Accounts", hAlign: "center" }],
+ ["Network", `${this.netId} chain`],
+ ["Events", `${events.length} events`],
+ [{ colSpan: 2, content: "Latest events" }],
+ ...events.slice(events.length - 10).reverse().map(({ blockNumber }, index) => {
+ const eventIndex = events.length - index;
+ return [eventIndex, blockNumber];
+ })
+ );
+ console.log(eventTable.toString() + "\n");
+ if (this.userDirectory) {
+ yield saveUserFile({
+ fileName: instanceName + ".json",
+ userDirectory: this.userDirectory,
+ dataString: JSON.stringify(events, null, 2) + "\n"
+ });
+ }
+ });
+ }
+}
+class NodeEncryptedNotesService extends BaseEncryptedNotesService {
+ constructor({
+ netId,
+ provider,
+ graphApi,
+ subgraphName,
+ Router,
+ deployedBlock,
+ fetchDataOptions,
+ cacheDirectory,
+ userDirectory
+ }) {
+ super({
+ netId,
+ provider,
+ graphApi,
+ subgraphName,
+ Router,
+ deployedBlock,
+ fetchDataOptions
+ });
+ this.cacheDirectory = cacheDirectory;
+ this.userDirectory = userDirectory;
+ }
+ updateEventProgress({ type, fromBlock, toBlock, count }) {
+ if (toBlock) {
+ console.log(`fromBlock - ${fromBlock}`);
+ console.log(`toBlock - ${toBlock}`);
+ if (count) {
+ console.log(`downloaded ${type} events count - ${count}`);
+ console.log("____________________________________________");
+ console.log(`Fetched ${type} events from ${fromBlock} to ${toBlock}
+`);
+ }
+ }
+ }
+ updateGraphProgress({ type, fromBlock, toBlock, count }) {
+ if (toBlock) {
+ console.log(`fromBlock - ${fromBlock}`);
+ console.log(`toBlock - ${toBlock}`);
+ if (count) {
+ console.log(`downloaded ${type} events from graph node count - ${count}`);
+ console.log("____________________________________________");
+ console.log(`Fetched ${type} events from graph node ${fromBlock} to ${toBlock}
+`);
+ }
+ }
+ }
+ getEventsFromDB() {
+ return nodeEvents_async(this, null, function* () {
+ if (!this.userDirectory) {
+ console.log(`Updating events for ${this.netId} chain encrypted events
+`);
+ console.log(`savedEvents count - ${0}`);
+ console.log(`savedEvents lastBlock - ${this.deployedBlock}
+`);
+ return {
+ events: [],
+ lastBlock: this.deployedBlock
+ };
+ }
+ const savedEvents = yield loadSavedEvents({
+ name: this.getInstanceName(),
+ userDirectory: this.userDirectory,
+ deployedBlock: this.deployedBlock
+ });
+ console.log(`Updating events for ${this.netId} chain encrypted events
+`);
+ console.log(`savedEvents count - ${savedEvents.events.length}`);
+ console.log(`savedEvents lastBlock - ${savedEvents.lastBlock}
+`);
+ return savedEvents;
+ });
+ }
+ getEventsFromCache() {
+ return nodeEvents_async(this, null, function* () {
+ if (!this.cacheDirectory) {
+ console.log(`cachedEvents count - ${0}`);
+ console.log(`cachedEvents lastBlock - ${this.deployedBlock}
+`);
+ return {
+ events: [],
+ lastBlock: this.deployedBlock
+ };
+ }
+ const cachedEvents = yield loadCachedEvents({
+ name: this.getInstanceName(),
+ cacheDirectory: this.cacheDirectory,
+ deployedBlock: this.deployedBlock
+ });
+ console.log(`cachedEvents count - ${cachedEvents.events.length}`);
+ console.log(`cachedEvents lastBlock - ${cachedEvents.lastBlock}
+`);
+ return cachedEvents;
+ });
+ }
+ saveEvents(_0) {
+ return nodeEvents_async(this, arguments, function* ({ events, lastBlock }) {
+ const instanceName = this.getInstanceName();
+ console.log("\ntotalEvents count - ", events.length);
+ console.log(
+ `totalEvents lastBlock - ${events[events.length - 1] ? events[events.length - 1].blockNumber : lastBlock}
+`
+ );
+ const eventTable = new (cli_table3_default())();
+ eventTable.push(
+ [{ colSpan: 2, content: "Encrypted Notes", hAlign: "center" }],
+ ["Network", `${this.netId} chain`],
+ ["Events", `${events.length} events`],
+ [{ colSpan: 2, content: "Latest events" }],
+ ...events.slice(events.length - 10).reverse().map(({ blockNumber }, index) => {
+ const eventIndex = events.length - index;
+ return [eventIndex, blockNumber];
+ })
+ );
+ console.log(eventTable.toString() + "\n");
+ if (this.userDirectory) {
+ yield saveUserFile({
+ fileName: instanceName + ".json",
+ userDirectory: this.userDirectory,
+ dataString: JSON.stringify(events, null, 2) + "\n"
+ });
+ }
+ });
+ }
+}
+class NodeGovernanceService extends BaseGovernanceService {
+ constructor({
+ netId,
+ provider,
+ graphApi,
+ subgraphName,
+ Governance,
+ deployedBlock,
+ fetchDataOptions,
+ cacheDirectory,
+ userDirectory
+ }) {
+ super({
+ netId,
+ provider,
+ graphApi,
+ subgraphName,
+ Governance,
+ deployedBlock,
+ fetchDataOptions
+ });
+ this.cacheDirectory = cacheDirectory;
+ this.userDirectory = userDirectory;
+ }
+ updateEventProgress({ type, fromBlock, toBlock, count }) {
+ if (toBlock) {
+ console.log(`fromBlock - ${fromBlock}`);
+ console.log(`toBlock - ${toBlock}`);
+ if (count) {
+ console.log(`downloaded ${type} events count - ${count}`);
+ console.log("____________________________________________");
+ console.log(`Fetched ${type} events from ${fromBlock} to ${toBlock}
+`);
+ }
+ }
+ }
+ updateGraphProgress({ type, fromBlock, toBlock, count }) {
+ if (toBlock) {
+ console.log(`fromBlock - ${fromBlock}`);
+ console.log(`toBlock - ${toBlock}`);
+ if (count) {
+ console.log(`downloaded ${type} events from graph node count - ${count}`);
+ console.log("____________________________________________");
+ console.log(`Fetched ${type} events from graph node ${fromBlock} to ${toBlock}
+`);
+ }
+ }
+ }
+ updateTransactionProgress({ currentIndex, totalIndex }) {
+ if (totalIndex) {
+ console.log(`Fetched ${currentIndex} governance txs of ${totalIndex}`);
+ }
+ }
+ getEventsFromDB() {
+ return nodeEvents_async(this, null, function* () {
+ if (!this.userDirectory) {
+ console.log(`Updating events for ${this.netId} chain governance events
+`);
+ console.log(`savedEvents count - ${0}`);
+ console.log(`savedEvents lastBlock - ${this.deployedBlock}
+`);
+ return {
+ events: [],
+ lastBlock: this.deployedBlock
+ };
+ }
+ const savedEvents = yield loadSavedEvents({
+ name: this.getInstanceName(),
+ userDirectory: this.userDirectory,
+ deployedBlock: this.deployedBlock
+ });
+ console.log(`Updating events for ${this.netId} chain governance events
+`);
+ console.log(`savedEvents count - ${savedEvents.events.length}`);
+ console.log(`savedEvents lastBlock - ${savedEvents.lastBlock}
+`);
+ return savedEvents;
+ });
+ }
+ getEventsFromCache() {
+ return nodeEvents_async(this, null, function* () {
+ if (!this.cacheDirectory) {
+ console.log(`cachedEvents count - ${0}`);
+ console.log(`cachedEvents lastBlock - ${this.deployedBlock}
+`);
+ return {
+ events: [],
+ lastBlock: this.deployedBlock
+ };
+ }
+ const cachedEvents = yield loadCachedEvents({
+ name: this.getInstanceName(),
+ cacheDirectory: this.cacheDirectory,
+ deployedBlock: this.deployedBlock
+ });
+ console.log(`cachedEvents count - ${cachedEvents.events.length}`);
+ console.log(`cachedEvents lastBlock - ${cachedEvents.lastBlock}
+`);
+ return cachedEvents;
+ });
+ }
+ saveEvents(_0) {
+ return nodeEvents_async(this, arguments, function* ({ events, lastBlock }) {
+ const instanceName = this.getInstanceName();
+ console.log("\ntotalEvents count - ", events.length);
+ console.log(
+ `totalEvents lastBlock - ${events[events.length - 1] ? events[events.length - 1].blockNumber : lastBlock}
+`
+ );
+ const eventTable = new (cli_table3_default())();
+ eventTable.push(
+ [{ colSpan: 2, content: "Governance Events", hAlign: "center" }],
+ ["Network", `${this.netId} chain`],
+ ["Events", `${events.length} events`],
+ [{ colSpan: 2, content: "Latest events" }],
+ ...events.slice(events.length - 10).reverse().map(({ blockNumber }, index) => {
+ const eventIndex = events.length - index;
+ return [eventIndex, blockNumber];
+ })
+ );
+ console.log(eventTable.toString() + "\n");
+ if (this.userDirectory) {
+ yield saveUserFile({
+ fileName: instanceName + ".json",
+ userDirectory: this.userDirectory,
+ dataString: JSON.stringify(events, null, 2) + "\n"
+ });
+ }
+ });
+ }
+}
+class NodeRegistryService extends BaseRegistryService {
+ constructor({
+ netId,
+ provider,
+ graphApi,
+ subgraphName,
+ RelayerRegistry,
+ deployedBlock,
+ fetchDataOptions,
+ cacheDirectory,
+ userDirectory
+ }) {
+ super({
+ netId,
+ provider,
+ graphApi,
+ subgraphName,
+ RelayerRegistry,
+ deployedBlock,
+ fetchDataOptions
+ });
+ this.cacheDirectory = cacheDirectory;
+ this.userDirectory = userDirectory;
+ }
+ updateEventProgress({ type, fromBlock, toBlock, count }) {
+ if (toBlock) {
+ console.log(`fromBlock - ${fromBlock}`);
+ console.log(`toBlock - ${toBlock}`);
+ if (count) {
+ console.log(`downloaded ${type} events count - ${count}`);
+ console.log("____________________________________________");
+ console.log(`Fetched ${type} events from ${fromBlock} to ${toBlock}
+`);
+ }
+ }
+ }
+ updateGraphProgress({ type, fromBlock, toBlock, count }) {
+ if (toBlock) {
+ console.log(`fromBlock - ${fromBlock}`);
+ console.log(`toBlock - ${toBlock}`);
+ if (count) {
+ console.log(`downloaded ${type} events from graph node count - ${count}`);
+ console.log("____________________________________________");
+ console.log(`Fetched ${type} events from graph node ${fromBlock} to ${toBlock}
+`);
+ }
+ }
+ }
+ getEventsFromDB() {
+ return nodeEvents_async(this, null, function* () {
+ if (!this.userDirectory) {
+ console.log(`Updating events for ${this.netId} chain registry events
+`);
+ console.log(`savedEvents count - ${0}`);
+ console.log(`savedEvents lastBlock - ${this.deployedBlock}
+`);
+ return {
+ events: [],
+ lastBlock: this.deployedBlock
+ };
+ }
+ const savedEvents = yield loadSavedEvents({
+ name: this.getInstanceName(),
+ userDirectory: this.userDirectory,
+ deployedBlock: this.deployedBlock
+ });
+ console.log(`Updating events for ${this.netId} chain registry events
+`);
+ console.log(`savedEvents count - ${savedEvents.events.length}`);
+ console.log(`savedEvents lastBlock - ${savedEvents.lastBlock}
+`);
+ return savedEvents;
+ });
+ }
+ getEventsFromCache() {
+ return nodeEvents_async(this, null, function* () {
+ if (!this.cacheDirectory) {
+ console.log(`cachedEvents count - ${0}`);
+ console.log(`cachedEvents lastBlock - ${this.deployedBlock}
+`);
+ return {
+ events: [],
+ lastBlock: this.deployedBlock
+ };
+ }
+ const cachedEvents = yield loadCachedEvents({
+ name: this.getInstanceName(),
+ cacheDirectory: this.cacheDirectory,
+ deployedBlock: this.deployedBlock
+ });
+ console.log(`cachedEvents count - ${cachedEvents.events.length}`);
+ console.log(`cachedEvents lastBlock - ${cachedEvents.lastBlock}
+`);
+ return cachedEvents;
+ });
+ }
+ saveEvents(_0) {
+ return nodeEvents_async(this, arguments, function* ({ events, lastBlock }) {
+ const instanceName = this.getInstanceName();
+ console.log("\ntotalEvents count - ", events.length);
+ console.log(
+ `totalEvents lastBlock - ${events[events.length - 1] ? events[events.length - 1].blockNumber : lastBlock}
+`
+ );
+ const eventTable = new (cli_table3_default())();
+ eventTable.push(
+ [{ colSpan: 2, content: "Registered Relayers", hAlign: "center" }],
+ ["Network", `${this.netId} chain`],
+ ["Events", `${events.length} events`],
+ [{ colSpan: 2, content: "Latest events" }],
+ ...events.slice(events.length - 10).reverse().map(({ blockNumber }, index) => {
+ const eventIndex = events.length - index;
+ return [eventIndex, blockNumber];
+ })
+ );
+ console.log(eventTable.toString() + "\n");
+ if (this.userDirectory) {
+ yield saveUserFile({
+ fileName: instanceName + ".json",
+ userDirectory: this.userDirectory,
+ dataString: JSON.stringify(events, null, 2) + "\n"
+ });
+ }
+ });
+ }
+}
+
+;// CONCATENATED MODULE: ./src/services/parser.ts
+
+
+
+
+function parseNumber(value) {
+ if (!value || isNaN(Number(value))) {
+ throw new InvalidArgumentError("Invalid Number");
+ }
+ return Number(value);
+}
+function parseUrl(value) {
+ if (!value || !validateUrl(value, ["http:", "https:"])) {
+ throw new InvalidArgumentError("Invalid URL");
+ }
+ return value;
+}
+function parseRelayer(value) {
+ if (!value || !(value.endsWith(".eth") || validateUrl(value, ["http:", "https:"]))) {
+ throw new InvalidArgumentError("Invalid Relayer ETH address or URL");
+ }
+ return value;
+}
+function parseAddress(value) {
+ if (!value) {
+ throw new InvalidArgumentError("Invalid Address");
+ }
+ try {
+ return address_getAddress(value);
+ } catch (e) {
+ throw new InvalidArgumentError("Invalid Address");
+ }
+}
+function parseMnemonic(value) {
+ if (!value) {
+ throw new InvalidArgumentError("Invalid Mnemonic");
+ }
+ try {
+ Mnemonic.fromPhrase(value);
+ } catch (e) {
+ throw new InvalidArgumentError("Invalid Mnemonic");
+ }
+ return value;
+}
+function parseKey(value) {
+ if (!value) {
+ throw new InvalidArgumentError("Invalid Private Key");
+ }
+ if (value.length === 64) {
+ value = "0x" + value;
+ }
+ try {
+ computeAddress(value);
+ } catch (e) {
+ throw new InvalidArgumentError("Invalid Private Key");
+ }
+ return value;
+}
+function parseRecoveryKey(value) {
+ if (!value) {
+ throw new InvalidArgumentError("Invalid Recovery Key");
+ }
+ try {
+ computeAddress("0x" + value);
+ } catch (e) {
+ throw new InvalidArgumentError("Invalid Recovery Key");
+ }
+ return value;
+}
+
+// EXTERNAL MODULE: ./node_modules/bloomfilter.js/index.js
+var bloomfilter_js = __webpack_require__(65403);
+var bloomfilter_js_default = /*#__PURE__*/__webpack_require__.n(bloomfilter_js);
+;// CONCATENATED MODULE: ./src/services/treeCache.ts
+
+var treeCache_defProp = Object.defineProperty;
+var treeCache_defProps = Object.defineProperties;
+var treeCache_getOwnPropDescs = Object.getOwnPropertyDescriptors;
+var treeCache_getOwnPropSymbols = Object.getOwnPropertySymbols;
+var treeCache_hasOwnProp = Object.prototype.hasOwnProperty;
+var treeCache_propIsEnum = Object.prototype.propertyIsEnumerable;
+var treeCache_defNormalProp = (obj, key, value) => key in obj ? treeCache_defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
+var treeCache_spreadValues = (a, b) => {
+ for (var prop in b || (b = {}))
+ if (treeCache_hasOwnProp.call(b, prop))
+ treeCache_defNormalProp(a, prop, b[prop]);
+ if (treeCache_getOwnPropSymbols)
+ for (var prop of treeCache_getOwnPropSymbols(b)) {
+ if (treeCache_propIsEnum.call(b, prop))
+ treeCache_defNormalProp(a, prop, b[prop]);
+ }
+ return a;
+};
+var treeCache_spreadProps = (a, b) => treeCache_defProps(a, treeCache_getOwnPropDescs(b));
+var __objRest = (source, exclude) => {
+ var target = {};
+ for (var prop in source)
+ if (treeCache_hasOwnProp.call(source, prop) && exclude.indexOf(prop) < 0)
+ target[prop] = source[prop];
+ if (source != null && treeCache_getOwnPropSymbols)
+ for (var prop of treeCache_getOwnPropSymbols(source)) {
+ if (exclude.indexOf(prop) < 0 && treeCache_propIsEnum.call(source, prop))
+ target[prop] = source[prop];
+ }
+ return target;
+};
+var treeCache_async = (__this, __arguments, generator) => {
+ return new Promise((resolve, reject) => {
+ var fulfilled = (value) => {
+ try {
+ step(generator.next(value));
+ } catch (e) {
+ reject(e);
+ }
+ };
+ var rejected = (value) => {
+ try {
+ step(generator.throw(value));
+ } catch (e) {
+ reject(e);
+ }
+ };
+ var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected);
+ step((generator = generator.apply(__this, __arguments)).next());
+ });
+};
+
+
+class TreeCache {
+ constructor({ netId, amount, currency, userDirectory, PARTS_COUNT = 4 }) {
+ this.netId = netId;
+ this.amount = amount;
+ this.currency = currency;
+ this.userDirectory = userDirectory;
+ this.PARTS_COUNT = PARTS_COUNT;
+ }
+ getInstanceName() {
+ return `deposits_${this.netId}_${this.currency}_${this.amount}`;
+ }
+ createTree(events, tree) {
+ return treeCache_async(this, null, function* () {
+ const bloom = new (bloomfilter_js_default())(events.length);
+ console.log(`Creating cached tree for ${this.getInstanceName()}
+`);
+ const eventsData = events.reduce(
+ (acc, _a, i) => {
+ var _b = _a, { leafIndex, commitment } = _b, rest = __objRest(_b, ["leafIndex", "commitment"]);
+ if (leafIndex !== i) {
+ throw new Error(`leafIndex (${leafIndex}) !== i (${i})`);
+ }
+ acc[commitment] = treeCache_spreadProps(treeCache_spreadValues({}, rest), { leafIndex });
+ return acc;
+ },
+ {}
+ );
+ const slices = tree.getTreeSlices(this.PARTS_COUNT);
+ yield Promise.all(
+ slices.map((slice, index) => treeCache_async(this, null, function* () {
+ const metadata = slice.elements.reduce((acc, curr) => {
+ if (index < this.PARTS_COUNT - 1) {
+ bloom.add(curr);
+ }
+ acc.push(eventsData[curr]);
+ return acc;
+ }, []);
+ const dataString2 = JSON.stringify(
+ treeCache_spreadProps(treeCache_spreadValues({}, slice), {
+ metadata
+ }),
+ null,
+ 2
+ ) + "\n";
+ const fileName2 = `${this.getInstanceName()}_slice${index + 1}.json`;
+ yield saveUserFile({
+ fileName: fileName2,
+ userDirectory: this.userDirectory,
+ dataString: dataString2
+ });
+ }))
+ );
+ const dataString = bloom.serialize() + "\n";
+ const fileName = `${this.getInstanceName()}_bloom.json`;
+ yield saveUserFile({
+ fileName,
+ userDirectory: this.userDirectory,
+ dataString
+ });
+ });
+ }
+}
+
+;// CONCATENATED MODULE: ./src/services/index.ts
+
+
+
+
+
+
;// CONCATENATED MODULE: ./src/program.ts
var program_defProp = Object.defineProperty;
@@ -193210,6 +186470,8 @@ var program_async = (__this, __arguments, generator) => {
+
+const EXEC_NAME = "tornado-cli";
const DEFAULT_GAS_LIMIT = Number((external_process_default()).env.DEFAULT_GAS_LIMIT) || 6e5;
const RELAYER_NETWORK = Number((external_process_default()).env.RELAYER_NETWORK) || NetId.MAINNET;
const STATIC_DIR = (external_process_default()).env.CACHE_DIR || external_path_default().join(__dirname, "../static");
@@ -193460,9 +186722,9 @@ function programSendTransaction(_0) {
});
}
function tornadoProgram() {
- const { name, version, description } = package_namespaceObject_0;
+ const { version, description } = package_namespaceObject_0;
const program = new Command();
- program.name(name).version(version).description(description);
+ program.name(EXEC_NAME).version(version).description(description);
program.command("create").description("Creates Tornado Cash deposit note and deposit invoice").argument("", "Network Chain ID to connect with (see https://chainlist.org for examples)", parseNumber).argument("", "Currency to deposit on Tornado Cash").argument("", "Amount to deposit on Tornado Cash").action((netId, currency, amount) => program_async(this, null, function* () {
currency = currency.toLowerCase();
const config = getConfig(netId);
@@ -194504,8 +187766,8 @@ function tornadoProgram() {
const balanceTable = new (cli_table3_default())({ head: ["Token", "Contract Address", "Balance"] });
balanceTable.push(
[{ colSpan: 3, content: `User: ${userAddress}`, hAlign: "center" }],
- ...tokenBalances2.map(({ address, name: name2, symbol, decimals, balance }) => {
- return [`${name2} (${symbol})`, address, `${formatUnits(balance, decimals)} ${symbol}`];
+ ...tokenBalances2.map(({ address, name, symbol, decimals, balance }) => {
+ return [`${name} (${symbol})`, address, `${formatUnits(balance, decimals)} ${symbol}`];
})
);
console.log(balanceTable.toString());
diff --git a/dist/services/data.d.ts b/dist/services/data.d.ts
new file mode 100644
index 0000000..b213ecd
--- /dev/null
+++ b/dist/services/data.d.ts
@@ -0,0 +1,24 @@
+import { AsyncZippable, Unzipped } from 'fflate';
+import { BaseEvents, MinimalEvents } from '@tornado/core';
+export declare function existsAsync(fileOrDir: string): Promise;
+export declare function zipAsync(file: AsyncZippable): Promise;
+export declare function unzipAsync(data: Uint8Array): Promise;
+export declare function saveUserFile({ fileName, userDirectory, dataString, }: {
+ fileName: string;
+ userDirectory: string;
+ dataString: string;
+}): Promise;
+export declare function loadSavedEvents({ name, userDirectory, deployedBlock, }: {
+ name: string;
+ userDirectory: string;
+ deployedBlock: number;
+}): Promise>;
+export declare function download({ name, cacheDirectory }: {
+ name: string;
+ cacheDirectory: string;
+}): Promise;
+export declare function loadCachedEvents({ name, cacheDirectory, deployedBlock, }: {
+ name: string;
+ cacheDirectory: string;
+ deployedBlock: number;
+}): Promise>;
diff --git a/dist/services/index.d.ts b/dist/services/index.d.ts
new file mode 100644
index 0000000..0deb98b
--- /dev/null
+++ b/dist/services/index.d.ts
@@ -0,0 +1,4 @@
+export * from './data';
+export * from './nodeEvents';
+export * from './parser';
+export * from './treeCache';
diff --git a/dist/services/nodeEvents.d.ts b/dist/services/nodeEvents.d.ts
new file mode 100644
index 0000000..ee67fb0
--- /dev/null
+++ b/dist/services/nodeEvents.d.ts
@@ -0,0 +1,75 @@
+import { BatchBlockOnProgress, BatchEventOnProgress, BaseTornadoService, BaseEncryptedNotesService, BaseGovernanceService, BaseRegistryService, BaseTornadoServiceConstructor, BaseEncryptedNotesServiceConstructor, BaseGovernanceServiceConstructor, BaseRegistryServiceConstructor, BaseEchoServiceConstructor, BaseEchoService } from '@tornado/core';
+import type { BaseEvents, DepositsEvents, WithdrawalsEvents, EncryptedNotesEvents, RegistersEvents, AllGovernanceEvents, EchoEvents } from '@tornado/core';
+export type NodeTornadoServiceConstructor = BaseTornadoServiceConstructor & {
+ cacheDirectory?: string;
+ userDirectory?: string;
+};
+export declare class NodeTornadoService extends BaseTornadoService {
+ cacheDirectory?: string;
+ userDirectory?: string;
+ constructor({ netId, provider, graphApi, subgraphName, Tornado, type, amount, currency, deployedBlock, fetchDataOptions, cacheDirectory, userDirectory, }: NodeTornadoServiceConstructor);
+ updateEventProgress({ type, fromBlock, toBlock, count }: Parameters[0]): void;
+ updateTransactionProgress({ currentIndex, totalIndex }: Parameters[0]): void;
+ updateBlockProgress({ currentIndex, totalIndex }: Parameters[0]): void;
+ updateGraphProgress({ type, fromBlock, toBlock, count }: Parameters[0]): void;
+ getEventsFromDB(): Promise>;
+ getEventsFromCache(): Promise>;
+ saveEvents({ events, lastBlock }: BaseEvents): Promise;
+}
+export type NodeEchoServiceConstructor = BaseEchoServiceConstructor & {
+ cacheDirectory?: string;
+ userDirectory?: string;
+};
+export declare class NodeEchoService extends BaseEchoService {
+ cacheDirectory?: string;
+ userDirectory?: string;
+ constructor({ netId, provider, graphApi, subgraphName, Echoer, deployedBlock, fetchDataOptions, cacheDirectory, userDirectory, }: NodeEchoServiceConstructor);
+ updateEventProgress({ type, fromBlock, toBlock, count }: Parameters[0]): void;
+ updateGraphProgress({ type, fromBlock, toBlock, count }: Parameters[0]): void;
+ getEventsFromDB(): Promise>;
+ getEventsFromCache(): Promise>;
+ saveEvents({ events, lastBlock }: BaseEvents): Promise;
+}
+export type NodeEncryptedNotesServiceConstructor = BaseEncryptedNotesServiceConstructor & {
+ cacheDirectory?: string;
+ userDirectory?: string;
+};
+export declare class NodeEncryptedNotesService extends BaseEncryptedNotesService {
+ cacheDirectory?: string;
+ userDirectory?: string;
+ constructor({ netId, provider, graphApi, subgraphName, Router, deployedBlock, fetchDataOptions, cacheDirectory, userDirectory, }: NodeEncryptedNotesServiceConstructor);
+ updateEventProgress({ type, fromBlock, toBlock, count }: Parameters[0]): void;
+ updateGraphProgress({ type, fromBlock, toBlock, count }: Parameters[0]): void;
+ getEventsFromDB(): Promise>;
+ getEventsFromCache(): Promise>;
+ saveEvents({ events, lastBlock }: BaseEvents): Promise;
+}
+export type NodeGovernanceServiceConstructor = BaseGovernanceServiceConstructor & {
+ cacheDirectory?: string;
+ userDirectory?: string;
+};
+export declare class NodeGovernanceService extends BaseGovernanceService {
+ cacheDirectory?: string;
+ userDirectory?: string;
+ constructor({ netId, provider, graphApi, subgraphName, Governance, deployedBlock, fetchDataOptions, cacheDirectory, userDirectory, }: NodeGovernanceServiceConstructor);
+ updateEventProgress({ type, fromBlock, toBlock, count }: Parameters[0]): void;
+ updateGraphProgress({ type, fromBlock, toBlock, count }: Parameters[0]): void;
+ updateTransactionProgress({ currentIndex, totalIndex }: Parameters[0]): void;
+ getEventsFromDB(): Promise>;
+ getEventsFromCache(): Promise>;
+ saveEvents({ events, lastBlock }: BaseEvents): Promise;
+}
+export type NodeRegistryServiceConstructor = BaseRegistryServiceConstructor & {
+ cacheDirectory?: string;
+ userDirectory?: string;
+};
+export declare class NodeRegistryService extends BaseRegistryService {
+ cacheDirectory?: string;
+ userDirectory?: string;
+ constructor({ netId, provider, graphApi, subgraphName, RelayerRegistry, deployedBlock, fetchDataOptions, cacheDirectory, userDirectory, }: NodeRegistryServiceConstructor);
+ updateEventProgress({ type, fromBlock, toBlock, count }: Parameters[0]): void;
+ updateGraphProgress({ type, fromBlock, toBlock, count }: Parameters[0]): void;
+ getEventsFromDB(): Promise>;
+ getEventsFromCache(): Promise>;
+ saveEvents({ events, lastBlock }: BaseEvents): Promise;
+}
diff --git a/dist/services/parser.d.ts b/dist/services/parser.d.ts
new file mode 100644
index 0000000..35da7f2
--- /dev/null
+++ b/dist/services/parser.d.ts
@@ -0,0 +1,10 @@
+export declare function parseNumber(value?: string | number): number;
+export declare function parseUrl(value?: string): string;
+export declare function parseRelayer(value?: string): string;
+export declare function parseAddress(value?: string): string;
+export declare function parseMnemonic(value?: string): string;
+export declare function parseKey(value?: string): string;
+/**
+ * Recovery key shouldn't have a 0x prefix (Also this is how the UI generates)
+ */
+export declare function parseRecoveryKey(value?: string): string;
diff --git a/dist/services/treeCache.d.ts b/dist/services/treeCache.d.ts
new file mode 100644
index 0000000..7ccfff8
--- /dev/null
+++ b/dist/services/treeCache.d.ts
@@ -0,0 +1,35 @@
+/**
+ * Create tree cache file from node.js
+ *
+ * Only works for node.js, modified from https://github.com/tornadocash/tornado-classic-ui/blob/master/scripts/updateTree.js
+ */
+import { MerkleTree } from '@tornado/fixed-merkle-tree';
+import { DepositsEvents } from '@tornado/core';
+import type { NetIdType } from '@tornado/core';
+export interface TreeCacheConstructor {
+ netId: NetIdType;
+ amount: string;
+ currency: string;
+ userDirectory: string;
+ PARTS_COUNT?: number;
+ LEAVES?: number;
+ zeroElement?: string;
+}
+export interface treeMetadata {
+ blockNumber: number;
+ logIndex: number;
+ transactionHash: string;
+ timestamp: number;
+ from: string;
+ leafIndex: number;
+}
+export declare class TreeCache {
+ netId: NetIdType;
+ amount: string;
+ currency: string;
+ userDirectory: string;
+ PARTS_COUNT: number;
+ constructor({ netId, amount, currency, userDirectory, PARTS_COUNT }: TreeCacheConstructor);
+ getInstanceName(): string;
+ createTree(events: DepositsEvents[], tree: MerkleTree): Promise;
+}
diff --git a/package.json b/package.json
index dc95265..9a043bd 100644
--- a/package.json
+++ b/package.json
@@ -1,5 +1,5 @@
{
- "name": "tornado-cli",
+ "name": "@tornado/cli",
"version": "1.0.3-alpha",
"description": "Modern Toolsets for Privacy Pools on Ethereum",
"main": "./dist/cli.js",
@@ -53,7 +53,7 @@
"@colors/colors": "1.5.0",
"@metamask/eth-sig-util": "^7.0.1",
"@tornado/contracts": "1.0.0",
- "@tornado/core": "git+https://git.tornado.ws/tornadocontrib/tornado-core.git#b5f57e20ee7de42c4af88fb417d887672a8d3582",
+ "@tornado/core": "git+https://git.tornado.ws/tornadocontrib/tornado-core.git#4fde41b10ce601bcf687e2e8b93785f86237ac6c",
"@tornado/fixed-merkle-tree": "0.7.3",
"@tornado/snarkjs": "0.1.20",
"@tornado/websnark": "0.0.4",
diff --git a/src/program.ts b/src/program.ts
index 52d9ff0..eb2ac7b 100644
--- a/src/program.ts
+++ b/src/program.ts
@@ -37,12 +37,6 @@ import {
Multicall__factory,
OffchainOracle__factory,
OvmGasPriceOracle__factory,
- parseUrl,
- parseRelayer,
- parseNumber,
- parseMnemonic,
- parseKey,
- parseAddress,
getProviderOptions,
getProviderWithNetId,
getTokenBalances,
@@ -50,19 +44,14 @@ import {
TornadoVoidSigner,
tokenBalances,
Deposit,
- NodeTornadoService,
DepositsEvents,
WithdrawalsEvents,
Relayer,
RelayerInfo,
RelayerError,
- NodeRegistryService,
TornadoFeeOracle,
TokenPriceOracle,
calculateSnarkProof,
- NodeEchoService,
- NodeEncryptedNotesService,
- NodeGovernanceService,
RelayerClient,
MerkleTreeService,
multicall,
@@ -78,12 +67,27 @@ import {
enabledChains,
substring,
NoteAccount,
- parseRecoveryKey,
getSupportedInstances,
- TreeCache,
initGroth16,
} from '@tornado/core';
import * as packageJson from '../package.json';
+import {
+ parseUrl,
+ parseRelayer,
+ parseNumber,
+ parseMnemonic,
+ parseKey,
+ parseAddress,
+ parseRecoveryKey,
+ NodeTornadoService,
+ NodeRegistryService,
+ NodeEchoService,
+ NodeEncryptedNotesService,
+ NodeGovernanceService,
+ TreeCache,
+} from './services';
+
+const EXEC_NAME = 'tornado-cli';
/**
* Static variables, shouldn't be modified by env unless you know what they are doing
@@ -448,11 +452,11 @@ export async function programSendTransaction({
}
export function tornadoProgram() {
- const { name, version, description } = packageJson as packageJson;
+ const { version, description } = packageJson as packageJson;
const program = new Command();
- program.name(name).version(version).description(description);
+ program.name(EXEC_NAME).version(version).description(description);
program
.command('create')
diff --git a/src/services/data.ts b/src/services/data.ts
new file mode 100644
index 0000000..6a1c3a2
--- /dev/null
+++ b/src/services/data.ts
@@ -0,0 +1,146 @@
+import path from 'path';
+import { stat, mkdir, readFile, writeFile } from 'fs/promises';
+import { zip, unzip, AsyncZippable, Unzipped } from 'fflate';
+import { BaseEvents, MinimalEvents } from '@tornado/core';
+
+export async function existsAsync(fileOrDir: string): Promise {
+ try {
+ await stat(fileOrDir);
+
+ return true;
+ } catch {
+ return false;
+ }
+}
+
+export function zipAsync(file: AsyncZippable): Promise {
+ return new Promise((res, rej) => {
+ zip(file, { mtime: new Date('1/1/1980') }, (err, data) => {
+ if (err) {
+ rej(err);
+ return;
+ }
+ res(data);
+ });
+ });
+}
+
+export function unzipAsync(data: Uint8Array): Promise {
+ return new Promise((res, rej) => {
+ unzip(data, {}, (err, data) => {
+ if (err) {
+ rej(err);
+ return;
+ }
+ res(data);
+ });
+ });
+}
+
+export async function saveUserFile({
+ fileName,
+ userDirectory,
+ dataString,
+}: {
+ fileName: string;
+ userDirectory: string;
+ dataString: string;
+}) {
+ fileName = fileName.toLowerCase();
+
+ const filePath = path.join(userDirectory, fileName);
+
+ const payload = await zipAsync({
+ [fileName]: new TextEncoder().encode(dataString),
+ });
+
+ if (!(await existsAsync(userDirectory))) {
+ await mkdir(userDirectory, { recursive: true });
+ }
+
+ await writeFile(filePath + '.zip', payload);
+ await writeFile(filePath, dataString);
+}
+
+export async function loadSavedEvents({
+ name,
+ userDirectory,
+ deployedBlock,
+}: {
+ name: string;
+ userDirectory: string;
+ deployedBlock: number;
+}): Promise> {
+ const filePath = path.join(userDirectory, `${name}.json`.toLowerCase());
+
+ if (!(await existsAsync(filePath))) {
+ return {
+ events: [] as T[],
+ lastBlock: null,
+ };
+ }
+
+ try {
+ const events = JSON.parse(await readFile(filePath, { encoding: 'utf8' })) as T[];
+
+ return {
+ events,
+ lastBlock: events && events.length ? events[events.length - 1].blockNumber : deployedBlock,
+ };
+ } catch (err) {
+ console.log('Method loadSavedEvents has error');
+ console.log(err);
+ return {
+ events: [],
+ lastBlock: deployedBlock,
+ };
+ }
+}
+
+export async function download({ name, cacheDirectory }: { name: string; cacheDirectory: string }) {
+ const fileName = `${name}.json`.toLowerCase();
+ const zipName = `${fileName}.zip`;
+ const zipPath = path.join(cacheDirectory, zipName);
+
+ const data = await readFile(zipPath);
+ const { [fileName]: content } = await unzipAsync(data);
+
+ return new TextDecoder().decode(content);
+}
+
+export async function loadCachedEvents({
+ name,
+ cacheDirectory,
+ deployedBlock,
+}: {
+ name: string;
+ cacheDirectory: string;
+ deployedBlock: number;
+}): Promise> {
+ try {
+ const module = await download({ cacheDirectory, name });
+
+ if (module) {
+ const events = JSON.parse(module);
+
+ const lastBlock = events && events.length ? events[events.length - 1].blockNumber : deployedBlock;
+
+ return {
+ events,
+ lastBlock,
+ };
+ }
+
+ return {
+ events: [],
+ lastBlock: deployedBlock,
+ };
+ } catch (err) {
+ console.log('Method loadCachedEvents has error');
+ console.log(err);
+ return {
+ events: [],
+ lastBlock: deployedBlock,
+ };
+ }
+}
diff --git a/src/services/index.ts b/src/services/index.ts
new file mode 100644
index 0000000..0deb98b
--- /dev/null
+++ b/src/services/index.ts
@@ -0,0 +1,4 @@
+export * from './data';
+export * from './nodeEvents';
+export * from './parser';
+export * from './treeCache';
diff --git a/src/services/nodeEvents.ts b/src/services/nodeEvents.ts
new file mode 100644
index 0000000..371a0e8
--- /dev/null
+++ b/src/services/nodeEvents.ts
@@ -0,0 +1,781 @@
+import Table from 'cli-table3';
+import moment from 'moment';
+import {
+ BatchBlockOnProgress,
+ BatchEventOnProgress,
+ BaseTornadoService,
+ BaseEncryptedNotesService,
+ BaseGovernanceService,
+ BaseRegistryService,
+ BaseTornadoServiceConstructor,
+ BaseEncryptedNotesServiceConstructor,
+ BaseGovernanceServiceConstructor,
+ BaseRegistryServiceConstructor,
+ BaseEchoServiceConstructor,
+ BaseEchoService,
+} from '@tornado/core';
+import type {
+ BaseEvents,
+ DepositsEvents,
+ WithdrawalsEvents,
+ EncryptedNotesEvents,
+ RegistersEvents,
+ AllGovernanceEvents,
+ EchoEvents,
+} from '@tornado/core';
+import { saveUserFile, loadSavedEvents, loadCachedEvents } from './data';
+
+export type NodeTornadoServiceConstructor = BaseTornadoServiceConstructor & {
+ cacheDirectory?: string;
+ userDirectory?: string;
+};
+
+export class NodeTornadoService extends BaseTornadoService {
+ cacheDirectory?: string;
+ userDirectory?: string;
+
+ constructor({
+ netId,
+ provider,
+ graphApi,
+ subgraphName,
+ Tornado,
+ type,
+ amount,
+ currency,
+ deployedBlock,
+ fetchDataOptions,
+ cacheDirectory,
+ userDirectory,
+ }: NodeTornadoServiceConstructor) {
+ super({
+ netId,
+ provider,
+ graphApi,
+ subgraphName,
+ Tornado,
+ type,
+ amount,
+ currency,
+ deployedBlock,
+ fetchDataOptions,
+ });
+
+ this.cacheDirectory = cacheDirectory;
+ this.userDirectory = userDirectory;
+ }
+
+ updateEventProgress({ type, fromBlock, toBlock, count }: Parameters[0]) {
+ if (toBlock) {
+ console.log(`fromBlock - ${fromBlock}`);
+ console.log(`toBlock - ${toBlock}`);
+
+ if (count) {
+ console.log(`downloaded ${type} events count - ${count}`);
+ console.log('____________________________________________');
+ console.log(`Fetched ${type} events from ${fromBlock} to ${toBlock}\n`);
+ }
+ }
+ }
+
+ updateTransactionProgress({ currentIndex, totalIndex }: Parameters[0]) {
+ if (totalIndex) {
+ console.log(`Fetched ${currentIndex} deposit txs of ${totalIndex}`);
+ }
+ }
+
+ updateBlockProgress({ currentIndex, totalIndex }: Parameters[0]) {
+ if (totalIndex) {
+ console.log(`Fetched ${currentIndex} withdrawal blocks of ${totalIndex}`);
+ }
+ }
+
+ updateGraphProgress({ type, fromBlock, toBlock, count }: Parameters[0]) {
+ if (toBlock) {
+ console.log(`fromBlock - ${fromBlock}`);
+ console.log(`toBlock - ${toBlock}`);
+
+ if (count) {
+ console.log(`downloaded ${type} events from graph node count - ${count}`);
+ console.log('____________________________________________');
+ console.log(`Fetched ${type} events from graph node ${fromBlock} to ${toBlock}\n`);
+ }
+ }
+ }
+
+ async getEventsFromDB() {
+ if (!this.userDirectory) {
+ console.log(
+ 'Updating events for',
+ this.amount,
+ this.currency.toUpperCase(),
+ `${this.getType().toLowerCase()}s\n`,
+ );
+ console.log(`savedEvents count - ${0}`);
+ console.log(`savedEvents lastBlock - ${this.deployedBlock}\n`);
+
+ return {
+ events: [],
+ lastBlock: this.deployedBlock,
+ };
+ }
+
+ const savedEvents = await loadSavedEvents({
+ name: this.getInstanceName(),
+ userDirectory: this.userDirectory,
+ deployedBlock: this.deployedBlock,
+ });
+
+ console.log('Updating events for', this.amount, this.currency.toUpperCase(), `${this.getType().toLowerCase()}s\n`);
+ console.log(`savedEvents count - ${savedEvents.events.length}`);
+ console.log(`savedEvents lastBlock - ${savedEvents.lastBlock}\n`);
+
+ return savedEvents;
+ }
+
+ async getEventsFromCache() {
+ if (!this.cacheDirectory) {
+ console.log(`cachedEvents count - ${0}`);
+ console.log(`cachedEvents lastBlock - ${this.deployedBlock}\n`);
+
+ return {
+ events: [],
+ lastBlock: this.deployedBlock,
+ };
+ }
+
+ const cachedEvents = await loadCachedEvents({
+ name: this.getInstanceName(),
+ cacheDirectory: this.cacheDirectory,
+ deployedBlock: this.deployedBlock,
+ });
+
+ console.log(`cachedEvents count - ${cachedEvents.events.length}`);
+ console.log(`cachedEvents lastBlock - ${cachedEvents.lastBlock}\n`);
+
+ return cachedEvents;
+ }
+
+ async saveEvents({ events, lastBlock }: BaseEvents) {
+ const instanceName = this.getInstanceName();
+
+ console.log('\ntotalEvents count - ', events.length);
+ console.log(
+ `totalEvents lastBlock - ${events[events.length - 1] ? events[events.length - 1].blockNumber : lastBlock}\n`,
+ );
+
+ const eventTable = new Table();
+
+ eventTable.push(
+ [{ colSpan: 2, content: `${this.getType()}s`, hAlign: 'center' }],
+ ['Instance', `${this.netId} chain ${this.amount} ${this.currency.toUpperCase()}`],
+ ['Anonymity set', `${events.length} equal user ${this.getType().toLowerCase()}s`],
+ [{ colSpan: 2, content: `Latest ${this.getType().toLowerCase()}s` }],
+ ...events
+ .slice(events.length - 10)
+ .reverse()
+ .map(({ timestamp }, index) => {
+ const eventIndex = events.length - index;
+ const eventTime = moment.unix(timestamp).fromNow();
+
+ return [eventIndex, eventTime];
+ }),
+ );
+
+ console.log(eventTable.toString() + '\n');
+
+ if (this.userDirectory) {
+ await saveUserFile({
+ fileName: instanceName + '.json',
+ userDirectory: this.userDirectory,
+ dataString: JSON.stringify(events, null, 2) + '\n',
+ });
+ }
+ }
+}
+
+export type NodeEchoServiceConstructor = BaseEchoServiceConstructor & {
+ cacheDirectory?: string;
+ userDirectory?: string;
+};
+
+export class NodeEchoService extends BaseEchoService {
+ cacheDirectory?: string;
+ userDirectory?: string;
+
+ constructor({
+ netId,
+ provider,
+ graphApi,
+ subgraphName,
+ Echoer,
+ deployedBlock,
+ fetchDataOptions,
+ cacheDirectory,
+ userDirectory,
+ }: NodeEchoServiceConstructor) {
+ super({
+ netId,
+ provider,
+ graphApi,
+ subgraphName,
+ Echoer,
+ deployedBlock,
+ fetchDataOptions,
+ });
+
+ this.cacheDirectory = cacheDirectory;
+ this.userDirectory = userDirectory;
+ }
+
+ updateEventProgress({ type, fromBlock, toBlock, count }: Parameters[0]) {
+ if (toBlock) {
+ console.log(`fromBlock - ${fromBlock}`);
+ console.log(`toBlock - ${toBlock}`);
+
+ if (count) {
+ console.log(`downloaded ${type} events count - ${count}`);
+ console.log('____________________________________________');
+ console.log(`Fetched ${type} events from ${fromBlock} to ${toBlock}\n`);
+ }
+ }
+ }
+
+ updateGraphProgress({ type, fromBlock, toBlock, count }: Parameters[0]) {
+ if (toBlock) {
+ console.log(`fromBlock - ${fromBlock}`);
+ console.log(`toBlock - ${toBlock}`);
+
+ if (count) {
+ console.log(`downloaded ${type} events from graph node count - ${count}`);
+ console.log('____________________________________________');
+ console.log(`Fetched ${type} events from graph node ${fromBlock} to ${toBlock}\n`);
+ }
+ }
+ }
+
+ async getEventsFromDB() {
+ if (!this.userDirectory) {
+ console.log(`Updating events for ${this.netId} chain echo events\n`);
+ console.log(`savedEvents count - ${0}`);
+ console.log(`savedEvents lastBlock - ${this.deployedBlock}\n`);
+
+ return {
+ events: [],
+ lastBlock: this.deployedBlock,
+ };
+ }
+
+ const savedEvents = await loadSavedEvents({
+ name: this.getInstanceName(),
+ userDirectory: this.userDirectory,
+ deployedBlock: this.deployedBlock,
+ });
+
+ console.log(`Updating events for ${this.netId} chain echo events\n`);
+ console.log(`savedEvents count - ${savedEvents.events.length}`);
+ console.log(`savedEvents lastBlock - ${savedEvents.lastBlock}\n`);
+
+ return savedEvents;
+ }
+
+ async getEventsFromCache() {
+ if (!this.cacheDirectory) {
+ console.log(`cachedEvents count - ${0}`);
+ console.log(`cachedEvents lastBlock - ${this.deployedBlock}\n`);
+
+ return {
+ events: [],
+ lastBlock: this.deployedBlock,
+ };
+ }
+
+ const cachedEvents = await loadCachedEvents({
+ name: this.getInstanceName(),
+ cacheDirectory: this.cacheDirectory,
+ deployedBlock: this.deployedBlock,
+ });
+
+ console.log(`cachedEvents count - ${cachedEvents.events.length}`);
+ console.log(`cachedEvents lastBlock - ${cachedEvents.lastBlock}\n`);
+
+ return cachedEvents;
+ }
+
+ async saveEvents({ events, lastBlock }: BaseEvents) {
+ const instanceName = this.getInstanceName();
+
+ console.log('\ntotalEvents count - ', events.length);
+ console.log(
+ `totalEvents lastBlock - ${events[events.length - 1] ? events[events.length - 1].blockNumber : lastBlock}\n`,
+ );
+
+ const eventTable = new Table();
+
+ eventTable.push(
+ [{ colSpan: 2, content: 'Echo Accounts', hAlign: 'center' }],
+ ['Network', `${this.netId} chain`],
+ ['Events', `${events.length} events`],
+ [{ colSpan: 2, content: 'Latest events' }],
+ ...events
+ .slice(events.length - 10)
+ .reverse()
+ .map(({ blockNumber }, index) => {
+ const eventIndex = events.length - index;
+
+ return [eventIndex, blockNumber];
+ }),
+ );
+
+ console.log(eventTable.toString() + '\n');
+
+ if (this.userDirectory) {
+ await saveUserFile({
+ fileName: instanceName + '.json',
+ userDirectory: this.userDirectory,
+ dataString: JSON.stringify(events, null, 2) + '\n',
+ });
+ }
+ }
+}
+
+export type NodeEncryptedNotesServiceConstructor = BaseEncryptedNotesServiceConstructor & {
+ cacheDirectory?: string;
+ userDirectory?: string;
+};
+
+export class NodeEncryptedNotesService extends BaseEncryptedNotesService {
+ cacheDirectory?: string;
+ userDirectory?: string;
+
+ constructor({
+ netId,
+ provider,
+ graphApi,
+ subgraphName,
+ Router,
+ deployedBlock,
+ fetchDataOptions,
+ cacheDirectory,
+ userDirectory,
+ }: NodeEncryptedNotesServiceConstructor) {
+ super({
+ netId,
+ provider,
+ graphApi,
+ subgraphName,
+ Router,
+ deployedBlock,
+ fetchDataOptions,
+ });
+
+ this.cacheDirectory = cacheDirectory;
+ this.userDirectory = userDirectory;
+ }
+
+ updateEventProgress({ type, fromBlock, toBlock, count }: Parameters[0]) {
+ if (toBlock) {
+ console.log(`fromBlock - ${fromBlock}`);
+ console.log(`toBlock - ${toBlock}`);
+
+ if (count) {
+ console.log(`downloaded ${type} events count - ${count}`);
+ console.log('____________________________________________');
+ console.log(`Fetched ${type} events from ${fromBlock} to ${toBlock}\n`);
+ }
+ }
+ }
+
+ updateGraphProgress({ type, fromBlock, toBlock, count }: Parameters[0]) {
+ if (toBlock) {
+ console.log(`fromBlock - ${fromBlock}`);
+ console.log(`toBlock - ${toBlock}`);
+
+ if (count) {
+ console.log(`downloaded ${type} events from graph node count - ${count}`);
+ console.log('____________________________________________');
+ console.log(`Fetched ${type} events from graph node ${fromBlock} to ${toBlock}\n`);
+ }
+ }
+ }
+
+ async getEventsFromDB() {
+ if (!this.userDirectory) {
+ console.log(`Updating events for ${this.netId} chain encrypted events\n`);
+ console.log(`savedEvents count - ${0}`);
+ console.log(`savedEvents lastBlock - ${this.deployedBlock}\n`);
+
+ return {
+ events: [],
+ lastBlock: this.deployedBlock,
+ };
+ }
+
+ const savedEvents = await loadSavedEvents({
+ name: this.getInstanceName(),
+ userDirectory: this.userDirectory,
+ deployedBlock: this.deployedBlock,
+ });
+
+ console.log(`Updating events for ${this.netId} chain encrypted events\n`);
+ console.log(`savedEvents count - ${savedEvents.events.length}`);
+ console.log(`savedEvents lastBlock - ${savedEvents.lastBlock}\n`);
+
+ return savedEvents;
+ }
+
+ async getEventsFromCache() {
+ if (!this.cacheDirectory) {
+ console.log(`cachedEvents count - ${0}`);
+ console.log(`cachedEvents lastBlock - ${this.deployedBlock}\n`);
+
+ return {
+ events: [],
+ lastBlock: this.deployedBlock,
+ };
+ }
+
+ const cachedEvents = await loadCachedEvents({
+ name: this.getInstanceName(),
+ cacheDirectory: this.cacheDirectory,
+ deployedBlock: this.deployedBlock,
+ });
+
+ console.log(`cachedEvents count - ${cachedEvents.events.length}`);
+ console.log(`cachedEvents lastBlock - ${cachedEvents.lastBlock}\n`);
+
+ return cachedEvents;
+ }
+
+ async saveEvents({ events, lastBlock }: BaseEvents) {
+ const instanceName = this.getInstanceName();
+
+ console.log('\ntotalEvents count - ', events.length);
+ console.log(
+ `totalEvents lastBlock - ${events[events.length - 1] ? events[events.length - 1].blockNumber : lastBlock}\n`,
+ );
+
+ const eventTable = new Table();
+
+ eventTable.push(
+ [{ colSpan: 2, content: 'Encrypted Notes', hAlign: 'center' }],
+ ['Network', `${this.netId} chain`],
+ ['Events', `${events.length} events`],
+ [{ colSpan: 2, content: 'Latest events' }],
+ ...events
+ .slice(events.length - 10)
+ .reverse()
+ .map(({ blockNumber }, index) => {
+ const eventIndex = events.length - index;
+
+ return [eventIndex, blockNumber];
+ }),
+ );
+
+ console.log(eventTable.toString() + '\n');
+
+ if (this.userDirectory) {
+ await saveUserFile({
+ fileName: instanceName + '.json',
+ userDirectory: this.userDirectory,
+ dataString: JSON.stringify(events, null, 2) + '\n',
+ });
+ }
+ }
+}
+
+export type NodeGovernanceServiceConstructor = BaseGovernanceServiceConstructor & {
+ cacheDirectory?: string;
+ userDirectory?: string;
+};
+
+export class NodeGovernanceService extends BaseGovernanceService {
+ cacheDirectory?: string;
+ userDirectory?: string;
+
+ constructor({
+ netId,
+ provider,
+ graphApi,
+ subgraphName,
+ Governance,
+ deployedBlock,
+ fetchDataOptions,
+ cacheDirectory,
+ userDirectory,
+ }: NodeGovernanceServiceConstructor) {
+ super({
+ netId,
+ provider,
+ graphApi,
+ subgraphName,
+ Governance,
+ deployedBlock,
+ fetchDataOptions,
+ });
+
+ this.cacheDirectory = cacheDirectory;
+ this.userDirectory = userDirectory;
+ }
+
+ updateEventProgress({ type, fromBlock, toBlock, count }: Parameters[0]) {
+ if (toBlock) {
+ console.log(`fromBlock - ${fromBlock}`);
+ console.log(`toBlock - ${toBlock}`);
+
+ if (count) {
+ console.log(`downloaded ${type} events count - ${count}`);
+ console.log('____________________________________________');
+ console.log(`Fetched ${type} events from ${fromBlock} to ${toBlock}\n`);
+ }
+ }
+ }
+
+ updateGraphProgress({ type, fromBlock, toBlock, count }: Parameters[0]) {
+ if (toBlock) {
+ console.log(`fromBlock - ${fromBlock}`);
+ console.log(`toBlock - ${toBlock}`);
+
+ if (count) {
+ console.log(`downloaded ${type} events from graph node count - ${count}`);
+ console.log('____________________________________________');
+ console.log(`Fetched ${type} events from graph node ${fromBlock} to ${toBlock}\n`);
+ }
+ }
+ }
+
+ updateTransactionProgress({ currentIndex, totalIndex }: Parameters[0]) {
+ if (totalIndex) {
+ console.log(`Fetched ${currentIndex} governance txs of ${totalIndex}`);
+ }
+ }
+
+ async getEventsFromDB() {
+ if (!this.userDirectory) {
+ console.log(`Updating events for ${this.netId} chain governance events\n`);
+ console.log(`savedEvents count - ${0}`);
+ console.log(`savedEvents lastBlock - ${this.deployedBlock}\n`);
+
+ return {
+ events: [],
+ lastBlock: this.deployedBlock,
+ };
+ }
+
+ const savedEvents = await loadSavedEvents({
+ name: this.getInstanceName(),
+ userDirectory: this.userDirectory,
+ deployedBlock: this.deployedBlock,
+ });
+
+ console.log(`Updating events for ${this.netId} chain governance events\n`);
+ console.log(`savedEvents count - ${savedEvents.events.length}`);
+ console.log(`savedEvents lastBlock - ${savedEvents.lastBlock}\n`);
+
+ return savedEvents;
+ }
+
+ async getEventsFromCache() {
+ if (!this.cacheDirectory) {
+ console.log(`cachedEvents count - ${0}`);
+ console.log(`cachedEvents lastBlock - ${this.deployedBlock}\n`);
+
+ return {
+ events: [],
+ lastBlock: this.deployedBlock,
+ };
+ }
+
+ const cachedEvents = await loadCachedEvents({
+ name: this.getInstanceName(),
+ cacheDirectory: this.cacheDirectory,
+ deployedBlock: this.deployedBlock,
+ });
+
+ console.log(`cachedEvents count - ${cachedEvents.events.length}`);
+ console.log(`cachedEvents lastBlock - ${cachedEvents.lastBlock}\n`);
+
+ return cachedEvents;
+ }
+
+ async saveEvents({ events, lastBlock }: BaseEvents) {
+ const instanceName = this.getInstanceName();
+
+ console.log('\ntotalEvents count - ', events.length);
+ console.log(
+ `totalEvents lastBlock - ${events[events.length - 1] ? events[events.length - 1].blockNumber : lastBlock}\n`,
+ );
+
+ const eventTable = new Table();
+
+ eventTable.push(
+ [{ colSpan: 2, content: 'Governance Events', hAlign: 'center' }],
+ ['Network', `${this.netId} chain`],
+ ['Events', `${events.length} events`],
+ [{ colSpan: 2, content: 'Latest events' }],
+ ...events
+ .slice(events.length - 10)
+ .reverse()
+ .map(({ blockNumber }, index) => {
+ const eventIndex = events.length - index;
+
+ return [eventIndex, blockNumber];
+ }),
+ );
+
+ console.log(eventTable.toString() + '\n');
+
+ if (this.userDirectory) {
+ await saveUserFile({
+ fileName: instanceName + '.json',
+ userDirectory: this.userDirectory,
+ dataString: JSON.stringify(events, null, 2) + '\n',
+ });
+ }
+ }
+}
+
+export type NodeRegistryServiceConstructor = BaseRegistryServiceConstructor & {
+ cacheDirectory?: string;
+ userDirectory?: string;
+};
+
+export class NodeRegistryService extends BaseRegistryService {
+ cacheDirectory?: string;
+ userDirectory?: string;
+
+ constructor({
+ netId,
+ provider,
+ graphApi,
+ subgraphName,
+ RelayerRegistry,
+ deployedBlock,
+ fetchDataOptions,
+ cacheDirectory,
+ userDirectory,
+ }: NodeRegistryServiceConstructor) {
+ super({
+ netId,
+ provider,
+ graphApi,
+ subgraphName,
+ RelayerRegistry,
+ deployedBlock,
+ fetchDataOptions,
+ });
+
+ this.cacheDirectory = cacheDirectory;
+ this.userDirectory = userDirectory;
+ }
+
+ updateEventProgress({ type, fromBlock, toBlock, count }: Parameters[0]) {
+ if (toBlock) {
+ console.log(`fromBlock - ${fromBlock}`);
+ console.log(`toBlock - ${toBlock}`);
+
+ if (count) {
+ console.log(`downloaded ${type} events count - ${count}`);
+ console.log('____________________________________________');
+ console.log(`Fetched ${type} events from ${fromBlock} to ${toBlock}\n`);
+ }
+ }
+ }
+
+ updateGraphProgress({ type, fromBlock, toBlock, count }: Parameters[0]) {
+ if (toBlock) {
+ console.log(`fromBlock - ${fromBlock}`);
+ console.log(`toBlock - ${toBlock}`);
+
+ if (count) {
+ console.log(`downloaded ${type} events from graph node count - ${count}`);
+ console.log('____________________________________________');
+ console.log(`Fetched ${type} events from graph node ${fromBlock} to ${toBlock}\n`);
+ }
+ }
+ }
+
+ async getEventsFromDB() {
+ if (!this.userDirectory) {
+ console.log(`Updating events for ${this.netId} chain registry events\n`);
+ console.log(`savedEvents count - ${0}`);
+ console.log(`savedEvents lastBlock - ${this.deployedBlock}\n`);
+
+ return {
+ events: [],
+ lastBlock: this.deployedBlock,
+ };
+ }
+
+ const savedEvents = await loadSavedEvents({
+ name: this.getInstanceName(),
+ userDirectory: this.userDirectory,
+ deployedBlock: this.deployedBlock,
+ });
+
+ console.log(`Updating events for ${this.netId} chain registry events\n`);
+ console.log(`savedEvents count - ${savedEvents.events.length}`);
+ console.log(`savedEvents lastBlock - ${savedEvents.lastBlock}\n`);
+
+ return savedEvents;
+ }
+
+ async getEventsFromCache() {
+ if (!this.cacheDirectory) {
+ console.log(`cachedEvents count - ${0}`);
+ console.log(`cachedEvents lastBlock - ${this.deployedBlock}\n`);
+
+ return {
+ events: [],
+ lastBlock: this.deployedBlock,
+ };
+ }
+
+ const cachedEvents = await loadCachedEvents({
+ name: this.getInstanceName(),
+ cacheDirectory: this.cacheDirectory,
+ deployedBlock: this.deployedBlock,
+ });
+
+ console.log(`cachedEvents count - ${cachedEvents.events.length}`);
+ console.log(`cachedEvents lastBlock - ${cachedEvents.lastBlock}\n`);
+
+ return cachedEvents;
+ }
+
+ async saveEvents({ events, lastBlock }: BaseEvents) {
+ const instanceName = this.getInstanceName();
+
+ console.log('\ntotalEvents count - ', events.length);
+ console.log(
+ `totalEvents lastBlock - ${events[events.length - 1] ? events[events.length - 1].blockNumber : lastBlock}\n`,
+ );
+
+ const eventTable = new Table();
+
+ eventTable.push(
+ [{ colSpan: 2, content: 'Registered Relayers', hAlign: 'center' }],
+ ['Network', `${this.netId} chain`],
+ ['Events', `${events.length} events`],
+ [{ colSpan: 2, content: 'Latest events' }],
+ ...events
+ .slice(events.length - 10)
+ .reverse()
+ .map(({ blockNumber }, index) => {
+ const eventIndex = events.length - index;
+
+ return [eventIndex, blockNumber];
+ }),
+ );
+
+ console.log(eventTable.toString() + '\n');
+
+ if (this.userDirectory) {
+ await saveUserFile({
+ fileName: instanceName + '.json',
+ userDirectory: this.userDirectory,
+ dataString: JSON.stringify(events, null, 2) + '\n',
+ });
+ }
+ }
+}
diff --git a/src/services/parser.ts b/src/services/parser.ts
new file mode 100644
index 0000000..3229b56
--- /dev/null
+++ b/src/services/parser.ts
@@ -0,0 +1,77 @@
+import { InvalidArgumentError } from 'commander';
+import { computeAddress, getAddress, Mnemonic } from 'ethers';
+import { validateUrl } from '@tornado/core';
+
+export function parseNumber(value?: string | number): number {
+ if (!value || isNaN(Number(value))) {
+ throw new InvalidArgumentError('Invalid Number');
+ }
+ return Number(value);
+}
+
+export function parseUrl(value?: string): string {
+ if (!value || !validateUrl(value, ['http:', 'https:'])) {
+ throw new InvalidArgumentError('Invalid URL');
+ }
+ return value;
+}
+
+export function parseRelayer(value?: string): string {
+ if (!value || !(value.endsWith('.eth') || validateUrl(value, ['http:', 'https:']))) {
+ throw new InvalidArgumentError('Invalid Relayer ETH address or URL');
+ }
+ return value;
+}
+
+export function parseAddress(value?: string): string {
+ if (!value) {
+ throw new InvalidArgumentError('Invalid Address');
+ }
+ try {
+ return getAddress(value);
+ } catch {
+ throw new InvalidArgumentError('Invalid Address');
+ }
+}
+
+export function parseMnemonic(value?: string): string {
+ if (!value) {
+ throw new InvalidArgumentError('Invalid Mnemonic');
+ }
+ try {
+ Mnemonic.fromPhrase(value);
+ } catch {
+ throw new InvalidArgumentError('Invalid Mnemonic');
+ }
+ return value;
+}
+
+export function parseKey(value?: string): string {
+ if (!value) {
+ throw new InvalidArgumentError('Invalid Private Key');
+ }
+ if (value.length === 64) {
+ value = '0x' + value;
+ }
+ try {
+ computeAddress(value);
+ } catch {
+ throw new InvalidArgumentError('Invalid Private Key');
+ }
+ return value;
+}
+
+/**
+ * Recovery key shouldn't have a 0x prefix (Also this is how the UI generates)
+ */
+export function parseRecoveryKey(value?: string): string {
+ if (!value) {
+ throw new InvalidArgumentError('Invalid Recovery Key');
+ }
+ try {
+ computeAddress('0x' + value);
+ } catch {
+ throw new InvalidArgumentError('Invalid Recovery Key');
+ }
+ return value;
+}
diff --git a/src/services/treeCache.ts b/src/services/treeCache.ts
new file mode 100644
index 0000000..3e4454b
--- /dev/null
+++ b/src/services/treeCache.ts
@@ -0,0 +1,113 @@
+/**
+ * Create tree cache file from node.js
+ *
+ * Only works for node.js, modified from https://github.com/tornadocash/tornado-classic-ui/blob/master/scripts/updateTree.js
+ */
+import { MerkleTree } from '@tornado/fixed-merkle-tree';
+import BloomFilter from 'bloomfilter.js';
+import { DepositsEvents } from '@tornado/core';
+import type { NetIdType } from '@tornado/core';
+import { saveUserFile } from './data';
+
+export interface TreeCacheConstructor {
+ netId: NetIdType;
+ amount: string;
+ currency: string;
+ userDirectory: string;
+ PARTS_COUNT?: number;
+ LEAVES?: number;
+ zeroElement?: string;
+}
+
+export interface treeMetadata {
+ blockNumber: number;
+ logIndex: number;
+ transactionHash: string;
+ timestamp: number;
+ from: string;
+ leafIndex: number;
+}
+
+export class TreeCache {
+ netId: NetIdType;
+ amount: string;
+ currency: string;
+ userDirectory: string;
+
+ PARTS_COUNT: number;
+
+ constructor({ netId, amount, currency, userDirectory, PARTS_COUNT = 4 }: TreeCacheConstructor) {
+ this.netId = netId;
+ this.amount = amount;
+ this.currency = currency;
+ this.userDirectory = userDirectory;
+
+ this.PARTS_COUNT = PARTS_COUNT;
+ }
+
+ getInstanceName(): string {
+ return `deposits_${this.netId}_${this.currency}_${this.amount}`;
+ }
+
+ async createTree(events: DepositsEvents[], tree: MerkleTree) {
+ const bloom = new BloomFilter(events.length);
+
+ console.log(`Creating cached tree for ${this.getInstanceName()}\n`);
+
+ // events indexed by commitment
+ const eventsData = events.reduce(
+ (acc, { leafIndex, commitment, ...rest }, i) => {
+ if (leafIndex !== i) {
+ throw new Error(`leafIndex (${leafIndex}) !== i (${i})`);
+ }
+
+ acc[commitment] = { ...rest, leafIndex };
+
+ return acc;
+ },
+ {} as { [key in string]: treeMetadata },
+ );
+
+ const slices = tree.getTreeSlices(this.PARTS_COUNT);
+
+ await Promise.all(
+ slices.map(async (slice, index) => {
+ const metadata = slice.elements.reduce((acc, curr) => {
+ if (index < this.PARTS_COUNT - 1) {
+ bloom.add(curr);
+ }
+ acc.push(eventsData[curr]);
+ return acc;
+ }, [] as treeMetadata[]);
+
+ const dataString =
+ JSON.stringify(
+ {
+ ...slice,
+ metadata,
+ },
+ null,
+ 2,
+ ) + '\n';
+
+ const fileName = `${this.getInstanceName()}_slice${index + 1}.json`;
+
+ await saveUserFile({
+ fileName,
+ userDirectory: this.userDirectory,
+ dataString,
+ });
+ }),
+ );
+
+ const dataString = bloom.serialize() + '\n';
+
+ const fileName = `${this.getInstanceName()}_bloom.json`;
+
+ await saveUserFile({
+ fileName,
+ userDirectory: this.userDirectory,
+ dataString,
+ });
+ }
+}
diff --git a/src/types/bloomfilter.js.d.ts b/src/types/bloomfilter.js.d.ts
new file mode 100644
index 0000000..925515d
--- /dev/null
+++ b/src/types/bloomfilter.js.d.ts
@@ -0,0 +1,25 @@
+/* eslint-disable @typescript-eslint/no-explicit-any */
+declare module 'bloomfilter.js' {
+ export default class BloomFilter {
+ m: number;
+ k: number;
+ size: number;
+ bitview: any;
+
+ constructor(n: number, false_postive_tolerance?: number);
+
+ calculateHash(x: number, m: number, i: number): number;
+
+ test(data: any): boolean;
+
+ add(data: any): void;
+
+ bytelength(): number;
+
+ view(): Uint8Array;
+
+ serialize(): string;
+
+ deserialize(serialized: string): BloomFilter;
+ }
+}
diff --git a/yarn.lock b/yarn.lock
index 3306743..b954692 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -771,9 +771,9 @@
"@openzeppelin/contracts-v3" "npm:@openzeppelin/contracts@3.2.0-rc.0"
ethers "^6.4.0"
-"@tornado/core@git+https://git.tornado.ws/tornadocontrib/tornado-core.git#b5f57e20ee7de42c4af88fb417d887672a8d3582":
- version "1.0.0"
- resolved "git+https://git.tornado.ws/tornadocontrib/tornado-core.git#b5f57e20ee7de42c4af88fb417d887672a8d3582"
+"@tornado/core@git+https://git.tornado.ws/tornadocontrib/tornado-core.git#4fde41b10ce601bcf687e2e8b93785f86237ac6c":
+ version "1.0.1"
+ resolved "git+https://git.tornado.ws/tornadocontrib/tornado-core.git#4fde41b10ce601bcf687e2e8b93785f86237ac6c"
dependencies:
"@metamask/eth-sig-util" "^7.0.1"
"@tornado/contracts" "^1.0.0"
@@ -781,21 +781,12 @@
"@tornado/snarkjs" "^0.1.20"
"@tornado/websnark" "^0.0.4"
ajv "^8.12.0"
- bloomfilter.js "^1.0.2"
bn.js "^5.2.1"
circomlibjs "0.1.7"
cross-fetch "^4.0.0"
ethers "^6.4.0"
ffjavascript "0.2.48"
fflate "^0.8.2"
- optionalDependencies:
- "@colors/colors" "1.5.0"
- cli-table3 "^0.6.4"
- commander "^12.0.0"
- http-proxy-agent "^7.0.2"
- https-proxy-agent "^7.0.4"
- moment "^2.30.1"
- socks-proxy-agent "^8.0.3"
"@tornado/fixed-merkle-tree@0.7.3", "@tornado/fixed-merkle-tree@^0.7.3":
version "0.7.3"