import path from 'path'; import fs from 'fs/promises'; import { bytesToBase64, digest } from '@tornado/core'; const src = [ 'node_modules/@tornado/core/dist/tornado.umd.js', 'node_modules/@tornado/core/dist/tornado.umd.min.js', 'node_modules/@tornado/core/dist/tornadoContracts.umd.js', 'node_modules/@tornado/core/dist/tornadoContracts.umd.min.js', 'node_modules/@tornado/core/dist/merkleTreeWorker.umd.js', 'node_modules/@tornado/core/dist/merkleTreeWorker.umd.min.js', ]; async function copyFiles() { for (const file of src) { const dst = path.join('static', path.basename(file)); await fs.copyFile(file, dst); console.log(`Copied ${file} to ${dst}`); } } async function content(file: string) { const content = new Uint8Array(await fs.readFile(file)); const hash = 'sha384-' + bytesToBase64(await digest(content)); console.log(`${hash}: ${file}`); return hash; } async function update() { await copyFiles(); const staticFiles = await fs.readdir('static', { recursive: true }); const hashes = {} as { [key: string]: string; }; for (const filePath of staticFiles) { const file = path.join('static', filePath).replaceAll(path.sep, path.posix.sep); if ((await fs.stat(file)).isDirectory() || file.includes('hashes.json')) { continue; } const hash = await content(file); hashes[file] = hash; } await fs.writeFile('static/hashes.json', JSON.stringify(hashes, null, 2)); } update();