From f21732a8e5b9660d5f597b1317da5bfddd729dc6 Mon Sep 17 00:00:00 2001 From: Sean Bowe Date: Sun, 29 Oct 2017 18:58:34 -0600 Subject: [PATCH 01/23] Initial commit --- .gitignore | 5 + COPYRIGHT | 14 + Cargo.lock | 151 +++++++++ Cargo.toml | 23 ++ LICENSE-APACHE | 201 ++++++++++++ LICENSE-MIT | 23 ++ README.md | 23 ++ src/lib.rs | 859 +++++++++++++++++++++++++++++++++++++++++++++++++ 8 files changed, 1299 insertions(+) create mode 100644 .gitignore create mode 100644 COPYRIGHT create mode 100644 Cargo.lock create mode 100644 Cargo.toml create mode 100644 LICENSE-APACHE create mode 100644 LICENSE-MIT create mode 100644 README.md create mode 100644 src/lib.rs diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..d851696 --- /dev/null +++ b/.gitignore @@ -0,0 +1,5 @@ +/target/ +**/*.rs.bk +transcript +challenge +response diff --git a/COPYRIGHT b/COPYRIGHT new file mode 100644 index 0000000..0bcb41d --- /dev/null +++ b/COPYRIGHT @@ -0,0 +1,14 @@ +Copyrights in the "powersoftau" project are retained by their contributors. No +copyright assignment is required to contribute to the "powersoftau" project. + +The "powersoftau" project is licensed under either of + + * Apache License, Version 2.0, (see ./LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0) + * MIT license (see ./LICENSE-MIT or http://opensource.org/licenses/MIT) + +at your option. + +Unless you explicitly state otherwise, any contribution intentionally +submitted for inclusion in the work by you, as defined in the Apache-2.0 +license, shall be dual licensed as above, without any additional terms or +conditions. diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 0000000..95a7464 --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,151 @@ +[root] +name = "powersoftau" +version = "0.1.0" +dependencies = [ + "blake2 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", + "byteorder 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "crossbeam 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "generic-array 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)", + "num_cpus 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "pairing 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)", + "typenum 1.9.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "bitflags" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "blake2" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "byte-tools 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "crypto-mac 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "digest 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", + "generic-array 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "byte-tools" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "byteorder" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "constant_time_eq" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "crossbeam" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "crypto-mac" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "constant_time_eq 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", + "generic-array 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "digest" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "generic-array 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "fuchsia-zircon" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "fuchsia-zircon-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "fuchsia-zircon-sys" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "generic-array" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "nodrop 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", + "typenum 1.9.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "libc" +version = "0.2.32" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "nodrop" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "num_cpus" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.32 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "pairing" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "byteorder 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "fuchsia-zircon 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.32 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "typenum" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[metadata] +"checksum bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "aad18937a628ec6abcd26d1489012cc0e18c21798210f491af69ded9b881106d" +"checksum blake2 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "53bf612c0f2839b7e764ebac65d6cb985f7c6812de399d0728038f4b1da141bc" +"checksum byte-tools 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "560c32574a12a89ecd91f5e742165893f86e3ab98d21f8ea548658eb9eef5f40" +"checksum byteorder 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ff81738b726f5d099632ceaffe7fb65b90212e8dce59d518729e7e8634032d3d" +"checksum constant_time_eq 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8ff012e225ce166d4422e0e78419d901719760f62ae2b7969ca6b564d1b54a9e" +"checksum crossbeam 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8837ab96533202c5b610ed44bc7f4183e7957c1c8f56e8cc78bb098593c8ba0a" +"checksum crypto-mac 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "779015233ac67d65098614aec748ac1c756ab6677fa2e14cf8b37c08dfed1198" +"checksum digest 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e5b29bf156f3f4b3c4f610a25ff69370616ae6e0657d416de22645483e72af0a" +"checksum fuchsia-zircon 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f6c0581a4e363262e52b87f59ee2afe3415361c6ec35e665924eb08afe8ff159" +"checksum fuchsia-zircon-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "43f3795b4bae048dc6123a6b972cadde2e676f9ded08aef6bb77f5f157684a82" +"checksum generic-array 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)" = "fceb69994e330afed50c93524be68c42fa898c2d9fd4ee8da03bd7363acd26f2" +"checksum libc 0.2.32 (registry+https://github.com/rust-lang/crates.io-index)" = "56cce3130fd040c28df6f495c8492e5ec5808fb4c9093c310df02b0c8f030148" +"checksum nodrop 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "9a2228dca57108069a5262f2ed8bd2e82496d2e074a06d1ccc7ce1687b6ae0a2" +"checksum num_cpus 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "514f0d73e64be53ff320680ca671b64fe3fb91da01e1ae2ddc99eb51d453b20d" +"checksum pairing 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c6cecc13d1871265fea9be96aa9f1cc027edacd55d2fb1fbc164e19725b88d70" +"checksum rand 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)" = "61efcbcd9fa8d8fbb07c84e34a8af18a1ff177b449689ad38a6e9457ecc7b2ae" +"checksum typenum 1.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "13a99dc6780ef33c78780b826cf9d2a78840b72cae9474de4bcaf9051e60ebbd" diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..6e71d26 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,23 @@ +[package] +name = "powersoftau" +version = "0.1.0" +authors = ["Sean Bowe"] +license = "MIT/Apache-2.0" + +description = "Communal zk-SNARK MPC for Public Parameters" +documentation = "https://docs.rs/powersoftau/" +homepage = "https://github.com/ebfull/powersoftau" +repository = "https://github.com/ebfull/powersoftau" + +[dependencies] +rand = "0.3.17" +crossbeam = "0.3.0" +num_cpus = "1.7.0" +blake2 = "0.6.1" +generic-array = "0.8.3" +typenum = "1.9.0" +byteorder = "1.1.0" +pairing = "0.13.0" + +[features] +u128-support = ["pairing/u128-support"] diff --git a/LICENSE-APACHE b/LICENSE-APACHE new file mode 100644 index 0000000..16fe87b --- /dev/null +++ b/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/LICENSE-MIT b/LICENSE-MIT new file mode 100644 index 0000000..31aa793 --- /dev/null +++ b/LICENSE-MIT @@ -0,0 +1,23 @@ +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000..a6d8f74 --- /dev/null +++ b/README.md @@ -0,0 +1,23 @@ +# Powers of Tau + +This is a [multi-party computation](https://en.wikipedia.org/wiki/Secure_multi-party_computation) (MPC) ceremony which constructs partial zk-SNARK parameters for _all_ circuits up to a depth of 221. It works by taking a step that is performed by all zk-SNARK MPCs and performing it in just one single ceremony. This makes individual zk-SNARK MPCs much cheaper and allows them to scale to practically unbounded numbers of participants. + +This protocol is described in a [forthcoming paper](https://eprint.iacr.org/2017/XXX). It produces parameters for an adaptation of [Jens Groth's 2016 pairing-based proving system](https://eprint.iacr.org/2016/260) using the [BLS12-381](https://github.com/ebfull/pairing/tree/master/src/bls12_381) elliptic curve construction. The security proof relies on a randomness beacon being applied at the end of the ceremony. + +**This is a work in progress.** + +## License + +Licensed under either of + + * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0) + * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) + +at your option. + +### Contribution + +Unless you explicitly state otherwise, any contribution intentionally +submitted for inclusion in the work by you, as defined in the Apache-2.0 +license, shall be dual licensed as above, without any additional terms or +conditions. diff --git a/src/lib.rs b/src/lib.rs new file mode 100644 index 0000000..e445a5b --- /dev/null +++ b/src/lib.rs @@ -0,0 +1,859 @@ +//! This ceremony constructs the "powers of tau" for Jens Groth's 2016 zk-SNARK proving +//! system using the BLS12-381 pairing-friendly elliptic curve construction. +//! +//! # Overview +//! +//! Participants of the ceremony receive a "challenge" file containing: +//! +//! * the BLAKE2b hash of the last file entered into the transcript +//! * an `Accumulator` (with curve points encoded in uncompressed form for fast deserialization) +//! +//! The participant runs a tool which generates a random keypair (`PublicKey`, `PrivateKey`) +//! used for modifying the `Accumulator` from the "challenge" file. The keypair is then used to +//! transform the `Accumulator`, and a "response" file is generated containing: +//! +//! * the BLAKE2b hash of the "challenge" file (thus forming a hash chain over the entire transcript) +//! * an `Accumulator` (with curve points encoded in compressed form for fast uploading) +//! * the `PublicKey` +//! +//! This "challenge" file is entered into the protocol transcript. A given transcript is valid +//! if the transformations between consecutive `Accumulator`s verify with their respective +//! `PublicKey`s. Participants (and the public) can ensure that their contribution to the +//! `Accumulator` was accepted by ensuring the transcript contains their "response" file, ideally +//! by comparison of the BLAKE2b hash of the "response" file. +//! +//! After some time has elapsed for participants to contribute to the ceremony, a participant is +//! simulated with a randomness beacon. The resulting `Accumulator` contains partial zk-SNARK +//! public parameters for all circuits within a bounded size. + +extern crate pairing; +extern crate rand; +extern crate crossbeam; +extern crate num_cpus; +extern crate blake2; +extern crate generic_array; +extern crate typenum; +extern crate byteorder; + +use byteorder::{ReadBytesExt, BigEndian}; +use rand::{SeedableRng, Rng, Rand}; +use rand::chacha::ChaChaRng; +use pairing::bls12_381::*; +use pairing::*; +use std::io::{self, Read, Write}; +use std::sync::{Arc, Mutex}; +use generic_array::GenericArray; +use typenum::consts::U64; +use blake2::{Blake2b, Digest}; +use std::fmt; + +// This ceremony is based on the BLS12-381 elliptic curve construction. +const G1_UNCOMPRESSED_BYTE_SIZE: usize = 96; +const G2_UNCOMPRESSED_BYTE_SIZE: usize = 192; +const G1_COMPRESSED_BYTE_SIZE: usize = 48; +const G2_COMPRESSED_BYTE_SIZE: usize = 96; + +/// The accumulator supports circuits with 2^21 multiplication gates. +const TAU_POWERS_LENGTH: usize = (1 << 21); + +/// More tau powers are needed in G1 because the Groth16 H query +/// includes terms of the form tau^i * (tau^m - 1) = tau^(i+m) - tau^i +/// where the largest i = m - 2, requiring the computation of tau^(2m - 2) +/// and thus giving us a vector length of 2^22 - 1. +const TAU_POWERS_G1_LENGTH: usize = (TAU_POWERS_LENGTH << 1) - 1; + +/// The size of the accumulator on disk. +pub const ACCUMULATOR_BYTE_SIZE: usize = (TAU_POWERS_G1_LENGTH * G1_UNCOMPRESSED_BYTE_SIZE) + // g1 tau powers + (TAU_POWERS_LENGTH * G2_UNCOMPRESSED_BYTE_SIZE) + // g2 tau powers + (TAU_POWERS_LENGTH * G1_UNCOMPRESSED_BYTE_SIZE) + // alpha tau powers + (TAU_POWERS_LENGTH * G1_UNCOMPRESSED_BYTE_SIZE) // beta tau powers + + G2_UNCOMPRESSED_BYTE_SIZE // beta in g2 + + 64; // blake2b hash of previous contribution + +/// The "public key" is used to verify a contribution was correctly +/// computed. +pub const PUBLIC_KEY_SIZE: usize = 3 * G2_UNCOMPRESSED_BYTE_SIZE + // tau, alpha, and beta in g2 + 6 * G1_UNCOMPRESSED_BYTE_SIZE; // (s1, s1*tau), (s2, s2*alpha), (s3, s3*beta) in g1 + +/// The size of the contribution on disk. +pub const CONTRIBUTION_BYTE_SIZE: usize = (TAU_POWERS_G1_LENGTH * G1_COMPRESSED_BYTE_SIZE) + // g1 tau powers + (TAU_POWERS_LENGTH * G2_COMPRESSED_BYTE_SIZE) + // g2 tau powers + (TAU_POWERS_LENGTH * G1_COMPRESSED_BYTE_SIZE) + // alpha tau powers + (TAU_POWERS_LENGTH * G1_COMPRESSED_BYTE_SIZE) // beta tau powers + + G2_COMPRESSED_BYTE_SIZE // beta in g2 + + 64 // blake2b hash of input accumulator + + PUBLIC_KEY_SIZE; // public key + +/// Hashes to G2 using the first 32 bytes of `digest`. Panics if `digest` is less +/// than 32 bytes. +fn hash_to_g2(mut digest: &[u8]) -> G2 +{ + assert!(digest.len() >= 32); + + let mut seed = Vec::with_capacity(8); + + for _ in 0..8 { + seed.push(digest.read_u32::().expect("assertion above guarantees this to work")); + } + + ChaChaRng::from_seed(&seed).gen() +} + +#[test] +fn test_hash_to_g2() { + assert!( + hash_to_g2(&[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33]) + == + hash_to_g2(&[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,34]) + ); + + assert!( + hash_to_g2(&[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32]) + != + hash_to_g2(&[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,33]) + ); +} + +/// Contains terms of the form (s1, s1x, H(s1x)2, H(s1x)2x) +/// for all x in τ, α and β, and some s chosen randomly by its creator. The function H "hashes into" the group G2. No points in the public key may be the identity. +/// +/// The elements in G2 are used to verify transformations of the accumulator. By its nature, the public key proves +/// knowledge of τ, α and β. +/// +/// It is necessary to verify `same_ratio`((s1, s1x), (H(s1x)2, H(s1x)2x)). +#[derive(PartialEq, Eq)] +pub struct PublicKey { + tau_g1: (G1Affine, G1Affine), + alpha_g1: (G1Affine, G1Affine), + beta_g1: (G1Affine, G1Affine), + tau_g2: G2Affine, + alpha_g2: G2Affine, + beta_g2: G2Affine +} + +/// Contains the secrets τ, α and β that the participant of the ceremony must destroy. +pub struct PrivateKey { + tau: Fr, + alpha: Fr, + beta: Fr +} + +/// Constructs a keypair given an RNG and a 64-byte transcript `digest`. +pub fn keypair(rng: &mut R, digest: &[u8]) -> (PublicKey, PrivateKey) +{ + assert_eq!(digest.len(), 64); + + let tau = Fr::rand(rng); + let alpha = Fr::rand(rng); + let beta = Fr::rand(rng); + + let mut op = |x, personalization: u8| { + // Sample random g^s + let g1_s = G1::rand(rng).into_affine(); + // Compute g^{s*x} + let g1_s_x = g1_s.mul(x).into_affine(); + // Compute BLAKE2b(personalization | transcript | g^s | g^{s*x}) + let h = { + let mut h = Blake2b::default(); + h.input(&[personalization]); + h.input(digest); + h.input(g1_s.into_uncompressed().as_ref()); + h.input(g1_s_x.into_uncompressed().as_ref()); + h.result() + }; + // Hash into G2 as g^{s'} + let g2_s = hash_to_g2(h.as_ref()).into_affine(); + // Compute g^{s'*x} + let g2_s_x = g2_s.mul(x).into_affine(); + + ((g1_s, g1_s_x), g2_s_x) + }; + + let pk_tau = op(tau, 0); + let pk_alpha = op(alpha, 1); + let pk_beta = op(beta, 2); + + ( + PublicKey { + tau_g1: pk_tau.0, + alpha_g1: pk_alpha.0, + beta_g1: pk_beta.0, + tau_g2: pk_tau.1, + alpha_g2: pk_alpha.1, + beta_g2: pk_beta.1, + }, + PrivateKey { + tau: tau, + alpha: alpha, + beta: beta + } + ) +} + +/// Determines if point compression should be used. +#[derive(Copy, Clone)] +pub enum UseCompression { + Yes, + No +} + +/// Determines if points should be checked for correctness during deserialization. +/// This is not necessary for participants, because a transcript verifier can +/// check this theirself. +#[derive(Copy, Clone)] +pub enum CheckForCorrectness { + Yes, + No +} + +fn write_point( + writer: &mut W, + p: &G, + compression: UseCompression +) -> io::Result<()> + where W: Write, + G: CurveAffine +{ + match compression { + UseCompression::Yes => writer.write_all(p.into_compressed().as_ref()), + UseCompression::No => writer.write_all(p.into_uncompressed().as_ref()), + } +} + +/// Errors that might occur during deserialization. +#[derive(Debug)] +pub enum DeserializationError { + IoError(io::Error), + DecodingError(GroupDecodingError), + PointAtInfinity +} + +impl fmt::Display for DeserializationError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + DeserializationError::IoError(ref e) => write!(f, "Disk IO error: {}", e), + DeserializationError::DecodingError(ref e) => write!(f, "Decoding error: {}", e), + DeserializationError::PointAtInfinity => write!(f, "Point at infinity found") + } + } +} + +impl From for DeserializationError { + fn from(err: io::Error) -> DeserializationError { + DeserializationError::IoError(err) + } +} + +impl From for DeserializationError { + fn from(err: GroupDecodingError) -> DeserializationError { + DeserializationError::DecodingError(err) + } +} + +impl PublicKey { + /// Serialize the public key. Points are always in uncompressed form. + pub fn serialize(&self, writer: &mut W) -> io::Result<()> + { + write_point(writer, &self.tau_g1.0, UseCompression::No)?; + write_point(writer, &self.tau_g1.1, UseCompression::No)?; + + write_point(writer, &self.alpha_g1.0, UseCompression::No)?; + write_point(writer, &self.alpha_g1.1, UseCompression::No)?; + + write_point(writer, &self.beta_g1.0, UseCompression::No)?; + write_point(writer, &self.beta_g1.1, UseCompression::No)?; + + write_point(writer, &self.tau_g2, UseCompression::No)?; + write_point(writer, &self.alpha_g2, UseCompression::No)?; + write_point(writer, &self.beta_g2, UseCompression::No)?; + + Ok(()) + } + + /// Deserialize the public key. Points are always in uncompressed form, and + /// always checked, since there aren't very many of them. Does not allow any + /// points at infinity. + pub fn deserialize(reader: &mut R) -> Result + { + fn read_uncompressed(reader: &mut R) -> Result { + let mut repr = C::Uncompressed::empty(); + reader.read_exact(repr.as_mut())?; + let v = repr.into_affine()?; + + if v.is_zero() { + Err(DeserializationError::PointAtInfinity) + } else { + Ok(v) + } + } + + let tau_g1_s = read_uncompressed(reader)?; + let tau_g1_s_tau = read_uncompressed(reader)?; + + let alpha_g1_s = read_uncompressed(reader)?; + let alpha_g1_s_alpha = read_uncompressed(reader)?; + + let beta_g1_s = read_uncompressed(reader)?; + let beta_g1_s_beta = read_uncompressed(reader)?; + + let tau_g2 = read_uncompressed(reader)?; + let alpha_g2 = read_uncompressed(reader)?; + let beta_g2 = read_uncompressed(reader)?; + + Ok(PublicKey { + tau_g1: (tau_g1_s, tau_g1_s_tau), + alpha_g1: (alpha_g1_s, alpha_g1_s_alpha), + beta_g1: (beta_g1_s, beta_g1_s_beta), + tau_g2: tau_g2, + alpha_g2: alpha_g2, + beta_g2: beta_g2 + }) + } +} + +#[test] +fn test_pubkey_serialization() { + use rand::thread_rng; + + let rng = &mut thread_rng(); + let digest = (0..64).map(|_| rng.gen()).collect::>(); + let (pk, _) = keypair(rng, &digest); + let mut v = vec![]; + pk.serialize(&mut v).unwrap(); + assert_eq!(v.len(), PUBLIC_KEY_SIZE); + let deserialized = PublicKey::deserialize(&mut &v[..]).unwrap(); + assert!(pk == deserialized); +} + +/// The `Accumulator` is an object that participants of the ceremony contribute +/// randomness to. This object contains powers of trapdoor `tau` in G1 and in G2 over +/// fixed generators, and additionally in G1 over two other generators of exponents +/// `alpha` and `beta` over those fixed generators. In other words: +/// +/// * (τ, τ2, ..., τ222 - 2, α, ατ, ατ2, ..., ατ221 - 1, β, βτ, βτ2, ..., βτ221 - 1)1 +/// * (β, τ, τ2, ..., τ221 - 1)2 +#[derive(PartialEq, Eq, Clone)] +pub struct Accumulator { + /// tau^0, tau^1, tau^2, ..., tau^{TAU_POWERS_G1_LENGTH - 1} + tau_powers_g1: Vec, + /// tau^0, tau^1, tau^2, ..., tau^{TAU_POWERS_LENGTH - 1} + tau_powers_g2: Vec, + /// alpha * tau^0, alpha * tau^1, alpha * tau^2, ..., alpha * tau^{TAU_POWERS_LENGTH - 1} + alpha_tau_powers_g1: Vec, + /// beta * tau^0, beta * tau^1, beta * tau^2, ..., beta * tau^{TAU_POWERS_LENGTH - 1} + beta_tau_powers_g1: Vec, + /// beta + beta_g2: G2Affine +} + +impl Accumulator { + /// Constructs an "initial" accumulator with τ = 1, α = 1, β = 1. + pub fn new() -> Self { + Accumulator { + tau_powers_g1: vec![G1Affine::one(); TAU_POWERS_G1_LENGTH], + tau_powers_g2: vec![G2Affine::one(); TAU_POWERS_LENGTH], + alpha_tau_powers_g1: vec![G1Affine::one(); TAU_POWERS_LENGTH], + beta_tau_powers_g1: vec![G1Affine::one(); TAU_POWERS_LENGTH], + beta_g2: G2Affine::one() + } + } + + /// Write the accumulator with some compression behavior. + pub fn serialize( + &self, + writer: &mut W, + compression: UseCompression + ) -> io::Result<()> + { + fn write_all( + writer: &mut W, + c: &[C], + compression: UseCompression + ) -> io::Result<()> + { + for c in c { + write_point(writer, c, compression)?; + } + + Ok(()) + } + + write_all(writer, &self.tau_powers_g1, compression)?; + write_all(writer, &self.tau_powers_g2, compression)?; + write_all(writer, &self.alpha_tau_powers_g1, compression)?; + write_all(writer, &self.beta_tau_powers_g1, compression)?; + write_all(writer, &[self.beta_g2], compression)?; + + Ok(()) + } + + /// Read the accumulator from disk with some compression behavior. `checked` + /// indicates whether we should check it's a valid element of the group and + /// not the point at infinity. + pub fn deserialize( + reader: &mut R, + compression: UseCompression, + checked: CheckForCorrectness + ) -> Result + { + fn read_all( + reader: &mut R, + size: usize, + compression: UseCompression, + checked: CheckForCorrectness + ) -> Result, DeserializationError> + { + fn decompress_all( + reader: &mut R, + size: usize, + checked: CheckForCorrectness + ) -> Result, DeserializationError> + { + // Read the encoded elements + let mut res = vec![E::empty(); size]; + + for encoded in &mut res { + reader.read_exact(encoded.as_mut())?; + } + + // Allocate space for the deserialized elements + let mut res_affine = vec![E::Affine::zero(); size]; + + let mut chunk_size = res.len() / num_cpus::get(); + if chunk_size == 0 { + chunk_size = 1; + } + + // If any of our threads encounter a deserialization/IO error, catch + // it with this. + let decoding_error = Arc::new(Mutex::new(None)); + + crossbeam::scope(|scope| { + for (source, target) in res.chunks(chunk_size).zip(res_affine.chunks_mut(chunk_size)) { + let decoding_error = decoding_error.clone(); + + scope.spawn(move || { + for (source, target) in source.iter().zip(target.iter_mut()) { + match { + // If we're a participant, we don't need to check all of the + // elements in the accumulator, which saves a lot of time. + // The hash chain prevents this from being a problem: the + // transcript guarantees that the accumulator was properly + // formed. + match checked { + CheckForCorrectness::Yes => { + // Points at infinity are never expected in the accumulator + source.into_affine().map_err(|e| e.into()).and_then(|source| { + if source.is_zero() { + Err(DeserializationError::PointAtInfinity) + } else { + Ok(source) + } + }) + }, + CheckForCorrectness::No => source.into_affine_unchecked().map_err(|e| e.into()) + } + } + { + Ok(source) => { + *target = source; + }, + Err(e) => { + *decoding_error.lock().unwrap() = Some(e); + } + } + } + }); + } + }); + + match Arc::try_unwrap(decoding_error).unwrap().into_inner().unwrap() { + Some(e) => { + Err(e) + }, + None => { + Ok(res_affine) + } + } + } + + match compression { + UseCompression::Yes => decompress_all::<_, C::Compressed>(reader, size, checked), + UseCompression::No => decompress_all::<_, C::Uncompressed>(reader, size, checked) + } + } + + let tau_powers_g1 = read_all(reader, TAU_POWERS_G1_LENGTH, compression, checked)?; + let tau_powers_g2 = read_all(reader, TAU_POWERS_LENGTH, compression, checked)?; + let alpha_tau_powers_g1 = read_all(reader, TAU_POWERS_LENGTH, compression, checked)?; + let beta_tau_powers_g1 = read_all(reader, TAU_POWERS_LENGTH, compression, checked)?; + let beta_g2 = read_all(reader, 1, compression, checked)?[0]; + + Ok(Accumulator { + tau_powers_g1: tau_powers_g1, + tau_powers_g2: tau_powers_g2, + alpha_tau_powers_g1: alpha_tau_powers_g1, + beta_tau_powers_g1: beta_tau_powers_g1, + beta_g2: beta_g2 + }) + } + + /// Transforms the accumulator with a private key. + pub fn transform(&mut self, key: &PrivateKey) + { + // Construct the powers of tau + let mut taupowers = vec![Fr::zero(); TAU_POWERS_G1_LENGTH]; + let chunk_size = TAU_POWERS_G1_LENGTH / num_cpus::get(); + + // Construct exponents in parallel + crossbeam::scope(|scope| { + for (i, taupowers) in taupowers.chunks_mut(chunk_size).enumerate() { + scope.spawn(move || { + let mut acc = key.tau.pow(&[(i * chunk_size) as u64]); + + for t in taupowers { + *t = acc; + acc.mul_assign(&key.tau); + } + }); + } + }); + + /// Exponentiate a large number of points, with an optional coefficient to be applied to the + /// exponent. + fn batch_exp(bases: &mut [C], exp: &[C::Scalar], coeff: Option<&C::Scalar>) { + assert_eq!(bases.len(), exp.len()); + let mut projective = vec![C::Projective::zero(); bases.len()]; + let chunk_size = bases.len() / num_cpus::get(); + + // Perform wNAF over multiple cores, placing results into `projective`. + crossbeam::scope(|scope| { + for ((bases, exp), projective) in bases.chunks_mut(chunk_size) + .zip(exp.chunks(chunk_size)) + .zip(projective.chunks_mut(chunk_size)) + { + scope.spawn(move || { + let mut wnaf = Wnaf::new(); + + for ((base, exp), projective) in bases.iter_mut() + .zip(exp.iter()) + .zip(projective.iter_mut()) + { + let mut exp = *exp; + if let Some(coeff) = coeff { + exp.mul_assign(coeff); + } + + *projective = wnaf.base(base.into_projective(), 1).scalar(exp.into_repr()); + } + }); + } + }); + + // Perform batch normalization + crossbeam::scope(|scope| { + for projective in projective.chunks_mut(chunk_size) + { + scope.spawn(move || { + C::Projective::batch_normalization(projective); + }); + } + }); + + // Turn it all back into affine points + for (projective, affine) in projective.iter().zip(bases.iter_mut()) { + *affine = projective.into_affine(); + } + } + + batch_exp(&mut self.tau_powers_g1, &taupowers[0..], None); + batch_exp(&mut self.tau_powers_g2, &taupowers[0..TAU_POWERS_LENGTH], None); + batch_exp(&mut self.alpha_tau_powers_g1, &taupowers[0..TAU_POWERS_LENGTH], Some(&key.alpha)); + batch_exp(&mut self.beta_tau_powers_g1, &taupowers[0..TAU_POWERS_LENGTH], Some(&key.beta)); + self.beta_g2 = self.beta_g2.mul(key.beta).into_affine(); + } +} + +/// Verifies a transformation of the `Accumulator` with the `PublicKey`, given a 64-byte transcript `digest`. +pub fn verify_transform(before: &Accumulator, after: &Accumulator, key: &PublicKey, digest: &[u8]) -> bool +{ + assert_eq!(digest.len(), 64); + + let compute_g2_s = |g1_s: G1Affine, g1_s_x: G1Affine, personalization: u8| { + let mut h = Blake2b::default(); + h.input(&[personalization]); + h.input(digest); + h.input(g1_s.into_uncompressed().as_ref()); + h.input(g1_s_x.into_uncompressed().as_ref()); + hash_to_g2(h.result().as_ref()).into_affine() + }; + + let tau_g2_s = compute_g2_s(key.tau_g1.0, key.tau_g1.1, 0); + let alpha_g2_s = compute_g2_s(key.alpha_g1.0, key.alpha_g1.1, 1); + let beta_g2_s = compute_g2_s(key.beta_g1.0, key.beta_g1.1, 2); + + // Check the proofs-of-knowledge for tau/alpha/beta + if !same_ratio(key.tau_g1, (tau_g2_s, key.tau_g2)) { + return false; + } + if !same_ratio(key.alpha_g1, (alpha_g2_s, key.alpha_g2)) { + return false; + } + if !same_ratio(key.beta_g1, (beta_g2_s, key.beta_g2)) { + return false; + } + + // Check the correctness of the generators for tau powers + if after.tau_powers_g1[0] != G1Affine::one() { + return false; + } + if after.tau_powers_g2[0] != G2Affine::one() { + return false; + } + + // Did the participant multiply the previous tau by the new one? + if !same_ratio((before.tau_powers_g1[1], after.tau_powers_g1[1]), (tau_g2_s, key.tau_g2)) { + return false; + } + + // Did the participant multiply the previous alpha by the new one? + if !same_ratio((before.alpha_tau_powers_g1[0], after.alpha_tau_powers_g1[0]), (alpha_g2_s, key.alpha_g2)) { + return false; + } + + // Did the participant multiply the previous beta by the new one? + if !same_ratio((before.beta_tau_powers_g1[0], after.beta_tau_powers_g1[0]), (beta_g2_s, key.beta_g2)) { + return false; + } + if !same_ratio((before.beta_tau_powers_g1[0], after.beta_tau_powers_g1[0]), (before.beta_g2, after.beta_g2)) { + return false; + } + + // Are the powers of tau correct? + if !same_ratio(power_pairs(&after.tau_powers_g1), (after.tau_powers_g2[0], after.tau_powers_g2[1])) { + return false; + } + if !same_ratio(power_pairs(&after.tau_powers_g2), (after.tau_powers_g1[0], after.tau_powers_g1[1])) { + return false; + } + if !same_ratio(power_pairs(&after.alpha_tau_powers_g1), (after.tau_powers_g2[0], after.tau_powers_g2[1])) { + return false; + } + if !same_ratio(power_pairs(&after.beta_tau_powers_g1), (after.tau_powers_g2[0], after.tau_powers_g2[1])) { + return false; + } + + true +} + +/// Computes a random linear combination over v1/v2. +/// +/// Checking that many pairs of elements are exponentiated by +/// the same `x` can be achieved (with high probability) with +/// the following technique: +/// +/// Given v1 = [a, b, c] and v2 = [as, bs, cs], compute +/// (a*r1 + b*r2 + c*r3, (as)*r1 + (bs)*r2 + (cs)*r3) for some +/// random r1, r2, r3. Given (g, g^s)... +/// +/// e(g, (as)*r1 + (bs)*r2 + (cs)*r3) = e(g^s, a*r1 + b*r2 + c*r3) +/// +/// ... with high probability. +fn merge_pairs(v1: &[G], v2: &[G]) -> (G, G) +{ + use std::sync::{Arc, Mutex}; + use rand::{thread_rng}; + + assert_eq!(v1.len(), v2.len()); + + let chunk = (v1.len() / num_cpus::get()) + 1; + + let s = Arc::new(Mutex::new(G::Projective::zero())); + let sx = Arc::new(Mutex::new(G::Projective::zero())); + + crossbeam::scope(|scope| { + for (v1, v2) in v1.chunks(chunk).zip(v2.chunks(chunk)) { + let s = s.clone(); + let sx = sx.clone(); + + scope.spawn(move || { + // We do not need to be overly cautious of the RNG + // used for this check. + let rng = &mut thread_rng(); + + let mut wnaf = Wnaf::new(); + let mut local_s = G::Projective::zero(); + let mut local_sx = G::Projective::zero(); + + for (v1, v2) in v1.iter().zip(v2.iter()) { + let rho = G::Scalar::rand(rng); + let mut wnaf = wnaf.scalar(rho.into_repr()); + let v1 = wnaf.base(v1.into_projective()); + let v2 = wnaf.base(v2.into_projective()); + + local_s.add_assign(&v1); + local_sx.add_assign(&v2); + } + + s.lock().unwrap().add_assign(&local_s); + sx.lock().unwrap().add_assign(&local_sx); + }); + } + }); + + let s = s.lock().unwrap().into_affine(); + let sx = sx.lock().unwrap().into_affine(); + + (s, sx) +} + +/// Construct a single pair (s, s^x) for a vector of +/// the form [1, x, x^2, x^3, ...]. +fn power_pairs(v: &[G]) -> (G, G) +{ + merge_pairs(&v[0..(v.len()-1)], &v[1..]) +} + +#[test] +fn test_power_pairs() { + use rand::thread_rng; + + let rng = &mut thread_rng(); + + let mut v = vec![]; + let x = Fr::rand(rng); + let mut acc = Fr::one(); + for _ in 0..100 { + v.push(G1Affine::one().mul(acc).into_affine()); + acc.mul_assign(&x); + } + + let gx = G2Affine::one().mul(x).into_affine(); + + assert!(same_ratio(power_pairs(&v), (G2Affine::one(), gx))); + + v[1] = v[1].mul(Fr::rand(rng)).into_affine(); + + assert!(!same_ratio(power_pairs(&v), (G2Affine::one(), gx))); +} + +/// Checks if pairs have the same ratio. +fn same_ratio( + g1: (G1, G1), + g2: (G1::Pair, G1::Pair) +) -> bool +{ + g1.0.pairing_with(&g2.1) == g1.1.pairing_with(&g2.0) +} + +#[test] +fn test_same_ratio() { + use rand::thread_rng; + + let rng = &mut thread_rng(); + + let s = Fr::rand(rng); + let g1 = G1Affine::one(); + let g2 = G2Affine::one(); + let g1_s = g1.mul(s).into_affine(); + let g2_s = g2.mul(s).into_affine(); + + assert!(same_ratio((g1, g1_s), (g2, g2_s))); + assert!(!same_ratio((g1_s, g1), (g2, g2_s))); +} + +#[test] +fn test_accumulator_serialization() { + use rand::thread_rng; + + let rng = &mut thread_rng(); + let mut digest = (0..64).map(|_| rng.gen()).collect::>(); + + let mut acc = Accumulator::new(); + let before = acc.clone(); + let (pk, sk) = keypair(rng, &digest); + acc.transform(&sk); + assert!(verify_transform(&before, &acc, &pk, &digest)); + digest[0] = !digest[0]; + assert!(!verify_transform(&before, &acc, &pk, &digest)); + let mut v = Vec::with_capacity(ACCUMULATOR_BYTE_SIZE - 64); + acc.serialize(&mut v, UseCompression::No).unwrap(); + assert_eq!(v.len(), ACCUMULATOR_BYTE_SIZE - 64); + let deserialized = Accumulator::deserialize(&mut &v[..], UseCompression::No, CheckForCorrectness::No).unwrap(); + assert!(acc == deserialized); +} + +/// Compute BLAKE2b("") +pub fn blank_hash() -> GenericArray { + Blake2b::new().result() +} + +/// Abstraction over a reader which hashes the data being read. +pub struct HashReader { + reader: R, + hasher: Blake2b +} + +impl HashReader { + /// Construct a new `HashReader` given an existing `reader` by value. + pub fn new(reader: R) -> Self { + HashReader { + reader: reader, + hasher: Blake2b::default() + } + } + + /// Destroy this reader and return the hash of what was read. + pub fn into_hash(self) -> GenericArray { + self.hasher.result() + } +} + +impl Read for HashReader { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + let bytes = self.reader.read(buf)?; + + if bytes > 0 { + self.hasher.input(&buf[0..bytes]); + } + + Ok(bytes) + } +} + +/// Abstraction over a writer which hashes the data being written. +pub struct HashWriter { + writer: W, + hasher: Blake2b +} + +impl HashWriter { + /// Construct a new `HashWriter` given an existing `writer` by value. + pub fn new(writer: W) -> Self { + HashWriter { + writer: writer, + hasher: Blake2b::default() + } + } + + /// Destroy this writer and return the hash of what was written. + pub fn into_hash(self) -> GenericArray { + self.hasher.result() + } +} + +impl Write for HashWriter { + fn write(&mut self, buf: &[u8]) -> io::Result { + let bytes = self.writer.write(buf)?; + + if bytes > 0 { + self.hasher.input(&buf[0..bytes]); + } + + Ok(bytes) + } + + fn flush(&mut self) -> io::Result<()> { + self.writer.flush() + } +} From 746f45f9e6103a24d2d1dc634a61dfa4cf32a8e6 Mon Sep 17 00:00:00 2001 From: Sean Bowe Date: Tue, 31 Oct 2017 09:35:46 -0600 Subject: [PATCH 02/23] Update paper link --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index a6d8f74..15c3d05 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ This is a [multi-party computation](https://en.wikipedia.org/wiki/Secure_multi-party_computation) (MPC) ceremony which constructs partial zk-SNARK parameters for _all_ circuits up to a depth of 221. It works by taking a step that is performed by all zk-SNARK MPCs and performing it in just one single ceremony. This makes individual zk-SNARK MPCs much cheaper and allows them to scale to practically unbounded numbers of participants. -This protocol is described in a [forthcoming paper](https://eprint.iacr.org/2017/XXX). It produces parameters for an adaptation of [Jens Groth's 2016 pairing-based proving system](https://eprint.iacr.org/2016/260) using the [BLS12-381](https://github.com/ebfull/pairing/tree/master/src/bls12_381) elliptic curve construction. The security proof relies on a randomness beacon being applied at the end of the ceremony. +This protocol is described in a [forthcoming paper](https://eprint.iacr.org/2017/1050). It produces parameters for an adaptation of [Jens Groth's 2016 pairing-based proving system](https://eprint.iacr.org/2016/260) using the [BLS12-381](https://github.com/ebfull/pairing/tree/master/src/bls12_381) elliptic curve construction. The security proof relies on a randomness beacon being applied at the end of the ceremony. **This is a work in progress.** From 7c8da6abb92c4b598c3b75c4fe3fe7e42dceb136 Mon Sep 17 00:00:00 2001 From: Sean Bowe Date: Wed, 8 Nov 2017 13:38:17 -0700 Subject: [PATCH 03/23] Implementation of `compute` tool for participants, along with README changes and a version bump. --- Cargo.lock | 2 +- Cargo.toml | 2 +- README.md | 26 ++++++++- src/bin/compute.rs | 128 +++++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 155 insertions(+), 3 deletions(-) create mode 100644 src/bin/compute.rs diff --git a/Cargo.lock b/Cargo.lock index 95a7464..981f146 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,6 +1,6 @@ [root] name = "powersoftau" -version = "0.1.0" +version = "0.1.1" dependencies = [ "blake2 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", "byteorder 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", diff --git a/Cargo.toml b/Cargo.toml index 6e71d26..b491296 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "powersoftau" -version = "0.1.0" +version = "0.1.1" authors = ["Sean Bowe"] license = "MIT/Apache-2.0" diff --git a/README.md b/README.md index 15c3d05..5b5597a 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,31 @@ This is a [multi-party computation](https://en.wikipedia.org/wiki/Secure_multi-p This protocol is described in a [forthcoming paper](https://eprint.iacr.org/2017/1050). It produces parameters for an adaptation of [Jens Groth's 2016 pairing-based proving system](https://eprint.iacr.org/2016/260) using the [BLS12-381](https://github.com/ebfull/pairing/tree/master/src/bls12_381) elliptic curve construction. The security proof relies on a randomness beacon being applied at the end of the ceremony. -**This is a work in progress.** +## Instructions + +If you've been asked to participate, you were sent a `challenge` file. Put that in the current directory and use your Rust toolchain execute the computation: + +``` +cargo run --release --bin compute +``` + +The process could take an hour or so. When it's finished, it will place a `response` file in the current directory. That's what you send back. It will also print a hash of the `response` file it produced. You need to write this hash down (or post it publicly) so that you and others can confirm that your contribution exists in the final transcript of the ceremony. + +## Recommendations + +Participants of the ceremony sample some randomness, perform a computation, and then destroy the randomness. **Only one participant needs to do this successfully to ensure the final parameters are secure.** In order to see that this randomness is truly destroyed, participants may take various kinds of precautions: + +* putting the machine in a Faraday cage +* destroying the machine afterwards +* running the software on secure hardware +* not connecting the hardware to any networks +* using multiple machines and randomly picking the result of one of them to use +* using different code than what we have provided +* using a secure operating system +* using an operating system that nobody would expect you to use (Rust can compile to Mac OS X and Windows) +* lots of other ideas we can't think of + +It is totally up to the participants. In general, participations should beware of side-channel attacks and assume that remnants of the randomness will be in RAM after the computation has finished. ## License diff --git a/src/bin/compute.rs b/src/bin/compute.rs new file mode 100644 index 0000000..955dd83 --- /dev/null +++ b/src/bin/compute.rs @@ -0,0 +1,128 @@ +extern crate powersoftau; +extern crate rand; +extern crate blake2; +extern crate byteorder; + +use powersoftau::*; + +use std::fs::OpenOptions; +use std::io::{self, Read, BufReader, Write, BufWriter}; + +fn main() { + // Create an RNG based on a mixture of system randomness and user provided randomness + let mut rng = { + use byteorder::{ReadBytesExt, BigEndian}; + use blake2::{Blake2b, Digest}; + use rand::{SeedableRng, Rng, OsRng}; + use rand::chacha::ChaChaRng; + + let h = { + let mut system_rng = OsRng::new().unwrap(); + let mut h = Blake2b::default(); + + // Gather 1024 bytes of entropy from the system + for _ in 0..1024 { + let r: u8 = system_rng.gen(); + h.input(&[r]); + } + + // Ask the user to provide some information for additional entropy + let mut user_input = String::new(); + println!("Type some random text and press [ENTER] to provide additional entropy..."); + io::stdin().read_line(&mut user_input).expect("expected to read some random text from the user"); + + // Hash it all up to make a seed + h.input(&user_input.as_bytes()); + h.result() + }; + + let mut digest = &h[..]; + + // Interpret the first 32 bytes of the digest as 8 32-bit words + let mut seed = [0u32; 8]; + for i in 0..8 { + seed[i] = digest.read_u32::().expect("digest is large enough for this to work"); + } + + ChaChaRng::from_seed(&seed) + }; + + // Try to load `./challenge` from disk. + let reader = OpenOptions::new() + .read(true) + .open("challenge").expect("unable open `./challenge` in this directory"); + + { + let metadata = reader.metadata().expect("unable to get filesystem metadata for `./challenge`"); + if metadata.len() != (ACCUMULATOR_BYTE_SIZE as u64) { + panic!("The size of `./challenge` should be {}, but it's {}, so something isn't right.", ACCUMULATOR_BYTE_SIZE, metadata.len()); + } + } + + let reader = BufReader::new(reader); + let mut reader = HashReader::new(reader); + + // Create `./response` in this directory + let writer = OpenOptions::new() + .read(false) + .write(true) + .create_new(true) + .open("response").expect("unable to create `./response` in this directory"); + + let writer = BufWriter::new(writer); + let mut writer = HashWriter::new(writer); + + println!("Reading `./challenge` into memory..."); + + // Read the BLAKE2b hash of the previous contribution + { + // We don't need to do anything with it, but it's important for + // the hash chain. + let mut tmp = [0; 64]; + reader.read_exact(&mut tmp).expect("unable to read BLAKE2b hash of previous contribution"); + } + + // Load the current accumulator into memory + let mut current_accumulator = Accumulator::deserialize(&mut reader, UseCompression::No, CheckForCorrectness::No).expect("unable to read uncompressed accumulator"); + + // Get the hash of the current accumulator + let current_accumulator_hash = reader.into_hash(); + + // Construct our keypair using the RNG we created above + let (pubkey, privkey) = keypair(&mut rng, current_accumulator_hash.as_ref()); + + // Perform the transformation + println!("Computing, this could take a while..."); + current_accumulator.transform(&privkey); + println!("Writing your contribution to `./response`..."); + + // Write the hash of the input accumulator + writer.write_all(¤t_accumulator_hash.as_ref()).expect("unable to write BLAKE2b hash of input accumulator"); + + // Write the transformed accumulator (in compressed form, to save upload bandwidth for disadvantaged + // players.) + current_accumulator.serialize(&mut writer, UseCompression::Yes).expect("unable to write transformed accumulator"); + + // Write the public key + pubkey.serialize(&mut writer).expect("unable to write public key"); + + // Get the hash of the contribution, so the user can compare later + let contribution_hash = writer.into_hash(); + + print!("Done!\n\n\ + Your contribution has been written to `./response`\n\n\ + The BLAKE2b hash of `./response` is:\n"); + + for line in contribution_hash.as_slice().chunks(16) { + print!("\t"); + for section in line.chunks(4) { + for b in section { + print!("{:02x}", b); + } + print!(" "); + } + println!(""); + } + + println!("\n"); +} From 83583caa92a71c38488a030c21c27f3730ce0455 Mon Sep 17 00:00:00 2001 From: Sean Bowe Date: Wed, 8 Nov 2017 13:40:40 -0700 Subject: [PATCH 04/23] Add link to Rust in README. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 5b5597a..5337936 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ This protocol is described in a [forthcoming paper](https://eprint.iacr.org/2017 ## Instructions -If you've been asked to participate, you were sent a `challenge` file. Put that in the current directory and use your Rust toolchain execute the computation: +If you've been asked to participate, you were sent a `challenge` file. Put that in the current directory and use your [Rust toolchain](https://www.rust-lang.org/en-US/) execute the computation: ``` cargo run --release --bin compute From 654b446811017120b8746963b3a582942727d7e2 Mon Sep 17 00:00:00 2001 From: Peter Todd Date: Wed, 8 Nov 2017 21:53:32 -0500 Subject: [PATCH 05/23] Add alternate rust toolchain recommendation --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 5337936..69599ee 100644 --- a/README.md +++ b/README.md @@ -26,6 +26,7 @@ Participants of the ceremony sample some randomness, perform a computation, and * using different code than what we have provided * using a secure operating system * using an operating system that nobody would expect you to use (Rust can compile to Mac OS X and Windows) +* using an unusual Rust toolchain or [alternate rust compiler](https://github.com/thepowersgang/mrustc) * lots of other ideas we can't think of It is totally up to the participants. In general, participations should beware of side-channel attacks and assume that remnants of the randomness will be in RAM after the computation has finished. From 60297403e68e8d996306f3f4883f6002051a1b5a Mon Sep 17 00:00:00 2001 From: Sean Bowe Date: Wed, 8 Nov 2017 21:09:43 -0700 Subject: [PATCH 06/23] Add `new` tool to repository for initializing the first challenge file. --- src/bin/new.rs | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) create mode 100644 src/bin/new.rs diff --git a/src/bin/new.rs b/src/bin/new.rs new file mode 100644 index 0000000..a5bb301 --- /dev/null +++ b/src/bin/new.rs @@ -0,0 +1,24 @@ +extern crate powersoftau; +use powersoftau::*; + +use std::fs::OpenOptions; +use std::io::{Write, BufWriter}; + +fn main() { + let writer = OpenOptions::new() + .read(false) + .write(true) + .create_new(true) + .open("challenge").expect("unable to create `./challenge`"); + + let mut writer = BufWriter::new(writer); + + // Write a blank BLAKE2b hash: + writer.write_all(&blank_hash().as_slice()).expect("unable to write blank hash to `./challenge`"); + + let acc = Accumulator::new(); + acc.serialize(&mut writer, UseCompression::No).expect("unable to write fresh accumulator to `./challenge`"); + writer.flush().expect("unable to flush accumulator to disk"); + + println!("Wrote a fresh accumulator to `./challenge`"); +} From 416e6b61990599d2c82feebb551ad51f76d7082d Mon Sep 17 00:00:00 2001 From: Sean Bowe Date: Thu, 9 Nov 2017 12:42:24 -0700 Subject: [PATCH 07/23] Add `verify_transform` tool for solo verification. --- src/bin/verify_transform.rs | 117 ++++++++++++++++++++++++++++++++++++ 1 file changed, 117 insertions(+) create mode 100644 src/bin/verify_transform.rs diff --git a/src/bin/verify_transform.rs b/src/bin/verify_transform.rs new file mode 100644 index 0000000..cb66f6e --- /dev/null +++ b/src/bin/verify_transform.rs @@ -0,0 +1,117 @@ +extern crate powersoftau; +use powersoftau::*; + +use std::fs::OpenOptions; +use std::io::{Read, Write, BufWriter, BufReader}; + +fn main() { + // Try to load `./challenge` from disk. + let challenge_reader = OpenOptions::new() + .read(true) + .open("challenge").expect("unable open `./challenge` in this directory"); + + { + let metadata = challenge_reader.metadata().expect("unable to get filesystem metadata for `./challenge`"); + if metadata.len() != (ACCUMULATOR_BYTE_SIZE as u64) { + panic!("The size of `./challenge` should be {}, but it's {}, so something isn't right.", ACCUMULATOR_BYTE_SIZE, metadata.len()); + } + } + + let challenge_reader = BufReader::new(challenge_reader); + let mut challenge_reader = HashReader::new(challenge_reader); + + // Try to load `./response` from disk. + let response_reader = OpenOptions::new() + .read(true) + .open("response").expect("unable open `./response` in this directory"); + + { + let metadata = response_reader.metadata().expect("unable to get filesystem metadata for `./response`"); + if metadata.len() != (CONTRIBUTION_BYTE_SIZE as u64) { + panic!("The size of `./response` should be {}, but it's {}, so something isn't right.", CONTRIBUTION_BYTE_SIZE, metadata.len()); + } + } + + let response_reader = BufReader::new(response_reader); + let mut response_reader = HashReader::new(response_reader); + + // Create new_challenge file + let writer = OpenOptions::new() + .read(false) + .write(true) + .create_new(true) + .open("new_challenge").expect("unable to create `./new_challenge`"); + + let mut writer = BufWriter::new(writer); + + // Deserialize the current challenge + + // Read the BLAKE2b hash of the previous contribution + { + // We don't need to do anything with it, but it's important for + // the hash chain. + let mut tmp = [0; 64]; + challenge_reader.read_exact(&mut tmp).expect("unable to read BLAKE2b hash of previous contribution"); + } + + // Load the current accumulator into memory + let current_accumulator = Accumulator::deserialize( + &mut challenge_reader, + UseCompression::No, + CheckForCorrectness::No // no need to check since we constructed the challenge already + ).expect("unable to read uncompressed accumulator"); + + // Get the hash of the current accumulator + let current_accumulator_hash = challenge_reader.into_hash(); + + // Load the response into memory + + // Check the hash chain + { + let mut response_challenge_hash = [0; 64]; + response_reader.read_exact(&mut response_challenge_hash).expect("couldn't read hash of challenge file from response file"); + + if &response_challenge_hash[..] != current_accumulator_hash.as_slice() { + panic!("Hash chain failure. This is not the right response."); + } + } + + // Load the response's accumulator + let new_accumulator = Accumulator::deserialize(&mut response_reader, UseCompression::Yes, CheckForCorrectness::Yes) + .expect("wasn't able to deserialize the response file's accumulator"); + + // Load the response's pubkey + let public_key = PublicKey::deserialize(&mut response_reader) + .expect("wasn't able to deserialize the response file's public key"); + + // Get the hash of the response file + let response_hash = response_reader.into_hash(); + + if !verify_transform(¤t_accumulator, &new_accumulator, &public_key, current_accumulator_hash.as_slice()) { + println!("Verification failed, contribution was invalid somehow."); + panic!("INVALID CONTRIBUTION!!!"); + } else { + println!("Verification succeeded!"); + } + + println!("Here's the BLAKE2b hash of the participant's response file:"); + + for line in response_hash.as_slice().chunks(16) { + print!("\t"); + for section in line.chunks(4) { + for b in section { + print!("{:02x}", b); + } + print!(" "); + } + println!(""); + } + + println!("Verification succeeded! Writing to `./new_challenge`..."); + + writer.write_all(response_hash.as_slice()).expect("couldn't write response file's hash into the `./new_challenge` file"); + new_accumulator.serialize(&mut writer, UseCompression::No).expect("unable to write uncompressed accumulator into the `./new_challenge` file"); + + println!("Done! `./new_challenge` contains the new challenge file. The other files"); + println!("were left alone."); +} From 9e1553c437183540392a7231d0788318a19b18a3 Mon Sep 17 00:00:00 2001 From: Sean Bowe Date: Thu, 9 Nov 2017 16:31:35 -0700 Subject: [PATCH 08/23] Fix mistake in README. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 69599ee..1274426 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ This protocol is described in a [forthcoming paper](https://eprint.iacr.org/2017 ## Instructions -If you've been asked to participate, you were sent a `challenge` file. Put that in the current directory and use your [Rust toolchain](https://www.rust-lang.org/en-US/) execute the computation: +If you've been asked to participate, you were sent a `challenge` file. Put that in the current directory and use your [Rust toolchain](https://www.rust-lang.org/en-US/) to execute the computation: ``` cargo run --release --bin compute From 22846cda9a651ab459167cffed80b1eb3ec52fb7 Mon Sep 17 00:00:00 2001 From: Gareth Davies Date: Wed, 15 Nov 2017 19:37:38 -0800 Subject: [PATCH 09/23] Fixing typo in README --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 1274426..4953996 100644 --- a/README.md +++ b/README.md @@ -29,7 +29,7 @@ Participants of the ceremony sample some randomness, perform a computation, and * using an unusual Rust toolchain or [alternate rust compiler](https://github.com/thepowersgang/mrustc) * lots of other ideas we can't think of -It is totally up to the participants. In general, participations should beware of side-channel attacks and assume that remnants of the randomness will be in RAM after the computation has finished. +It is totally up to the participants. In general, participants should beware of side-channel attacks and assume that remnants of the randomness will be in RAM after the computation has finished. ## License From 2537d11f17d427a1ecea99445182b32b69474888 Mon Sep 17 00:00:00 2001 From: Sean Bowe Date: Sun, 18 Mar 2018 12:39:08 -0600 Subject: [PATCH 10/23] Add random beacon query code. --- Cargo.lock | 128 ++++++++++++++++++++++++++++++++++++----- Cargo.toml | 2 + src/bin/beacon.rs | 142 ++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 258 insertions(+), 14 deletions(-) create mode 100644 src/bin/beacon.rs diff --git a/Cargo.lock b/Cargo.lock index 981f146..c1d40c4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,17 +1,3 @@ -[root] -name = "powersoftau" -version = "0.1.1" -dependencies = [ - "blake2 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", - "byteorder 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "generic-array 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)", - "num_cpus 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)", - "pairing 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)", - "typenum 1.9.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "bitflags" version = "0.7.0" @@ -81,6 +67,11 @@ dependencies = [ "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "gcc" +version = "0.3.54" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "generic-array" version = "0.8.3" @@ -90,6 +81,23 @@ dependencies = [ "typenum 1.9.0 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "hex-literal" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "hex-literal-impl 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro-hack 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "hex-literal-impl" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "proc-macro-hack 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "libc" version = "0.2.32" @@ -117,6 +125,35 @@ dependencies = [ "rand 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "powersoftau" +version = "0.1.1" +dependencies = [ + "blake2 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", + "byteorder 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "crossbeam 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "generic-array 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)", + "hex-literal 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "num_cpus 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "pairing 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)", + "rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)", + "typenum 1.9.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "proc-macro-hack" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "proc-macro-hack-impl 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "proc-macro-hack-impl" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "rand" version = "0.3.17" @@ -126,11 +163,62 @@ dependencies = [ "libc 0.2.32 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "redox_syscall" +version = "0.1.37" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "rust-crypto" +version = "0.2.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "gcc 0.3.54 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.32 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", + "time 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rustc-serialize" +version = "0.3.24" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "time" +version = "0.1.39" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.32 (registry+https://github.com/rust-lang/crates.io-index)", + "redox_syscall 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "typenum" version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "winapi" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [metadata] "checksum bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "aad18937a628ec6abcd26d1489012cc0e18c21798210f491af69ded9b881106d" "checksum blake2 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "53bf612c0f2839b7e764ebac65d6cb985f7c6812de399d0728038f4b1da141bc" @@ -142,10 +230,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum digest 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e5b29bf156f3f4b3c4f610a25ff69370616ae6e0657d416de22645483e72af0a" "checksum fuchsia-zircon 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f6c0581a4e363262e52b87f59ee2afe3415361c6ec35e665924eb08afe8ff159" "checksum fuchsia-zircon-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "43f3795b4bae048dc6123a6b972cadde2e676f9ded08aef6bb77f5f157684a82" +"checksum gcc 0.3.54 (registry+https://github.com/rust-lang/crates.io-index)" = "5e33ec290da0d127825013597dbdfc28bee4964690c7ce1166cbc2a7bd08b1bb" "checksum generic-array 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)" = "fceb69994e330afed50c93524be68c42fa898c2d9fd4ee8da03bd7363acd26f2" +"checksum hex-literal 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4da5f0e01bd8a71a224a4eedecaacfcabda388dbb7a80faf04d3514287572d95" +"checksum hex-literal-impl 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1d340b6514f232f6db1bd16db65302a5278a04fef9ce867cb932e7e5fa21130a" "checksum libc 0.2.32 (registry+https://github.com/rust-lang/crates.io-index)" = "56cce3130fd040c28df6f495c8492e5ec5808fb4c9093c310df02b0c8f030148" "checksum nodrop 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "9a2228dca57108069a5262f2ed8bd2e82496d2e074a06d1ccc7ce1687b6ae0a2" "checksum num_cpus 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "514f0d73e64be53ff320680ca671b64fe3fb91da01e1ae2ddc99eb51d453b20d" "checksum pairing 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c6cecc13d1871265fea9be96aa9f1cc027edacd55d2fb1fbc164e19725b88d70" +"checksum proc-macro-hack 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3ba8d4f9257b85eb6cdf13f055cea3190520aab1409ca2ab43493ea4820c25f0" +"checksum proc-macro-hack-impl 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d5cb6f960ad471404618e9817c0e5d10b1ae74cfdf01fab89ea0641fe7fb2892" "checksum rand 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)" = "61efcbcd9fa8d8fbb07c84e34a8af18a1ff177b449689ad38a6e9457ecc7b2ae" +"checksum redox_syscall 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "0d92eecebad22b767915e4d529f89f28ee96dbbf5a4810d2b844373f136417fd" +"checksum rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)" = "f76d05d3993fd5f4af9434e8e436db163a12a9d40e1a58a726f27a01dfd12a2a" +"checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" +"checksum time 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)" = "a15375f1df02096fb3317256ce2cee6a1f42fc84ea5ad5fc8c421cfe40c73098" "checksum typenum 1.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "13a99dc6780ef33c78780b826cf9d2a78840b72cae9474de4bcaf9051e60ebbd" +"checksum winapi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "04e3bd221fcbe8a271359c04f21a76db7d0c6028862d1bb5512d85e1e2eb5bb3" +"checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" +"checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" diff --git a/Cargo.toml b/Cargo.toml index b491296..9b150fe 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -18,6 +18,8 @@ generic-array = "0.8.3" typenum = "1.9.0" byteorder = "1.1.0" pairing = "0.13.0" +hex-literal = "0.1" +rust-crypto = "0.2" [features] u128-support = ["pairing/u128-support"] diff --git a/src/bin/beacon.rs b/src/bin/beacon.rs new file mode 100644 index 0000000..a76d0d2 --- /dev/null +++ b/src/bin/beacon.rs @@ -0,0 +1,142 @@ +extern crate powersoftau; +extern crate rand; +extern crate blake2; +extern crate byteorder; + +#[macro_use] +extern crate hex_literal; + +extern crate crypto; + +use powersoftau::*; + +use std::fs::OpenOptions; +use std::io::{Read, BufReader, Write, BufWriter}; + +fn main() { + // Create an RNG based on the outcome of the random beacon + let mut rng = { + use byteorder::{ReadBytesExt, BigEndian}; + use rand::{SeedableRng}; + use rand::chacha::ChaChaRng; + use crypto::sha2::Sha256; + use crypto::digest::Digest; + + // Place block hash here (this will change) + let mut cur_hash: [u8; 32] = hex!("000000000000000000357b7d5040ee362f037bdf873eb4cde615b2f368f2f2a6"); + + // Performs 2^n hash iterations over it + const N: usize = 40; + + for i in 0..(1u64<().expect("digest is large enough for this to work"); + } + + ChaChaRng::from_seed(&seed) + }; + + // Try to load `./challenge` from disk. + let reader = OpenOptions::new() + .read(true) + .open("challenge").expect("unable open `./challenge` in this directory"); + + { + let metadata = reader.metadata().expect("unable to get filesystem metadata for `./challenge`"); + if metadata.len() != (ACCUMULATOR_BYTE_SIZE as u64) { + panic!("The size of `./challenge` should be {}, but it's {}, so something isn't right.", ACCUMULATOR_BYTE_SIZE, metadata.len()); + } + } + + let reader = BufReader::new(reader); + let mut reader = HashReader::new(reader); + + // Create `./response` in this directory + let writer = OpenOptions::new() + .read(false) + .write(true) + .create_new(true) + .open("response").expect("unable to create `./response` in this directory"); + + let writer = BufWriter::new(writer); + let mut writer = HashWriter::new(writer); + + println!("Reading `./challenge` into memory..."); + + // Read the BLAKE2b hash of the previous contribution + { + // We don't need to do anything with it, but it's important for + // the hash chain. + let mut tmp = [0; 64]; + reader.read_exact(&mut tmp).expect("unable to read BLAKE2b hash of previous contribution"); + } + + // Load the current accumulator into memory + let mut current_accumulator = Accumulator::deserialize(&mut reader, UseCompression::No, CheckForCorrectness::No).expect("unable to read uncompressed accumulator"); + + // Get the hash of the current accumulator + let current_accumulator_hash = reader.into_hash(); + + // Construct our keypair using the RNG we created above + let (pubkey, privkey) = keypair(&mut rng, current_accumulator_hash.as_ref()); + + // Perform the transformation + println!("Computing, this could take a while..."); + current_accumulator.transform(&privkey); + println!("Writing your contribution to `./response`..."); + + // Write the hash of the input accumulator + writer.write_all(¤t_accumulator_hash.as_ref()).expect("unable to write BLAKE2b hash of input accumulator"); + + // Write the transformed accumulator (in compressed form, to save upload bandwidth for disadvantaged + // players.) + current_accumulator.serialize(&mut writer, UseCompression::Yes).expect("unable to write transformed accumulator"); + + // Write the public key + pubkey.serialize(&mut writer).expect("unable to write public key"); + + // Get the hash of the contribution, so the user can compare later + let contribution_hash = writer.into_hash(); + + print!("Done!\n\n\ + Your contribution has been written to `./response`\n\n\ + The BLAKE2b hash of `./response` is:\n"); + + for line in contribution_hash.as_slice().chunks(16) { + print!("\t"); + for section in line.chunks(4) { + for b in section { + print!("{:02x}", b); + } + print!(" "); + } + println!(""); + } + + println!("\n"); +} From a0904e3a30c8afdc0dac1ab67c2b8c6571075c9d Mon Sep 17 00:00:00 2001 From: Sean Bowe Date: Sun, 18 Mar 2018 13:14:05 -0600 Subject: [PATCH 11/23] Use 2^42 iterations instead of 2^40. --- src/bin/beacon.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/bin/beacon.rs b/src/bin/beacon.rs index a76d0d2..762ca50 100644 --- a/src/bin/beacon.rs +++ b/src/bin/beacon.rs @@ -26,7 +26,7 @@ fn main() { let mut cur_hash: [u8; 32] = hex!("000000000000000000357b7d5040ee362f037bdf873eb4cde615b2f368f2f2a6"); // Performs 2^n hash iterations over it - const N: usize = 40; + const N: usize = 42; for i in 0..(1u64< Date: Tue, 20 Mar 2018 14:04:51 -0600 Subject: [PATCH 12/23] Block hash for the beacon. --- src/bin/beacon.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/bin/beacon.rs b/src/bin/beacon.rs index 762ca50..9e02015 100644 --- a/src/bin/beacon.rs +++ b/src/bin/beacon.rs @@ -23,7 +23,7 @@ fn main() { use crypto::digest::Digest; // Place block hash here (this will change) - let mut cur_hash: [u8; 32] = hex!("000000000000000000357b7d5040ee362f037bdf873eb4cde615b2f368f2f2a6"); + let mut cur_hash: [u8; 32] = hex!("00000000000000000034b33e842ac1c50456abe5fa92b60f6b3dfc5d247f7b58"); // Performs 2^n hash iterations over it const N: usize = 42; From aa8cbfe1a6deca989288ca3f7f45fecc0a11afa9 Mon Sep 17 00:00:00 2001 From: Sean Bowe Date: Tue, 20 Mar 2018 14:05:55 -0600 Subject: [PATCH 13/23] Fix comment for block hash. --- src/bin/beacon.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/bin/beacon.rs b/src/bin/beacon.rs index 9e02015..5f6f50d 100644 --- a/src/bin/beacon.rs +++ b/src/bin/beacon.rs @@ -22,7 +22,7 @@ fn main() { use crypto::sha2::Sha256; use crypto::digest::Digest; - // Place block hash here (this will change) + // Place block hash here (block number #514200) let mut cur_hash: [u8; 32] = hex!("00000000000000000034b33e842ac1c50456abe5fa92b60f6b3dfc5d247f7b58"); // Performs 2^n hash iterations over it From 5429415959175082207fd61c10319e47a6b56e87 Mon Sep 17 00:00:00 2001 From: Sean Bowe Date: Tue, 3 Apr 2018 18:59:39 -0600 Subject: [PATCH 14/23] Add tool for verifying the powersoftau transcript. --- Cargo.lock | 123 ++++++++++++----- Cargo.toml | 5 +- src/bin/verify.rs | 339 ++++++++++++++++++++++++++++++++++++++++++++++ src/lib.rs | 10 +- 4 files changed, 433 insertions(+), 44 deletions(-) create mode 100644 src/bin/verify.rs diff --git a/Cargo.lock b/Cargo.lock index c1d40c4..0a3f328 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,6 +1,26 @@ +[[package]] +name = "bellman" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bit-vec 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", + "byteorder 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "crossbeam 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-cpupool 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", + "num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)", + "pairing 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "bit-vec" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "bitflags" -version = "0.7.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -21,7 +41,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "byteorder" -version = "1.1.0" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -31,7 +51,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "crossbeam" -version = "0.3.0" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -53,18 +73,30 @@ dependencies = [ [[package]] name = "fuchsia-zircon" -version = "0.2.1" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "fuchsia-zircon-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "fuchsia-zircon-sys" -version = "0.2.0" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "futures" +version = "0.1.21" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "futures-cpupool" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)", + "num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -78,7 +110,7 @@ version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "nodrop 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", - "typenum 1.9.0 (registry+https://github.com/rust-lang/crates.io-index)", + "typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -100,7 +132,7 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.32" +version = "0.2.40" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -110,35 +142,36 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "num_cpus" -version = "1.7.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.32 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "pairing" -version = "0.13.0" +version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "byteorder 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)", + "byteorder 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "powersoftau" version = "0.1.1" dependencies = [ + "bellman 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "blake2 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", - "byteorder 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "byteorder 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "crossbeam 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "generic-array 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)", "hex-literal 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "num_cpus 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)", - "pairing 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)", + "num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)", + "pairing 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)", - "typenum 1.9.0 (registry+https://github.com/rust-lang/crates.io-index)", + "typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -156,11 +189,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "rand" -version = "0.3.17" +version = "0.3.22" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "fuchsia-zircon 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.32 (registry+https://github.com/rust-lang/crates.io-index)", + "fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -174,8 +218,8 @@ version = "0.2.36" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "gcc 0.3.54 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.32 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -190,14 +234,14 @@ name = "time" version = "0.1.39" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.32 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "typenum" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -220,32 +264,37 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" [metadata] -"checksum bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "aad18937a628ec6abcd26d1489012cc0e18c21798210f491af69ded9b881106d" +"checksum bellman 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "eae372472c7ea8f7c8fc6a62f7d5535db8302de7f1aafda2e13a97c4830d3bcf" +"checksum bit-vec 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "02b4ff8b16e6076c3e14220b39fbc1fabb6737522281a388998046859400895f" +"checksum bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b3c30d3802dfb7281680d6285f2ccdaa8c2d8fee41f93805dba5c4cf50dc23cf" "checksum blake2 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "53bf612c0f2839b7e764ebac65d6cb985f7c6812de399d0728038f4b1da141bc" "checksum byte-tools 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "560c32574a12a89ecd91f5e742165893f86e3ab98d21f8ea548658eb9eef5f40" -"checksum byteorder 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ff81738b726f5d099632ceaffe7fb65b90212e8dce59d518729e7e8634032d3d" +"checksum byteorder 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "73b5bdfe7ee3ad0b99c9801d58807a9dbc9e09196365b0203853b99889ab3c87" "checksum constant_time_eq 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8ff012e225ce166d4422e0e78419d901719760f62ae2b7969ca6b564d1b54a9e" -"checksum crossbeam 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8837ab96533202c5b610ed44bc7f4183e7957c1c8f56e8cc78bb098593c8ba0a" +"checksum crossbeam 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "24ce9782d4d5c53674646a6a4c1863a21a8fc0cb649b3c94dfc16e45071dea19" "checksum crypto-mac 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "779015233ac67d65098614aec748ac1c756ab6677fa2e14cf8b37c08dfed1198" "checksum digest 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e5b29bf156f3f4b3c4f610a25ff69370616ae6e0657d416de22645483e72af0a" -"checksum fuchsia-zircon 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f6c0581a4e363262e52b87f59ee2afe3415361c6ec35e665924eb08afe8ff159" -"checksum fuchsia-zircon-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "43f3795b4bae048dc6123a6b972cadde2e676f9ded08aef6bb77f5f157684a82" +"checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82" +"checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" +"checksum futures 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)" = "1a70b146671de62ec8c8ed572219ca5d594d9b06c0b364d5e67b722fc559b48c" +"checksum futures-cpupool 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "ab90cde24b3319636588d0c35fe03b1333857621051837ed769faefb4c2162e4" "checksum gcc 0.3.54 (registry+https://github.com/rust-lang/crates.io-index)" = "5e33ec290da0d127825013597dbdfc28bee4964690c7ce1166cbc2a7bd08b1bb" "checksum generic-array 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)" = "fceb69994e330afed50c93524be68c42fa898c2d9fd4ee8da03bd7363acd26f2" "checksum hex-literal 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4da5f0e01bd8a71a224a4eedecaacfcabda388dbb7a80faf04d3514287572d95" "checksum hex-literal-impl 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1d340b6514f232f6db1bd16db65302a5278a04fef9ce867cb932e7e5fa21130a" -"checksum libc 0.2.32 (registry+https://github.com/rust-lang/crates.io-index)" = "56cce3130fd040c28df6f495c8492e5ec5808fb4c9093c310df02b0c8f030148" +"checksum libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)" = "6fd41f331ac7c5b8ac259b8bf82c75c0fb2e469bbf37d2becbba9a6a2221965b" "checksum nodrop 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "9a2228dca57108069a5262f2ed8bd2e82496d2e074a06d1ccc7ce1687b6ae0a2" -"checksum num_cpus 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "514f0d73e64be53ff320680ca671b64fe3fb91da01e1ae2ddc99eb51d453b20d" -"checksum pairing 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c6cecc13d1871265fea9be96aa9f1cc027edacd55d2fb1fbc164e19725b88d70" +"checksum num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c51a3322e4bca9d212ad9a158a02abc6934d005490c054a2778df73a70aa0a30" +"checksum pairing 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)" = "06f21a403a78257de696b59a5bfafad56a3b3ab8f27741c8122750bf0ebbb9fa" "checksum proc-macro-hack 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3ba8d4f9257b85eb6cdf13f055cea3190520aab1409ca2ab43493ea4820c25f0" "checksum proc-macro-hack-impl 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d5cb6f960ad471404618e9817c0e5d10b1ae74cfdf01fab89ea0641fe7fb2892" -"checksum rand 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)" = "61efcbcd9fa8d8fbb07c84e34a8af18a1ff177b449689ad38a6e9457ecc7b2ae" +"checksum rand 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)" = "15a732abf9d20f0ad8eeb6f909bf6868722d9a06e1e50802b6a70351f40b4eb1" +"checksum rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "eba5f8cb59cc50ed56be8880a5c7b496bfd9bd26394e176bc67884094145c2c5" "checksum redox_syscall 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "0d92eecebad22b767915e4d529f89f28ee96dbbf5a4810d2b844373f136417fd" "checksum rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)" = "f76d05d3993fd5f4af9434e8e436db163a12a9d40e1a58a726f27a01dfd12a2a" "checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" "checksum time 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)" = "a15375f1df02096fb3317256ce2cee6a1f42fc84ea5ad5fc8c421cfe40c73098" -"checksum typenum 1.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "13a99dc6780ef33c78780b826cf9d2a78840b72cae9474de4bcaf9051e60ebbd" +"checksum typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "612d636f949607bdf9b123b4a6f6d966dedf3ff669f7f045890d3a4a73948169" "checksum winapi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "04e3bd221fcbe8a271359c04f21a76db7d0c6028862d1bb5512d85e1e2eb5bb3" "checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" "checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" diff --git a/Cargo.toml b/Cargo.toml index 9b150fe..2b7c201 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,16 +10,17 @@ homepage = "https://github.com/ebfull/powersoftau" repository = "https://github.com/ebfull/powersoftau" [dependencies] -rand = "0.3.17" +rand = "0.4" crossbeam = "0.3.0" num_cpus = "1.7.0" blake2 = "0.6.1" generic-array = "0.8.3" typenum = "1.9.0" byteorder = "1.1.0" -pairing = "0.13.0" +pairing = "0.14" hex-literal = "0.1" rust-crypto = "0.2" +bellman = "0.1" [features] u128-support = ["pairing/u128-support"] diff --git a/src/bin/verify.rs b/src/bin/verify.rs new file mode 100644 index 0000000..ece1fcc --- /dev/null +++ b/src/bin/verify.rs @@ -0,0 +1,339 @@ +extern crate pairing; +extern crate powersoftau; +extern crate rand; +extern crate blake2; +extern crate byteorder; +extern crate bellman; + +use pairing::{CurveAffine, CurveProjective}; +use pairing::bls12_381::{G1, G2}; +use powersoftau::*; + +use bellman::multicore::Worker; +use bellman::domain::{EvaluationDomain, Point}; + +use std::fs::OpenOptions; +use std::io::{self, BufReader, BufWriter, Write}; + +fn into_hex(h: &[u8]) -> String { + let mut f = String::new(); + + for byte in &h[..] { + f += &format!("{:02x}", byte); + } + + f +} + +// Computes the hash of the challenge file for the player, +// given the current state of the accumulator and the last +// response file hash. +fn get_challenge_file_hash( + acc: &Accumulator, + last_response_file_hash: &[u8; 64] +) -> [u8; 64] +{ + let sink = io::sink(); + let mut sink = HashWriter::new(sink); + + sink.write_all(last_response_file_hash) + .unwrap(); + + acc.serialize( + &mut sink, + UseCompression::No + ).unwrap(); + + let mut tmp = [0; 64]; + tmp.copy_from_slice(sink.into_hash().as_slice()); + + tmp +} + +// Computes the hash of the response file, given the new +// accumulator, the player's public key, and the challenge +// file's hash. +fn get_response_file_hash( + acc: &Accumulator, + pubkey: &PublicKey, + last_challenge_file_hash: &[u8; 64] +) -> [u8; 64] +{ + let sink = io::sink(); + let mut sink = HashWriter::new(sink); + + sink.write_all(last_challenge_file_hash) + .unwrap(); + + acc.serialize( + &mut sink, + UseCompression::Yes + ).unwrap(); + + pubkey.serialize(&mut sink).unwrap(); + + let mut tmp = [0; 64]; + tmp.copy_from_slice(sink.into_hash().as_slice()); + + tmp +} + +fn main() { + // Try to load `./transcript` from disk. + let reader = OpenOptions::new() + .read(true) + .open("transcript") + .expect("unable open `./transcript` in this directory"); + + let mut reader = BufReader::with_capacity(1024 * 1024, reader); + + // Initialize the accumulator + let mut current_accumulator = Accumulator::new(); + + // The "last response file hash" is just a blank BLAKE2b hash + // at the beginning of the hash chain. + let mut last_response_file_hash = [0; 64]; + last_response_file_hash.copy_from_slice(blank_hash().as_slice()); + + // There were 89 rounds. + for _ in 0..89 { + // Compute the hash of the challenge file that the player + // should have received. + let last_challenge_file_hash = get_challenge_file_hash( + ¤t_accumulator, + &last_response_file_hash + ); + + // Deserialize the accumulator provided by the player in + // their response file. It's stored in the transcript in + // uncompressed form so that we can more efficiently + // deserialize it. + let response_file_accumulator = Accumulator::deserialize( + &mut reader, + UseCompression::No, + CheckForCorrectness::Yes + ).expect("unable to read uncompressed accumulator"); + + // Deserialize the public key provided by the player. + let response_file_pubkey = PublicKey::deserialize(&mut reader) + .expect("wasn't able to deserialize the response file's public key"); + + // Compute the hash of the response file. (we had it in uncompressed + // form in the transcript, but the response file is compressed to save + // participants bandwidth.) + last_response_file_hash = get_response_file_hash( + &response_file_accumulator, + &response_file_pubkey, + &last_challenge_file_hash + ); + + print!("{}", into_hex(&last_response_file_hash)); + + // Verify the transformation from the previous accumulator to the new + // one. This also verifies the correctness of the accumulators and the + // public keys, with respect to the transcript so far. + if !verify_transform( + ¤t_accumulator, + &response_file_accumulator, + &response_file_pubkey, + &last_challenge_file_hash + ) + { + println!(" ... FAILED"); + panic!("INVALID RESPONSE FILE!"); + } else { + println!(""); + } + + current_accumulator = response_file_accumulator; + } + + println!("Transcript OK!"); + + let worker = &Worker::new(); + + // Create the parameters for various 2^m circuit depths. + for m in 0..22 { + let paramname = format!("phase1radix2m{}", m); + println!("Creating {}", paramname); + + let degree = 1 << m; + + let mut g1_coeffs = EvaluationDomain::from_coeffs( + current_accumulator.tau_powers_g1[0..degree].iter() + .map(|e| Point(e.into_projective())) + .collect() + ).unwrap(); + + let mut g2_coeffs = EvaluationDomain::from_coeffs( + current_accumulator.tau_powers_g2[0..degree].iter() + .map(|e| Point(e.into_projective())) + .collect() + ).unwrap(); + + let mut g1_alpha_coeffs = EvaluationDomain::from_coeffs( + current_accumulator.alpha_tau_powers_g1[0..degree].iter() + .map(|e| Point(e.into_projective())) + .collect() + ).unwrap(); + + let mut g1_beta_coeffs = EvaluationDomain::from_coeffs( + current_accumulator.beta_tau_powers_g1[0..degree].iter() + .map(|e| Point(e.into_projective())) + .collect() + ).unwrap(); + + // This converts all of the elements into Lagrange coefficients + // for later construction of interpolation polynomials + g1_coeffs.ifft(&worker); + g2_coeffs.ifft(&worker); + g1_alpha_coeffs.ifft(&worker); + g1_beta_coeffs.ifft(&worker); + + let g1_coeffs = g1_coeffs.into_coeffs(); + let g2_coeffs = g2_coeffs.into_coeffs(); + let g1_alpha_coeffs = g1_alpha_coeffs.into_coeffs(); + let g1_beta_coeffs = g1_beta_coeffs.into_coeffs(); + + assert_eq!(g1_coeffs.len(), degree); + assert_eq!(g2_coeffs.len(), degree); + assert_eq!(g1_alpha_coeffs.len(), degree); + assert_eq!(g1_beta_coeffs.len(), degree); + + // Remove the Point() wrappers + + let mut g1_coeffs = g1_coeffs.into_iter() + .map(|e| e.0) + .collect::>(); + + let mut g2_coeffs = g2_coeffs.into_iter() + .map(|e| e.0) + .collect::>(); + + let mut g1_alpha_coeffs = g1_alpha_coeffs.into_iter() + .map(|e| e.0) + .collect::>(); + + let mut g1_beta_coeffs = g1_beta_coeffs.into_iter() + .map(|e| e.0) + .collect::>(); + + // Batch normalize + G1::batch_normalization(&mut g1_coeffs); + G2::batch_normalization(&mut g2_coeffs); + G1::batch_normalization(&mut g1_alpha_coeffs); + G1::batch_normalization(&mut g1_beta_coeffs); + + // H query of Groth16 needs... + // x^i * (x^m - 1) for i in 0..=(m-2) a.k.a. + // x^(i + m) - x^i for i in 0..=(m-2) + // for radix2 evaluation domains + let mut h = Vec::with_capacity(degree - 1); + for i in 0..(degree-1) { + let mut tmp = current_accumulator.tau_powers_g1[i + degree].into_projective(); + let mut tmp2 = current_accumulator.tau_powers_g1[i].into_projective(); + tmp2.negate(); + tmp.add_assign(&tmp2); + + h.push(tmp); + } + + // Batch normalize this as well + G1::batch_normalization(&mut h); + + // Create the parameter file + let writer = OpenOptions::new() + .read(false) + .write(true) + .create_new(true) + .open(paramname) + .expect("unable to create parameter file in this directory"); + + let mut writer = BufWriter::new(writer); + + // Write alpha (in g1) + // Needed by verifier for e(alpha, beta) + // Needed by prover for A and C elements of proof + writer.write_all( + current_accumulator.alpha_tau_powers_g1[0] + .into_uncompressed() + .as_ref() + ).unwrap(); + + // Write beta (in g1) + // Needed by prover for C element of proof + writer.write_all( + current_accumulator.beta_tau_powers_g1[0] + .into_uncompressed() + .as_ref() + ).unwrap(); + + // Write beta (in g2) + // Needed by verifier for e(alpha, beta) + // Needed by prover for B element of proof + writer.write_all( + current_accumulator.beta_g2 + .into_uncompressed() + .as_ref() + ).unwrap(); + + // Lagrange coefficients in G1 (for constructing + // LC/IC queries and precomputing polynomials for A) + for coeff in g1_coeffs { + // Was normalized earlier in parallel + let coeff = coeff.into_affine(); + + writer.write_all( + coeff.into_uncompressed() + .as_ref() + ).unwrap(); + } + + // Lagrange coefficients in G2 (for precomputing + // polynomials for B) + for coeff in g2_coeffs { + // Was normalized earlier in parallel + let coeff = coeff.into_affine(); + + writer.write_all( + coeff.into_uncompressed() + .as_ref() + ).unwrap(); + } + + // Lagrange coefficients in G1 with alpha (for + // LC/IC queries) + for coeff in g1_alpha_coeffs { + // Was normalized earlier in parallel + let coeff = coeff.into_affine(); + + writer.write_all( + coeff.into_uncompressed() + .as_ref() + ).unwrap(); + } + + // Lagrange coefficients in G1 with beta (for + // LC/IC queries) + for coeff in g1_beta_coeffs { + // Was normalized earlier in parallel + let coeff = coeff.into_affine(); + + writer.write_all( + coeff.into_uncompressed() + .as_ref() + ).unwrap(); + } + + // Bases for H polynomial computation + for coeff in h { + // Was normalized earlier in parallel + let coeff = coeff.into_affine(); + + writer.write_all( + coeff.into_uncompressed() + .as_ref() + ).unwrap(); + } + } +} diff --git a/src/lib.rs b/src/lib.rs index e445a5b..22c82d8 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -335,15 +335,15 @@ fn test_pubkey_serialization() { #[derive(PartialEq, Eq, Clone)] pub struct Accumulator { /// tau^0, tau^1, tau^2, ..., tau^{TAU_POWERS_G1_LENGTH - 1} - tau_powers_g1: Vec, + pub tau_powers_g1: Vec, /// tau^0, tau^1, tau^2, ..., tau^{TAU_POWERS_LENGTH - 1} - tau_powers_g2: Vec, + pub tau_powers_g2: Vec, /// alpha * tau^0, alpha * tau^1, alpha * tau^2, ..., alpha * tau^{TAU_POWERS_LENGTH - 1} - alpha_tau_powers_g1: Vec, + pub alpha_tau_powers_g1: Vec, /// beta * tau^0, beta * tau^1, beta * tau^2, ..., beta * tau^{TAU_POWERS_LENGTH - 1} - beta_tau_powers_g1: Vec, + pub beta_tau_powers_g1: Vec, /// beta - beta_g2: G2Affine + pub beta_g2: G2Affine } impl Accumulator { From 299c10a37d41a887146136ebe7b36f2065e79772 Mon Sep 17 00:00:00 2001 From: Alex Vlasov Date: Wed, 9 Jan 2019 20:30:42 +0200 Subject: [PATCH 15/23] prepare for testing for Ethereum related ceremony --- .gitignore | 7 +- Cargo.lock | 167 +++- Cargo.toml | 20 +- README.md | 16 +- src/accumulator.rs | 466 ++++++++++ src/batched_accumulator.rs | 832 +++++++++++++++++ src/bin/{beacon.rs => beacon.rs.nocompile} | 0 src/bin/beacon_constrained.rs | 192 ++++ src/bin/{compute.rs => compute.rs.nocompile} | 0 src/bin/compute_constrained.rs | 177 ++++ src/bin/new.rs | 14 +- src/bin/new.rs.nocompile | 24 + src/bin/new_constrained.rs | 81 ++ src/bin/{verify.rs => verify.rs.nocompile} | 0 ...sform.rs => verify_transform.rs.nocompile} | 0 src/bin/verify_transform_constrained.rs | 190 ++++ src/bls12_381/mod.rs | 859 +++++++++++++++++ src/bn256/mod.rs | 120 +++ src/keypair.rs | 307 +++++++ src/lib.rs | 869 +----------------- src/parameters.rs | 118 +++ src/small_bn256/mod.rs | 41 + src/utils.rs | 168 ++++ test.sh | 19 + 24 files changed, 3799 insertions(+), 888 deletions(-) create mode 100644 src/accumulator.rs create mode 100644 src/batched_accumulator.rs rename src/bin/{beacon.rs => beacon.rs.nocompile} (100%) create mode 100644 src/bin/beacon_constrained.rs rename src/bin/{compute.rs => compute.rs.nocompile} (100%) create mode 100644 src/bin/compute_constrained.rs create mode 100644 src/bin/new.rs.nocompile create mode 100644 src/bin/new_constrained.rs rename src/bin/{verify.rs => verify.rs.nocompile} (100%) rename src/bin/{verify_transform.rs => verify_transform.rs.nocompile} (100%) create mode 100644 src/bin/verify_transform_constrained.rs create mode 100644 src/bls12_381/mod.rs create mode 100644 src/bn256/mod.rs create mode 100644 src/keypair.rs create mode 100644 src/parameters.rs create mode 100644 src/small_bn256/mod.rs create mode 100644 src/utils.rs create mode 100755 test.sh diff --git a/.gitignore b/.gitignore index d851696..b544111 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ /target/ **/*.rs.bk -transcript -challenge -response +transcript* +challenge* +response* +new_challenge* diff --git a/Cargo.lock b/Cargo.lock index 0a3f328..71f4a93 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,16 +1,19 @@ [[package]] name = "bellman" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" +version = "0.1.1" +source = "git+https://github.com/matterinc/bellman#e544678a6abe2f97a9afbc02e2e590f2259f1e30" dependencies = [ "bit-vec 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", "byteorder 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", + "ff 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)", "futures-cpupool 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)", - "pairing 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)", + "pairing 0.15.0 (git+https://github.com/matterinc/pairing)", + "pbr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "time 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -71,6 +74,29 @@ dependencies = [ "generic-array 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "ff" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "byteorder 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "ff_derive 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "ff_derive" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "num-bigint 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "num-integer 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 0.14.9 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "fuchsia-zircon" version = "0.3.3" @@ -130,6 +156,15 @@ dependencies = [ "proc-macro-hack 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "kernel32-sys" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "libc" version = "0.2.40" @@ -140,6 +175,28 @@ name = "nodrop" version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "num-bigint" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "num-integer 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "num-integer" +version = "0.1.39" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "num-traits" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "num_cpus" version = "1.8.0" @@ -150,25 +207,39 @@ dependencies = [ [[package]] name = "pairing" -version = "0.14.1" -source = "registry+https://github.com/rust-lang/crates.io-index" +version = "0.15.0" +source = "git+https://github.com/matterinc/pairing#1363d02170f1d98f1b9c8eec0e3fc6b1eea4ef9a" dependencies = [ "byteorder 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "ff 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] -name = "powersoftau" -version = "0.1.1" +name = "pbr" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bellman 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)", + "termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)", + "time 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "powersoftau" +version = "0.1.2" +dependencies = [ + "bellman 0.1.1 (git+https://github.com/matterinc/bellman)", "blake2 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", "byteorder 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", + "ff 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "generic-array 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)", "hex-literal 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)", - "pairing 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)", + "pairing 0.15.0 (git+https://github.com/matterinc/pairing)", "rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)", "typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -187,6 +258,22 @@ name = "proc-macro-hack-impl" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "proc-macro2" +version = "0.4.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "quote" +version = "0.6.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "rand" version = "0.3.22" @@ -212,6 +299,14 @@ name = "redox_syscall" version = "0.1.37" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "redox_termios" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "redox_syscall 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "rust-crypto" version = "0.2.36" @@ -229,6 +324,26 @@ name = "rustc-serialize" version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "syn" +version = "0.14.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "termion" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)", + "redox_syscall 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)", + "redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "time" version = "0.1.39" @@ -244,6 +359,16 @@ name = "typenum" version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "unicode-xid" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "winapi" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "winapi" version = "0.3.4" @@ -253,6 +378,11 @@ dependencies = [ "winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "winapi-build" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "winapi-i686-pc-windows-gnu" version = "0.4.0" @@ -264,7 +394,7 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" [metadata] -"checksum bellman 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "eae372472c7ea8f7c8fc6a62f7d5535db8302de7f1aafda2e13a97c4830d3bcf" +"checksum bellman 0.1.1 (git+https://github.com/matterinc/bellman)" = "" "checksum bit-vec 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "02b4ff8b16e6076c3e14220b39fbc1fabb6737522281a388998046859400895f" "checksum bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b3c30d3802dfb7281680d6285f2ccdaa8c2d8fee41f93805dba5c4cf50dc23cf" "checksum blake2 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "53bf612c0f2839b7e764ebac65d6cb985f7c6812de399d0728038f4b1da141bc" @@ -274,6 +404,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum crossbeam 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "24ce9782d4d5c53674646a6a4c1863a21a8fc0cb649b3c94dfc16e45071dea19" "checksum crypto-mac 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "779015233ac67d65098614aec748ac1c756ab6677fa2e14cf8b37c08dfed1198" "checksum digest 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e5b29bf156f3f4b3c4f610a25ff69370616ae6e0657d416de22645483e72af0a" +"checksum ff 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "eec81e2e423086589b224dbcfbab70e3732913de25479d05165b20d4aaed05f4" +"checksum ff_derive 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "70335090ee115d5716416ca38980cce7752f40923f41d22cf5a69a6269f9e2a2" "checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82" "checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" "checksum futures 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)" = "1a70b146671de62ec8c8ed572219ca5d594d9b06c0b364d5e67b722fc559b48c" @@ -282,19 +414,32 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum generic-array 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)" = "fceb69994e330afed50c93524be68c42fa898c2d9fd4ee8da03bd7363acd26f2" "checksum hex-literal 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4da5f0e01bd8a71a224a4eedecaacfcabda388dbb7a80faf04d3514287572d95" "checksum hex-literal-impl 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1d340b6514f232f6db1bd16db65302a5278a04fef9ce867cb932e7e5fa21130a" +"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" "checksum libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)" = "6fd41f331ac7c5b8ac259b8bf82c75c0fb2e469bbf37d2becbba9a6a2221965b" "checksum nodrop 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "9a2228dca57108069a5262f2ed8bd2e82496d2e074a06d1ccc7ce1687b6ae0a2" +"checksum num-bigint 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "10b8423ea72ec64751198856a853e07b37087cfc9b53a87ecb19bff67b6d1320" +"checksum num-integer 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)" = "e83d528d2677f0518c570baf2b7abdcf0cd2d248860b68507bdcb3e91d4c0cea" +"checksum num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0b3a5d7cc97d6d30d8b9bc8fa19bf45349ffe46241e8816f50f62f6d6aaabee1" "checksum num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c51a3322e4bca9d212ad9a158a02abc6934d005490c054a2778df73a70aa0a30" -"checksum pairing 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)" = "06f21a403a78257de696b59a5bfafad56a3b3ab8f27741c8122750bf0ebbb9fa" +"checksum pairing 0.15.0 (git+https://github.com/matterinc/pairing)" = "" +"checksum pbr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "deb73390ab68d81992bd994d145f697451bb0b54fd39738e72eef32458ad6907" "checksum proc-macro-hack 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3ba8d4f9257b85eb6cdf13f055cea3190520aab1409ca2ab43493ea4820c25f0" "checksum proc-macro-hack-impl 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d5cb6f960ad471404618e9817c0e5d10b1ae74cfdf01fab89ea0641fe7fb2892" +"checksum proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)" = "77619697826f31a02ae974457af0b29b723e5619e113e9397b8b82c6bd253f09" +"checksum quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)" = "53fa22a1994bd0f9372d7a816207d8a2677ad0325b073f5c5332760f0fb62b5c" "checksum rand 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)" = "15a732abf9d20f0ad8eeb6f909bf6868722d9a06e1e50802b6a70351f40b4eb1" "checksum rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "eba5f8cb59cc50ed56be8880a5c7b496bfd9bd26394e176bc67884094145c2c5" "checksum redox_syscall 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "0d92eecebad22b767915e4d529f89f28ee96dbbf5a4810d2b844373f136417fd" +"checksum redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7e891cfe48e9100a70a3b6eb652fef28920c117d366339687bd5576160db0f76" "checksum rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)" = "f76d05d3993fd5f4af9434e8e436db163a12a9d40e1a58a726f27a01dfd12a2a" "checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" +"checksum syn 0.14.9 (registry+https://github.com/rust-lang/crates.io-index)" = "261ae9ecaa397c42b960649561949d69311f08eeaea86a65696e6e46517cf741" +"checksum termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "689a3bdfaab439fd92bc87df5c4c78417d3cbe537487274e9b0b2dce76e92096" "checksum time 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)" = "a15375f1df02096fb3317256ce2cee6a1f42fc84ea5ad5fc8c421cfe40c73098" "checksum typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "612d636f949607bdf9b123b4a6f6d966dedf3ff669f7f045890d3a4a73948169" +"checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" +"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a" "checksum winapi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "04e3bd221fcbe8a271359c04f21a76db7d0c6028862d1bb5512d85e1e2eb5bb3" +"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc" "checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" "checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" diff --git a/Cargo.toml b/Cargo.toml index 2b7c201..c4fba99 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,8 +1,9 @@ [package] name = "powersoftau" -version = "0.1.1" -authors = ["Sean Bowe"] +version = "0.1.2" +authors = ["Sean Bowe", "Alex Vlasov"] license = "MIT/Apache-2.0" +edition = "2018" description = "Communal zk-SNARK MPC for Public Parameters" documentation = "https://docs.rs/powersoftau/" @@ -17,10 +18,19 @@ blake2 = "0.6.1" generic-array = "0.8.3" typenum = "1.9.0" byteorder = "1.1.0" -pairing = "0.14" hex-literal = "0.1" rust-crypto = "0.2" -bellman = "0.1" + +memmap = "0.7.0" +itertools = "0.8.0" + +#ff = { path = "../ff", features = ["derive"] } +#pairing = { path = "../pairing", features = ["expose-arith"]} +#bellman = { path = "../bellman" } + +ff = { git = 'https://github.com/matterinc/ff', features = ["derive"] } +pairing = { git = 'https://github.com/matterinc/pairing', features = ["expose-arith"] } +bellman = { git = 'https://github.com/matterinc/bellman'} [features] -u128-support = ["pairing/u128-support"] + diff --git a/README.md b/README.md index 4953996..01831ce 100644 --- a/README.md +++ b/README.md @@ -1,20 +1,20 @@ # Powers of Tau +## Original story + This is a [multi-party computation](https://en.wikipedia.org/wiki/Secure_multi-party_computation) (MPC) ceremony which constructs partial zk-SNARK parameters for _all_ circuits up to a depth of 221. It works by taking a step that is performed by all zk-SNARK MPCs and performing it in just one single ceremony. This makes individual zk-SNARK MPCs much cheaper and allows them to scale to practically unbounded numbers of participants. This protocol is described in a [forthcoming paper](https://eprint.iacr.org/2017/1050). It produces parameters for an adaptation of [Jens Groth's 2016 pairing-based proving system](https://eprint.iacr.org/2016/260) using the [BLS12-381](https://github.com/ebfull/pairing/tree/master/src/bls12_381) elliptic curve construction. The security proof relies on a randomness beacon being applied at the end of the ceremony. +## Contributions + +Extended to support Ethereum's BN256 curve and made it easier to change size of the ceremony. In addition proof generation process can be done in memory constrained environments now. Benchmark is around `1.3 Gb` of memory and `3 hours` for a `2^26` power of tau on BN256 curve on my personal laptop + ## Instructions -If you've been asked to participate, you were sent a `challenge` file. Put that in the current directory and use your [Rust toolchain](https://www.rust-lang.org/en-US/) to execute the computation: +Instructions for a planned ceremony will be posted when everything is tested and finalized. -``` -cargo run --release --bin compute -``` - -The process could take an hour or so. When it's finished, it will place a `response` file in the current directory. That's what you send back. It will also print a hash of the `response` file it produced. You need to write this hash down (or post it publicly) so that you and others can confirm that your contribution exists in the final transcript of the ceremony. - -## Recommendations +## Recommendations from original ceremony Participants of the ceremony sample some randomness, perform a computation, and then destroy the randomness. **Only one participant needs to do this successfully to ensure the final parameters are secure.** In order to see that this randomness is truly destroyed, participants may take various kinds of precautions: diff --git a/src/accumulator.rs b/src/accumulator.rs new file mode 100644 index 0000000..36828f4 --- /dev/null +++ b/src/accumulator.rs @@ -0,0 +1,466 @@ +//! This ceremony constructs the "powers of tau" for Jens Groth's 2016 zk-SNARK proving +//! system using the BLS12-381 pairing-friendly elliptic curve construction. +//! +//! # Overview +//! +//! Participants of the ceremony receive a "challenge" file containing: +//! +//! * the BLAKE2b hash of the last file entered into the transcript +//! * an `Accumulator` (with curve points encoded in uncompressed form for fast deserialization) +//! +//! The participant runs a tool which generates a random keypair (`PublicKey`, `PrivateKey`) +//! used for modifying the `Accumulator` from the "challenge" file. The keypair is then used to +//! transform the `Accumulator`, and a "response" file is generated containing: +//! +//! * the BLAKE2b hash of the "challenge" file (thus forming a hash chain over the entire transcript) +//! * an `Accumulator` (with curve points encoded in compressed form for fast uploading) +//! * the `PublicKey` +//! +//! This "challenge" file is entered into the protocol transcript. A given transcript is valid +//! if the transformations between consecutive `Accumulator`s verify with their respective +//! `PublicKey`s. Participants (and the public) can ensure that their contribution to the +//! `Accumulator` was accepted by ensuring the transcript contains their "response" file, ideally +//! by comparison of the BLAKE2b hash of the "response" file. +//! +//! After some time has elapsed for participants to contribute to the ceremony, a participant is +//! simulated with a randomness beacon. The resulting `Accumulator` contains partial zk-SNARK +//! public parameters for all circuits within a bounded size. + +extern crate pairing; +extern crate rand; +extern crate crossbeam; +extern crate num_cpus; +extern crate blake2; +extern crate generic_array; +extern crate typenum; +extern crate byteorder; +extern crate ff; +extern crate memmap; + +use memmap::{Mmap, MmapMut}; +use self::ff::{Field, PrimeField}; +use self::byteorder::{ReadBytesExt, BigEndian}; +use self::rand::{SeedableRng, Rng, Rand}; +use self::rand::chacha::ChaChaRng; +use self::pairing::bn256::{Bn256}; +use self::pairing::*; +use std::io::{self, Read, Write}; +use std::sync::{Arc, Mutex}; +use self::generic_array::GenericArray; +use self::typenum::consts::U64; +use self::blake2::{Blake2b, Digest}; +use std::fmt; + +use super::keypair::*; +use super::utils::*; +use super::parameters::*; + +/// The `Accumulator` is an object that participants of the ceremony contribute +/// randomness to. This object contains powers of trapdoor `tau` in G1 and in G2 over +/// fixed generators, and additionally in G1 over two other generators of exponents +/// `alpha` and `beta` over those fixed generators. In other words: +/// +/// * (τ, τ2, ..., τ222 - 2, α, ατ, ατ2, ..., ατ221 - 1, β, βτ, βτ2, ..., βτ221 - 1)1 +/// * (β, τ, τ2, ..., τ221 - 1)2 +#[derive(Eq, Clone)] +pub struct Accumulator { + /// tau^0, tau^1, tau^2, ..., tau^{TAU_POWERS_G1_LENGTH - 1} + pub tau_powers_g1: Vec, + /// tau^0, tau^1, tau^2, ..., tau^{TAU_POWERS_LENGTH - 1} + pub tau_powers_g2: Vec, + /// alpha * tau^0, alpha * tau^1, alpha * tau^2, ..., alpha * tau^{TAU_POWERS_LENGTH - 1} + pub alpha_tau_powers_g1: Vec, + /// beta * tau^0, beta * tau^1, beta * tau^2, ..., beta * tau^{TAU_POWERS_LENGTH - 1} + pub beta_tau_powers_g1: Vec, + /// beta + pub beta_g2: E::G2Affine, + /// Keep parameters here + pub parameters: P +} + +impl PartialEq for Accumulator { + fn eq(&self, other: &Accumulator) -> bool { + self.tau_powers_g1.eq(&other.tau_powers_g1) && + self.tau_powers_g2.eq(&other.tau_powers_g2) && + self.alpha_tau_powers_g1.eq(&other.alpha_tau_powers_g1) && + self.beta_tau_powers_g1.eq(&other.beta_tau_powers_g1) && + self.beta_g2 == other.beta_g2 + } +} + +impl Accumulator { + /// Constructs an "initial" accumulator with τ = 1, α = 1, β = 1. + pub fn new(parameters: P) -> Self { + Accumulator { + tau_powers_g1: vec![E::G1Affine::one(); P::TAU_POWERS_G1_LENGTH], + tau_powers_g2: vec![E::G2Affine::one(); P::TAU_POWERS_LENGTH], + alpha_tau_powers_g1: vec![E::G1Affine::one(); P::TAU_POWERS_LENGTH], + beta_tau_powers_g1: vec![E::G1Affine::one(); P::TAU_POWERS_LENGTH], + beta_g2: E::G2Affine::one(), + parameters: parameters + } + } + + /// Write the accumulator with some compression behavior. + pub fn serialize( + &self, + writer: &mut W, + compression: UseCompression + ) -> io::Result<()> + { + fn write_all( + writer: &mut W, + c: &[C], + compression: UseCompression + ) -> io::Result<()> + { + for c in c { + write_point(writer, c, compression)?; + } + + Ok(()) + } + + write_all(writer, &self.tau_powers_g1, compression)?; + write_all(writer, &self.tau_powers_g2, compression)?; + write_all(writer, &self.alpha_tau_powers_g1, compression)?; + write_all(writer, &self.beta_tau_powers_g1, compression)?; + write_all(writer, &[self.beta_g2], compression)?; + + Ok(()) + } + + /// Read the accumulator from disk with some compression behavior. `checked` + /// indicates whether we should check it's a valid element of the group and + /// not the point at infinity. + pub fn deserialize( + reader: &mut R, + compression: UseCompression, + checked: CheckForCorrectness, + parameters: P + ) -> Result + { + fn read_all > ( + reader: &mut R, + size: usize, + compression: UseCompression, + checked: CheckForCorrectness + ) -> Result, DeserializationError> + { + fn decompress_all( + reader: &mut R, + size: usize, + checked: CheckForCorrectness + ) -> Result, DeserializationError> + { + // Read the encoded elements + let mut res = vec![ENC::empty(); size]; + + for encoded in &mut res { + reader.read_exact(encoded.as_mut())?; + } + + // Allocate space for the deserialized elements + let mut res_affine = vec![ENC::Affine::zero(); size]; + + let mut chunk_size = res.len() / num_cpus::get(); + if chunk_size == 0 { + chunk_size = 1; + } + + // If any of our threads encounter a deserialization/IO error, catch + // it with this. + let decoding_error = Arc::new(Mutex::new(None)); + + crossbeam::scope(|scope| { + for (source, target) in res.chunks(chunk_size).zip(res_affine.chunks_mut(chunk_size)) { + let decoding_error = decoding_error.clone(); + + scope.spawn(move || { + for (source, target) in source.iter().zip(target.iter_mut()) { + match { + // If we're a participant, we don't need to check all of the + // elements in the accumulator, which saves a lot of time. + // The hash chain prevents this from being a problem: the + // transcript guarantees that the accumulator was properly + // formed. + match checked { + CheckForCorrectness::Yes => { + // Points at infinity are never expected in the accumulator + source.into_affine().map_err(|e| e.into()).and_then(|source| { + if source.is_zero() { + Err(DeserializationError::PointAtInfinity) + } else { + Ok(source) + } + }) + }, + CheckForCorrectness::No => source.into_affine_unchecked().map_err(|e| e.into()) + } + } + { + Ok(source) => { + *target = source; + }, + Err(e) => { + *decoding_error.lock().unwrap() = Some(e); + } + } + } + }); + } + }); + + match Arc::try_unwrap(decoding_error).unwrap().into_inner().unwrap() { + Some(e) => { + Err(e) + }, + None => { + Ok(res_affine) + } + } + } + + match compression { + UseCompression::Yes => decompress_all::<_, C::Compressed>(reader, size, checked), + UseCompression::No => decompress_all::<_, C::Uncompressed>(reader, size, checked) + } + } + + let tau_powers_g1 = read_all::(reader, P::TAU_POWERS_G1_LENGTH, compression, checked)?; + let tau_powers_g2 = read_all::(reader, P::TAU_POWERS_LENGTH, compression, checked)?; + let alpha_tau_powers_g1 = read_all::(reader, P::TAU_POWERS_LENGTH, compression, checked)?; + let beta_tau_powers_g1 = read_all::(reader, P::TAU_POWERS_LENGTH, compression, checked)?; + let beta_g2 = read_all::(reader, 1, compression, checked)?[0]; + + Ok(Accumulator { + tau_powers_g1: tau_powers_g1, + tau_powers_g2: tau_powers_g2, + alpha_tau_powers_g1: alpha_tau_powers_g1, + beta_tau_powers_g1: beta_tau_powers_g1, + beta_g2: beta_g2, + parameters: parameters + }) + } + + /// Transforms the accumulator with a private key. + pub fn transform(&mut self, key: &PrivateKey) + { + // Construct the powers of tau + let mut taupowers = vec![E::Fr::zero(); P::TAU_POWERS_G1_LENGTH]; + let chunk_size = P::TAU_POWERS_G1_LENGTH / num_cpus::get(); + + // Construct exponents in parallel + crossbeam::scope(|scope| { + for (i, taupowers) in taupowers.chunks_mut(chunk_size).enumerate() { + scope.spawn(move || { + let mut acc = key.tau.pow(&[(i * chunk_size) as u64]); + + for t in taupowers { + *t = acc; + acc.mul_assign(&key.tau); + } + }); + } + }); + + /// Exponentiate a large number of points, with an optional coefficient to be applied to the + /// exponent. + fn batch_exp >(bases: &mut [C], exp: &[C::Scalar], coeff: Option<&C::Scalar>) { + assert_eq!(bases.len(), exp.len()); + let mut projective = vec![C::Projective::zero(); bases.len()]; + let chunk_size = bases.len() / num_cpus::get(); + + // Perform wNAF over multiple cores, placing results into `projective`. + crossbeam::scope(|scope| { + for ((bases, exp), projective) in bases.chunks_mut(chunk_size) + .zip(exp.chunks(chunk_size)) + .zip(projective.chunks_mut(chunk_size)) + { + scope.spawn(move || { + let mut wnaf = Wnaf::new(); + + for ((base, exp), projective) in bases.iter_mut() + .zip(exp.iter()) + .zip(projective.iter_mut()) + { + let mut exp = *exp; + if let Some(coeff) = coeff { + exp.mul_assign(coeff); + } + + *projective = wnaf.base(base.into_projective(), 1).scalar(exp.into_repr()); + } + }); + } + }); + + // Perform batch normalization + crossbeam::scope(|scope| { + for projective in projective.chunks_mut(chunk_size) + { + scope.spawn(move || { + C::Projective::batch_normalization(projective); + }); + } + }); + + // Turn it all back into affine points + for (projective, affine) in projective.iter().zip(bases.iter_mut()) { + *affine = projective.into_affine(); + } + } + + batch_exp::(&mut self.tau_powers_g1, &taupowers[0..], None); + batch_exp::(&mut self.tau_powers_g2, &taupowers[0..P::TAU_POWERS_LENGTH], None); + batch_exp::(&mut self.alpha_tau_powers_g1, &taupowers[0..P::TAU_POWERS_LENGTH], Some(&key.alpha)); + batch_exp::(&mut self.beta_tau_powers_g1, &taupowers[0..P::TAU_POWERS_LENGTH], Some(&key.beta)); + self.beta_g2 = self.beta_g2.mul(key.beta).into_affine(); + } +} + +/// Verifies a transformation of the `Accumulator` with the `PublicKey`, given a 64-byte transcript `digest`. +pub fn verify_transform(before: &Accumulator, after: &Accumulator, key: &PublicKey, digest: &[u8]) -> bool +{ + assert_eq!(digest.len(), 64); + + let compute_g2_s = |g1_s: E::G1Affine, g1_s_x: E::G1Affine, personalization: u8| { + let mut h = Blake2b::default(); + h.input(&[personalization]); + h.input(digest); + h.input(g1_s.into_uncompressed().as_ref()); + h.input(g1_s_x.into_uncompressed().as_ref()); + hash_to_g2::(h.result().as_ref()).into_affine() + }; + + let tau_g2_s = compute_g2_s(key.tau_g1.0, key.tau_g1.1, 0); + let alpha_g2_s = compute_g2_s(key.alpha_g1.0, key.alpha_g1.1, 1); + let beta_g2_s = compute_g2_s(key.beta_g1.0, key.beta_g1.1, 2); + + // Check the proofs-of-knowledge for tau/alpha/beta + + // g1^s / g1^(s*x) = g2^s / g2^(s*x) + if !same_ratio(key.tau_g1, (tau_g2_s, key.tau_g2)) { + return false; + } + if !same_ratio(key.alpha_g1, (alpha_g2_s, key.alpha_g2)) { + return false; + } + if !same_ratio(key.beta_g1, (beta_g2_s, key.beta_g2)) { + return false; + } + + // Check the correctness of the generators for tau powers + if after.tau_powers_g1[0] != E::G1Affine::one() { + return false; + } + if after.tau_powers_g2[0] != E::G2Affine::one() { + return false; + } + + // Did the participant multiply the previous tau by the new one? + if !same_ratio((before.tau_powers_g1[1], after.tau_powers_g1[1]), (tau_g2_s, key.tau_g2)) { + return false; + } + + // Did the participant multiply the previous alpha by the new one? + if !same_ratio((before.alpha_tau_powers_g1[0], after.alpha_tau_powers_g1[0]), (alpha_g2_s, key.alpha_g2)) { + return false; + } + + // Did the participant multiply the previous beta by the new one? + if !same_ratio((before.beta_tau_powers_g1[0], after.beta_tau_powers_g1[0]), (beta_g2_s, key.beta_g2)) { + return false; + } + if !same_ratio((before.beta_tau_powers_g1[0], after.beta_tau_powers_g1[0]), (before.beta_g2, after.beta_g2)) { + return false; + } + + // Are the powers of tau correct? + if !same_ratio(power_pairs(&after.tau_powers_g1), (after.tau_powers_g2[0], after.tau_powers_g2[1])) { + return false; + } + if !same_ratio(power_pairs(&after.tau_powers_g2), (after.tau_powers_g1[0], after.tau_powers_g1[1])) { + return false; + } + if !same_ratio(power_pairs(&after.alpha_tau_powers_g1), (after.tau_powers_g2[0], after.tau_powers_g2[1])) { + return false; + } + if !same_ratio(power_pairs(&after.beta_tau_powers_g1), (after.tau_powers_g2[0], after.tau_powers_g2[1])) { + return false; + } + + true +} + + + +/// Abstraction over a reader which hashes the data being read. +pub struct HashReader { + reader: R, + hasher: Blake2b +} + +impl HashReader { + /// Construct a new `HashReader` given an existing `reader` by value. + pub fn new(reader: R) -> Self { + HashReader { + reader: reader, + hasher: Blake2b::default() + } + } + + /// Destroy this reader and return the hash of what was read. + pub fn into_hash(self) -> GenericArray { + self.hasher.result() + } +} + +impl Read for HashReader { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + let bytes = self.reader.read(buf)?; + + if bytes > 0 { + self.hasher.input(&buf[0..bytes]); + } + + Ok(bytes) + } +} + +/// Abstraction over a writer which hashes the data being written. +pub struct HashWriter { + writer: W, + hasher: Blake2b +} + +impl HashWriter { + /// Construct a new `HashWriter` given an existing `writer` by value. + pub fn new(writer: W) -> Self { + HashWriter { + writer: writer, + hasher: Blake2b::default() + } + } + + /// Destroy this writer and return the hash of what was written. + pub fn into_hash(self) -> GenericArray { + self.hasher.result() + } +} + +impl Write for HashWriter { + fn write(&mut self, buf: &[u8]) -> io::Result { + let bytes = self.writer.write(buf)?; + + if bytes > 0 { + self.hasher.input(&buf[0..bytes]); + } + + Ok(bytes) + } + + fn flush(&mut self) -> io::Result<()> { + self.writer.flush() + } +} diff --git a/src/batched_accumulator.rs b/src/batched_accumulator.rs new file mode 100644 index 0000000..6e98c4a --- /dev/null +++ b/src/batched_accumulator.rs @@ -0,0 +1,832 @@ +/// Memory constrained accumulator that checks parts of the initial information in parts that fit to memory +/// and then contributes to entropy in parts as well + +extern crate pairing; +extern crate rand; +extern crate crossbeam; +extern crate num_cpus; +extern crate blake2; +extern crate generic_array; +extern crate typenum; +extern crate byteorder; +extern crate ff; +extern crate memmap; +extern crate itertools; + +use itertools::Itertools; +use memmap::{Mmap, MmapMut}; +use self::ff::{Field, PrimeField}; +use self::byteorder::{ReadBytesExt, BigEndian}; +use self::rand::{SeedableRng, Rng, Rand}; +use self::rand::chacha::ChaChaRng; +use self::pairing::bn256::{Bn256}; +use self::pairing::*; +use std::io::{self, Read, Write}; +use std::sync::{Arc, Mutex}; +use self::generic_array::GenericArray; +use self::typenum::consts::U64; +use self::blake2::{Blake2b, Digest}; +use std::fmt; + +use super::keypair::*; +use super::utils::*; +use super::parameters::*; + +pub enum AccumulatorState{ + Empty, + NonEmpty, + Transformed, +} + +/// The `Accumulator` is an object that participants of the ceremony contribute +/// randomness to. This object contains powers of trapdoor `tau` in G1 and in G2 over +/// fixed generators, and additionally in G1 over two other generators of exponents +/// `alpha` and `beta` over those fixed generators. In other words: +/// +/// * (τ, τ2, ..., τ222 - 2, α, ατ, ατ2, ..., ατ221 - 1, β, βτ, βτ2, ..., βτ221 - 1)1 +/// * (β, τ, τ2, ..., τ221 - 1)2 +pub struct BachedAccumulator { + /// tau^0, tau^1, tau^2, ..., tau^{TAU_POWERS_G1_LENGTH - 1} + pub tau_powers_g1: Vec, + /// tau^0, tau^1, tau^2, ..., tau^{TAU_POWERS_LENGTH - 1} + pub tau_powers_g2: Vec, + /// alpha * tau^0, alpha * tau^1, alpha * tau^2, ..., alpha * tau^{TAU_POWERS_LENGTH - 1} + pub alpha_tau_powers_g1: Vec, + /// beta * tau^0, beta * tau^1, beta * tau^2, ..., beta * tau^{TAU_POWERS_LENGTH - 1} + pub beta_tau_powers_g1: Vec, + /// beta + pub beta_g2: E::G2Affine, + /// Hash chain hash + pub hash: GenericArray, + /// Keep parameters here as a marker + marker: std::marker::PhantomData

, +} + +impl BachedAccumulator { + /// Calcualte the contibution hash from the resulting file. Original powers of tau implementaiton + /// used a specially formed writer to write to the file and calculate a hash on the fly, but memory-constrained + /// implementation now writes without a particular order, so plain recalculation at the end + /// of the procedure is more efficient + pub fn calculate_hash( + input_map: &Mmap + ) -> GenericArray { + let chunk_size = 1 << 30; // read by 1GB from map + let mut hasher = Blake2b::default(); + for chunk in input_map.chunks(chunk_size) { + hasher.input(&chunk); + } + + hasher.result() + } +} + +impl BachedAccumulator { + pub fn empty() -> Self { + Self { + tau_powers_g1: vec![], + tau_powers_g2: vec![], + alpha_tau_powers_g1: vec![], + beta_tau_powers_g1: vec![], + beta_g2: E::G2Affine::zero(), + hash: blank_hash(), + marker: std::marker::PhantomData::

{} + } + } +} + +impl BachedAccumulator { + fn g1_size(compression: UseCompression) -> usize { + match compression { + UseCompression::Yes => { + return P::G1_COMPRESSED_BYTE_SIZE; + }, + UseCompression::No => { + return P::G1_UNCOMPRESSED_BYTE_SIZE; + } + } + } + + fn g2_size(compression: UseCompression) -> usize { + match compression { + UseCompression::Yes => { + return P::G2_COMPRESSED_BYTE_SIZE; + }, + UseCompression::No => { + return P::G2_UNCOMPRESSED_BYTE_SIZE; + } + } + } + + fn get_size(element_type: ElementType, compression: UseCompression) -> usize { + let size = match element_type { + ElementType::AlphaG1 | ElementType::BetaG1 | ElementType::TauG1 => { Self::g1_size(compression) }, + ElementType::BetaG2 | ElementType::TauG2 => { Self::g2_size(compression) } + }; + + size + } + + /// File expected structure + /// HASH_SIZE bytes for the hash of the contribution + /// TAU_POWERS_G1_LENGTH of G1 points + /// TAU_POWERS_LENGTH of G2 points + /// TAU_POWERS_LENGTH of G1 points for alpha + /// TAU_POWERS_LENGTH of G1 points for beta + /// One G2 point for beta + /// Public key appended to the end of file, but it's irrelevant for an accumulator itself + + fn calculate_mmap_position(index: usize, element_type: ElementType, compression: UseCompression) -> usize { + let g1_size = Self::g1_size(compression); + let g2_size = Self::g2_size(compression); + let required_tau_g1_power = P::TAU_POWERS_G1_LENGTH; + let required_power = P::TAU_POWERS_LENGTH; + let position = match element_type { + ElementType::TauG1 => { + let mut position = 0; + position += g1_size * index; + assert!(index < P::TAU_POWERS_G1_LENGTH, format!("Index of TauG1 element written must not exceed {}, while it's {}", P::TAU_POWERS_G1_LENGTH, index)); + + position + }, + ElementType::TauG2 => { + let mut position = 0; + position += g1_size * required_tau_g1_power; + assert!(index < P::TAU_POWERS_LENGTH, format!("Index of TauG2 element written must not exceed {}, while it's {}", P::TAU_POWERS_LENGTH, index)); + position += g2_size * index; + + position + }, + ElementType::AlphaG1 => { + let mut position = 0; + position += g1_size * required_tau_g1_power; + position += g2_size * required_power; + assert!(index < P::TAU_POWERS_LENGTH, format!("Index of AlphaG1 element written must not exceed {}, while it's {}", P::TAU_POWERS_LENGTH, index)); + position += g1_size * index; + + position + }, + ElementType::BetaG1 => { + let mut position = 0; + position += g1_size * required_tau_g1_power; + position += g2_size * required_power; + position += g1_size * required_power; + assert!(index < P::TAU_POWERS_LENGTH, format!("Index of BetaG1 element written must not exceed {}, while it's {}", P::TAU_POWERS_LENGTH, index)); + position += g1_size * index; + + position + }, + ElementType::BetaG2 => { + let mut position = 0; + position += g1_size * required_tau_g1_power; + position += g2_size * required_power; + position += g1_size * required_power; + position += g1_size * required_power; + + position + } + }; + + position + P::HASH_SIZE + } +} + +impl BachedAccumulator { + /// Verifies a transformation of the `Accumulator` with the `PublicKey`, given a 64-byte transcript `digest`. + pub fn verify_transformation( + input_map: &Mmap, + output_map: &Mmap, + key: &PublicKey, + digest: &[u8], + input_is_compressed: UseCompression, + output_is_compressed: UseCompression, + check_input_for_correctness: CheckForCorrectness, + check_output_for_correctness: CheckForCorrectness, + ) -> bool + { + use itertools::MinMaxResult::{MinMax}; + assert_eq!(digest.len(), 64); + + let tau_g2_s = compute_g2_s::(&digest, &key.tau_g1.0, &key.tau_g1.1, 0); + let alpha_g2_s = compute_g2_s::(&digest, &key.alpha_g1.0, &key.alpha_g1.1, 1); + let beta_g2_s = compute_g2_s::(&digest, &key.beta_g1.0, &key.beta_g1.1, 2); + + // Check the proofs-of-knowledge for tau/alpha/beta + + // g1^s / g1^(s*x) = g2^s / g2^(s*x) + if !same_ratio(key.tau_g1, (tau_g2_s, key.tau_g2)) { + println!("Invalid ratio key.tau_g1, (tau_g2_s, key.tau_g2)"); + return false; + } + if !same_ratio(key.alpha_g1, (alpha_g2_s, key.alpha_g2)) { + println!("Invalid ratio key.alpha_g1, (alpha_g2_s, key.alpha_g2)"); + return false; + } + if !same_ratio(key.beta_g1, (beta_g2_s, key.beta_g2)) { + println!("Invalid ratio key.beta_g1, (beta_g2_s, key.beta_g2)"); + return false; + } + + // Load accumulators AND perform computations + + let mut before = Self::empty(); + let mut after = Self::empty(); + + // these checks only touch a part of the accumulator, so read two elements + + { + let chunk_size = 2; + before.read_chunk(0, chunk_size, input_is_compressed, check_input_for_correctness, &input_map).expect("must read a first chunk"); + after.read_chunk(0, chunk_size, output_is_compressed, check_output_for_correctness, &output_map).expect("must read a first chunk"); + + // Check the correctness of the generators for tau powers + if after.tau_powers_g1[0] != E::G1Affine::one() { + println!("tau_powers_g1[0] != 1"); + return false; + } + if after.tau_powers_g2[0] != E::G2Affine::one() { + println!("tau_powers_g2[0] != 1"); + return false; + } + + // Did the participant multiply the previous tau by the new one? + if !same_ratio((before.tau_powers_g1[1], after.tau_powers_g1[1]), (tau_g2_s, key.tau_g2)) { + println!("Invalid ratio (before.tau_powers_g1[1], after.tau_powers_g1[1]), (tau_g2_s, key.tau_g2)"); + return false; + } + + // Did the participant multiply the previous alpha by the new one? + if !same_ratio((before.alpha_tau_powers_g1[0], after.alpha_tau_powers_g1[0]), (alpha_g2_s, key.alpha_g2)) { + println!("Invalid ratio (before.alpha_tau_powers_g1[0], after.alpha_tau_powers_g1[0]), (alpha_g2_s, key.alpha_g2)"); + return false; + } + + // Did the participant multiply the previous beta by the new one? + if !same_ratio((before.beta_tau_powers_g1[0], after.beta_tau_powers_g1[0]), (beta_g2_s, key.beta_g2)) { + println!("Invalid ratio (before.beta_tau_powers_g1[0], after.beta_tau_powers_g1[0]), (beta_g2_s, key.beta_g2)"); + return false; + } + if !same_ratio((before.beta_tau_powers_g1[0], after.beta_tau_powers_g1[0]), (before.beta_g2, after.beta_g2)) { + println!("Invalid ratio (before.beta_tau_powers_g1[0], after.beta_tau_powers_g1[0]), (before.beta_g2, after.beta_g2)"); + return false; + } + + } + + let tau_powers_g2_0 = after.tau_powers_g2[0].clone(); + let tau_powers_g2_1 = after.tau_powers_g2[1].clone(); + let tau_powers_g1_0 = after.tau_powers_g1[0].clone(); + let tau_powers_g1_1 = after.tau_powers_g1[1].clone(); + + // Read by parts and just verify same ratios. Cause of two fixed variables above with tau_powers_g2_1 = tau_powers_g2_0 ^ s + // one does not need to care about some overlapping + + for chunk in &(0..P::TAU_POWERS_LENGTH).into_iter().chunks(P::EMPIRICAL_BATCH_SIZE) { + if let MinMax(start, end) = chunk.minmax() { + let size = end - start + 1; + before.read_chunk(start, size, input_is_compressed, check_input_for_correctness, &input_map).expect("must read a first chunk"); + after.read_chunk(start, size, output_is_compressed, check_output_for_correctness, &output_map).expect("must read a first chunk"); + + // Are the powers of tau correct? + if !same_ratio(power_pairs(&after.tau_powers_g1), (tau_powers_g2_0, tau_powers_g2_1)) { + println!("Invalid ratio power_pairs(&after.tau_powers_g1), (tau_powers_g2_0, tau_powers_g2_1)"); + return false; + } + if !same_ratio(power_pairs(&after.tau_powers_g2), (tau_powers_g1_0, tau_powers_g1_1)) { + println!("Invalid ratio power_pairs(&after.tau_powers_g2), (tau_powers_g1_0, tau_powers_g1_1)"); + return false; + } + if !same_ratio(power_pairs(&after.alpha_tau_powers_g1), (tau_powers_g2_0, tau_powers_g2_1)) { + println!("Invalid ratio power_pairs(&after.alpha_tau_powers_g1), (tau_powers_g2_0, tau_powers_g2_1)"); + return false; + } + if !same_ratio(power_pairs(&after.beta_tau_powers_g1), (tau_powers_g2_0, tau_powers_g2_1)) { + println!("Invalid ratio power_pairs(&after.beta_tau_powers_g1), (tau_powers_g2_0, tau_powers_g2_1)"); + return false; + } + } else { + panic!("Chunk does not have a min and max"); + } + } + + for chunk in &(P::TAU_POWERS_LENGTH..P::TAU_POWERS_G1_LENGTH).into_iter().chunks(P::EMPIRICAL_BATCH_SIZE) { + if let MinMax(start, end) = chunk.minmax() { + let size = end - start + 1; + before.read_chunk(start, size, input_is_compressed, check_input_for_correctness, &input_map).expect("must read a first chunk"); + after.read_chunk(start, size, output_is_compressed, check_output_for_correctness, &output_map).expect("must read a first chunk"); + + assert_eq!(before.tau_powers_g2.len(), 0, "during rest of tau g1 generation tau g2 must be empty"); + assert_eq!(after.tau_powers_g2.len(), 0, "during rest of tau g1 generation tau g2 must be empty"); + + // Are the powers of tau correct? + if !same_ratio(power_pairs(&after.tau_powers_g1), (tau_powers_g2_0, tau_powers_g2_1)) { + println!("Invalid ratio power_pairs(&after.tau_powers_g1), (tau_powers_g2_0, tau_powers_g2_1) in extra TauG1 contribution"); + return false; + } + } else { + panic!("Chunk does not have a min and max"); + } + } + + true + } + + pub fn decompress( + input_map: &Mmap, + output_map: &mut MmapMut, + check_input_for_correctness: CheckForCorrectness, + ) -> io::Result<()> + { + use itertools::MinMaxResult::{MinMax}; + + let mut accumulator = Self::empty(); + + for chunk in &(0..P::TAU_POWERS_LENGTH).into_iter().chunks(P::EMPIRICAL_BATCH_SIZE) { + if let MinMax(start, end) = chunk.minmax() { + let size = end - start + 1; + accumulator.read_chunk(start, size, UseCompression::Yes, check_input_for_correctness, &input_map).expect("Must read a chunk"); + accumulator.write_chunk(start, UseCompression::No, output_map)?; + } else { + panic!("Chunk does not have a min and max"); + } + } + + for chunk in &(P::TAU_POWERS_LENGTH..P::TAU_POWERS_G1_LENGTH).into_iter().chunks(P::EMPIRICAL_BATCH_SIZE) { + if let MinMax(start, end) = chunk.minmax() { + let size = end - start + 1; + accumulator.read_chunk(start, size, UseCompression::Yes, check_input_for_correctness, &input_map).expect("must read a chunk"); + assert_eq!(accumulator.tau_powers_g2.len(), 0, "during rest of tau g1 generation tau g2 must be empty"); + + accumulator.write_chunk(start, UseCompression::No, output_map)?; + + } else { + panic!("Chunk does not have a min and max"); + } + } + + Ok(()) + } +} + +impl BachedAccumulator { + pub fn read_chunk ( + &mut self, + from: usize, + size: usize, + compression: UseCompression, + checked: CheckForCorrectness, + input_map: &Mmap, + ) -> Result<(), DeserializationError> + { + self.tau_powers_g1 = match compression { + UseCompression::Yes => { + self.read_points_chunk::<::Compressed>(from, size, ElementType::TauG1, compression, checked, &input_map)? + }, + UseCompression::No => { + self.read_points_chunk::<::Uncompressed>(from, size, ElementType::TauG1, compression, checked, &input_map)? + }, + + }; + + self.tau_powers_g2 = match compression { + UseCompression::Yes => { + self.read_points_chunk::<::Compressed>(from, size, ElementType::TauG2, compression, checked, &input_map)? + }, + UseCompression::No => { + self.read_points_chunk::<::Uncompressed>(from, size, ElementType::TauG2, compression, checked, &input_map)? + }, + + }; + + self.alpha_tau_powers_g1 = match compression { + UseCompression::Yes => { + self.read_points_chunk::<::Compressed>(from, size, ElementType::AlphaG1, compression, checked, &input_map)? + }, + UseCompression::No => { + self.read_points_chunk::<::Uncompressed>(from, size, ElementType::AlphaG1, compression, checked, &input_map)? + }, + + }; + + self.beta_tau_powers_g1 = match compression { + UseCompression::Yes => { + self.read_points_chunk::<::Compressed>(from, size, ElementType::BetaG1, compression, checked, &input_map)? + }, + UseCompression::No => { + self.read_points_chunk::<::Uncompressed>(from, size, ElementType::BetaG1, compression, checked, &input_map)? + }, + }; + + self.beta_g2 = match compression { + UseCompression::Yes => { + let points = self.read_points_chunk::<::Compressed>(0, 1, ElementType::BetaG2, compression, checked, &input_map)?; + + points[0] + }, + UseCompression::No => { + let points = self.read_points_chunk::<::Uncompressed>(0, 1, ElementType::BetaG2, compression, checked, &input_map)?; + + points[0] + }, + }; + + Ok(()) + } + + // fn read_point( + + // ) -> + + fn read_points_chunk( + &mut self, + from: usize, + size: usize, + element_type: ElementType, + compression: UseCompression, + checked: CheckForCorrectness, + input_map: &Mmap, + ) -> Result, DeserializationError> + { + // Read the encoded elements + let mut res = vec![ENC::empty(); size]; + + for (i, encoded) in res.iter_mut().enumerate() { + let index = from + i; + match element_type { + ElementType::TauG1 => { + if index >= P::TAU_POWERS_G1_LENGTH { + return Ok(vec![]); + } + }, + ElementType::AlphaG1 | ElementType::BetaG1 | ElementType::BetaG2 | ElementType::TauG2 => { + if index >= P::TAU_POWERS_LENGTH { + return Ok(vec![]); + } + } + }; + let position = Self::calculate_mmap_position(index, element_type, compression); + let element_size = Self::get_size(element_type, compression); + let memory_slice = input_map.get(position..position+element_size).expect("must read point data from file"); + memory_slice.clone().read_exact(encoded.as_mut())?; + } + + // Allocate space for the deserialized elements + let mut res_affine = vec![ENC::Affine::zero(); size]; + + let mut chunk_size = res.len() / num_cpus::get(); + if chunk_size == 0 { + chunk_size = 1; + } + + // If any of our threads encounter a deserialization/IO error, catch + // it with this. + let decoding_error = Arc::new(Mutex::new(None)); + + crossbeam::scope(|scope| { + for (source, target) in res.chunks(chunk_size).zip(res_affine.chunks_mut(chunk_size)) { + let decoding_error = decoding_error.clone(); + + scope.spawn(move || { + for (source, target) in source.iter().zip(target.iter_mut()) { + match { + // If we're a participant, we don't need to check all of the + // elements in the accumulator, which saves a lot of time. + // The hash chain prevents this from being a problem: the + // transcript guarantees that the accumulator was properly + // formed. + match checked { + CheckForCorrectness::Yes => { + // Points at infinity are never expected in the accumulator + source.into_affine().map_err(|e| e.into()).and_then(|source| { + if source.is_zero() { + Err(DeserializationError::PointAtInfinity) + } else { + Ok(source) + } + }) + }, + CheckForCorrectness::No => source.into_affine_unchecked().map_err(|e| e.into()) + } + } + { + Ok(source) => { + *target = source; + }, + Err(e) => { + *decoding_error.lock().unwrap() = Some(e); + } + } + } + }); + } + }); + + match Arc::try_unwrap(decoding_error).unwrap().into_inner().unwrap() { + Some(e) => { + Err(e) + }, + None => { + Ok(res_affine) + } + } + } +} + +impl BachedAccumulator { + fn write_all( + &mut self, + chunk_start: usize, + compression: UseCompression, + element_type: ElementType, + output_map: &mut MmapMut, + ) -> io::Result<()> + { + match element_type { + ElementType::TauG1 => { + for (i, c) in self.tau_powers_g1.clone().iter().enumerate() { + let index = chunk_start + i; + self.write_point(index, c, compression, element_type.clone(), output_map)?; + } + }, + ElementType::TauG2 => { + for (i, c) in self.tau_powers_g2.clone().iter().enumerate() { + let index = chunk_start + i; + self.write_point(index, c, compression, element_type.clone(), output_map)?; + } + }, + ElementType::AlphaG1 => { + for (i, c) in self.alpha_tau_powers_g1.clone().iter().enumerate() { + let index = chunk_start + i; + self.write_point(index, c, compression, element_type.clone(), output_map)?; + } + }, + ElementType::BetaG1 => { + for (i, c) in self.beta_tau_powers_g1.clone().iter().enumerate() { + let index = chunk_start + i; + self.write_point(index, c, compression, element_type.clone(), output_map)?; + } + }, + ElementType::BetaG2 => { + let index = chunk_start; + self.write_point(index, &self.beta_g2.clone(), compression, element_type.clone(), output_map)? + } + }; + + output_map.flush_async()?; + + Ok(()) + } + + fn write_point( + &mut self, + index: usize, + p: &C, + compression: UseCompression, + element_type: ElementType, + output_map: &mut MmapMut, + ) -> io::Result<()> + where C: CurveAffine + { + match element_type { + ElementType::TauG1 => { + if index >= P::TAU_POWERS_G1_LENGTH { + return Ok(()); + } + }, + ElementType::AlphaG1 | ElementType::BetaG1 | ElementType::BetaG2 | ElementType::TauG2 => { + if index >= P::TAU_POWERS_LENGTH { + return Ok(()); + } + } + }; + + match compression { + UseCompression::Yes => { + let position = Self::calculate_mmap_position(index, element_type, compression); + // let size = self.get_size(element_type, compression); + (&mut output_map[position..]).write(p.into_compressed().as_ref())?; + }, + UseCompression::No => { + let position = Self::calculate_mmap_position(index, element_type, compression); + // let size = self.get_size(element_type, compression); + (&mut output_map[position..]).write(p.into_uncompressed().as_ref())?; + }, + }; + + Ok(()) + } + + /// Write the accumulator with some compression behavior. + pub fn write_chunk( + &mut self, + chunk_start: usize, + compression: UseCompression, + output_map: &mut MmapMut + ) -> io::Result<()> + { + self.write_all(chunk_start, compression, ElementType::TauG1, output_map)?; + if chunk_start < P::TAU_POWERS_LENGTH { + self.write_all(chunk_start, compression, ElementType::TauG2, output_map)?; + self.write_all(chunk_start, compression, ElementType::AlphaG1, output_map)?; + self.write_all(chunk_start, compression, ElementType::BetaG1, output_map)?; + self.write_all(chunk_start, compression, ElementType::BetaG2, output_map)?; + } + + Ok(()) + } + +} + +impl BachedAccumulator { + /// Transforms the accumulator with a private key. + /// Due to large amount of data in a previous accumulator even in the compressed form + /// this function can now work on compressed input. Output can be made in any form + /// WARNING: Contributor does not have to check that values from challenge file were serialized + /// corrently, but we may want to enforce it if a ceremony coordinator does not recompress the previous + /// contribution into the new challenge file + pub fn transform( + input_map: &Mmap, + output_map: &mut MmapMut, + input_is_compressed: UseCompression, + compress_the_output: UseCompression, + check_input_for_correctness: CheckForCorrectness, + key: &PrivateKey + ) -> io::Result<()> + { + + /// Exponentiate a large number of points, with an optional coefficient to be applied to the + /// exponent. + fn batch_exp >(bases: &mut [C], exp: &[C::Scalar], coeff: Option<&C::Scalar>) { + assert_eq!(bases.len(), exp.len()); + let mut projective = vec![C::Projective::zero(); bases.len()]; + let chunk_size = bases.len() / num_cpus::get(); + + // Perform wNAF over multiple cores, placing results into `projective`. + crossbeam::scope(|scope| { + for ((bases, exp), projective) in bases.chunks_mut(chunk_size) + .zip(exp.chunks(chunk_size)) + .zip(projective.chunks_mut(chunk_size)) + { + scope.spawn(move || { + let mut wnaf = Wnaf::new(); + + for ((base, exp), projective) in bases.iter_mut() + .zip(exp.iter()) + .zip(projective.iter_mut()) + { + let mut exp = *exp; + if let Some(coeff) = coeff { + exp.mul_assign(coeff); + } + + *projective = wnaf.base(base.into_projective(), 1).scalar(exp.into_repr()); + } + }); + } + }); + + // Perform batch normalization + crossbeam::scope(|scope| { + for projective in projective.chunks_mut(chunk_size) + { + scope.spawn(move || { + C::Projective::batch_normalization(projective); + }); + } + }); + + // Turn it all back into affine points + for (projective, affine) in projective.iter().zip(bases.iter_mut()) { + *affine = projective.into_affine(); + } + } + + let mut accumulator = Self::empty(); + + use itertools::MinMaxResult::{MinMax}; + + for chunk in &(0..P::TAU_POWERS_LENGTH).into_iter().chunks(P::EMPIRICAL_BATCH_SIZE) { + if let MinMax(start, end) = chunk.minmax() { + let size = end - start + 1; + accumulator.read_chunk(start, size, input_is_compressed, check_input_for_correctness, &input_map).expect("must read a first chunk"); + + // Construct the powers of tau + let mut taupowers = vec![E::Fr::zero(); size]; + let chunk_size = size / num_cpus::get(); + + // Construct exponents in parallel + crossbeam::scope(|scope| { + for (i, taupowers) in taupowers.chunks_mut(chunk_size).enumerate() { + scope.spawn(move || { + let mut acc = key.tau.pow(&[(i * chunk_size) as u64]); + + for t in taupowers { + *t = acc; + acc.mul_assign(&key.tau); + } + }); + } + }); + + batch_exp::(&mut accumulator.tau_powers_g1, &taupowers[0..], None); + batch_exp::(&mut accumulator.tau_powers_g2, &taupowers[0..], None); + batch_exp::(&mut accumulator.alpha_tau_powers_g1, &taupowers[0..], Some(&key.alpha)); + batch_exp::(&mut accumulator.beta_tau_powers_g1, &taupowers[0..], Some(&key.beta)); + accumulator.beta_g2 = accumulator.beta_g2.mul(key.beta).into_affine(); + accumulator.write_chunk(start, compress_the_output, output_map)?; + + println!("Done processing {} powers of tau", end); + } else { + panic!("Chunk does not have a min and max"); + } + } + + for chunk in &(P::TAU_POWERS_LENGTH..P::TAU_POWERS_G1_LENGTH).into_iter().chunks(P::EMPIRICAL_BATCH_SIZE) { + if let MinMax(start, end) = chunk.minmax() { + let size = end - start + 1; + accumulator.read_chunk(start, size, input_is_compressed, check_input_for_correctness, &input_map).expect("must read a first chunk"); + assert_eq!(accumulator.tau_powers_g2.len(), 0, "during rest of tau g1 generation tau g2 must be empty"); + + // Construct the powers of tau + let mut taupowers = vec![E::Fr::zero(); size]; + let chunk_size = size / num_cpus::get(); + + // Construct exponents in parallel + crossbeam::scope(|scope| { + for (i, taupowers) in taupowers.chunks_mut(chunk_size).enumerate() { + scope.spawn(move || { + let mut acc = key.tau.pow(&[(i * chunk_size) as u64]); + + for t in taupowers { + *t = acc; + acc.mul_assign(&key.tau); + } + }); + } + }); + + batch_exp::(&mut accumulator.tau_powers_g1, &taupowers[0..], None); + accumulator.beta_g2 = accumulator.beta_g2.mul(key.beta).into_affine(); + accumulator.write_chunk(start, compress_the_output, output_map)?; + + println!("Done processing {} powers of tau", end); + } else { + panic!("Chunk does not have a min and max"); + } + } + + Ok(()) + } +} + +impl BachedAccumulator { + /// Transforms the accumulator with a private key. + pub fn generate_initial( + output_map: &mut MmapMut, + compress_the_output: UseCompression, + ) -> io::Result<()> + { + use itertools::MinMaxResult::{MinMax}; + + for chunk in &(0..P::TAU_POWERS_LENGTH).into_iter().chunks(P::EMPIRICAL_BATCH_SIZE) { + if let MinMax(start, end) = chunk.minmax() { + let size = end - start + 1; + let mut accumulator = Self { + tau_powers_g1: vec![E::G1Affine::one(); size], + tau_powers_g2: vec![E::G2Affine::one(); size], + alpha_tau_powers_g1: vec![E::G1Affine::one(); size], + beta_tau_powers_g1: vec![E::G1Affine::one(); size], + beta_g2: E::G2Affine::one(), + hash: blank_hash(), + marker: std::marker::PhantomData::

{} + }; + + accumulator.write_chunk(start, compress_the_output, output_map)?; + println!("Done processing {} powers of tau", end); + } else { + panic!("Chunk does not have a min and max"); + } + } + + for chunk in &(P::TAU_POWERS_LENGTH..P::TAU_POWERS_G1_LENGTH).into_iter().chunks(P::EMPIRICAL_BATCH_SIZE) { + if let MinMax(start, end) = chunk.minmax() { + let size = end - start + 1; + let mut accumulator = Self { + tau_powers_g1: vec![E::G1Affine::one(); size], + tau_powers_g2: vec![], + alpha_tau_powers_g1: vec![], + beta_tau_powers_g1: vec![], + beta_g2: E::G2Affine::one(), + hash: blank_hash(), + marker: std::marker::PhantomData::

{} + }; + + accumulator.write_chunk(start, compress_the_output, output_map)?; + println!("Done processing {} powers of tau", end); + } else { + panic!("Chunk does not have a min and max"); + } + } + + Ok(()) + } +} diff --git a/src/bin/beacon.rs b/src/bin/beacon.rs.nocompile similarity index 100% rename from src/bin/beacon.rs rename to src/bin/beacon.rs.nocompile diff --git a/src/bin/beacon_constrained.rs b/src/bin/beacon_constrained.rs new file mode 100644 index 0000000..69844d2 --- /dev/null +++ b/src/bin/beacon_constrained.rs @@ -0,0 +1,192 @@ +extern crate powersoftau; +extern crate pairing; +extern crate memmap; +extern crate rand; +extern crate blake2; +extern crate byteorder; +extern crate crypto; + +// use powersoftau::bn256::{Bn256CeremonyParameters}; +use powersoftau::small_bn256::{Bn256CeremonyParameters}; +use powersoftau::batched_accumulator::{BachedAccumulator}; +use powersoftau::keypair::{keypair}; +use powersoftau::parameters::{UseCompression, CheckForCorrectness}; + +use std::fs::OpenOptions; +use pairing::bn256::Bn256; +use memmap::*; + +use std::io::Write; + +use powersoftau::parameters::PowersOfTauParameters; + +#[macro_use] +extern crate hex_literal; + +const input_is_compressed: UseCompression = UseCompression::No; +const compress_the_output: UseCompression = UseCompression::Yes; +const check_input_correctness: CheckForCorrectness = CheckForCorrectness::No; + + +fn main() { + println!("Will contribute a random beacon to accumulator for 2^{} powers of tau", Bn256CeremonyParameters::REQUIRED_POWER); + println!("In total will generate up to {} powers", Bn256CeremonyParameters::TAU_POWERS_G1_LENGTH); + + // Create an RNG based on the outcome of the random beacon + let mut rng = { + use byteorder::{ReadBytesExt, BigEndian}; + use rand::{SeedableRng}; + use rand::chacha::ChaChaRng; + use crypto::sha2::Sha256; + use crypto::digest::Digest; + + // Place block hash here (block number #514200) + let mut cur_hash: [u8; 32] = hex!("00000000000000000034b33e842ac1c50456abe5fa92b60f6b3dfc5d247f7b58"); + + // Performs 2^n hash iterations over it + // const N: usize = 42; + + const N: usize = 16; + + for i in 0..(1u64<().expect("digest is large enough for this to work"); + } + + ChaChaRng::from_seed(&seed) + }; + + println!("Done creating a beacon RNG"); + + // Try to load `./challenge` from disk. + let reader = OpenOptions::new() + .read(true) + .open("challenge").expect("unable open `./challenge` in this directory"); + + { + let metadata = reader.metadata().expect("unable to get filesystem metadata for `./challenge`"); + let expected_challenge_length = match input_is_compressed { + UseCompression::Yes => { + Bn256CeremonyParameters::CONTRIBUTION_BYTE_SIZE + }, + UseCompression::No => { + Bn256CeremonyParameters::ACCUMULATOR_BYTE_SIZE + } + }; + + if metadata.len() != (expected_challenge_length as u64) { + panic!("The size of `./challenge` should be {}, but it's {}, so something isn't right.", expected_challenge_length, metadata.len()); + } + } + + let readable_map = unsafe { MmapOptions::new().map(&reader).expect("unable to create a memory map for input") }; + + // Create `./response` in this directory + let writer = OpenOptions::new() + .read(true) + .write(true) + .create_new(true) + .open("response").expect("unable to create `./response` in this directory"); + + let required_output_length = match compress_the_output { + UseCompression::Yes => { + Bn256CeremonyParameters::CONTRIBUTION_BYTE_SIZE + }, + UseCompression::No => { + Bn256CeremonyParameters::ACCUMULATOR_BYTE_SIZE + Bn256CeremonyParameters::PUBLIC_KEY_SIZE + } + }; + + writer.set_len(required_output_length as u64).expect("must make output file large enough"); + + let mut writable_map = unsafe { MmapOptions::new().map_mut(&writer).expect("unable to create a memory map for output") }; + + println!("Calculating previous contribution hash..."); + + let current_accumulator_hash = BachedAccumulator::::calculate_hash(&readable_map); + + { + println!("Contributing on top of the hash:"); + for line in current_accumulator_hash.as_slice().chunks(16) { + print!("\t"); + for section in line.chunks(4) { + for b in section { + print!("{:02x}", b); + } + print!(" "); + } + println!(""); + } + + (&mut writable_map[0..]).write(current_accumulator_hash.as_slice()).expect("unable to write a challenge hash to mmap"); + + writable_map.flush().expect("unable to write hash to `./response`"); + } + + // Construct our keypair using the RNG we created above + let (pubkey, privkey) = keypair(&mut rng, current_accumulator_hash.as_ref()); + + // Perform the transformation + println!("Computing and writing your contribution, this could take a while..."); + + // this computes a transformation and writes it + BachedAccumulator::::transform( + &readable_map, + &mut writable_map, + input_is_compressed, + compress_the_output, + check_input_correctness, + &privkey + ).expect("must transform with the key"); + println!("Finihsing writing your contribution to `./response`..."); + + // Write the public key + pubkey.write::(&mut writable_map, compress_the_output).expect("unable to write public key"); + + // Get the hash of the contribution, so the user can compare later + let output_readonly = writable_map.make_read_only().expect("must make a map readonly"); + let contribution_hash = BachedAccumulator::::calculate_hash(&output_readonly); + + print!("Done!\n\n\ + Your contribution has been written to `./response`\n\n\ + The BLAKE2b hash of `./response` is:\n"); + + for line in contribution_hash.as_slice().chunks(16) { + print!("\t"); + for section in line.chunks(4) { + for b in section { + print!("{:02x}", b); + } + print!(" "); + } + println!(""); + } + + println!("Thank you for your participation, much appreciated! :)"); +} diff --git a/src/bin/compute.rs b/src/bin/compute.rs.nocompile similarity index 100% rename from src/bin/compute.rs rename to src/bin/compute.rs.nocompile diff --git a/src/bin/compute_constrained.rs b/src/bin/compute_constrained.rs new file mode 100644 index 0000000..05f8f27 --- /dev/null +++ b/src/bin/compute_constrained.rs @@ -0,0 +1,177 @@ +extern crate powersoftau; +extern crate pairing; +extern crate memmap; +extern crate rand; +extern crate blake2; +extern crate byteorder; + +// use powersoftau::bn256::{Bn256CeremonyParameters}; +use powersoftau::small_bn256::{Bn256CeremonyParameters}; +use powersoftau::batched_accumulator::{BachedAccumulator}; +use powersoftau::keypair::{keypair}; +use powersoftau::parameters::{UseCompression, CheckForCorrectness}; + +use std::fs::OpenOptions; +use pairing::bn256::Bn256; +use memmap::*; + +use std::io::Write; + +use powersoftau::parameters::PowersOfTauParameters; + +const input_is_compressed: UseCompression = UseCompression::No; +const compress_the_output: UseCompression = UseCompression::Yes; +const check_input_correctness: CheckForCorrectness = CheckForCorrectness::No; + +fn main() { + println!("Will contribute to accumulator for 2^{} powers of tau", Bn256CeremonyParameters::REQUIRED_POWER); + println!("In total will generate up to {} powers", Bn256CeremonyParameters::TAU_POWERS_G1_LENGTH); + + // Create an RNG based on a mixture of system randomness and user provided randomness + let mut rng = { + use byteorder::{ReadBytesExt, BigEndian}; + use blake2::{Blake2b, Digest}; + use rand::{SeedableRng, Rng, OsRng}; + use rand::chacha::ChaChaRng; + + let h = { + let mut system_rng = OsRng::new().unwrap(); + let mut h = Blake2b::default(); + + // Gather 1024 bytes of entropy from the system + for _ in 0..1024 { + let r: u8 = system_rng.gen(); + h.input(&[r]); + } + + // Ask the user to provide some information for additional entropy + let mut user_input = String::new(); + println!("Type some random text and press [ENTER] to provide additional entropy..."); + std::io::stdin().read_line(&mut user_input).expect("expected to read some random text from the user"); + + // Hash it all up to make a seed + h.input(&user_input.as_bytes()); + h.result() + }; + + let mut digest = &h[..]; + + // Interpret the first 32 bytes of the digest as 8 32-bit words + let mut seed = [0u32; 8]; + for i in 0..8 { + seed[i] = digest.read_u32::().expect("digest is large enough for this to work"); + } + + ChaChaRng::from_seed(&seed) + }; + + // Try to load `./challenge` from disk. + let reader = OpenOptions::new() + .read(true) + .open("challenge").expect("unable open `./challenge` in this directory"); + + { + let metadata = reader.metadata().expect("unable to get filesystem metadata for `./challenge`"); + let expected_challenge_length = match input_is_compressed { + UseCompression::Yes => { + Bn256CeremonyParameters::CONTRIBUTION_BYTE_SIZE + }, + UseCompression::No => { + Bn256CeremonyParameters::ACCUMULATOR_BYTE_SIZE + } + }; + + if metadata.len() != (expected_challenge_length as u64) { + panic!("The size of `./challenge` should be {}, but it's {}, so something isn't right.", expected_challenge_length, metadata.len()); + } + } + + let readable_map = unsafe { MmapOptions::new().map(&reader).expect("unable to create a memory map for input") }; + + // Create `./response` in this directory + let writer = OpenOptions::new() + .read(true) + .write(true) + .create_new(true) + .open("response").expect("unable to create `./response` in this directory"); + + let required_output_length = match compress_the_output { + UseCompression::Yes => { + Bn256CeremonyParameters::CONTRIBUTION_BYTE_SIZE + }, + UseCompression::No => { + Bn256CeremonyParameters::ACCUMULATOR_BYTE_SIZE + Bn256CeremonyParameters::PUBLIC_KEY_SIZE + } + }; + + writer.set_len(required_output_length as u64).expect("must make output file large enough"); + + let mut writable_map = unsafe { MmapOptions::new().map_mut(&writer).expect("unable to create a memory map for output") }; + + println!("Calculating previous contribution hash..."); + + assert!(UseCompression::No == input_is_compressed, "Hashing the compressed file in not yet defined"); + let current_accumulator_hash = BachedAccumulator::::calculate_hash(&readable_map); + + { + println!("Contributing on top of the hash:"); + for line in current_accumulator_hash.as_slice().chunks(16) { + print!("\t"); + for section in line.chunks(4) { + for b in section { + print!("{:02x}", b); + } + print!(" "); + } + println!(""); + } + + (&mut writable_map[0..]).write(current_accumulator_hash.as_slice()).expect("unable to write a challenge hash to mmap"); + + writable_map.flush().expect("unable to write hash to `./response`"); + } + + // Construct our keypair using the RNG we created above + let (pubkey, privkey) = keypair(&mut rng, current_accumulator_hash.as_ref()); + + // Perform the transformation + println!("Computing and writing your contribution, this could take a while..."); + + // this computes a transformation and writes it + BachedAccumulator::::transform( + &readable_map, + &mut writable_map, + input_is_compressed, + compress_the_output, + check_input_correctness, + &privkey + ).expect("must transform with the key"); + + println!("Finihsing writing your contribution to `./response`..."); + + // Write the public key + pubkey.write::(&mut writable_map, compress_the_output).expect("unable to write public key"); + + writable_map.flush().expect("must flush a memory map"); + + // Get the hash of the contribution, so the user can compare later + let output_readonly = writable_map.make_read_only().expect("must make a map readonly"); + let contribution_hash = BachedAccumulator::::calculate_hash(&output_readonly); + + print!("Done!\n\n\ + Your contribution has been written to `./response`\n\n\ + The BLAKE2b hash of `./response` is:\n"); + + for line in contribution_hash.as_slice().chunks(16) { + print!("\t"); + for section in line.chunks(4) { + for b in section { + print!("{:02x}", b); + } + print!(" "); + } + println!(""); + } + + println!("Thank you for your participation, much appreciated! :)"); +} diff --git a/src/bin/new.rs b/src/bin/new.rs index a5bb301..fcf5408 100644 --- a/src/bin/new.rs +++ b/src/bin/new.rs @@ -1,8 +1,15 @@ extern crate powersoftau; -use powersoftau::*; +extern crate pairing; + +// use powersoftau::bn256::{Bn256CeremonyParameters}; +use powersoftau::small_bn256::{Bn256CeremonyParameters}; +use powersoftau::accumulator::{Accumulator}; +use powersoftau::utils::{blank_hash}; +use powersoftau::parameters::{UseCompression}; use std::fs::OpenOptions; use std::io::{Write, BufWriter}; +use pairing::bn256::Bn256; fn main() { let writer = OpenOptions::new() @@ -16,7 +23,10 @@ fn main() { // Write a blank BLAKE2b hash: writer.write_all(&blank_hash().as_slice()).expect("unable to write blank hash to `./challenge`"); - let acc = Accumulator::new(); + let parameters = Bn256CeremonyParameters{}; + + let acc: Accumulator = Accumulator::new(parameters); + println!("Writing an empty accumulator to disk"); acc.serialize(&mut writer, UseCompression::No).expect("unable to write fresh accumulator to `./challenge`"); writer.flush().expect("unable to flush accumulator to disk"); diff --git a/src/bin/new.rs.nocompile b/src/bin/new.rs.nocompile new file mode 100644 index 0000000..a5bb301 --- /dev/null +++ b/src/bin/new.rs.nocompile @@ -0,0 +1,24 @@ +extern crate powersoftau; +use powersoftau::*; + +use std::fs::OpenOptions; +use std::io::{Write, BufWriter}; + +fn main() { + let writer = OpenOptions::new() + .read(false) + .write(true) + .create_new(true) + .open("challenge").expect("unable to create `./challenge`"); + + let mut writer = BufWriter::new(writer); + + // Write a blank BLAKE2b hash: + writer.write_all(&blank_hash().as_slice()).expect("unable to write blank hash to `./challenge`"); + + let acc = Accumulator::new(); + acc.serialize(&mut writer, UseCompression::No).expect("unable to write fresh accumulator to `./challenge`"); + writer.flush().expect("unable to flush accumulator to disk"); + + println!("Wrote a fresh accumulator to `./challenge`"); +} diff --git a/src/bin/new_constrained.rs b/src/bin/new_constrained.rs new file mode 100644 index 0000000..f3f8699 --- /dev/null +++ b/src/bin/new_constrained.rs @@ -0,0 +1,81 @@ +extern crate powersoftau; +extern crate pairing; +extern crate memmap; + +// use powersoftau::bn256::{Bn256CeremonyParameters}; +use powersoftau::small_bn256::{Bn256CeremonyParameters}; +use powersoftau::batched_accumulator::{BachedAccumulator}; +use powersoftau::parameters::{UseCompression}; +use powersoftau::utils::{blank_hash}; + +use std::fs::OpenOptions; +use std::io::{Write}; +use pairing::bn256::Bn256; +use memmap::*; + +use powersoftau::parameters::PowersOfTauParameters; + +const compress_new_challenge: UseCompression = UseCompression::No; + +fn main() { + println!("Will generate an empty accumulator for 2^{} powers of tau", Bn256CeremonyParameters::REQUIRED_POWER); + println!("In total will generate up to {} powers", Bn256CeremonyParameters::TAU_POWERS_G1_LENGTH); + + let file = OpenOptions::new() + .read(true) + .write(true) + .create_new(true) + .open("challenge").expect("unable to create `./challenge`"); + + let expected_challenge_length = match compress_new_challenge { + UseCompression::Yes => { + Bn256CeremonyParameters::CONTRIBUTION_BYTE_SIZE - Bn256CeremonyParameters::PUBLIC_KEY_SIZE + }, + UseCompression::No => { + Bn256CeremonyParameters::ACCUMULATOR_BYTE_SIZE + } + }; + + file.set_len(expected_challenge_length as u64).expect("unable to allocate large enough file"); + + let mut writable_map = unsafe { MmapOptions::new().map_mut(&file).expect("unable to create a memory map") }; + + // Write a blank BLAKE2b hash: + let hash = blank_hash(); + (&mut writable_map[0..]).write(hash.as_slice()).expect("unable to write a default hash to mmap"); + writable_map.flush().expect("unable to write blank hash to `./challenge`"); + + println!("Blank hash for an empty challenge:"); + for line in hash.as_slice().chunks(16) { + print!("\t"); + for section in line.chunks(4) { + for b in section { + print!("{:02x}", b); + } + print!(" "); + } + println!(""); + } + + BachedAccumulator::::generate_initial(&mut writable_map, compress_new_challenge).expect("generation of initial accumulator is successful"); + writable_map.flush().expect("unable to flush memmap to disk"); + + // Get the hash of the contribution, so the user can compare later + let output_readonly = writable_map.make_read_only().expect("must make a map readonly"); + let contribution_hash = BachedAccumulator::::calculate_hash(&output_readonly); + + println!("Empty contribution is formed with a hash:"); + + for line in contribution_hash.as_slice().chunks(16) { + print!("\t"); + for section in line.chunks(4) { + for b in section { + print!("{:02x}", b); + } + print!(" "); + } + println!(""); + } + + println!("Wrote a fresh accumulator to `./challenge`"); +} diff --git a/src/bin/verify.rs b/src/bin/verify.rs.nocompile similarity index 100% rename from src/bin/verify.rs rename to src/bin/verify.rs.nocompile diff --git a/src/bin/verify_transform.rs b/src/bin/verify_transform.rs.nocompile similarity index 100% rename from src/bin/verify_transform.rs rename to src/bin/verify_transform.rs.nocompile diff --git a/src/bin/verify_transform_constrained.rs b/src/bin/verify_transform_constrained.rs new file mode 100644 index 0000000..61d5b34 --- /dev/null +++ b/src/bin/verify_transform_constrained.rs @@ -0,0 +1,190 @@ +extern crate powersoftau; +extern crate pairing; +extern crate memmap; +extern crate rand; +extern crate blake2; +extern crate byteorder; + +// use powersoftau::bn256::{Bn256CeremonyParameters}; +use powersoftau::small_bn256::{Bn256CeremonyParameters}; +use powersoftau::batched_accumulator::{BachedAccumulator}; +use powersoftau::keypair::{PublicKey}; +use powersoftau::parameters::{UseCompression, CheckForCorrectness}; + +use std::fs::OpenOptions; +use pairing::bn256::Bn256; +use memmap::*; + +use std::io::{Read, Write}; + +use powersoftau::parameters::PowersOfTauParameters; + +const previous_challenge_is_compressed: UseCompression = UseCompression::No; +const contribution_is_compressed: UseCompression = UseCompression::Yes; +const compress_new_challenge: UseCompression = UseCompression::No; + +fn main() { + println!("Will verify and decompress a contribution to accumulator for 2^{} powers of tau", Bn256CeremonyParameters::REQUIRED_POWER); + + // Try to load `./challenge` from disk. + let challenge_reader = OpenOptions::new() + .read(true) + .open("challenge").expect("unable open `./challenge` in this directory"); + + { + let metadata = challenge_reader.metadata().expect("unable to get filesystem metadata for `./challenge`"); + let expected_challenge_length = match previous_challenge_is_compressed { + UseCompression::Yes => { + Bn256CeremonyParameters::CONTRIBUTION_BYTE_SIZE + }, + UseCompression::No => { + Bn256CeremonyParameters::ACCUMULATOR_BYTE_SIZE + } + }; + if metadata.len() != (expected_challenge_length as u64) { + panic!("The size of `./challenge` should be {}, but it's {}, so something isn't right.", expected_challenge_length, metadata.len()); + } + } + + let challenge_readable_map = unsafe { MmapOptions::new().map(&challenge_reader).expect("unable to create a memory map for input") }; + + // Try to load `./response` from disk. + let response_reader = OpenOptions::new() + .read(true) + .open("response").expect("unable open `./response` in this directory"); + + { + let metadata = response_reader.metadata().expect("unable to get filesystem metadata for `./response`"); + let expected_response_length = match contribution_is_compressed { + UseCompression::Yes => { + Bn256CeremonyParameters::CONTRIBUTION_BYTE_SIZE + }, + UseCompression::No => { + Bn256CeremonyParameters::ACCUMULATOR_BYTE_SIZE + Bn256CeremonyParameters::PUBLIC_KEY_SIZE + } + }; + if metadata.len() != (expected_response_length as u64) { + panic!("The size of `./response` should be {}, but it's {}, so something isn't right.", expected_response_length, metadata.len()); + } + } + + let response_readable_map = unsafe { MmapOptions::new().map(&response_reader).expect("unable to create a memory map for input") }; + + println!("Calculating previous challenge hash..."); + + // Check that contribution is correct + + let current_accumulator_hash = BachedAccumulator::::calculate_hash(&challenge_readable_map); + + println!("Previous challenge hash"); + for line in current_accumulator_hash.as_slice().chunks(16) { + print!("\t"); + for section in line.chunks(4) { + for b in section { + print!("{:02x}", b); + } + print!(" "); + } + println!(""); + } + + // Check the hash chain - a new response must be based on the previous challenge! + { + let mut response_challenge_hash = [0; 64]; + let memory_slice = response_readable_map.get(0..64).expect("must read point data from file"); + memory_slice.clone().read_exact(&mut response_challenge_hash).expect("couldn't read hash of challenge file from response file"); + + println!("Response was based on the hash"); + for line in response_challenge_hash.chunks(16) { + print!("\t"); + for section in line.chunks(4) { + for b in section { + print!("{:02x}", b); + } + print!(" "); + } + println!(""); + } + + if &response_challenge_hash[..] != current_accumulator_hash.as_slice() { + panic!("Hash chain failure. This is not the right response."); + } + } + + // get the contributor's public key + let public_key = PublicKey::::read::(&response_readable_map, contribution_is_compressed) + .expect("wasn't able to deserialize the response file's public key"); + + + // check that it follows the protocol + + let valid = BachedAccumulator::::verify_transformation( + &challenge_readable_map, + &response_readable_map, + &public_key, + current_accumulator_hash.as_slice(), + previous_challenge_is_compressed, + contribution_is_compressed, + CheckForCorrectness::No, + CheckForCorrectness::Yes, + ); + + if !valid { + println!("Verification failed, contribution was invalid somehow."); + panic!("INVALID CONTRIBUTION!!!"); + } else { + println!("Verification succeeded!"); + } + + + let response_hash = BachedAccumulator::::calculate_hash(&response_readable_map); + + println!("Here's the BLAKE2b hash of the participant's response file:"); + + for line in response_hash.as_slice().chunks(16) { + print!("\t"); + for section in line.chunks(4) { + for b in section { + print!("{:02x}", b); + } + print!(" "); + } + println!(""); + } + + if compress_new_challenge == UseCompression::Yes { + println!("Don't need to recompress the contribution, please copy `./response` as `./new_challenge`"); + } else { + println!("Verification succeeded! Writing to `./new_challenge`..."); + + // Create `./new_challenge` in this directory + let writer = OpenOptions::new() + .read(true) + .write(true) + .create_new(true) + .open("new_challenge").expect("unable to create `./new_challenge` in this directory"); + + + + // Recomputation stips the public key and uses hashing to link with the previous contibution after decompression + writer.set_len(Bn256CeremonyParameters::ACCUMULATOR_BYTE_SIZE as u64).expect("must make output file large enough"); + + let mut writable_map = unsafe { MmapOptions::new().map_mut(&writer).expect("unable to create a memory map for output") }; + + { + (&mut writable_map[0..]).write(response_hash.as_slice()).expect("unable to write a default hash to mmap"); + + writable_map.flush().expect("unable to write hash to `./new_challenge`"); + } + + BachedAccumulator::::decompress( + &response_readable_map, + &mut writable_map, + CheckForCorrectness::No).expect("must decompress a response for a new challenge"); + + writable_map.flush().expect("must flush the memory map"); + + println!("Done! `./new_challenge` contains the new challenge file. The other files"); + println!("were left alone."); + } +} diff --git a/src/bls12_381/mod.rs b/src/bls12_381/mod.rs new file mode 100644 index 0000000..22c82d8 --- /dev/null +++ b/src/bls12_381/mod.rs @@ -0,0 +1,859 @@ +//! This ceremony constructs the "powers of tau" for Jens Groth's 2016 zk-SNARK proving +//! system using the BLS12-381 pairing-friendly elliptic curve construction. +//! +//! # Overview +//! +//! Participants of the ceremony receive a "challenge" file containing: +//! +//! * the BLAKE2b hash of the last file entered into the transcript +//! * an `Accumulator` (with curve points encoded in uncompressed form for fast deserialization) +//! +//! The participant runs a tool which generates a random keypair (`PublicKey`, `PrivateKey`) +//! used for modifying the `Accumulator` from the "challenge" file. The keypair is then used to +//! transform the `Accumulator`, and a "response" file is generated containing: +//! +//! * the BLAKE2b hash of the "challenge" file (thus forming a hash chain over the entire transcript) +//! * an `Accumulator` (with curve points encoded in compressed form for fast uploading) +//! * the `PublicKey` +//! +//! This "challenge" file is entered into the protocol transcript. A given transcript is valid +//! if the transformations between consecutive `Accumulator`s verify with their respective +//! `PublicKey`s. Participants (and the public) can ensure that their contribution to the +//! `Accumulator` was accepted by ensuring the transcript contains their "response" file, ideally +//! by comparison of the BLAKE2b hash of the "response" file. +//! +//! After some time has elapsed for participants to contribute to the ceremony, a participant is +//! simulated with a randomness beacon. The resulting `Accumulator` contains partial zk-SNARK +//! public parameters for all circuits within a bounded size. + +extern crate pairing; +extern crate rand; +extern crate crossbeam; +extern crate num_cpus; +extern crate blake2; +extern crate generic_array; +extern crate typenum; +extern crate byteorder; + +use byteorder::{ReadBytesExt, BigEndian}; +use rand::{SeedableRng, Rng, Rand}; +use rand::chacha::ChaChaRng; +use pairing::bls12_381::*; +use pairing::*; +use std::io::{self, Read, Write}; +use std::sync::{Arc, Mutex}; +use generic_array::GenericArray; +use typenum::consts::U64; +use blake2::{Blake2b, Digest}; +use std::fmt; + +// This ceremony is based on the BLS12-381 elliptic curve construction. +const G1_UNCOMPRESSED_BYTE_SIZE: usize = 96; +const G2_UNCOMPRESSED_BYTE_SIZE: usize = 192; +const G1_COMPRESSED_BYTE_SIZE: usize = 48; +const G2_COMPRESSED_BYTE_SIZE: usize = 96; + +/// The accumulator supports circuits with 2^21 multiplication gates. +const TAU_POWERS_LENGTH: usize = (1 << 21); + +/// More tau powers are needed in G1 because the Groth16 H query +/// includes terms of the form tau^i * (tau^m - 1) = tau^(i+m) - tau^i +/// where the largest i = m - 2, requiring the computation of tau^(2m - 2) +/// and thus giving us a vector length of 2^22 - 1. +const TAU_POWERS_G1_LENGTH: usize = (TAU_POWERS_LENGTH << 1) - 1; + +/// The size of the accumulator on disk. +pub const ACCUMULATOR_BYTE_SIZE: usize = (TAU_POWERS_G1_LENGTH * G1_UNCOMPRESSED_BYTE_SIZE) + // g1 tau powers + (TAU_POWERS_LENGTH * G2_UNCOMPRESSED_BYTE_SIZE) + // g2 tau powers + (TAU_POWERS_LENGTH * G1_UNCOMPRESSED_BYTE_SIZE) + // alpha tau powers + (TAU_POWERS_LENGTH * G1_UNCOMPRESSED_BYTE_SIZE) // beta tau powers + + G2_UNCOMPRESSED_BYTE_SIZE // beta in g2 + + 64; // blake2b hash of previous contribution + +/// The "public key" is used to verify a contribution was correctly +/// computed. +pub const PUBLIC_KEY_SIZE: usize = 3 * G2_UNCOMPRESSED_BYTE_SIZE + // tau, alpha, and beta in g2 + 6 * G1_UNCOMPRESSED_BYTE_SIZE; // (s1, s1*tau), (s2, s2*alpha), (s3, s3*beta) in g1 + +/// The size of the contribution on disk. +pub const CONTRIBUTION_BYTE_SIZE: usize = (TAU_POWERS_G1_LENGTH * G1_COMPRESSED_BYTE_SIZE) + // g1 tau powers + (TAU_POWERS_LENGTH * G2_COMPRESSED_BYTE_SIZE) + // g2 tau powers + (TAU_POWERS_LENGTH * G1_COMPRESSED_BYTE_SIZE) + // alpha tau powers + (TAU_POWERS_LENGTH * G1_COMPRESSED_BYTE_SIZE) // beta tau powers + + G2_COMPRESSED_BYTE_SIZE // beta in g2 + + 64 // blake2b hash of input accumulator + + PUBLIC_KEY_SIZE; // public key + +/// Hashes to G2 using the first 32 bytes of `digest`. Panics if `digest` is less +/// than 32 bytes. +fn hash_to_g2(mut digest: &[u8]) -> G2 +{ + assert!(digest.len() >= 32); + + let mut seed = Vec::with_capacity(8); + + for _ in 0..8 { + seed.push(digest.read_u32::().expect("assertion above guarantees this to work")); + } + + ChaChaRng::from_seed(&seed).gen() +} + +#[test] +fn test_hash_to_g2() { + assert!( + hash_to_g2(&[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33]) + == + hash_to_g2(&[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,34]) + ); + + assert!( + hash_to_g2(&[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32]) + != + hash_to_g2(&[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,33]) + ); +} + +/// Contains terms of the form (s1, s1x, H(s1x)2, H(s1x)2x) +/// for all x in τ, α and β, and some s chosen randomly by its creator. The function H "hashes into" the group G2. No points in the public key may be the identity. +/// +/// The elements in G2 are used to verify transformations of the accumulator. By its nature, the public key proves +/// knowledge of τ, α and β. +/// +/// It is necessary to verify `same_ratio`((s1, s1x), (H(s1x)2, H(s1x)2x)). +#[derive(PartialEq, Eq)] +pub struct PublicKey { + tau_g1: (G1Affine, G1Affine), + alpha_g1: (G1Affine, G1Affine), + beta_g1: (G1Affine, G1Affine), + tau_g2: G2Affine, + alpha_g2: G2Affine, + beta_g2: G2Affine +} + +/// Contains the secrets τ, α and β that the participant of the ceremony must destroy. +pub struct PrivateKey { + tau: Fr, + alpha: Fr, + beta: Fr +} + +/// Constructs a keypair given an RNG and a 64-byte transcript `digest`. +pub fn keypair(rng: &mut R, digest: &[u8]) -> (PublicKey, PrivateKey) +{ + assert_eq!(digest.len(), 64); + + let tau = Fr::rand(rng); + let alpha = Fr::rand(rng); + let beta = Fr::rand(rng); + + let mut op = |x, personalization: u8| { + // Sample random g^s + let g1_s = G1::rand(rng).into_affine(); + // Compute g^{s*x} + let g1_s_x = g1_s.mul(x).into_affine(); + // Compute BLAKE2b(personalization | transcript | g^s | g^{s*x}) + let h = { + let mut h = Blake2b::default(); + h.input(&[personalization]); + h.input(digest); + h.input(g1_s.into_uncompressed().as_ref()); + h.input(g1_s_x.into_uncompressed().as_ref()); + h.result() + }; + // Hash into G2 as g^{s'} + let g2_s = hash_to_g2(h.as_ref()).into_affine(); + // Compute g^{s'*x} + let g2_s_x = g2_s.mul(x).into_affine(); + + ((g1_s, g1_s_x), g2_s_x) + }; + + let pk_tau = op(tau, 0); + let pk_alpha = op(alpha, 1); + let pk_beta = op(beta, 2); + + ( + PublicKey { + tau_g1: pk_tau.0, + alpha_g1: pk_alpha.0, + beta_g1: pk_beta.0, + tau_g2: pk_tau.1, + alpha_g2: pk_alpha.1, + beta_g2: pk_beta.1, + }, + PrivateKey { + tau: tau, + alpha: alpha, + beta: beta + } + ) +} + +/// Determines if point compression should be used. +#[derive(Copy, Clone)] +pub enum UseCompression { + Yes, + No +} + +/// Determines if points should be checked for correctness during deserialization. +/// This is not necessary for participants, because a transcript verifier can +/// check this theirself. +#[derive(Copy, Clone)] +pub enum CheckForCorrectness { + Yes, + No +} + +fn write_point( + writer: &mut W, + p: &G, + compression: UseCompression +) -> io::Result<()> + where W: Write, + G: CurveAffine +{ + match compression { + UseCompression::Yes => writer.write_all(p.into_compressed().as_ref()), + UseCompression::No => writer.write_all(p.into_uncompressed().as_ref()), + } +} + +/// Errors that might occur during deserialization. +#[derive(Debug)] +pub enum DeserializationError { + IoError(io::Error), + DecodingError(GroupDecodingError), + PointAtInfinity +} + +impl fmt::Display for DeserializationError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + DeserializationError::IoError(ref e) => write!(f, "Disk IO error: {}", e), + DeserializationError::DecodingError(ref e) => write!(f, "Decoding error: {}", e), + DeserializationError::PointAtInfinity => write!(f, "Point at infinity found") + } + } +} + +impl From for DeserializationError { + fn from(err: io::Error) -> DeserializationError { + DeserializationError::IoError(err) + } +} + +impl From for DeserializationError { + fn from(err: GroupDecodingError) -> DeserializationError { + DeserializationError::DecodingError(err) + } +} + +impl PublicKey { + /// Serialize the public key. Points are always in uncompressed form. + pub fn serialize(&self, writer: &mut W) -> io::Result<()> + { + write_point(writer, &self.tau_g1.0, UseCompression::No)?; + write_point(writer, &self.tau_g1.1, UseCompression::No)?; + + write_point(writer, &self.alpha_g1.0, UseCompression::No)?; + write_point(writer, &self.alpha_g1.1, UseCompression::No)?; + + write_point(writer, &self.beta_g1.0, UseCompression::No)?; + write_point(writer, &self.beta_g1.1, UseCompression::No)?; + + write_point(writer, &self.tau_g2, UseCompression::No)?; + write_point(writer, &self.alpha_g2, UseCompression::No)?; + write_point(writer, &self.beta_g2, UseCompression::No)?; + + Ok(()) + } + + /// Deserialize the public key. Points are always in uncompressed form, and + /// always checked, since there aren't very many of them. Does not allow any + /// points at infinity. + pub fn deserialize(reader: &mut R) -> Result + { + fn read_uncompressed(reader: &mut R) -> Result { + let mut repr = C::Uncompressed::empty(); + reader.read_exact(repr.as_mut())?; + let v = repr.into_affine()?; + + if v.is_zero() { + Err(DeserializationError::PointAtInfinity) + } else { + Ok(v) + } + } + + let tau_g1_s = read_uncompressed(reader)?; + let tau_g1_s_tau = read_uncompressed(reader)?; + + let alpha_g1_s = read_uncompressed(reader)?; + let alpha_g1_s_alpha = read_uncompressed(reader)?; + + let beta_g1_s = read_uncompressed(reader)?; + let beta_g1_s_beta = read_uncompressed(reader)?; + + let tau_g2 = read_uncompressed(reader)?; + let alpha_g2 = read_uncompressed(reader)?; + let beta_g2 = read_uncompressed(reader)?; + + Ok(PublicKey { + tau_g1: (tau_g1_s, tau_g1_s_tau), + alpha_g1: (alpha_g1_s, alpha_g1_s_alpha), + beta_g1: (beta_g1_s, beta_g1_s_beta), + tau_g2: tau_g2, + alpha_g2: alpha_g2, + beta_g2: beta_g2 + }) + } +} + +#[test] +fn test_pubkey_serialization() { + use rand::thread_rng; + + let rng = &mut thread_rng(); + let digest = (0..64).map(|_| rng.gen()).collect::>(); + let (pk, _) = keypair(rng, &digest); + let mut v = vec![]; + pk.serialize(&mut v).unwrap(); + assert_eq!(v.len(), PUBLIC_KEY_SIZE); + let deserialized = PublicKey::deserialize(&mut &v[..]).unwrap(); + assert!(pk == deserialized); +} + +/// The `Accumulator` is an object that participants of the ceremony contribute +/// randomness to. This object contains powers of trapdoor `tau` in G1 and in G2 over +/// fixed generators, and additionally in G1 over two other generators of exponents +/// `alpha` and `beta` over those fixed generators. In other words: +/// +/// * (τ, τ2, ..., τ222 - 2, α, ατ, ατ2, ..., ατ221 - 1, β, βτ, βτ2, ..., βτ221 - 1)1 +/// * (β, τ, τ2, ..., τ221 - 1)2 +#[derive(PartialEq, Eq, Clone)] +pub struct Accumulator { + /// tau^0, tau^1, tau^2, ..., tau^{TAU_POWERS_G1_LENGTH - 1} + pub tau_powers_g1: Vec, + /// tau^0, tau^1, tau^2, ..., tau^{TAU_POWERS_LENGTH - 1} + pub tau_powers_g2: Vec, + /// alpha * tau^0, alpha * tau^1, alpha * tau^2, ..., alpha * tau^{TAU_POWERS_LENGTH - 1} + pub alpha_tau_powers_g1: Vec, + /// beta * tau^0, beta * tau^1, beta * tau^2, ..., beta * tau^{TAU_POWERS_LENGTH - 1} + pub beta_tau_powers_g1: Vec, + /// beta + pub beta_g2: G2Affine +} + +impl Accumulator { + /// Constructs an "initial" accumulator with τ = 1, α = 1, β = 1. + pub fn new() -> Self { + Accumulator { + tau_powers_g1: vec![G1Affine::one(); TAU_POWERS_G1_LENGTH], + tau_powers_g2: vec![G2Affine::one(); TAU_POWERS_LENGTH], + alpha_tau_powers_g1: vec![G1Affine::one(); TAU_POWERS_LENGTH], + beta_tau_powers_g1: vec![G1Affine::one(); TAU_POWERS_LENGTH], + beta_g2: G2Affine::one() + } + } + + /// Write the accumulator with some compression behavior. + pub fn serialize( + &self, + writer: &mut W, + compression: UseCompression + ) -> io::Result<()> + { + fn write_all( + writer: &mut W, + c: &[C], + compression: UseCompression + ) -> io::Result<()> + { + for c in c { + write_point(writer, c, compression)?; + } + + Ok(()) + } + + write_all(writer, &self.tau_powers_g1, compression)?; + write_all(writer, &self.tau_powers_g2, compression)?; + write_all(writer, &self.alpha_tau_powers_g1, compression)?; + write_all(writer, &self.beta_tau_powers_g1, compression)?; + write_all(writer, &[self.beta_g2], compression)?; + + Ok(()) + } + + /// Read the accumulator from disk with some compression behavior. `checked` + /// indicates whether we should check it's a valid element of the group and + /// not the point at infinity. + pub fn deserialize( + reader: &mut R, + compression: UseCompression, + checked: CheckForCorrectness + ) -> Result + { + fn read_all( + reader: &mut R, + size: usize, + compression: UseCompression, + checked: CheckForCorrectness + ) -> Result, DeserializationError> + { + fn decompress_all( + reader: &mut R, + size: usize, + checked: CheckForCorrectness + ) -> Result, DeserializationError> + { + // Read the encoded elements + let mut res = vec![E::empty(); size]; + + for encoded in &mut res { + reader.read_exact(encoded.as_mut())?; + } + + // Allocate space for the deserialized elements + let mut res_affine = vec![E::Affine::zero(); size]; + + let mut chunk_size = res.len() / num_cpus::get(); + if chunk_size == 0 { + chunk_size = 1; + } + + // If any of our threads encounter a deserialization/IO error, catch + // it with this. + let decoding_error = Arc::new(Mutex::new(None)); + + crossbeam::scope(|scope| { + for (source, target) in res.chunks(chunk_size).zip(res_affine.chunks_mut(chunk_size)) { + let decoding_error = decoding_error.clone(); + + scope.spawn(move || { + for (source, target) in source.iter().zip(target.iter_mut()) { + match { + // If we're a participant, we don't need to check all of the + // elements in the accumulator, which saves a lot of time. + // The hash chain prevents this from being a problem: the + // transcript guarantees that the accumulator was properly + // formed. + match checked { + CheckForCorrectness::Yes => { + // Points at infinity are never expected in the accumulator + source.into_affine().map_err(|e| e.into()).and_then(|source| { + if source.is_zero() { + Err(DeserializationError::PointAtInfinity) + } else { + Ok(source) + } + }) + }, + CheckForCorrectness::No => source.into_affine_unchecked().map_err(|e| e.into()) + } + } + { + Ok(source) => { + *target = source; + }, + Err(e) => { + *decoding_error.lock().unwrap() = Some(e); + } + } + } + }); + } + }); + + match Arc::try_unwrap(decoding_error).unwrap().into_inner().unwrap() { + Some(e) => { + Err(e) + }, + None => { + Ok(res_affine) + } + } + } + + match compression { + UseCompression::Yes => decompress_all::<_, C::Compressed>(reader, size, checked), + UseCompression::No => decompress_all::<_, C::Uncompressed>(reader, size, checked) + } + } + + let tau_powers_g1 = read_all(reader, TAU_POWERS_G1_LENGTH, compression, checked)?; + let tau_powers_g2 = read_all(reader, TAU_POWERS_LENGTH, compression, checked)?; + let alpha_tau_powers_g1 = read_all(reader, TAU_POWERS_LENGTH, compression, checked)?; + let beta_tau_powers_g1 = read_all(reader, TAU_POWERS_LENGTH, compression, checked)?; + let beta_g2 = read_all(reader, 1, compression, checked)?[0]; + + Ok(Accumulator { + tau_powers_g1: tau_powers_g1, + tau_powers_g2: tau_powers_g2, + alpha_tau_powers_g1: alpha_tau_powers_g1, + beta_tau_powers_g1: beta_tau_powers_g1, + beta_g2: beta_g2 + }) + } + + /// Transforms the accumulator with a private key. + pub fn transform(&mut self, key: &PrivateKey) + { + // Construct the powers of tau + let mut taupowers = vec![Fr::zero(); TAU_POWERS_G1_LENGTH]; + let chunk_size = TAU_POWERS_G1_LENGTH / num_cpus::get(); + + // Construct exponents in parallel + crossbeam::scope(|scope| { + for (i, taupowers) in taupowers.chunks_mut(chunk_size).enumerate() { + scope.spawn(move || { + let mut acc = key.tau.pow(&[(i * chunk_size) as u64]); + + for t in taupowers { + *t = acc; + acc.mul_assign(&key.tau); + } + }); + } + }); + + /// Exponentiate a large number of points, with an optional coefficient to be applied to the + /// exponent. + fn batch_exp(bases: &mut [C], exp: &[C::Scalar], coeff: Option<&C::Scalar>) { + assert_eq!(bases.len(), exp.len()); + let mut projective = vec![C::Projective::zero(); bases.len()]; + let chunk_size = bases.len() / num_cpus::get(); + + // Perform wNAF over multiple cores, placing results into `projective`. + crossbeam::scope(|scope| { + for ((bases, exp), projective) in bases.chunks_mut(chunk_size) + .zip(exp.chunks(chunk_size)) + .zip(projective.chunks_mut(chunk_size)) + { + scope.spawn(move || { + let mut wnaf = Wnaf::new(); + + for ((base, exp), projective) in bases.iter_mut() + .zip(exp.iter()) + .zip(projective.iter_mut()) + { + let mut exp = *exp; + if let Some(coeff) = coeff { + exp.mul_assign(coeff); + } + + *projective = wnaf.base(base.into_projective(), 1).scalar(exp.into_repr()); + } + }); + } + }); + + // Perform batch normalization + crossbeam::scope(|scope| { + for projective in projective.chunks_mut(chunk_size) + { + scope.spawn(move || { + C::Projective::batch_normalization(projective); + }); + } + }); + + // Turn it all back into affine points + for (projective, affine) in projective.iter().zip(bases.iter_mut()) { + *affine = projective.into_affine(); + } + } + + batch_exp(&mut self.tau_powers_g1, &taupowers[0..], None); + batch_exp(&mut self.tau_powers_g2, &taupowers[0..TAU_POWERS_LENGTH], None); + batch_exp(&mut self.alpha_tau_powers_g1, &taupowers[0..TAU_POWERS_LENGTH], Some(&key.alpha)); + batch_exp(&mut self.beta_tau_powers_g1, &taupowers[0..TAU_POWERS_LENGTH], Some(&key.beta)); + self.beta_g2 = self.beta_g2.mul(key.beta).into_affine(); + } +} + +/// Verifies a transformation of the `Accumulator` with the `PublicKey`, given a 64-byte transcript `digest`. +pub fn verify_transform(before: &Accumulator, after: &Accumulator, key: &PublicKey, digest: &[u8]) -> bool +{ + assert_eq!(digest.len(), 64); + + let compute_g2_s = |g1_s: G1Affine, g1_s_x: G1Affine, personalization: u8| { + let mut h = Blake2b::default(); + h.input(&[personalization]); + h.input(digest); + h.input(g1_s.into_uncompressed().as_ref()); + h.input(g1_s_x.into_uncompressed().as_ref()); + hash_to_g2(h.result().as_ref()).into_affine() + }; + + let tau_g2_s = compute_g2_s(key.tau_g1.0, key.tau_g1.1, 0); + let alpha_g2_s = compute_g2_s(key.alpha_g1.0, key.alpha_g1.1, 1); + let beta_g2_s = compute_g2_s(key.beta_g1.0, key.beta_g1.1, 2); + + // Check the proofs-of-knowledge for tau/alpha/beta + if !same_ratio(key.tau_g1, (tau_g2_s, key.tau_g2)) { + return false; + } + if !same_ratio(key.alpha_g1, (alpha_g2_s, key.alpha_g2)) { + return false; + } + if !same_ratio(key.beta_g1, (beta_g2_s, key.beta_g2)) { + return false; + } + + // Check the correctness of the generators for tau powers + if after.tau_powers_g1[0] != G1Affine::one() { + return false; + } + if after.tau_powers_g2[0] != G2Affine::one() { + return false; + } + + // Did the participant multiply the previous tau by the new one? + if !same_ratio((before.tau_powers_g1[1], after.tau_powers_g1[1]), (tau_g2_s, key.tau_g2)) { + return false; + } + + // Did the participant multiply the previous alpha by the new one? + if !same_ratio((before.alpha_tau_powers_g1[0], after.alpha_tau_powers_g1[0]), (alpha_g2_s, key.alpha_g2)) { + return false; + } + + // Did the participant multiply the previous beta by the new one? + if !same_ratio((before.beta_tau_powers_g1[0], after.beta_tau_powers_g1[0]), (beta_g2_s, key.beta_g2)) { + return false; + } + if !same_ratio((before.beta_tau_powers_g1[0], after.beta_tau_powers_g1[0]), (before.beta_g2, after.beta_g2)) { + return false; + } + + // Are the powers of tau correct? + if !same_ratio(power_pairs(&after.tau_powers_g1), (after.tau_powers_g2[0], after.tau_powers_g2[1])) { + return false; + } + if !same_ratio(power_pairs(&after.tau_powers_g2), (after.tau_powers_g1[0], after.tau_powers_g1[1])) { + return false; + } + if !same_ratio(power_pairs(&after.alpha_tau_powers_g1), (after.tau_powers_g2[0], after.tau_powers_g2[1])) { + return false; + } + if !same_ratio(power_pairs(&after.beta_tau_powers_g1), (after.tau_powers_g2[0], after.tau_powers_g2[1])) { + return false; + } + + true +} + +/// Computes a random linear combination over v1/v2. +/// +/// Checking that many pairs of elements are exponentiated by +/// the same `x` can be achieved (with high probability) with +/// the following technique: +/// +/// Given v1 = [a, b, c] and v2 = [as, bs, cs], compute +/// (a*r1 + b*r2 + c*r3, (as)*r1 + (bs)*r2 + (cs)*r3) for some +/// random r1, r2, r3. Given (g, g^s)... +/// +/// e(g, (as)*r1 + (bs)*r2 + (cs)*r3) = e(g^s, a*r1 + b*r2 + c*r3) +/// +/// ... with high probability. +fn merge_pairs(v1: &[G], v2: &[G]) -> (G, G) +{ + use std::sync::{Arc, Mutex}; + use rand::{thread_rng}; + + assert_eq!(v1.len(), v2.len()); + + let chunk = (v1.len() / num_cpus::get()) + 1; + + let s = Arc::new(Mutex::new(G::Projective::zero())); + let sx = Arc::new(Mutex::new(G::Projective::zero())); + + crossbeam::scope(|scope| { + for (v1, v2) in v1.chunks(chunk).zip(v2.chunks(chunk)) { + let s = s.clone(); + let sx = sx.clone(); + + scope.spawn(move || { + // We do not need to be overly cautious of the RNG + // used for this check. + let rng = &mut thread_rng(); + + let mut wnaf = Wnaf::new(); + let mut local_s = G::Projective::zero(); + let mut local_sx = G::Projective::zero(); + + for (v1, v2) in v1.iter().zip(v2.iter()) { + let rho = G::Scalar::rand(rng); + let mut wnaf = wnaf.scalar(rho.into_repr()); + let v1 = wnaf.base(v1.into_projective()); + let v2 = wnaf.base(v2.into_projective()); + + local_s.add_assign(&v1); + local_sx.add_assign(&v2); + } + + s.lock().unwrap().add_assign(&local_s); + sx.lock().unwrap().add_assign(&local_sx); + }); + } + }); + + let s = s.lock().unwrap().into_affine(); + let sx = sx.lock().unwrap().into_affine(); + + (s, sx) +} + +/// Construct a single pair (s, s^x) for a vector of +/// the form [1, x, x^2, x^3, ...]. +fn power_pairs(v: &[G]) -> (G, G) +{ + merge_pairs(&v[0..(v.len()-1)], &v[1..]) +} + +#[test] +fn test_power_pairs() { + use rand::thread_rng; + + let rng = &mut thread_rng(); + + let mut v = vec![]; + let x = Fr::rand(rng); + let mut acc = Fr::one(); + for _ in 0..100 { + v.push(G1Affine::one().mul(acc).into_affine()); + acc.mul_assign(&x); + } + + let gx = G2Affine::one().mul(x).into_affine(); + + assert!(same_ratio(power_pairs(&v), (G2Affine::one(), gx))); + + v[1] = v[1].mul(Fr::rand(rng)).into_affine(); + + assert!(!same_ratio(power_pairs(&v), (G2Affine::one(), gx))); +} + +/// Checks if pairs have the same ratio. +fn same_ratio( + g1: (G1, G1), + g2: (G1::Pair, G1::Pair) +) -> bool +{ + g1.0.pairing_with(&g2.1) == g1.1.pairing_with(&g2.0) +} + +#[test] +fn test_same_ratio() { + use rand::thread_rng; + + let rng = &mut thread_rng(); + + let s = Fr::rand(rng); + let g1 = G1Affine::one(); + let g2 = G2Affine::one(); + let g1_s = g1.mul(s).into_affine(); + let g2_s = g2.mul(s).into_affine(); + + assert!(same_ratio((g1, g1_s), (g2, g2_s))); + assert!(!same_ratio((g1_s, g1), (g2, g2_s))); +} + +#[test] +fn test_accumulator_serialization() { + use rand::thread_rng; + + let rng = &mut thread_rng(); + let mut digest = (0..64).map(|_| rng.gen()).collect::>(); + + let mut acc = Accumulator::new(); + let before = acc.clone(); + let (pk, sk) = keypair(rng, &digest); + acc.transform(&sk); + assert!(verify_transform(&before, &acc, &pk, &digest)); + digest[0] = !digest[0]; + assert!(!verify_transform(&before, &acc, &pk, &digest)); + let mut v = Vec::with_capacity(ACCUMULATOR_BYTE_SIZE - 64); + acc.serialize(&mut v, UseCompression::No).unwrap(); + assert_eq!(v.len(), ACCUMULATOR_BYTE_SIZE - 64); + let deserialized = Accumulator::deserialize(&mut &v[..], UseCompression::No, CheckForCorrectness::No).unwrap(); + assert!(acc == deserialized); +} + +/// Compute BLAKE2b("") +pub fn blank_hash() -> GenericArray { + Blake2b::new().result() +} + +/// Abstraction over a reader which hashes the data being read. +pub struct HashReader { + reader: R, + hasher: Blake2b +} + +impl HashReader { + /// Construct a new `HashReader` given an existing `reader` by value. + pub fn new(reader: R) -> Self { + HashReader { + reader: reader, + hasher: Blake2b::default() + } + } + + /// Destroy this reader and return the hash of what was read. + pub fn into_hash(self) -> GenericArray { + self.hasher.result() + } +} + +impl Read for HashReader { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + let bytes = self.reader.read(buf)?; + + if bytes > 0 { + self.hasher.input(&buf[0..bytes]); + } + + Ok(bytes) + } +} + +/// Abstraction over a writer which hashes the data being written. +pub struct HashWriter { + writer: W, + hasher: Blake2b +} + +impl HashWriter { + /// Construct a new `HashWriter` given an existing `writer` by value. + pub fn new(writer: W) -> Self { + HashWriter { + writer: writer, + hasher: Blake2b::default() + } + } + + /// Destroy this writer and return the hash of what was written. + pub fn into_hash(self) -> GenericArray { + self.hasher.result() + } +} + +impl Write for HashWriter { + fn write(&mut self, buf: &[u8]) -> io::Result { + let bytes = self.writer.write(buf)?; + + if bytes > 0 { + self.hasher.input(&buf[0..bytes]); + } + + Ok(bytes) + } + + fn flush(&mut self) -> io::Result<()> { + self.writer.flush() + } +} diff --git a/src/bn256/mod.rs b/src/bn256/mod.rs new file mode 100644 index 0000000..b2fafa9 --- /dev/null +++ b/src/bn256/mod.rs @@ -0,0 +1,120 @@ +extern crate pairing; +extern crate rand; +extern crate crossbeam; +extern crate num_cpus; +extern crate blake2; +extern crate generic_array; +extern crate typenum; +extern crate byteorder; +extern crate ff; + +use self::ff::{Field, PrimeField}; +use self::byteorder::{ReadBytesExt, BigEndian}; +use self::rand::{SeedableRng, Rng, Rand}; +use self::rand::chacha::ChaChaRng; +use self::pairing::bn256::{Bn256}; +use self::pairing::*; +use std::io::{self, Read, Write}; +use std::sync::{Arc, Mutex}; +use self::generic_array::GenericArray; +use self::typenum::consts::U64; +use self::blake2::{Blake2b, Digest}; +use std::fmt; + +use crate::parameters::*; +use crate::keypair::*; +use crate::utils::*; + +#[derive(Clone)] +pub struct Bn256CeremonyParameters { + +} + +impl PowersOfTauParameters for Bn256CeremonyParameters { + const REQUIRED_POWER: usize = 26; // generate to have roughly 64 million constraints + + // This ceremony is based on the BN256 elliptic curve construction. + const G1_UNCOMPRESSED_BYTE_SIZE: usize = 64; + const G2_UNCOMPRESSED_BYTE_SIZE: usize = 128; + const G1_COMPRESSED_BYTE_SIZE: usize = 32; + const G2_COMPRESSED_BYTE_SIZE: usize = 64; +} + +#[test] +fn test_pubkey_serialization() { + use self::rand::thread_rng; + + let rng = &mut thread_rng(); + let digest = (0..64).map(|_| rng.gen()).collect::>(); + let (pk, _) = keypair::<_, Bn256>(rng, &digest); + let mut v = vec![]; + pk.serialize(&mut v).unwrap(); + assert_eq!(v.len(), Bn256CeremonyParameters::PUBLIC_KEY_SIZE); + let deserialized = PublicKey::::deserialize(&mut &v[..]).unwrap(); + assert!(pk == deserialized); +} + +#[test] +fn test_power_pairs() { + use self::rand::thread_rng; + use self::pairing::bn256::{Fr, G1Affine, G2Affine}; + let rng = &mut thread_rng(); + + let mut v = vec![]; + let x = Fr::rand(rng); + let mut acc = Fr::one(); + for _ in 0..100 { + v.push(G1Affine::one().mul(acc).into_affine()); + acc.mul_assign(&x); + } + + let gx = G2Affine::one().mul(x).into_affine(); + + assert!(same_ratio(power_pairs(&v), (G2Affine::one(), gx))); + + v[1] = v[1].mul(Fr::rand(rng)).into_affine(); + + assert!(!same_ratio(power_pairs(&v), (G2Affine::one(), gx))); +} + +#[test] +fn test_same_ratio() { + use self::rand::thread_rng; + use self::pairing::bn256::{Fr, G1Affine, G2Affine}; + + let rng = &mut thread_rng(); + + let s = Fr::rand(rng); + let g1 = G1Affine::one(); + let g2 = G2Affine::one(); + let g1_s = g1.mul(s).into_affine(); + let g2_s = g2.mul(s).into_affine(); + + assert!(same_ratio((g1, g1_s), (g2, g2_s))); + assert!(!same_ratio((g1_s, g1), (g2, g2_s))); +} + +#[test] +fn test_accumulator_serialization() { + use crate::accumulator::*; + + use self::rand::thread_rng; + use self::pairing::bn256::{Bn256, Fr, G1Affine, G2Affine}; + use self::PowersOfTauParameters; + + let rng = &mut thread_rng(); + let mut digest = (0..64).map(|_| rng.gen()).collect::>(); + let params = Bn256CeremonyParameters{}; + let mut acc = Accumulator::::new(params.clone()); + let before = acc.clone(); + let (pk, sk) = keypair::<_, Bn256>(rng, &digest); + acc.transform(&sk); + assert!(verify_transform(&before, &acc, &pk, &digest)); + digest[0] = !digest[0]; + assert!(!verify_transform(&before, &acc, &pk, &digest)); + let mut v = Vec::with_capacity(Bn256CeremonyParameters::ACCUMULATOR_BYTE_SIZE - 64); + acc.serialize(&mut v, UseCompression::No).unwrap(); + assert_eq!(v.len(), Bn256CeremonyParameters::ACCUMULATOR_BYTE_SIZE - 64); + let deserialized = Accumulator::deserialize(&mut &v[..], UseCompression::No, CheckForCorrectness::No, params).unwrap(); + assert!(acc == deserialized); +} \ No newline at end of file diff --git a/src/keypair.rs b/src/keypair.rs new file mode 100644 index 0000000..a49591d --- /dev/null +++ b/src/keypair.rs @@ -0,0 +1,307 @@ +extern crate pairing; +extern crate rand; +extern crate crossbeam; +extern crate num_cpus; +extern crate blake2; +extern crate generic_array; +extern crate typenum; +extern crate byteorder; +extern crate ff; +extern crate memmap; +extern crate itertools; + +use itertools::Itertools; +use memmap::{Mmap, MmapMut}; +use self::ff::{Field, PrimeField}; +use self::byteorder::{ReadBytesExt, BigEndian}; +use self::rand::{SeedableRng, Rng, Rand}; +use self::rand::chacha::ChaChaRng; +use self::pairing::bn256::{Bn256}; +use self::pairing::*; +use std::io::{self, Read, Write}; +use std::sync::{Arc, Mutex}; +use self::generic_array::GenericArray; +use self::typenum::consts::U64; +use self::blake2::{Blake2b, Digest}; +use std::fmt; + +use super::utils::*; +use super::parameters::*; + +/// Contains terms of the form (s1, s1x, H(s1x)2, H(s1x)2x) +/// for all x in τ, α and β, and some s chosen randomly by its creator. The function H "hashes into" the group G2. No points in the public key may be the identity. +/// +/// The elements in G2 are used to verify transformations of the accumulator. By its nature, the public key proves +/// knowledge of τ, α and β. +/// +/// It is necessary to verify `same_ratio`((s1, s1x), (H(s1x)2, H(s1x)2x)). +#[derive(Eq)] +pub struct PublicKey { + pub tau_g1: (E::G1Affine, E::G1Affine), + pub alpha_g1: (E::G1Affine, E::G1Affine), + pub beta_g1: (E::G1Affine, E::G1Affine), + pub tau_g2: E::G2Affine, + pub alpha_g2: E::G2Affine, + pub beta_g2: E::G2Affine +} + +impl PartialEq for PublicKey { + fn eq(&self, other: &PublicKey) -> bool { + self.tau_g1.0 == other.tau_g1.0 && + self.tau_g1.1 == other.tau_g1.1 && + self.alpha_g1.0 == other.alpha_g1.0 && + self.alpha_g1.1 == other.alpha_g1.1 && + self.beta_g1.0 == other.beta_g1.0 && + self.beta_g1.1 == other.beta_g1.1 && + self.tau_g2 == other.tau_g2 && + self.alpha_g2 == other.alpha_g2 && + self.beta_g2 == other.beta_g2 + } +} + +/// Contains the secrets τ, α and β that the participant of the ceremony must destroy. +pub struct PrivateKey { + pub tau: E::Fr, + pub alpha: E::Fr, + pub beta: E::Fr +} + +/// Constructs a keypair given an RNG and a 64-byte transcript `digest`. +pub fn keypair(rng: &mut R, digest: &[u8]) -> (PublicKey, PrivateKey) +{ + assert_eq!(digest.len(), 64); + + // tau is a conribution to the "powers of tau", in a set of points of the form "tau^i * G" + let tau = E::Fr::rand(rng); + // alpha and beta are a set of conrtibuitons in a form "alpha * tau^i * G" and that are required + // for construction of the polynomials + let alpha = E::Fr::rand(rng); + let beta = E::Fr::rand(rng); + + let mut op = |x: E::Fr, personalization: u8| { + // Sample random g^s + let g1_s = E::G1::rand(rng).into_affine(); + // Compute g^{s*x} + let g1_s_x = g1_s.mul(x).into_affine(); + // Compute BLAKE2b(personalization | transcript | g^s | g^{s*x}) + let h: generic_array::GenericArray = { + let mut h = Blake2b::default(); + h.input(&[personalization]); + h.input(digest); + h.input(g1_s.into_uncompressed().as_ref()); + h.input(g1_s_x.into_uncompressed().as_ref()); + h.result() + }; + // Hash into G2 as g^{s'} + let g2_s: E::G2Affine = hash_to_g2::(h.as_ref()).into_affine(); + // Compute g^{s'*x} + let g2_s_x = g2_s.mul(x).into_affine(); + + ((g1_s, g1_s_x), g2_s_x) + }; + + // these "public keys" are requried for for next participants to check that points are in fact + // sequential powers + let pk_tau = op(tau, 0); + let pk_alpha = op(alpha, 1); + let pk_beta = op(beta, 2); + + ( + PublicKey { + tau_g1: pk_tau.0, + alpha_g1: pk_alpha.0, + beta_g1: pk_beta.0, + tau_g2: pk_tau.1, + alpha_g2: pk_alpha.1, + beta_g2: pk_beta.1, + }, + PrivateKey { + tau: tau, + alpha: alpha, + beta: beta + } + ) +} + +impl PublicKey { + /// Serialize the public key. Points are always in uncompressed form. + pub fn serialize(&self, writer: &mut W) -> io::Result<()> + { + write_point(writer, &self.tau_g1.0, UseCompression::No)?; + write_point(writer, &self.tau_g1.1, UseCompression::No)?; + + write_point(writer, &self.alpha_g1.0, UseCompression::No)?; + write_point(writer, &self.alpha_g1.1, UseCompression::No)?; + + write_point(writer, &self.beta_g1.0, UseCompression::No)?; + write_point(writer, &self.beta_g1.1, UseCompression::No)?; + + write_point(writer, &self.tau_g2, UseCompression::No)?; + write_point(writer, &self.alpha_g2, UseCompression::No)?; + write_point(writer, &self.beta_g2, UseCompression::No)?; + + Ok(()) + } + + /// Deserialize the public key. Points are always in uncompressed form, and + /// always checked, since there aren't very many of them. Does not allow any + /// points at infinity. + pub fn deserialize(reader: &mut R) -> Result, DeserializationError> + { + fn read_uncompressed, R: Read>(reader: &mut R) -> Result { + let mut repr = C::Uncompressed::empty(); + reader.read_exact(repr.as_mut())?; + let v = repr.into_affine()?; + + if v.is_zero() { + Err(DeserializationError::PointAtInfinity) + } else { + Ok(v) + } + } + + let tau_g1_s = read_uncompressed::(reader)?; + let tau_g1_s_tau = read_uncompressed::(reader)?; + + let alpha_g1_s = read_uncompressed::(reader)?; + let alpha_g1_s_alpha = read_uncompressed::(reader)?; + + let beta_g1_s = read_uncompressed::(reader)?; + let beta_g1_s_beta = read_uncompressed::(reader)?; + + let tau_g2 = read_uncompressed::(reader)?; + let alpha_g2 = read_uncompressed::(reader)?; + let beta_g2 = read_uncompressed::(reader)?; + + Ok(PublicKey { + tau_g1: (tau_g1_s, tau_g1_s_tau), + alpha_g1: (alpha_g1_s, alpha_g1_s_alpha), + beta_g1: (beta_g1_s, beta_g1_s_beta), + tau_g2: tau_g2, + alpha_g2: alpha_g2, + beta_g2: beta_g2 + }) + } +} + +impl PublicKey { + + /// This function is intended to write the key to the memory map and calculates + /// a position for writing into the file itself based on information whether + /// contribution was output in compressed on uncompressed form + pub fn write

( + &self, + output_map: &mut MmapMut, + accumulator_was_compressed: UseCompression + ) + -> io::Result<()> + where P: PowersOfTauParameters + { + let mut position = match accumulator_was_compressed { + UseCompression::Yes => { + P::CONTRIBUTION_BYTE_SIZE - P::PUBLIC_KEY_SIZE + }, + UseCompression::No => { + P::ACCUMULATOR_BYTE_SIZE + } + }; + + (&mut output_map[position..]).write(&self.tau_g1.0.into_uncompressed().as_ref())?; + position += P::G1_UNCOMPRESSED_BYTE_SIZE; + + (&mut output_map[position..]).write(&self.tau_g1.1.into_uncompressed().as_ref())?; + position += P::G1_UNCOMPRESSED_BYTE_SIZE; + + (&mut output_map[position..]).write(&self.alpha_g1.0.into_uncompressed().as_ref())?; + position += P::G1_UNCOMPRESSED_BYTE_SIZE; + + (&mut output_map[position..]).write(&self.alpha_g1.1.into_uncompressed().as_ref())?; + position += P::G1_UNCOMPRESSED_BYTE_SIZE; + + (&mut output_map[position..]).write(&self.beta_g1.0.into_uncompressed().as_ref())?; + position += P::G1_UNCOMPRESSED_BYTE_SIZE; + + (&mut output_map[position..]).write(&self.beta_g1.1.into_uncompressed().as_ref())?; + position += P::G1_UNCOMPRESSED_BYTE_SIZE; + + (&mut output_map[position..]).write(&self.tau_g2.into_uncompressed().as_ref())?; + position += P::G2_UNCOMPRESSED_BYTE_SIZE; + + (&mut output_map[position..]).write(&self.alpha_g2.into_uncompressed().as_ref())?; + position += P::G2_UNCOMPRESSED_BYTE_SIZE; + + (&mut output_map[position..]).write(&self.beta_g2.into_uncompressed().as_ref())?; + + output_map.flush()?; + + Ok(()) + } + + /// Deserialize the public key. Points are always in uncompressed form, and + /// always checked, since there aren't very many of them. Does not allow any + /// points at infinity. + pub fn read

( + input_map: &Mmap, + accumulator_was_compressed: UseCompression + ) -> Result + where P: PowersOfTauParameters + { + fn read_uncompressed>(input_map: &Mmap, position: usize) -> Result { + let mut repr = C::Uncompressed::empty(); + let element_size = C::Uncompressed::size(); + let memory_slice = input_map.get(position..position+element_size).expect("must read point data from file"); + memory_slice.clone().read_exact(repr.as_mut())?; + let v = repr.into_affine()?; + + if v.is_zero() { + Err(DeserializationError::PointAtInfinity) + } else { + Ok(v) + } + } + + let mut position = match accumulator_was_compressed { + UseCompression::Yes => { + P::CONTRIBUTION_BYTE_SIZE - P::PUBLIC_KEY_SIZE + }, + UseCompression::No => { + P::ACCUMULATOR_BYTE_SIZE + } + }; + + let tau_g1_s = read_uncompressed::(input_map, position)?; + position += P::G1_UNCOMPRESSED_BYTE_SIZE; + + let tau_g1_s_tau = read_uncompressed::(input_map, position)?; + position += P::G1_UNCOMPRESSED_BYTE_SIZE; + + let alpha_g1_s = read_uncompressed::(input_map, position)?; + position += P::G1_UNCOMPRESSED_BYTE_SIZE; + + let alpha_g1_s_alpha = read_uncompressed::(input_map, position)?; + position += P::G1_UNCOMPRESSED_BYTE_SIZE; + + let beta_g1_s = read_uncompressed::(input_map, position)?; + position += P::G1_UNCOMPRESSED_BYTE_SIZE; + + let beta_g1_s_beta = read_uncompressed::(input_map, position)?; + position += P::G1_UNCOMPRESSED_BYTE_SIZE; + + let tau_g2 = read_uncompressed::(input_map, position)?; + position += P::G2_UNCOMPRESSED_BYTE_SIZE; + + let alpha_g2 = read_uncompressed::(input_map, position)?; + position += P::G2_UNCOMPRESSED_BYTE_SIZE; + + let beta_g2 = read_uncompressed::(input_map, position)?; + + Ok(PublicKey { + tau_g1: (tau_g1_s, tau_g1_s_tau), + alpha_g1: (alpha_g1_s, alpha_g1_s_alpha), + beta_g1: (beta_g1_s, beta_g1_s_beta), + tau_g2: tau_g2, + alpha_g2: alpha_g2, + beta_g2: beta_g2 + }) + } +} diff --git a/src/lib.rs b/src/lib.rs index 22c82d8..d3ff170 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,859 +1,10 @@ -//! This ceremony constructs the "powers of tau" for Jens Groth's 2016 zk-SNARK proving -//! system using the BLS12-381 pairing-friendly elliptic curve construction. -//! -//! # Overview -//! -//! Participants of the ceremony receive a "challenge" file containing: -//! -//! * the BLAKE2b hash of the last file entered into the transcript -//! * an `Accumulator` (with curve points encoded in uncompressed form for fast deserialization) -//! -//! The participant runs a tool which generates a random keypair (`PublicKey`, `PrivateKey`) -//! used for modifying the `Accumulator` from the "challenge" file. The keypair is then used to -//! transform the `Accumulator`, and a "response" file is generated containing: -//! -//! * the BLAKE2b hash of the "challenge" file (thus forming a hash chain over the entire transcript) -//! * an `Accumulator` (with curve points encoded in compressed form for fast uploading) -//! * the `PublicKey` -//! -//! This "challenge" file is entered into the protocol transcript. A given transcript is valid -//! if the transformations between consecutive `Accumulator`s verify with their respective -//! `PublicKey`s. Participants (and the public) can ensure that their contribution to the -//! `Accumulator` was accepted by ensuring the transcript contains their "response" file, ideally -//! by comparison of the BLAKE2b hash of the "response" file. -//! -//! After some time has elapsed for participants to contribute to the ceremony, a participant is -//! simulated with a randomness beacon. The resulting `Accumulator` contains partial zk-SNARK -//! public parameters for all circuits within a bounded size. - -extern crate pairing; -extern crate rand; -extern crate crossbeam; -extern crate num_cpus; -extern crate blake2; -extern crate generic_array; -extern crate typenum; -extern crate byteorder; - -use byteorder::{ReadBytesExt, BigEndian}; -use rand::{SeedableRng, Rng, Rand}; -use rand::chacha::ChaChaRng; -use pairing::bls12_381::*; -use pairing::*; -use std::io::{self, Read, Write}; -use std::sync::{Arc, Mutex}; -use generic_array::GenericArray; -use typenum::consts::U64; -use blake2::{Blake2b, Digest}; -use std::fmt; - -// This ceremony is based on the BLS12-381 elliptic curve construction. -const G1_UNCOMPRESSED_BYTE_SIZE: usize = 96; -const G2_UNCOMPRESSED_BYTE_SIZE: usize = 192; -const G1_COMPRESSED_BYTE_SIZE: usize = 48; -const G2_COMPRESSED_BYTE_SIZE: usize = 96; - -/// The accumulator supports circuits with 2^21 multiplication gates. -const TAU_POWERS_LENGTH: usize = (1 << 21); - -/// More tau powers are needed in G1 because the Groth16 H query -/// includes terms of the form tau^i * (tau^m - 1) = tau^(i+m) - tau^i -/// where the largest i = m - 2, requiring the computation of tau^(2m - 2) -/// and thus giving us a vector length of 2^22 - 1. -const TAU_POWERS_G1_LENGTH: usize = (TAU_POWERS_LENGTH << 1) - 1; - -/// The size of the accumulator on disk. -pub const ACCUMULATOR_BYTE_SIZE: usize = (TAU_POWERS_G1_LENGTH * G1_UNCOMPRESSED_BYTE_SIZE) + // g1 tau powers - (TAU_POWERS_LENGTH * G2_UNCOMPRESSED_BYTE_SIZE) + // g2 tau powers - (TAU_POWERS_LENGTH * G1_UNCOMPRESSED_BYTE_SIZE) + // alpha tau powers - (TAU_POWERS_LENGTH * G1_UNCOMPRESSED_BYTE_SIZE) // beta tau powers - + G2_UNCOMPRESSED_BYTE_SIZE // beta in g2 - + 64; // blake2b hash of previous contribution - -/// The "public key" is used to verify a contribution was correctly -/// computed. -pub const PUBLIC_KEY_SIZE: usize = 3 * G2_UNCOMPRESSED_BYTE_SIZE + // tau, alpha, and beta in g2 - 6 * G1_UNCOMPRESSED_BYTE_SIZE; // (s1, s1*tau), (s2, s2*alpha), (s3, s3*beta) in g1 - -/// The size of the contribution on disk. -pub const CONTRIBUTION_BYTE_SIZE: usize = (TAU_POWERS_G1_LENGTH * G1_COMPRESSED_BYTE_SIZE) + // g1 tau powers - (TAU_POWERS_LENGTH * G2_COMPRESSED_BYTE_SIZE) + // g2 tau powers - (TAU_POWERS_LENGTH * G1_COMPRESSED_BYTE_SIZE) + // alpha tau powers - (TAU_POWERS_LENGTH * G1_COMPRESSED_BYTE_SIZE) // beta tau powers - + G2_COMPRESSED_BYTE_SIZE // beta in g2 - + 64 // blake2b hash of input accumulator - + PUBLIC_KEY_SIZE; // public key - -/// Hashes to G2 using the first 32 bytes of `digest`. Panics if `digest` is less -/// than 32 bytes. -fn hash_to_g2(mut digest: &[u8]) -> G2 -{ - assert!(digest.len() >= 32); - - let mut seed = Vec::with_capacity(8); - - for _ in 0..8 { - seed.push(digest.read_u32::().expect("assertion above guarantees this to work")); - } - - ChaChaRng::from_seed(&seed).gen() -} - -#[test] -fn test_hash_to_g2() { - assert!( - hash_to_g2(&[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33]) - == - hash_to_g2(&[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,34]) - ); - - assert!( - hash_to_g2(&[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32]) - != - hash_to_g2(&[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,33]) - ); -} - -/// Contains terms of the form (s1, s1x, H(s1x)2, H(s1x)2x) -/// for all x in τ, α and β, and some s chosen randomly by its creator. The function H "hashes into" the group G2. No points in the public key may be the identity. -/// -/// The elements in G2 are used to verify transformations of the accumulator. By its nature, the public key proves -/// knowledge of τ, α and β. -/// -/// It is necessary to verify `same_ratio`((s1, s1x), (H(s1x)2, H(s1x)2x)). -#[derive(PartialEq, Eq)] -pub struct PublicKey { - tau_g1: (G1Affine, G1Affine), - alpha_g1: (G1Affine, G1Affine), - beta_g1: (G1Affine, G1Affine), - tau_g2: G2Affine, - alpha_g2: G2Affine, - beta_g2: G2Affine -} - -/// Contains the secrets τ, α and β that the participant of the ceremony must destroy. -pub struct PrivateKey { - tau: Fr, - alpha: Fr, - beta: Fr -} - -/// Constructs a keypair given an RNG and a 64-byte transcript `digest`. -pub fn keypair(rng: &mut R, digest: &[u8]) -> (PublicKey, PrivateKey) -{ - assert_eq!(digest.len(), 64); - - let tau = Fr::rand(rng); - let alpha = Fr::rand(rng); - let beta = Fr::rand(rng); - - let mut op = |x, personalization: u8| { - // Sample random g^s - let g1_s = G1::rand(rng).into_affine(); - // Compute g^{s*x} - let g1_s_x = g1_s.mul(x).into_affine(); - // Compute BLAKE2b(personalization | transcript | g^s | g^{s*x}) - let h = { - let mut h = Blake2b::default(); - h.input(&[personalization]); - h.input(digest); - h.input(g1_s.into_uncompressed().as_ref()); - h.input(g1_s_x.into_uncompressed().as_ref()); - h.result() - }; - // Hash into G2 as g^{s'} - let g2_s = hash_to_g2(h.as_ref()).into_affine(); - // Compute g^{s'*x} - let g2_s_x = g2_s.mul(x).into_affine(); - - ((g1_s, g1_s_x), g2_s_x) - }; - - let pk_tau = op(tau, 0); - let pk_alpha = op(alpha, 1); - let pk_beta = op(beta, 2); - - ( - PublicKey { - tau_g1: pk_tau.0, - alpha_g1: pk_alpha.0, - beta_g1: pk_beta.0, - tau_g2: pk_tau.1, - alpha_g2: pk_alpha.1, - beta_g2: pk_beta.1, - }, - PrivateKey { - tau: tau, - alpha: alpha, - beta: beta - } - ) -} - -/// Determines if point compression should be used. -#[derive(Copy, Clone)] -pub enum UseCompression { - Yes, - No -} - -/// Determines if points should be checked for correctness during deserialization. -/// This is not necessary for participants, because a transcript verifier can -/// check this theirself. -#[derive(Copy, Clone)] -pub enum CheckForCorrectness { - Yes, - No -} - -fn write_point( - writer: &mut W, - p: &G, - compression: UseCompression -) -> io::Result<()> - where W: Write, - G: CurveAffine -{ - match compression { - UseCompression::Yes => writer.write_all(p.into_compressed().as_ref()), - UseCompression::No => writer.write_all(p.into_uncompressed().as_ref()), - } -} - -/// Errors that might occur during deserialization. -#[derive(Debug)] -pub enum DeserializationError { - IoError(io::Error), - DecodingError(GroupDecodingError), - PointAtInfinity -} - -impl fmt::Display for DeserializationError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - DeserializationError::IoError(ref e) => write!(f, "Disk IO error: {}", e), - DeserializationError::DecodingError(ref e) => write!(f, "Decoding error: {}", e), - DeserializationError::PointAtInfinity => write!(f, "Point at infinity found") - } - } -} - -impl From for DeserializationError { - fn from(err: io::Error) -> DeserializationError { - DeserializationError::IoError(err) - } -} - -impl From for DeserializationError { - fn from(err: GroupDecodingError) -> DeserializationError { - DeserializationError::DecodingError(err) - } -} - -impl PublicKey { - /// Serialize the public key. Points are always in uncompressed form. - pub fn serialize(&self, writer: &mut W) -> io::Result<()> - { - write_point(writer, &self.tau_g1.0, UseCompression::No)?; - write_point(writer, &self.tau_g1.1, UseCompression::No)?; - - write_point(writer, &self.alpha_g1.0, UseCompression::No)?; - write_point(writer, &self.alpha_g1.1, UseCompression::No)?; - - write_point(writer, &self.beta_g1.0, UseCompression::No)?; - write_point(writer, &self.beta_g1.1, UseCompression::No)?; - - write_point(writer, &self.tau_g2, UseCompression::No)?; - write_point(writer, &self.alpha_g2, UseCompression::No)?; - write_point(writer, &self.beta_g2, UseCompression::No)?; - - Ok(()) - } - - /// Deserialize the public key. Points are always in uncompressed form, and - /// always checked, since there aren't very many of them. Does not allow any - /// points at infinity. - pub fn deserialize(reader: &mut R) -> Result - { - fn read_uncompressed(reader: &mut R) -> Result { - let mut repr = C::Uncompressed::empty(); - reader.read_exact(repr.as_mut())?; - let v = repr.into_affine()?; - - if v.is_zero() { - Err(DeserializationError::PointAtInfinity) - } else { - Ok(v) - } - } - - let tau_g1_s = read_uncompressed(reader)?; - let tau_g1_s_tau = read_uncompressed(reader)?; - - let alpha_g1_s = read_uncompressed(reader)?; - let alpha_g1_s_alpha = read_uncompressed(reader)?; - - let beta_g1_s = read_uncompressed(reader)?; - let beta_g1_s_beta = read_uncompressed(reader)?; - - let tau_g2 = read_uncompressed(reader)?; - let alpha_g2 = read_uncompressed(reader)?; - let beta_g2 = read_uncompressed(reader)?; - - Ok(PublicKey { - tau_g1: (tau_g1_s, tau_g1_s_tau), - alpha_g1: (alpha_g1_s, alpha_g1_s_alpha), - beta_g1: (beta_g1_s, beta_g1_s_beta), - tau_g2: tau_g2, - alpha_g2: alpha_g2, - beta_g2: beta_g2 - }) - } -} - -#[test] -fn test_pubkey_serialization() { - use rand::thread_rng; - - let rng = &mut thread_rng(); - let digest = (0..64).map(|_| rng.gen()).collect::>(); - let (pk, _) = keypair(rng, &digest); - let mut v = vec![]; - pk.serialize(&mut v).unwrap(); - assert_eq!(v.len(), PUBLIC_KEY_SIZE); - let deserialized = PublicKey::deserialize(&mut &v[..]).unwrap(); - assert!(pk == deserialized); -} - -/// The `Accumulator` is an object that participants of the ceremony contribute -/// randomness to. This object contains powers of trapdoor `tau` in G1 and in G2 over -/// fixed generators, and additionally in G1 over two other generators of exponents -/// `alpha` and `beta` over those fixed generators. In other words: -/// -/// * (τ, τ2, ..., τ222 - 2, α, ατ, ατ2, ..., ατ221 - 1, β, βτ, βτ2, ..., βτ221 - 1)1 -/// * (β, τ, τ2, ..., τ221 - 1)2 -#[derive(PartialEq, Eq, Clone)] -pub struct Accumulator { - /// tau^0, tau^1, tau^2, ..., tau^{TAU_POWERS_G1_LENGTH - 1} - pub tau_powers_g1: Vec, - /// tau^0, tau^1, tau^2, ..., tau^{TAU_POWERS_LENGTH - 1} - pub tau_powers_g2: Vec, - /// alpha * tau^0, alpha * tau^1, alpha * tau^2, ..., alpha * tau^{TAU_POWERS_LENGTH - 1} - pub alpha_tau_powers_g1: Vec, - /// beta * tau^0, beta * tau^1, beta * tau^2, ..., beta * tau^{TAU_POWERS_LENGTH - 1} - pub beta_tau_powers_g1: Vec, - /// beta - pub beta_g2: G2Affine -} - -impl Accumulator { - /// Constructs an "initial" accumulator with τ = 1, α = 1, β = 1. - pub fn new() -> Self { - Accumulator { - tau_powers_g1: vec![G1Affine::one(); TAU_POWERS_G1_LENGTH], - tau_powers_g2: vec![G2Affine::one(); TAU_POWERS_LENGTH], - alpha_tau_powers_g1: vec![G1Affine::one(); TAU_POWERS_LENGTH], - beta_tau_powers_g1: vec![G1Affine::one(); TAU_POWERS_LENGTH], - beta_g2: G2Affine::one() - } - } - - /// Write the accumulator with some compression behavior. - pub fn serialize( - &self, - writer: &mut W, - compression: UseCompression - ) -> io::Result<()> - { - fn write_all( - writer: &mut W, - c: &[C], - compression: UseCompression - ) -> io::Result<()> - { - for c in c { - write_point(writer, c, compression)?; - } - - Ok(()) - } - - write_all(writer, &self.tau_powers_g1, compression)?; - write_all(writer, &self.tau_powers_g2, compression)?; - write_all(writer, &self.alpha_tau_powers_g1, compression)?; - write_all(writer, &self.beta_tau_powers_g1, compression)?; - write_all(writer, &[self.beta_g2], compression)?; - - Ok(()) - } - - /// Read the accumulator from disk with some compression behavior. `checked` - /// indicates whether we should check it's a valid element of the group and - /// not the point at infinity. - pub fn deserialize( - reader: &mut R, - compression: UseCompression, - checked: CheckForCorrectness - ) -> Result - { - fn read_all( - reader: &mut R, - size: usize, - compression: UseCompression, - checked: CheckForCorrectness - ) -> Result, DeserializationError> - { - fn decompress_all( - reader: &mut R, - size: usize, - checked: CheckForCorrectness - ) -> Result, DeserializationError> - { - // Read the encoded elements - let mut res = vec![E::empty(); size]; - - for encoded in &mut res { - reader.read_exact(encoded.as_mut())?; - } - - // Allocate space for the deserialized elements - let mut res_affine = vec![E::Affine::zero(); size]; - - let mut chunk_size = res.len() / num_cpus::get(); - if chunk_size == 0 { - chunk_size = 1; - } - - // If any of our threads encounter a deserialization/IO error, catch - // it with this. - let decoding_error = Arc::new(Mutex::new(None)); - - crossbeam::scope(|scope| { - for (source, target) in res.chunks(chunk_size).zip(res_affine.chunks_mut(chunk_size)) { - let decoding_error = decoding_error.clone(); - - scope.spawn(move || { - for (source, target) in source.iter().zip(target.iter_mut()) { - match { - // If we're a participant, we don't need to check all of the - // elements in the accumulator, which saves a lot of time. - // The hash chain prevents this from being a problem: the - // transcript guarantees that the accumulator was properly - // formed. - match checked { - CheckForCorrectness::Yes => { - // Points at infinity are never expected in the accumulator - source.into_affine().map_err(|e| e.into()).and_then(|source| { - if source.is_zero() { - Err(DeserializationError::PointAtInfinity) - } else { - Ok(source) - } - }) - }, - CheckForCorrectness::No => source.into_affine_unchecked().map_err(|e| e.into()) - } - } - { - Ok(source) => { - *target = source; - }, - Err(e) => { - *decoding_error.lock().unwrap() = Some(e); - } - } - } - }); - } - }); - - match Arc::try_unwrap(decoding_error).unwrap().into_inner().unwrap() { - Some(e) => { - Err(e) - }, - None => { - Ok(res_affine) - } - } - } - - match compression { - UseCompression::Yes => decompress_all::<_, C::Compressed>(reader, size, checked), - UseCompression::No => decompress_all::<_, C::Uncompressed>(reader, size, checked) - } - } - - let tau_powers_g1 = read_all(reader, TAU_POWERS_G1_LENGTH, compression, checked)?; - let tau_powers_g2 = read_all(reader, TAU_POWERS_LENGTH, compression, checked)?; - let alpha_tau_powers_g1 = read_all(reader, TAU_POWERS_LENGTH, compression, checked)?; - let beta_tau_powers_g1 = read_all(reader, TAU_POWERS_LENGTH, compression, checked)?; - let beta_g2 = read_all(reader, 1, compression, checked)?[0]; - - Ok(Accumulator { - tau_powers_g1: tau_powers_g1, - tau_powers_g2: tau_powers_g2, - alpha_tau_powers_g1: alpha_tau_powers_g1, - beta_tau_powers_g1: beta_tau_powers_g1, - beta_g2: beta_g2 - }) - } - - /// Transforms the accumulator with a private key. - pub fn transform(&mut self, key: &PrivateKey) - { - // Construct the powers of tau - let mut taupowers = vec![Fr::zero(); TAU_POWERS_G1_LENGTH]; - let chunk_size = TAU_POWERS_G1_LENGTH / num_cpus::get(); - - // Construct exponents in parallel - crossbeam::scope(|scope| { - for (i, taupowers) in taupowers.chunks_mut(chunk_size).enumerate() { - scope.spawn(move || { - let mut acc = key.tau.pow(&[(i * chunk_size) as u64]); - - for t in taupowers { - *t = acc; - acc.mul_assign(&key.tau); - } - }); - } - }); - - /// Exponentiate a large number of points, with an optional coefficient to be applied to the - /// exponent. - fn batch_exp(bases: &mut [C], exp: &[C::Scalar], coeff: Option<&C::Scalar>) { - assert_eq!(bases.len(), exp.len()); - let mut projective = vec![C::Projective::zero(); bases.len()]; - let chunk_size = bases.len() / num_cpus::get(); - - // Perform wNAF over multiple cores, placing results into `projective`. - crossbeam::scope(|scope| { - for ((bases, exp), projective) in bases.chunks_mut(chunk_size) - .zip(exp.chunks(chunk_size)) - .zip(projective.chunks_mut(chunk_size)) - { - scope.spawn(move || { - let mut wnaf = Wnaf::new(); - - for ((base, exp), projective) in bases.iter_mut() - .zip(exp.iter()) - .zip(projective.iter_mut()) - { - let mut exp = *exp; - if let Some(coeff) = coeff { - exp.mul_assign(coeff); - } - - *projective = wnaf.base(base.into_projective(), 1).scalar(exp.into_repr()); - } - }); - } - }); - - // Perform batch normalization - crossbeam::scope(|scope| { - for projective in projective.chunks_mut(chunk_size) - { - scope.spawn(move || { - C::Projective::batch_normalization(projective); - }); - } - }); - - // Turn it all back into affine points - for (projective, affine) in projective.iter().zip(bases.iter_mut()) { - *affine = projective.into_affine(); - } - } - - batch_exp(&mut self.tau_powers_g1, &taupowers[0..], None); - batch_exp(&mut self.tau_powers_g2, &taupowers[0..TAU_POWERS_LENGTH], None); - batch_exp(&mut self.alpha_tau_powers_g1, &taupowers[0..TAU_POWERS_LENGTH], Some(&key.alpha)); - batch_exp(&mut self.beta_tau_powers_g1, &taupowers[0..TAU_POWERS_LENGTH], Some(&key.beta)); - self.beta_g2 = self.beta_g2.mul(key.beta).into_affine(); - } -} - -/// Verifies a transformation of the `Accumulator` with the `PublicKey`, given a 64-byte transcript `digest`. -pub fn verify_transform(before: &Accumulator, after: &Accumulator, key: &PublicKey, digest: &[u8]) -> bool -{ - assert_eq!(digest.len(), 64); - - let compute_g2_s = |g1_s: G1Affine, g1_s_x: G1Affine, personalization: u8| { - let mut h = Blake2b::default(); - h.input(&[personalization]); - h.input(digest); - h.input(g1_s.into_uncompressed().as_ref()); - h.input(g1_s_x.into_uncompressed().as_ref()); - hash_to_g2(h.result().as_ref()).into_affine() - }; - - let tau_g2_s = compute_g2_s(key.tau_g1.0, key.tau_g1.1, 0); - let alpha_g2_s = compute_g2_s(key.alpha_g1.0, key.alpha_g1.1, 1); - let beta_g2_s = compute_g2_s(key.beta_g1.0, key.beta_g1.1, 2); - - // Check the proofs-of-knowledge for tau/alpha/beta - if !same_ratio(key.tau_g1, (tau_g2_s, key.tau_g2)) { - return false; - } - if !same_ratio(key.alpha_g1, (alpha_g2_s, key.alpha_g2)) { - return false; - } - if !same_ratio(key.beta_g1, (beta_g2_s, key.beta_g2)) { - return false; - } - - // Check the correctness of the generators for tau powers - if after.tau_powers_g1[0] != G1Affine::one() { - return false; - } - if after.tau_powers_g2[0] != G2Affine::one() { - return false; - } - - // Did the participant multiply the previous tau by the new one? - if !same_ratio((before.tau_powers_g1[1], after.tau_powers_g1[1]), (tau_g2_s, key.tau_g2)) { - return false; - } - - // Did the participant multiply the previous alpha by the new one? - if !same_ratio((before.alpha_tau_powers_g1[0], after.alpha_tau_powers_g1[0]), (alpha_g2_s, key.alpha_g2)) { - return false; - } - - // Did the participant multiply the previous beta by the new one? - if !same_ratio((before.beta_tau_powers_g1[0], after.beta_tau_powers_g1[0]), (beta_g2_s, key.beta_g2)) { - return false; - } - if !same_ratio((before.beta_tau_powers_g1[0], after.beta_tau_powers_g1[0]), (before.beta_g2, after.beta_g2)) { - return false; - } - - // Are the powers of tau correct? - if !same_ratio(power_pairs(&after.tau_powers_g1), (after.tau_powers_g2[0], after.tau_powers_g2[1])) { - return false; - } - if !same_ratio(power_pairs(&after.tau_powers_g2), (after.tau_powers_g1[0], after.tau_powers_g1[1])) { - return false; - } - if !same_ratio(power_pairs(&after.alpha_tau_powers_g1), (after.tau_powers_g2[0], after.tau_powers_g2[1])) { - return false; - } - if !same_ratio(power_pairs(&after.beta_tau_powers_g1), (after.tau_powers_g2[0], after.tau_powers_g2[1])) { - return false; - } - - true -} - -/// Computes a random linear combination over v1/v2. -/// -/// Checking that many pairs of elements are exponentiated by -/// the same `x` can be achieved (with high probability) with -/// the following technique: -/// -/// Given v1 = [a, b, c] and v2 = [as, bs, cs], compute -/// (a*r1 + b*r2 + c*r3, (as)*r1 + (bs)*r2 + (cs)*r3) for some -/// random r1, r2, r3. Given (g, g^s)... -/// -/// e(g, (as)*r1 + (bs)*r2 + (cs)*r3) = e(g^s, a*r1 + b*r2 + c*r3) -/// -/// ... with high probability. -fn merge_pairs(v1: &[G], v2: &[G]) -> (G, G) -{ - use std::sync::{Arc, Mutex}; - use rand::{thread_rng}; - - assert_eq!(v1.len(), v2.len()); - - let chunk = (v1.len() / num_cpus::get()) + 1; - - let s = Arc::new(Mutex::new(G::Projective::zero())); - let sx = Arc::new(Mutex::new(G::Projective::zero())); - - crossbeam::scope(|scope| { - for (v1, v2) in v1.chunks(chunk).zip(v2.chunks(chunk)) { - let s = s.clone(); - let sx = sx.clone(); - - scope.spawn(move || { - // We do not need to be overly cautious of the RNG - // used for this check. - let rng = &mut thread_rng(); - - let mut wnaf = Wnaf::new(); - let mut local_s = G::Projective::zero(); - let mut local_sx = G::Projective::zero(); - - for (v1, v2) in v1.iter().zip(v2.iter()) { - let rho = G::Scalar::rand(rng); - let mut wnaf = wnaf.scalar(rho.into_repr()); - let v1 = wnaf.base(v1.into_projective()); - let v2 = wnaf.base(v2.into_projective()); - - local_s.add_assign(&v1); - local_sx.add_assign(&v2); - } - - s.lock().unwrap().add_assign(&local_s); - sx.lock().unwrap().add_assign(&local_sx); - }); - } - }); - - let s = s.lock().unwrap().into_affine(); - let sx = sx.lock().unwrap().into_affine(); - - (s, sx) -} - -/// Construct a single pair (s, s^x) for a vector of -/// the form [1, x, x^2, x^3, ...]. -fn power_pairs(v: &[G]) -> (G, G) -{ - merge_pairs(&v[0..(v.len()-1)], &v[1..]) -} - -#[test] -fn test_power_pairs() { - use rand::thread_rng; - - let rng = &mut thread_rng(); - - let mut v = vec![]; - let x = Fr::rand(rng); - let mut acc = Fr::one(); - for _ in 0..100 { - v.push(G1Affine::one().mul(acc).into_affine()); - acc.mul_assign(&x); - } - - let gx = G2Affine::one().mul(x).into_affine(); - - assert!(same_ratio(power_pairs(&v), (G2Affine::one(), gx))); - - v[1] = v[1].mul(Fr::rand(rng)).into_affine(); - - assert!(!same_ratio(power_pairs(&v), (G2Affine::one(), gx))); -} - -/// Checks if pairs have the same ratio. -fn same_ratio( - g1: (G1, G1), - g2: (G1::Pair, G1::Pair) -) -> bool -{ - g1.0.pairing_with(&g2.1) == g1.1.pairing_with(&g2.0) -} - -#[test] -fn test_same_ratio() { - use rand::thread_rng; - - let rng = &mut thread_rng(); - - let s = Fr::rand(rng); - let g1 = G1Affine::one(); - let g2 = G2Affine::one(); - let g1_s = g1.mul(s).into_affine(); - let g2_s = g2.mul(s).into_affine(); - - assert!(same_ratio((g1, g1_s), (g2, g2_s))); - assert!(!same_ratio((g1_s, g1), (g2, g2_s))); -} - -#[test] -fn test_accumulator_serialization() { - use rand::thread_rng; - - let rng = &mut thread_rng(); - let mut digest = (0..64).map(|_| rng.gen()).collect::>(); - - let mut acc = Accumulator::new(); - let before = acc.clone(); - let (pk, sk) = keypair(rng, &digest); - acc.transform(&sk); - assert!(verify_transform(&before, &acc, &pk, &digest)); - digest[0] = !digest[0]; - assert!(!verify_transform(&before, &acc, &pk, &digest)); - let mut v = Vec::with_capacity(ACCUMULATOR_BYTE_SIZE - 64); - acc.serialize(&mut v, UseCompression::No).unwrap(); - assert_eq!(v.len(), ACCUMULATOR_BYTE_SIZE - 64); - let deserialized = Accumulator::deserialize(&mut &v[..], UseCompression::No, CheckForCorrectness::No).unwrap(); - assert!(acc == deserialized); -} - -/// Compute BLAKE2b("") -pub fn blank_hash() -> GenericArray { - Blake2b::new().result() -} - -/// Abstraction over a reader which hashes the data being read. -pub struct HashReader { - reader: R, - hasher: Blake2b -} - -impl HashReader { - /// Construct a new `HashReader` given an existing `reader` by value. - pub fn new(reader: R) -> Self { - HashReader { - reader: reader, - hasher: Blake2b::default() - } - } - - /// Destroy this reader and return the hash of what was read. - pub fn into_hash(self) -> GenericArray { - self.hasher.result() - } -} - -impl Read for HashReader { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - let bytes = self.reader.read(buf)?; - - if bytes > 0 { - self.hasher.input(&buf[0..bytes]); - } - - Ok(bytes) - } -} - -/// Abstraction over a writer which hashes the data being written. -pub struct HashWriter { - writer: W, - hasher: Blake2b -} - -impl HashWriter { - /// Construct a new `HashWriter` given an existing `writer` by value. - pub fn new(writer: W) -> Self { - HashWriter { - writer: writer, - hasher: Blake2b::default() - } - } - - /// Destroy this writer and return the hash of what was written. - pub fn into_hash(self) -> GenericArray { - self.hasher.result() - } -} - -impl Write for HashWriter { - fn write(&mut self, buf: &[u8]) -> io::Result { - let bytes = self.writer.write(buf)?; - - if bytes > 0 { - self.hasher.input(&buf[0..bytes]); - } - - Ok(bytes) - } - - fn flush(&mut self) -> io::Result<()> { - self.writer.flush() - } -} +#![allow(unused_imports)] + +// pub mod bls12_381; +pub mod bn256; +pub mod small_bn256; +pub mod accumulator; +pub mod batched_accumulator; +pub mod keypair; +pub mod parameters; +pub mod utils; \ No newline at end of file diff --git a/src/parameters.rs b/src/parameters.rs new file mode 100644 index 0000000..1222fd9 --- /dev/null +++ b/src/parameters.rs @@ -0,0 +1,118 @@ +extern crate pairing; +extern crate rand; +extern crate crossbeam; +extern crate num_cpus; +extern crate blake2; +extern crate generic_array; +extern crate typenum; +extern crate byteorder; +extern crate ff; + +use self::ff::{Field, PrimeField}; +use self::byteorder::{ReadBytesExt, BigEndian}; +use self::rand::{SeedableRng, Rng, Rand}; +use self::rand::chacha::ChaChaRng; +use self::pairing::bn256::{Bn256}; +use self::pairing::*; +use std::io::{self, Read, Write}; +use std::sync::{Arc, Mutex}; +use self::generic_array::GenericArray; +use self::typenum::consts::U64; +use self::blake2::{Blake2b, Digest}; +use std::fmt; + +use super::keypair::*; + +pub trait PowersOfTauParameters: Clone { + const REQUIRED_POWER: usize; + + const G1_UNCOMPRESSED_BYTE_SIZE: usize; + const G2_UNCOMPRESSED_BYTE_SIZE: usize; + const G1_COMPRESSED_BYTE_SIZE: usize; + const G2_COMPRESSED_BYTE_SIZE: usize; + + const TAU_POWERS_LENGTH: usize = (1 << Self::REQUIRED_POWER); + + const TAU_POWERS_G1_LENGTH: usize = (Self::TAU_POWERS_LENGTH << 1) - 1; + + const ACCUMULATOR_BYTE_SIZE: usize = (Self::TAU_POWERS_G1_LENGTH * Self::G1_UNCOMPRESSED_BYTE_SIZE) + // g1 tau powers + (Self::TAU_POWERS_LENGTH * Self::G2_UNCOMPRESSED_BYTE_SIZE) + // g2 tau powers + (Self::TAU_POWERS_LENGTH * Self::G1_UNCOMPRESSED_BYTE_SIZE) + // alpha tau powers + (Self::TAU_POWERS_LENGTH * Self::G1_UNCOMPRESSED_BYTE_SIZE) // beta tau powers + + Self::G2_UNCOMPRESSED_BYTE_SIZE // beta in g2 + + Self::HASH_SIZE; // blake2b hash of previous contribution + + const PUBLIC_KEY_SIZE: usize = 3 * Self::G2_UNCOMPRESSED_BYTE_SIZE + // tau, alpha, and beta in g2 + 6 * Self::G1_UNCOMPRESSED_BYTE_SIZE; // (s1, s1*tau), (s2, s2*alpha), (s3, s3*beta) in g1 + + const CONTRIBUTION_BYTE_SIZE: usize = (Self::TAU_POWERS_G1_LENGTH * Self::G1_COMPRESSED_BYTE_SIZE) + // g1 tau powers + (Self::TAU_POWERS_LENGTH * Self::G2_COMPRESSED_BYTE_SIZE) + // g2 tau powers + (Self::TAU_POWERS_LENGTH * Self::G1_COMPRESSED_BYTE_SIZE) + // alpha tau powers + (Self::TAU_POWERS_LENGTH * Self::G1_COMPRESSED_BYTE_SIZE) // beta tau powers + + Self::G2_COMPRESSED_BYTE_SIZE // beta in g2 + + Self::HASH_SIZE // blake2b hash of input accumulator + + Self::PUBLIC_KEY_SIZE; // public key + + // Blake2b hash size + const HASH_SIZE: usize = 64; + + const EMPIRICAL_BATCH_SIZE: usize = 1 << 21; +} + + + +/// Determines if point compression should be used. +#[derive(Copy, Clone, PartialEq)] +pub enum UseCompression { + Yes, + No +} + +/// Determines if points should be checked for correctness during deserialization. +/// This is not necessary for participants, because a transcript verifier can +/// check this theirself. +#[derive(Copy, Clone, PartialEq)] +pub enum CheckForCorrectness { + Yes, + No +} + + +/// Errors that might occur during deserialization. +#[derive(Debug)] +pub enum DeserializationError { + IoError(io::Error), + DecodingError(GroupDecodingError), + PointAtInfinity +} + +impl fmt::Display for DeserializationError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + DeserializationError::IoError(ref e) => write!(f, "Disk IO error: {}", e), + DeserializationError::DecodingError(ref e) => write!(f, "Decoding error: {}", e), + DeserializationError::PointAtInfinity => write!(f, "Point at infinity found") + } + } +} + +impl From for DeserializationError { + fn from(err: io::Error) -> DeserializationError { + DeserializationError::IoError(err) + } +} + +impl From for DeserializationError { + fn from(err: GroupDecodingError) -> DeserializationError { + DeserializationError::DecodingError(err) + } +} + +#[derive(Copy, Clone, Debug, PartialEq)] +pub enum ElementType { + TauG1, + TauG2, + AlphaG1, + BetaG1, + BetaG2 +} \ No newline at end of file diff --git a/src/small_bn256/mod.rs b/src/small_bn256/mod.rs new file mode 100644 index 0000000..8c47568 --- /dev/null +++ b/src/small_bn256/mod.rs @@ -0,0 +1,41 @@ +extern crate pairing; +extern crate rand; +extern crate crossbeam; +extern crate num_cpus; +extern crate blake2; +extern crate generic_array; +extern crate typenum; +extern crate byteorder; +extern crate ff; + +use self::ff::{Field, PrimeField}; +use self::byteorder::{ReadBytesExt, BigEndian}; +use self::rand::{SeedableRng, Rng, Rand}; +use self::rand::chacha::ChaChaRng; +use self::pairing::bn256::{Bn256}; +use self::pairing::*; +use std::io::{self, Read, Write}; +use std::sync::{Arc, Mutex}; +use self::generic_array::GenericArray; +use self::typenum::consts::U64; +use self::blake2::{Blake2b, Digest}; +use std::fmt; + +use crate::parameters::*; +use crate::keypair::*; +use crate::utils::*; + +#[derive(Clone)] +pub struct Bn256CeremonyParameters { + +} + +impl PowersOfTauParameters for Bn256CeremonyParameters { + const REQUIRED_POWER: usize = 21; // generate to have roughly 2 million constraints + + // This ceremony is based on the BN256 elliptic curve construction. + const G1_UNCOMPRESSED_BYTE_SIZE: usize = 64; + const G2_UNCOMPRESSED_BYTE_SIZE: usize = 128; + const G1_COMPRESSED_BYTE_SIZE: usize = 32; + const G2_COMPRESSED_BYTE_SIZE: usize = 64; +} diff --git a/src/utils.rs b/src/utils.rs new file mode 100644 index 0000000..4e4ed51 --- /dev/null +++ b/src/utils.rs @@ -0,0 +1,168 @@ +extern crate pairing; +extern crate rand; +extern crate crossbeam; +extern crate num_cpus; +extern crate blake2; +extern crate generic_array; +extern crate typenum; +extern crate byteorder; +extern crate ff; + +use self::ff::{Field, PrimeField}; +use self::byteorder::{ReadBytesExt, BigEndian}; +use self::rand::{SeedableRng, Rng, Rand}; +use self::rand::chacha::ChaChaRng; +use self::pairing::bn256::{Bn256}; +use self::pairing::*; +use std::io::{self, Read, Write}; +use std::sync::{Arc, Mutex}; +use self::generic_array::GenericArray; +use self::typenum::consts::U64; +use self::blake2::{Blake2b, Digest}; +use std::fmt; + +use super::parameters::*; + +/// Hashes to G2 using the first 32 bytes of `digest`. Panics if `digest` is less +/// than 32 bytes. +pub fn hash_to_g2(mut digest: &[u8]) -> E::G2 +{ + assert!(digest.len() >= 32); + + let mut seed = Vec::with_capacity(8); + + for _ in 0..8 { + seed.push(digest.read_u32::().expect("assertion above guarantees this to work")); + } + + ChaChaRng::from_seed(&seed).gen() +} + +#[test] +fn test_hash_to_g2() { + assert!( + hash_to_g2::(&[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33]) + == + hash_to_g2::(&[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,34]) + ); + + assert!( + hash_to_g2::(&[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32]) + != + hash_to_g2::(&[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,33]) + ); +} + +/// Computes a random linear combination over v1/v2. +/// +/// Checking that many pairs of elements are exponentiated by +/// the same `x` can be achieved (with high probability) with +/// the following technique: +/// +/// Given v1 = [a, b, c] and v2 = [as, bs, cs], compute +/// (a*r1 + b*r2 + c*r3, (as)*r1 + (bs)*r2 + (cs)*r3) for some +/// random r1, r2, r3. Given (g, g^s)... +/// +/// e(g, (as)*r1 + (bs)*r2 + (cs)*r3) = e(g^s, a*r1 + b*r2 + c*r3) +/// +/// ... with high probability. +fn merge_pairs>(v1: &[G], v2: &[G]) -> (G, G) +{ + use std::sync::{Arc, Mutex}; + use self::rand::{thread_rng}; + + assert_eq!(v1.len(), v2.len()); + + let chunk = (v1.len() / num_cpus::get()) + 1; + + let s = Arc::new(Mutex::new(G::Projective::zero())); + let sx = Arc::new(Mutex::new(G::Projective::zero())); + + crossbeam::scope(|scope| { + for (v1, v2) in v1.chunks(chunk).zip(v2.chunks(chunk)) { + let s = s.clone(); + let sx = sx.clone(); + + scope.spawn(move || { + // We do not need to be overly cautious of the RNG + // used for this check. + let rng = &mut thread_rng(); + + let mut wnaf = Wnaf::new(); + let mut local_s = G::Projective::zero(); + let mut local_sx = G::Projective::zero(); + + for (v1, v2) in v1.iter().zip(v2.iter()) { + let rho = G::Scalar::rand(rng); + let mut wnaf = wnaf.scalar(rho.into_repr()); + let v1 = wnaf.base(v1.into_projective()); + let v2 = wnaf.base(v2.into_projective()); + + local_s.add_assign(&v1); + local_sx.add_assign(&v2); + } + + s.lock().unwrap().add_assign(&local_s); + sx.lock().unwrap().add_assign(&local_sx); + }); + } + }); + + let s = s.lock().unwrap().into_affine(); + let sx = sx.lock().unwrap().into_affine(); + + (s, sx) +} + +/// Construct a single pair (s, s^x) for a vector of +/// the form [1, x, x^2, x^3, ...]. +pub fn power_pairs>(v: &[G]) -> (G, G) +{ + merge_pairs::(&v[0..(v.len()-1)], &v[1..]) +} + +/// Compute BLAKE2b("") +pub fn blank_hash() -> GenericArray { + Blake2b::new().result() +} + +/// Checks if pairs have the same ratio. +/// Under the hood uses pairing to check +/// x1/x2 = y1/y2 => x1*y2 = x2*y1 +pub fn same_ratio>( + g1: (G1, G1), + g2: (G1::Pair, G1::Pair) +) -> bool +{ + g1.0.pairing_with(&g2.1) == g1.1.pairing_with(&g2.0) +} + +pub fn write_point( + writer: &mut W, + p: &G, + compression: UseCompression +) -> io::Result<()> + where W: Write, + G: CurveAffine +{ + match compression { + UseCompression::Yes => writer.write_all(p.into_compressed().as_ref()), + UseCompression::No => writer.write_all(p.into_uncompressed().as_ref()), + } +} + +pub fn compute_g2_s ( + digest: &[u8], + g1_s: &E::G1Affine, + g1_s_x: &E::G1Affine, + personalization: u8 +) -> E::G2Affine +{ + let mut h = Blake2b::default(); + h.input(&[personalization]); + h.input(digest); + h.input(g1_s.into_uncompressed().as_ref()); + h.input(g1_s_x.into_uncompressed().as_ref()); + + hash_to_g2::(h.result().as_ref()).into_affine() +} \ No newline at end of file diff --git a/test.sh b/test.sh new file mode 100755 index 0000000..035c085 --- /dev/null +++ b/test.sh @@ -0,0 +1,19 @@ +#!/bin/sh + +rm challenge +rm response +rm new_challenge +rm challenge_old +rm response_old + +cargo run --release --bin new_constrained +cargo run --release --bin beacon_constrained +cargo run --release --bin verify_transform_constrained + +mv challenge challenge_old +mv response response_old + +mv new_challenge challenge + +cargo run --release --bin compute_constrained +cargo run --release --bin verify_transform_constrained \ No newline at end of file From 03ec5d5ffad72e7daea755b2e43908182e9deb7c Mon Sep 17 00:00:00 2001 From: Alex Vlasov Date: Sat, 19 Jan 2019 16:52:19 +0300 Subject: [PATCH 16/23] fix imports and build --- Cargo.lock | 282 ++++++++++++++++-------- Cargo.toml | 4 +- src/accumulator.rs | 18 +- src/batched_accumulator.rs | 18 +- src/bin/beacon_constrained.rs | 18 +- src/bin/new_constrained.rs | 6 +- src/bin/verify_transform_constrained.rs | 18 +- src/parameters.rs | 18 +- src/utils.rs | 18 +- 9 files changed, 253 insertions(+), 147 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 71f4a93..80cf983 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,19 +1,19 @@ [[package]] name = "bellman" -version = "0.1.1" -source = "git+https://github.com/matterinc/bellman#e544678a6abe2f97a9afbc02e2e590f2259f1e30" +version = "0.1.2" +source = "git+https://github.com/matterinc/bellman#6e5cfe211feec9f0e612a47a8a19d31f37ea9071" dependencies = [ "bit-vec 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", - "byteorder 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "ff 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)", + "ff 0.5.0 (git+https://github.com/matterinc/ff)", + "futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)", "futures-cpupool 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", - "num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)", - "pairing 0.15.0 (git+https://github.com/matterinc/pairing)", + "num_cpus 1.9.0 (registry+https://github.com/rust-lang/crates.io-index)", + "pairing 0.15.1 (git+https://github.com/matterinc/pairing)", "pbr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "time 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", + "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -23,7 +23,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "bitflags" -version = "1.0.1" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -44,7 +44,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "byteorder" -version = "1.2.2" +version = "1.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -75,25 +75,31 @@ dependencies = [ ] [[package]] -name = "ff" -version = "0.4.0" +name = "either" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "ff" +version = "0.5.0" +source = "git+https://github.com/matterinc/ff#056a13b95f4b971a9ae2c6fbb5fbc9f1e4f4828e" dependencies = [ - "byteorder 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "ff_derive 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)", + "ff_derive 0.4.0 (git+https://github.com/matterinc/ff)", + "rand 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "ff_derive" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" +version = "0.4.0" +source = "git+https://github.com/matterinc/ff#056a13b95f4b971a9ae2c6fbb5fbc9f1e4f4828e" dependencies = [ - "num-bigint 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "num-bigint 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "num-integer 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", - "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 0.4.25 (registry+https://github.com/rust-lang/crates.io-index)", "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.85 (registry+https://github.com/rust-lang/crates.io-index)", "syn 0.14.9 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -102,7 +108,7 @@ name = "fuchsia-zircon" version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -113,7 +119,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "futures" -version = "0.1.21" +version = "0.1.25" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -121,13 +127,13 @@ name = "futures-cpupool" version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "futures 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)", - "num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)", + "num_cpus 1.9.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "gcc" -version = "0.3.54" +version = "0.3.55" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -135,17 +141,22 @@ name = "generic-array" version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "nodrop 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", + "nodrop 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", "typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "hex" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "hex-literal" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "hex-literal-impl 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "proc-macro-hack 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro-hack 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -153,9 +164,22 @@ name = "hex-literal-impl" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro-hack 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro-hack 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "itertools" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "either 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "itoa" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "kernel32-sys" version = "0.2.2" @@ -167,17 +191,26 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.40" +version = "0.2.47" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "memmap" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.47 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "nodrop" -version = "0.1.12" +version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "num-bigint" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "num-integer 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", @@ -199,20 +232,24 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "num_cpus" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.47 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "pairing" -version = "0.15.0" -source = "git+https://github.com/matterinc/pairing#1363d02170f1d98f1b9c8eec0e3fc6b1eea4ef9a" +version = "0.15.1" +source = "git+https://github.com/matterinc/pairing#3279e322eb9239e7f6e98f0abb9421e4e7f37c25" dependencies = [ - "byteorder 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "ff 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)", + "ff 0.5.0 (git+https://github.com/matterinc/ff)", + "hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.85 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.85 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.36 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -221,9 +258,9 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.47 (registry+https://github.com/rust-lang/crates.io-index)", "termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)", - "time 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", + "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -231,36 +268,38 @@ dependencies = [ name = "powersoftau" version = "0.1.2" dependencies = [ - "bellman 0.1.1 (git+https://github.com/matterinc/bellman)", + "bellman 0.1.2 (git+https://github.com/matterinc/bellman)", "blake2 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", - "byteorder 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "ff 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "ff 0.5.0 (git+https://github.com/matterinc/ff)", "generic-array 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)", "hex-literal 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)", - "pairing 0.15.0 (git+https://github.com/matterinc/pairing)", - "rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", + "memmap 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "num_cpus 1.9.0 (registry+https://github.com/rust-lang/crates.io-index)", + "pairing 0.15.1 (git+https://github.com/matterinc/pairing)", + "rand 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", "rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)", "typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "proc-macro-hack" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro-hack-impl 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro-hack-impl 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "proc-macro-hack-impl" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "proc-macro2" -version = "0.4.24" +version = "0.4.25" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -271,7 +310,7 @@ name = "quote" version = "0.6.10" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 0.4.25 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -280,23 +319,38 @@ version = "0.3.22" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.47 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "rand" -version = "0.4.2" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.47 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand_core" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "rdrand" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "rand_core 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "redox_syscall" -version = "0.1.37" +version = "0.1.50" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -304,7 +358,7 @@ name = "redox_termios" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "redox_syscall 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)", + "redox_syscall 0.1.50 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -312,11 +366,11 @@ name = "rust-crypto" version = "0.2.36" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "gcc 0.3.54 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)", + "gcc 0.3.55 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.47 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", - "time 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", + "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -324,12 +378,52 @@ name = "rustc-serialize" version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "ryu" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "serde" +version = "1.0.85" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "serde_derive" +version = "1.0.85" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "proc-macro2 0.4.25 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 0.15.26 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "serde_json" +version = "1.0.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "itoa 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", + "ryu 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.85 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "syn" version = "0.14.9" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 0.4.25 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "syn" +version = "0.15.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "proc-macro2 0.4.25 (registry+https://github.com/rust-lang/crates.io-index)", "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -339,19 +433,19 @@ name = "termion" version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)", - "redox_syscall 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.47 (registry+https://github.com/rust-lang/crates.io-index)", + "redox_syscall 0.1.50 (registry+https://github.com/rust-lang/crates.io-index)", "redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "time" -version = "0.1.39" +version = "0.1.42" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)", - "redox_syscall 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.47 (registry+https://github.com/rust-lang/crates.io-index)", + "redox_syscall 0.1.50 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -371,7 +465,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "winapi" -version = "0.3.4" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -394,52 +488,64 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" [metadata] -"checksum bellman 0.1.1 (git+https://github.com/matterinc/bellman)" = "" +"checksum bellman 0.1.2 (git+https://github.com/matterinc/bellman)" = "" "checksum bit-vec 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "02b4ff8b16e6076c3e14220b39fbc1fabb6737522281a388998046859400895f" -"checksum bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b3c30d3802dfb7281680d6285f2ccdaa8c2d8fee41f93805dba5c4cf50dc23cf" +"checksum bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "228047a76f468627ca71776ecdebd732a3423081fcf5125585bcd7c49886ce12" "checksum blake2 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "53bf612c0f2839b7e764ebac65d6cb985f7c6812de399d0728038f4b1da141bc" "checksum byte-tools 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "560c32574a12a89ecd91f5e742165893f86e3ab98d21f8ea548658eb9eef5f40" -"checksum byteorder 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "73b5bdfe7ee3ad0b99c9801d58807a9dbc9e09196365b0203853b99889ab3c87" +"checksum byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "94f88df23a25417badc922ab0f5716cc1330e87f71ddd9203b3a3ccd9cedf75d" "checksum constant_time_eq 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8ff012e225ce166d4422e0e78419d901719760f62ae2b7969ca6b564d1b54a9e" "checksum crossbeam 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "24ce9782d4d5c53674646a6a4c1863a21a8fc0cb649b3c94dfc16e45071dea19" "checksum crypto-mac 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "779015233ac67d65098614aec748ac1c756ab6677fa2e14cf8b37c08dfed1198" "checksum digest 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e5b29bf156f3f4b3c4f610a25ff69370616ae6e0657d416de22645483e72af0a" -"checksum ff 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "eec81e2e423086589b224dbcfbab70e3732913de25479d05165b20d4aaed05f4" -"checksum ff_derive 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "70335090ee115d5716416ca38980cce7752f40923f41d22cf5a69a6269f9e2a2" +"checksum either 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3be565ca5c557d7f59e7cfcf1844f9e3033650c929c6566f511e8005f205c1d0" +"checksum ff 0.5.0 (git+https://github.com/matterinc/ff)" = "" +"checksum ff_derive 0.4.0 (git+https://github.com/matterinc/ff)" = "" "checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82" "checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" -"checksum futures 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)" = "1a70b146671de62ec8c8ed572219ca5d594d9b06c0b364d5e67b722fc559b48c" +"checksum futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)" = "49e7653e374fe0d0c12de4250f0bdb60680b8c80eed558c5c7538eec9c89e21b" "checksum futures-cpupool 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "ab90cde24b3319636588d0c35fe03b1333857621051837ed769faefb4c2162e4" -"checksum gcc 0.3.54 (registry+https://github.com/rust-lang/crates.io-index)" = "5e33ec290da0d127825013597dbdfc28bee4964690c7ce1166cbc2a7bd08b1bb" +"checksum gcc 0.3.55 (registry+https://github.com/rust-lang/crates.io-index)" = "8f5f3913fa0bfe7ee1fd8248b6b9f42a5af4b9d65ec2dd2c3c26132b950ecfc2" "checksum generic-array 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)" = "fceb69994e330afed50c93524be68c42fa898c2d9fd4ee8da03bd7363acd26f2" +"checksum hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "805026a5d0141ffc30abb3be3173848ad46a1b1664fe632428479619a3644d77" "checksum hex-literal 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4da5f0e01bd8a71a224a4eedecaacfcabda388dbb7a80faf04d3514287572d95" "checksum hex-literal-impl 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1d340b6514f232f6db1bd16db65302a5278a04fef9ce867cb932e7e5fa21130a" +"checksum itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5b8467d9c1cebe26feb08c640139247fac215782d35371ade9a2136ed6085358" +"checksum itoa 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "1306f3464951f30e30d12373d31c79fbd52d236e5e896fd92f96ec7babbbe60b" "checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" -"checksum libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)" = "6fd41f331ac7c5b8ac259b8bf82c75c0fb2e469bbf37d2becbba9a6a2221965b" -"checksum nodrop 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "9a2228dca57108069a5262f2ed8bd2e82496d2e074a06d1ccc7ce1687b6ae0a2" -"checksum num-bigint 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "10b8423ea72ec64751198856a853e07b37087cfc9b53a87ecb19bff67b6d1320" +"checksum libc 0.2.47 (registry+https://github.com/rust-lang/crates.io-index)" = "48450664a984b25d5b479554c29cc04e3150c97aa4c01da5604a2d4ed9151476" +"checksum memmap 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6585fd95e7bb50d6cc31e20d4cf9afb4e2ba16c5846fc76793f11218da9c475b" +"checksum nodrop 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "2f9667ddcc6cc8a43afc9b7917599d7216aa09c463919ea32c59ed6cac8bc945" +"checksum num-bigint 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "57450397855d951f1a41305e54851b1a7b8f5d2e349543a02a2effe25459f718" "checksum num-integer 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)" = "e83d528d2677f0518c570baf2b7abdcf0cd2d248860b68507bdcb3e91d4c0cea" "checksum num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0b3a5d7cc97d6d30d8b9bc8fa19bf45349ffe46241e8816f50f62f6d6aaabee1" -"checksum num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c51a3322e4bca9d212ad9a158a02abc6934d005490c054a2778df73a70aa0a30" -"checksum pairing 0.15.0 (git+https://github.com/matterinc/pairing)" = "" +"checksum num_cpus 1.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5a69d464bdc213aaaff628444e99578ede64e9c854025aa43b9796530afa9238" +"checksum pairing 0.15.1 (git+https://github.com/matterinc/pairing)" = "" "checksum pbr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "deb73390ab68d81992bd994d145f697451bb0b54fd39738e72eef32458ad6907" -"checksum proc-macro-hack 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3ba8d4f9257b85eb6cdf13f055cea3190520aab1409ca2ab43493ea4820c25f0" -"checksum proc-macro-hack-impl 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d5cb6f960ad471404618e9817c0e5d10b1ae74cfdf01fab89ea0641fe7fb2892" -"checksum proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)" = "77619697826f31a02ae974457af0b29b723e5619e113e9397b8b82c6bd253f09" +"checksum proc-macro-hack 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2c725b36c99df7af7bf9324e9c999b9e37d92c8f8caf106d82e1d7953218d2d8" +"checksum proc-macro-hack-impl 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2b753ad9ed99dd8efeaa7d2fb8453c8f6bc3e54b97966d35f1bc77ca6865254a" +"checksum proc-macro2 0.4.25 (registry+https://github.com/rust-lang/crates.io-index)" = "d3797b7142c9aa74954e351fc089bbee7958cebbff6bf2815e7ffff0b19f547d" "checksum quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)" = "53fa22a1994bd0f9372d7a816207d8a2677ad0325b073f5c5332760f0fb62b5c" "checksum rand 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)" = "15a732abf9d20f0ad8eeb6f909bf6868722d9a06e1e50802b6a70351f40b4eb1" -"checksum rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "eba5f8cb59cc50ed56be8880a5c7b496bfd9bd26394e176bc67884094145c2c5" -"checksum redox_syscall 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "0d92eecebad22b767915e4d529f89f28ee96dbbf5a4810d2b844373f136417fd" +"checksum rand 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "dee497e66d8d76bf08ce20c8d36e16f93749ab0bf89975b4f8ae5cee660c2da2" +"checksum rand_core 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0905b6b7079ec73b314d4c748701f6931eb79fd97c668caa3f1899b22b32c6db" +"checksum rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" +"checksum redox_syscall 0.1.50 (registry+https://github.com/rust-lang/crates.io-index)" = "52ee9a534dc1301776eff45b4fa92d2c39b1d8c3d3357e6eb593e0d795506fc2" "checksum redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7e891cfe48e9100a70a3b6eb652fef28920c117d366339687bd5576160db0f76" "checksum rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)" = "f76d05d3993fd5f4af9434e8e436db163a12a9d40e1a58a726f27a01dfd12a2a" "checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" +"checksum ryu 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "eb9e9b8cde282a9fe6a42dd4681319bfb63f121b8a8ee9439c6f4107e58a46f7" +"checksum serde 1.0.85 (registry+https://github.com/rust-lang/crates.io-index)" = "534b8b91a95e0f71bca3ed5824752d558da048d4248c91af873b63bd60519752" +"checksum serde_derive 1.0.85 (registry+https://github.com/rust-lang/crates.io-index)" = "a915306b0f1ac5607797697148c223bedeaa36bcc2e28a01441cd638cc6567b4" +"checksum serde_json 1.0.36 (registry+https://github.com/rust-lang/crates.io-index)" = "574378d957d6dcdf1bbb5d562a15cbd5e644159432f84634b94e485267abbcc7" "checksum syn 0.14.9 (registry+https://github.com/rust-lang/crates.io-index)" = "261ae9ecaa397c42b960649561949d69311f08eeaea86a65696e6e46517cf741" +"checksum syn 0.15.26 (registry+https://github.com/rust-lang/crates.io-index)" = "f92e629aa1d9c827b2bb8297046c1ccffc57c99b947a680d3ccff1f136a3bee9" "checksum termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "689a3bdfaab439fd92bc87df5c4c78417d3cbe537487274e9b0b2dce76e92096" -"checksum time 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)" = "a15375f1df02096fb3317256ce2cee6a1f42fc84ea5ad5fc8c421cfe40c73098" +"checksum time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)" = "db8dcfca086c1143c9270ac42a2bbd8a7ee477b78ac8e45b19abfb0cbede4b6f" "checksum typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "612d636f949607bdf9b123b4a6f6d966dedf3ff669f7f045890d3a4a73948169" "checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" "checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a" -"checksum winapi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "04e3bd221fcbe8a271359c04f21a76db7d0c6028862d1bb5512d85e1e2eb5bb3" +"checksum winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "92c1eb33641e276cfa214a0522acad57be5c56b10cb348b3c5117db75f3ac4b0" "checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc" "checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" "checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" diff --git a/Cargo.toml b/Cargo.toml index c4fba99..ce063d1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -7,8 +7,8 @@ edition = "2018" description = "Communal zk-SNARK MPC for Public Parameters" documentation = "https://docs.rs/powersoftau/" -homepage = "https://github.com/ebfull/powersoftau" -repository = "https://github.com/ebfull/powersoftau" +homepage = "https://github.com/matterinc/powersoftau" +repository = "https://github.com/matterinc/powersoftau" [dependencies] rand = "0.4" diff --git a/src/accumulator.rs b/src/accumulator.rs index 36828f4..4032911 100644 --- a/src/accumulator.rs +++ b/src/accumulator.rs @@ -38,17 +38,17 @@ extern crate ff; extern crate memmap; use memmap::{Mmap, MmapMut}; -use self::ff::{Field, PrimeField}; -use self::byteorder::{ReadBytesExt, BigEndian}; -use self::rand::{SeedableRng, Rng, Rand}; -use self::rand::chacha::ChaChaRng; -use self::pairing::bn256::{Bn256}; -use self::pairing::*; +use ff::{Field, PrimeField}; +use byteorder::{ReadBytesExt, BigEndian}; +use rand::{SeedableRng, Rng, Rand}; +use rand::chacha::ChaChaRng; +use pairing::bn256::{Bn256}; +use pairing::*; use std::io::{self, Read, Write}; use std::sync::{Arc, Mutex}; -use self::generic_array::GenericArray; -use self::typenum::consts::U64; -use self::blake2::{Blake2b, Digest}; +use generic_array::GenericArray; +use typenum::consts::U64; +use blake2::{Blake2b, Digest}; use std::fmt; use super::keypair::*; diff --git a/src/batched_accumulator.rs b/src/batched_accumulator.rs index 6e98c4a..ff4d583 100644 --- a/src/batched_accumulator.rs +++ b/src/batched_accumulator.rs @@ -15,17 +15,17 @@ extern crate itertools; use itertools::Itertools; use memmap::{Mmap, MmapMut}; -use self::ff::{Field, PrimeField}; -use self::byteorder::{ReadBytesExt, BigEndian}; -use self::rand::{SeedableRng, Rng, Rand}; -use self::rand::chacha::ChaChaRng; -use self::pairing::bn256::{Bn256}; -use self::pairing::*; +use ff::{Field, PrimeField}; +use byteorder::{ReadBytesExt, BigEndian}; +use rand::{SeedableRng, Rng, Rand}; +use rand::chacha::ChaChaRng; +use pairing::bn256::{Bn256}; +use pairing::*; use std::io::{self, Read, Write}; use std::sync::{Arc, Mutex}; -use self::generic_array::GenericArray; -use self::typenum::consts::U64; -use self::blake2::{Blake2b, Digest}; +use generic_array::GenericArray; +use typenum::consts::U64; +use blake2::{Blake2b, Digest}; use std::fmt; use super::keypair::*; diff --git a/src/bin/beacon_constrained.rs b/src/bin/beacon_constrained.rs index 69844d2..b4dcdf7 100644 --- a/src/bin/beacon_constrained.rs +++ b/src/bin/beacon_constrained.rs @@ -23,9 +23,9 @@ use powersoftau::parameters::PowersOfTauParameters; #[macro_use] extern crate hex_literal; -const input_is_compressed: UseCompression = UseCompression::No; -const compress_the_output: UseCompression = UseCompression::Yes; -const check_input_correctness: CheckForCorrectness = CheckForCorrectness::No; +const INPUT_IS_COMPRESSED: UseCompression = UseCompression::No; +const COMPRESS_THE_OUTPUT: UseCompression = UseCompression::Yes; +const CHECK_INPUT_CORRECTNESS: CheckForCorrectness = CheckForCorrectness::No; fn main() { @@ -91,7 +91,7 @@ fn main() { { let metadata = reader.metadata().expect("unable to get filesystem metadata for `./challenge`"); - let expected_challenge_length = match input_is_compressed { + let expected_challenge_length = match INPUT_IS_COMPRESSED { UseCompression::Yes => { Bn256CeremonyParameters::CONTRIBUTION_BYTE_SIZE }, @@ -114,7 +114,7 @@ fn main() { .create_new(true) .open("response").expect("unable to create `./response` in this directory"); - let required_output_length = match compress_the_output { + let required_output_length = match COMPRESS_THE_OUTPUT { UseCompression::Yes => { Bn256CeremonyParameters::CONTRIBUTION_BYTE_SIZE }, @@ -159,15 +159,15 @@ fn main() { BachedAccumulator::::transform( &readable_map, &mut writable_map, - input_is_compressed, - compress_the_output, - check_input_correctness, + INPUT_IS_COMPRESSED, + COMPRESS_THE_OUTPUT, + CHECK_INPUT_CORRECTNESS, &privkey ).expect("must transform with the key"); println!("Finihsing writing your contribution to `./response`..."); // Write the public key - pubkey.write::(&mut writable_map, compress_the_output).expect("unable to write public key"); + pubkey.write::(&mut writable_map, COMPRESS_THE_OUTPUT).expect("unable to write public key"); // Get the hash of the contribution, so the user can compare later let output_readonly = writable_map.make_read_only().expect("must make a map readonly"); diff --git a/src/bin/new_constrained.rs b/src/bin/new_constrained.rs index f3f8699..4d63365 100644 --- a/src/bin/new_constrained.rs +++ b/src/bin/new_constrained.rs @@ -15,7 +15,7 @@ use memmap::*; use powersoftau::parameters::PowersOfTauParameters; -const compress_new_challenge: UseCompression = UseCompression::No; +const COMPRESS_NEW_CHALLENGE: UseCompression = UseCompression::No; fn main() { println!("Will generate an empty accumulator for 2^{} powers of tau", Bn256CeremonyParameters::REQUIRED_POWER); @@ -27,7 +27,7 @@ fn main() { .create_new(true) .open("challenge").expect("unable to create `./challenge`"); - let expected_challenge_length = match compress_new_challenge { + let expected_challenge_length = match COMPRESS_NEW_CHALLENGE { UseCompression::Yes => { Bn256CeremonyParameters::CONTRIBUTION_BYTE_SIZE - Bn256CeremonyParameters::PUBLIC_KEY_SIZE }, @@ -57,7 +57,7 @@ fn main() { println!(""); } - BachedAccumulator::::generate_initial(&mut writable_map, compress_new_challenge).expect("generation of initial accumulator is successful"); + BachedAccumulator::::generate_initial(&mut writable_map, COMPRESS_NEW_CHALLENGE).expect("generation of initial accumulator is successful"); writable_map.flush().expect("unable to flush memmap to disk"); // Get the hash of the contribution, so the user can compare later diff --git a/src/bin/verify_transform_constrained.rs b/src/bin/verify_transform_constrained.rs index 61d5b34..8727f66 100644 --- a/src/bin/verify_transform_constrained.rs +++ b/src/bin/verify_transform_constrained.rs @@ -19,9 +19,9 @@ use std::io::{Read, Write}; use powersoftau::parameters::PowersOfTauParameters; -const previous_challenge_is_compressed: UseCompression = UseCompression::No; -const contribution_is_compressed: UseCompression = UseCompression::Yes; -const compress_new_challenge: UseCompression = UseCompression::No; +const PREVIOUS_CHALLENGE_IS_COMPRESSED: UseCompression = UseCompression::No; +const CONTRIBUTION_IS_COMPRESSED: UseCompression = UseCompression::Yes; +const COMPRESS_NEW_CHALLENGE: UseCompression = UseCompression::No; fn main() { println!("Will verify and decompress a contribution to accumulator for 2^{} powers of tau", Bn256CeremonyParameters::REQUIRED_POWER); @@ -33,7 +33,7 @@ fn main() { { let metadata = challenge_reader.metadata().expect("unable to get filesystem metadata for `./challenge`"); - let expected_challenge_length = match previous_challenge_is_compressed { + let expected_challenge_length = match PREVIOUS_CHALLENGE_IS_COMPRESSED { UseCompression::Yes => { Bn256CeremonyParameters::CONTRIBUTION_BYTE_SIZE }, @@ -55,7 +55,7 @@ fn main() { { let metadata = response_reader.metadata().expect("unable to get filesystem metadata for `./response`"); - let expected_response_length = match contribution_is_compressed { + let expected_response_length = match CONTRIBUTION_IS_COMPRESSED { UseCompression::Yes => { Bn256CeremonyParameters::CONTRIBUTION_BYTE_SIZE }, @@ -112,7 +112,7 @@ fn main() { } // get the contributor's public key - let public_key = PublicKey::::read::(&response_readable_map, contribution_is_compressed) + let public_key = PublicKey::::read::(&response_readable_map, CONTRIBUTION_IS_COMPRESSED) .expect("wasn't able to deserialize the response file's public key"); @@ -123,8 +123,8 @@ fn main() { &response_readable_map, &public_key, current_accumulator_hash.as_slice(), - previous_challenge_is_compressed, - contribution_is_compressed, + PREVIOUS_CHALLENGE_IS_COMPRESSED, + CONTRIBUTION_IS_COMPRESSED, CheckForCorrectness::No, CheckForCorrectness::Yes, ); @@ -152,7 +152,7 @@ fn main() { println!(""); } - if compress_new_challenge == UseCompression::Yes { + if COMPRESS_NEW_CHALLENGE == UseCompression::Yes { println!("Don't need to recompress the contribution, please copy `./response` as `./new_challenge`"); } else { println!("Verification succeeded! Writing to `./new_challenge`..."); diff --git a/src/parameters.rs b/src/parameters.rs index 1222fd9..ab35351 100644 --- a/src/parameters.rs +++ b/src/parameters.rs @@ -8,17 +8,17 @@ extern crate typenum; extern crate byteorder; extern crate ff; -use self::ff::{Field, PrimeField}; -use self::byteorder::{ReadBytesExt, BigEndian}; -use self::rand::{SeedableRng, Rng, Rand}; -use self::rand::chacha::ChaChaRng; -use self::pairing::bn256::{Bn256}; -use self::pairing::*; +use ff::{Field, PrimeField}; +use byteorder::{ReadBytesExt, BigEndian}; +use rand::{SeedableRng, Rng, Rand}; +use rand::chacha::ChaChaRng; +use pairing::bn256::{Bn256}; +use pairing::*; use std::io::{self, Read, Write}; use std::sync::{Arc, Mutex}; -use self::generic_array::GenericArray; -use self::typenum::consts::U64; -use self::blake2::{Blake2b, Digest}; +use generic_array::GenericArray; +use typenum::consts::U64; +use blake2::{Blake2b, Digest}; use std::fmt; use super::keypair::*; diff --git a/src/utils.rs b/src/utils.rs index 4e4ed51..48336bd 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -8,17 +8,17 @@ extern crate typenum; extern crate byteorder; extern crate ff; -use self::ff::{Field, PrimeField}; -use self::byteorder::{ReadBytesExt, BigEndian}; -use self::rand::{SeedableRng, Rng, Rand}; -use self::rand::chacha::ChaChaRng; -use self::pairing::bn256::{Bn256}; -use self::pairing::*; +use ff::{Field, PrimeField}; +use byteorder::{ReadBytesExt, BigEndian}; +use rand::{SeedableRng, Rng, Rand}; +use rand::chacha::ChaChaRng; +use pairing::bn256::{Bn256}; +use pairing::*; use std::io::{self, Read, Write}; use std::sync::{Arc, Mutex}; -use self::generic_array::GenericArray; -use self::typenum::consts::U64; -use self::blake2::{Blake2b, Digest}; +use generic_array::GenericArray; +use typenum::consts::U64; +use blake2::{Blake2b, Digest}; use std::fmt; use super::parameters::*; From 4db0a32f5b5e573494fbb43597f299b0a3e2d367 Mon Sep 17 00:00:00 2001 From: Alex Vlasov Date: Wed, 27 Feb 2019 19:10:19 +0300 Subject: [PATCH 17/23] cleanup for freeze --- Cargo.lock | 243 ++++++++++---------------- Cargo.toml | 22 ++- src/batched_accumulator.rs | 4 - src/bin/beacon.rs.nocompile | 142 --------------- src/bin/beacon_constrained.rs | 22 ++- src/bin/compute.rs.nocompile | 128 -------------- src/bin/compute_constrained.rs | 20 +-- src/bin/new.rs.nocompile | 24 --- src/bin/verify_transform.rs.nocompile | 117 ------------- test.sh | 7 +- 10 files changed, 126 insertions(+), 603 deletions(-) delete mode 100644 src/bin/beacon.rs.nocompile delete mode 100644 src/bin/compute.rs.nocompile delete mode 100644 src/bin/new.rs.nocompile delete mode 100644 src/bin/verify_transform.rs.nocompile diff --git a/Cargo.lock b/Cargo.lock index 80cf983..968b511 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,19 +1,17 @@ [[package]] name = "bellman" -version = "0.1.2" -source = "git+https://github.com/matterinc/bellman#6e5cfe211feec9f0e612a47a8a19d31f37ea9071" +version = "0.1.3" +source = "git+https://github.com/matterinc/bellman#e775b47d99562243f4ed3ab432eb3a56ad9493a3" dependencies = [ "bit-vec 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", - "byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)", + "byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "ff 0.5.0 (git+https://github.com/matterinc/ff)", "futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)", "futures-cpupool 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", - "num_cpus 1.9.0 (registry+https://github.com/rust-lang/crates.io-index)", - "pairing 0.15.1 (git+https://github.com/matterinc/pairing)", - "pbr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", - "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", + "num_cpus 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)", + "pairing 0.15.2 (git+https://github.com/matterinc/pairing)", + "rand 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -21,11 +19,6 @@ name = "bit-vec" version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "bitflags" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" - [[package]] name = "blake2" version = "0.6.1" @@ -44,7 +37,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "byteorder" -version = "1.2.7" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -76,7 +69,7 @@ dependencies = [ [[package]] name = "either" -version = "1.5.0" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -84,9 +77,9 @@ name = "ff" version = "0.5.0" source = "git+https://github.com/matterinc/ff#056a13b95f4b971a9ae2c6fbb5fbc9f1e4f4828e" dependencies = [ - "byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)", + "byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "ff_derive 0.4.0 (git+https://github.com/matterinc/ff)", - "rand 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -97,24 +90,15 @@ dependencies = [ "num-bigint 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "num-integer 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", - "proc-macro2 0.4.25 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive 1.0.85 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 0.4.27 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.88 (registry+https://github.com/rust-lang/crates.io-index)", "syn 0.14.9 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] -name = "fuchsia-zircon" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", - "fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "fuchsia-zircon-sys" -version = "0.3.3" +name = "fuchsia-cprng" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -128,7 +112,7 @@ version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)", - "num_cpus 1.9.0 (registry+https://github.com/rust-lang/crates.io-index)", + "num_cpus 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -152,7 +136,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "hex-literal" -version = "0.1.1" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "hex-literal-impl 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -172,7 +156,7 @@ name = "itertools" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "either 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)", + "either 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -180,18 +164,9 @@ name = "itoa" version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "kernel32-sys" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "libc" -version = "0.2.47" +version = "0.2.49" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -199,7 +174,7 @@ name = "memmap" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.47 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.49 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -232,54 +207,42 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "num_cpus" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.47 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.49 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "pairing" -version = "0.15.1" -source = "git+https://github.com/matterinc/pairing#3279e322eb9239e7f6e98f0abb9421e4e7f37c25" +version = "0.15.2" +source = "git+https://github.com/matterinc/pairing#84b57df3259c2f41d97744792cc89269e261d44e" dependencies = [ - "byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)", + "byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "ff 0.5.0 (git+https://github.com/matterinc/ff)", "hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.85 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive 1.0.85 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_json 1.0.36 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "pbr" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.47 (registry+https://github.com/rust-lang/crates.io-index)", - "termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)", - "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.88 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.88 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.38 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "powersoftau" -version = "0.1.2" +version = "0.2.0" dependencies = [ - "bellman 0.1.2 (git+https://github.com/matterinc/bellman)", + "bellman 0.1.3 (git+https://github.com/matterinc/bellman)", "blake2 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", - "byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)", + "byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "ff 0.5.0 (git+https://github.com/matterinc/ff)", "generic-array 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)", - "hex-literal 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "hex-literal 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "memmap 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", - "num_cpus 1.9.0 (registry+https://github.com/rust-lang/crates.io-index)", - "pairing 0.15.1 (git+https://github.com/matterinc/pairing)", - "rand 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", + "num_cpus 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)", + "pairing 0.15.2 (git+https://github.com/matterinc/pairing)", + "rand 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)", "typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -299,7 +262,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "proc-macro2" -version = "0.4.25" +version = "0.4.27" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -307,37 +270,44 @@ dependencies = [ [[package]] name = "quote" -version = "0.6.10" +version = "0.6.11" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 0.4.25 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 0.4.27 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "rand" -version = "0.3.22" +version = "0.3.23" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.47 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.49 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "rand" -version = "0.4.5" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.47 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.49 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "rand_core" -version = "0.3.0" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "rand_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rand_core" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -345,30 +315,22 @@ name = "rdrand" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "rand_core 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "redox_syscall" -version = "0.1.50" +version = "0.1.51" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "redox_termios" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "redox_syscall 0.1.50 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "rust-crypto" version = "0.2.36" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "gcc 0.3.55 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.47 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.49 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -385,27 +347,27 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "serde" -version = "1.0.85" +version = "1.0.88" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "serde_derive" -version = "1.0.85" +version = "1.0.88" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 0.4.25 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 0.4.27 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)", "syn 0.15.26 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "serde_json" -version = "1.0.36" +version = "1.0.38" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "itoa 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", "ryu 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.85 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.88 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -413,8 +375,8 @@ name = "syn" version = "0.14.9" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 0.4.25 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 0.4.27 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -423,28 +385,18 @@ name = "syn" version = "0.15.26" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 0.4.25 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 0.4.27 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "termion" -version = "1.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "libc 0.2.47 (registry+https://github.com/rust-lang/crates.io-index)", - "redox_syscall 0.1.50 (registry+https://github.com/rust-lang/crates.io-index)", - "redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "time" version = "0.1.42" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.47 (registry+https://github.com/rust-lang/crates.io-index)", - "redox_syscall 0.1.50 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.49 (registry+https://github.com/rust-lang/crates.io-index)", + "redox_syscall 0.1.51 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -458,11 +410,6 @@ name = "unicode-xid" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "winapi" -version = "0.2.8" -source = "registry+https://github.com/rust-lang/crates.io-index" - [[package]] name = "winapi" version = "0.3.6" @@ -472,11 +419,6 @@ dependencies = [ "winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "winapi-build" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" - [[package]] name = "winapi-i686-pc-windows-gnu" version = "0.4.0" @@ -488,64 +430,57 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" [metadata] -"checksum bellman 0.1.2 (git+https://github.com/matterinc/bellman)" = "" +"checksum bellman 0.1.3 (git+https://github.com/matterinc/bellman)" = "" "checksum bit-vec 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "02b4ff8b16e6076c3e14220b39fbc1fabb6737522281a388998046859400895f" -"checksum bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "228047a76f468627ca71776ecdebd732a3423081fcf5125585bcd7c49886ce12" "checksum blake2 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "53bf612c0f2839b7e764ebac65d6cb985f7c6812de399d0728038f4b1da141bc" "checksum byte-tools 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "560c32574a12a89ecd91f5e742165893f86e3ab98d21f8ea548658eb9eef5f40" -"checksum byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "94f88df23a25417badc922ab0f5716cc1330e87f71ddd9203b3a3ccd9cedf75d" +"checksum byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a019b10a2a7cdeb292db131fc8113e57ea2a908f6e7894b0c3c671893b65dbeb" "checksum constant_time_eq 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8ff012e225ce166d4422e0e78419d901719760f62ae2b7969ca6b564d1b54a9e" "checksum crossbeam 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "24ce9782d4d5c53674646a6a4c1863a21a8fc0cb649b3c94dfc16e45071dea19" "checksum crypto-mac 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "779015233ac67d65098614aec748ac1c756ab6677fa2e14cf8b37c08dfed1198" "checksum digest 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e5b29bf156f3f4b3c4f610a25ff69370616ae6e0657d416de22645483e72af0a" -"checksum either 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3be565ca5c557d7f59e7cfcf1844f9e3033650c929c6566f511e8005f205c1d0" +"checksum either 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c67353c641dc847124ea1902d69bd753dee9bb3beff9aa3662ecf86c971d1fac" "checksum ff 0.5.0 (git+https://github.com/matterinc/ff)" = "" "checksum ff_derive 0.4.0 (git+https://github.com/matterinc/ff)" = "" -"checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82" -"checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" +"checksum fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" "checksum futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)" = "49e7653e374fe0d0c12de4250f0bdb60680b8c80eed558c5c7538eec9c89e21b" "checksum futures-cpupool 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "ab90cde24b3319636588d0c35fe03b1333857621051837ed769faefb4c2162e4" "checksum gcc 0.3.55 (registry+https://github.com/rust-lang/crates.io-index)" = "8f5f3913fa0bfe7ee1fd8248b6b9f42a5af4b9d65ec2dd2c3c26132b950ecfc2" "checksum generic-array 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)" = "fceb69994e330afed50c93524be68c42fa898c2d9fd4ee8da03bd7363acd26f2" "checksum hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "805026a5d0141ffc30abb3be3173848ad46a1b1664fe632428479619a3644d77" -"checksum hex-literal 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4da5f0e01bd8a71a224a4eedecaacfcabda388dbb7a80faf04d3514287572d95" +"checksum hex-literal 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "27455ce8b4a6666c87220e4b59c9a83995476bdadc10197905e61dbe906e36fa" "checksum hex-literal-impl 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1d340b6514f232f6db1bd16db65302a5278a04fef9ce867cb932e7e5fa21130a" "checksum itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5b8467d9c1cebe26feb08c640139247fac215782d35371ade9a2136ed6085358" "checksum itoa 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "1306f3464951f30e30d12373d31c79fbd52d236e5e896fd92f96ec7babbbe60b" -"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" -"checksum libc 0.2.47 (registry+https://github.com/rust-lang/crates.io-index)" = "48450664a984b25d5b479554c29cc04e3150c97aa4c01da5604a2d4ed9151476" +"checksum libc 0.2.49 (registry+https://github.com/rust-lang/crates.io-index)" = "413f3dfc802c5dc91dc570b05125b6cda9855edfaa9825c9849807876376e70e" "checksum memmap 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6585fd95e7bb50d6cc31e20d4cf9afb4e2ba16c5846fc76793f11218da9c475b" "checksum nodrop 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "2f9667ddcc6cc8a43afc9b7917599d7216aa09c463919ea32c59ed6cac8bc945" "checksum num-bigint 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "57450397855d951f1a41305e54851b1a7b8f5d2e349543a02a2effe25459f718" "checksum num-integer 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)" = "e83d528d2677f0518c570baf2b7abdcf0cd2d248860b68507bdcb3e91d4c0cea" "checksum num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0b3a5d7cc97d6d30d8b9bc8fa19bf45349ffe46241e8816f50f62f6d6aaabee1" -"checksum num_cpus 1.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5a69d464bdc213aaaff628444e99578ede64e9c854025aa43b9796530afa9238" -"checksum pairing 0.15.1 (git+https://github.com/matterinc/pairing)" = "" -"checksum pbr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "deb73390ab68d81992bd994d145f697451bb0b54fd39738e72eef32458ad6907" +"checksum num_cpus 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1a23f0ed30a54abaa0c7e83b1d2d87ada7c3c23078d1d87815af3e3b6385fbba" +"checksum pairing 0.15.2 (git+https://github.com/matterinc/pairing)" = "" "checksum proc-macro-hack 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2c725b36c99df7af7bf9324e9c999b9e37d92c8f8caf106d82e1d7953218d2d8" "checksum proc-macro-hack-impl 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2b753ad9ed99dd8efeaa7d2fb8453c8f6bc3e54b97966d35f1bc77ca6865254a" -"checksum proc-macro2 0.4.25 (registry+https://github.com/rust-lang/crates.io-index)" = "d3797b7142c9aa74954e351fc089bbee7958cebbff6bf2815e7ffff0b19f547d" -"checksum quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)" = "53fa22a1994bd0f9372d7a816207d8a2677ad0325b073f5c5332760f0fb62b5c" -"checksum rand 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)" = "15a732abf9d20f0ad8eeb6f909bf6868722d9a06e1e50802b6a70351f40b4eb1" -"checksum rand 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "dee497e66d8d76bf08ce20c8d36e16f93749ab0bf89975b4f8ae5cee660c2da2" -"checksum rand_core 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0905b6b7079ec73b314d4c748701f6931eb79fd97c668caa3f1899b22b32c6db" +"checksum proc-macro2 0.4.27 (registry+https://github.com/rust-lang/crates.io-index)" = "4d317f9caece796be1980837fd5cb3dfec5613ebdb04ad0956deea83ce168915" +"checksum quote 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)" = "cdd8e04bd9c52e0342b406469d494fcb033be4bdbe5c606016defbb1681411e1" +"checksum rand 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)" = "64ac302d8f83c0c1974bf758f6b041c6c8ada916fbb44a609158ca8b064cc76c" +"checksum rand 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" +"checksum rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" +"checksum rand_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d0e7a549d590831370895ab7ba4ea0c1b6b011d106b5ff2da6eee112615e6dc0" "checksum rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" -"checksum redox_syscall 0.1.50 (registry+https://github.com/rust-lang/crates.io-index)" = "52ee9a534dc1301776eff45b4fa92d2c39b1d8c3d3357e6eb593e0d795506fc2" -"checksum redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7e891cfe48e9100a70a3b6eb652fef28920c117d366339687bd5576160db0f76" +"checksum redox_syscall 0.1.51 (registry+https://github.com/rust-lang/crates.io-index)" = "423e376fffca3dfa06c9e9790a9ccd282fafb3cc6e6397d01dbf64f9bacc6b85" "checksum rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)" = "f76d05d3993fd5f4af9434e8e436db163a12a9d40e1a58a726f27a01dfd12a2a" "checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" "checksum ryu 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "eb9e9b8cde282a9fe6a42dd4681319bfb63f121b8a8ee9439c6f4107e58a46f7" -"checksum serde 1.0.85 (registry+https://github.com/rust-lang/crates.io-index)" = "534b8b91a95e0f71bca3ed5824752d558da048d4248c91af873b63bd60519752" -"checksum serde_derive 1.0.85 (registry+https://github.com/rust-lang/crates.io-index)" = "a915306b0f1ac5607797697148c223bedeaa36bcc2e28a01441cd638cc6567b4" -"checksum serde_json 1.0.36 (registry+https://github.com/rust-lang/crates.io-index)" = "574378d957d6dcdf1bbb5d562a15cbd5e644159432f84634b94e485267abbcc7" +"checksum serde 1.0.88 (registry+https://github.com/rust-lang/crates.io-index)" = "9f301d728f2b94c9a7691c90f07b0b4e8a4517181d9461be94c04bddeb4bd850" +"checksum serde_derive 1.0.88 (registry+https://github.com/rust-lang/crates.io-index)" = "beed18e6f5175aef3ba670e57c60ef3b1b74d250d962a26604bff4c80e970dd4" +"checksum serde_json 1.0.38 (registry+https://github.com/rust-lang/crates.io-index)" = "27dce848e7467aa0e2fcaf0a413641499c0b745452aaca1194d24dedde9e13c9" "checksum syn 0.14.9 (registry+https://github.com/rust-lang/crates.io-index)" = "261ae9ecaa397c42b960649561949d69311f08eeaea86a65696e6e46517cf741" "checksum syn 0.15.26 (registry+https://github.com/rust-lang/crates.io-index)" = "f92e629aa1d9c827b2bb8297046c1ccffc57c99b947a680d3ccff1f136a3bee9" -"checksum termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "689a3bdfaab439fd92bc87df5c4c78417d3cbe537487274e9b0b2dce76e92096" "checksum time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)" = "db8dcfca086c1143c9270ac42a2bbd8a7ee477b78ac8e45b19abfb0cbede4b6f" "checksum typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "612d636f949607bdf9b123b4a6f6d966dedf3ff669f7f045890d3a4a73948169" "checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" -"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a" "checksum winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "92c1eb33641e276cfa214a0522acad57be5c56b10cb348b3c5117db75f3ac4b0" -"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc" "checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" "checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" diff --git a/Cargo.toml b/Cargo.toml index ce063d1..9edb8d2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,14 +1,14 @@ [package] name = "powersoftau" -version = "0.1.2" +version = "0.2.0" authors = ["Sean Bowe", "Alex Vlasov"] license = "MIT/Apache-2.0" edition = "2018" description = "Communal zk-SNARK MPC for Public Parameters" documentation = "https://docs.rs/powersoftau/" -homepage = "https://github.com/matterinc/powersoftau" -repository = "https://github.com/matterinc/powersoftau" +homepage = "https://github.com/matter-labs/powersoftau" +repository = "https://github.com/matter-labs/powersoftau" [dependencies] rand = "0.4" @@ -24,13 +24,19 @@ rust-crypto = "0.2" memmap = "0.7.0" itertools = "0.8.0" -#ff = { path = "../ff", features = ["derive"] } -#pairing = { path = "../pairing", features = ["expose-arith"]} -#bellman = { path = "../bellman" } - ff = { git = 'https://github.com/matterinc/ff', features = ["derive"] } -pairing = { git = 'https://github.com/matterinc/pairing', features = ["expose-arith"] } +pairing = { git = 'https://github.com/matterinc/pairing', features = ["expose-arith"]} bellman = { git = 'https://github.com/matterinc/bellman'} +#ff = { git = 'https://github.com/matterinc/ff', features = ["derive"], rev = "056a13b9" } +#pairing = { git = 'https://github.com/matterinc/pairing', features = ["expose-arith"], rev = "84b57df3" } +#bellman = { git = 'https://github.com/matterinc/bellman', rev = "e775b47d"} + +#[patch.'https://https://github.com/matterinc/ff'] +#ff = { git = 'https://github.com/matterinc/ff', features = ["derive"], rev = "056a13b9" } + +#[patch.'https://https://github.com/matterinc/pairing'] +#pairing = { git = 'https://github.com/matterinc/pairing', features = ["expose-arith"], rev = "84b57df3" } + [features] diff --git a/src/batched_accumulator.rs b/src/batched_accumulator.rs index ff4d583..e840e91 100644 --- a/src/batched_accumulator.rs +++ b/src/batched_accumulator.rs @@ -432,10 +432,6 @@ impl BachedAccumulator { Ok(()) } - // fn read_point( - - // ) -> - fn read_points_chunk( &mut self, from: usize, diff --git a/src/bin/beacon.rs.nocompile b/src/bin/beacon.rs.nocompile deleted file mode 100644 index 5f6f50d..0000000 --- a/src/bin/beacon.rs.nocompile +++ /dev/null @@ -1,142 +0,0 @@ -extern crate powersoftau; -extern crate rand; -extern crate blake2; -extern crate byteorder; - -#[macro_use] -extern crate hex_literal; - -extern crate crypto; - -use powersoftau::*; - -use std::fs::OpenOptions; -use std::io::{Read, BufReader, Write, BufWriter}; - -fn main() { - // Create an RNG based on the outcome of the random beacon - let mut rng = { - use byteorder::{ReadBytesExt, BigEndian}; - use rand::{SeedableRng}; - use rand::chacha::ChaChaRng; - use crypto::sha2::Sha256; - use crypto::digest::Digest; - - // Place block hash here (block number #514200) - let mut cur_hash: [u8; 32] = hex!("00000000000000000034b33e842ac1c50456abe5fa92b60f6b3dfc5d247f7b58"); - - // Performs 2^n hash iterations over it - const N: usize = 42; - - for i in 0..(1u64<().expect("digest is large enough for this to work"); - } - - ChaChaRng::from_seed(&seed) - }; - - // Try to load `./challenge` from disk. - let reader = OpenOptions::new() - .read(true) - .open("challenge").expect("unable open `./challenge` in this directory"); - - { - let metadata = reader.metadata().expect("unable to get filesystem metadata for `./challenge`"); - if metadata.len() != (ACCUMULATOR_BYTE_SIZE as u64) { - panic!("The size of `./challenge` should be {}, but it's {}, so something isn't right.", ACCUMULATOR_BYTE_SIZE, metadata.len()); - } - } - - let reader = BufReader::new(reader); - let mut reader = HashReader::new(reader); - - // Create `./response` in this directory - let writer = OpenOptions::new() - .read(false) - .write(true) - .create_new(true) - .open("response").expect("unable to create `./response` in this directory"); - - let writer = BufWriter::new(writer); - let mut writer = HashWriter::new(writer); - - println!("Reading `./challenge` into memory..."); - - // Read the BLAKE2b hash of the previous contribution - { - // We don't need to do anything with it, but it's important for - // the hash chain. - let mut tmp = [0; 64]; - reader.read_exact(&mut tmp).expect("unable to read BLAKE2b hash of previous contribution"); - } - - // Load the current accumulator into memory - let mut current_accumulator = Accumulator::deserialize(&mut reader, UseCompression::No, CheckForCorrectness::No).expect("unable to read uncompressed accumulator"); - - // Get the hash of the current accumulator - let current_accumulator_hash = reader.into_hash(); - - // Construct our keypair using the RNG we created above - let (pubkey, privkey) = keypair(&mut rng, current_accumulator_hash.as_ref()); - - // Perform the transformation - println!("Computing, this could take a while..."); - current_accumulator.transform(&privkey); - println!("Writing your contribution to `./response`..."); - - // Write the hash of the input accumulator - writer.write_all(¤t_accumulator_hash.as_ref()).expect("unable to write BLAKE2b hash of input accumulator"); - - // Write the transformed accumulator (in compressed form, to save upload bandwidth for disadvantaged - // players.) - current_accumulator.serialize(&mut writer, UseCompression::Yes).expect("unable to write transformed accumulator"); - - // Write the public key - pubkey.serialize(&mut writer).expect("unable to write public key"); - - // Get the hash of the contribution, so the user can compare later - let contribution_hash = writer.into_hash(); - - print!("Done!\n\n\ - Your contribution has been written to `./response`\n\n\ - The BLAKE2b hash of `./response` is:\n"); - - for line in contribution_hash.as_slice().chunks(16) { - print!("\t"); - for section in line.chunks(4) { - for b in section { - print!("{:02x}", b); - } - print!(" "); - } - println!(""); - } - - println!("\n"); -} diff --git a/src/bin/beacon_constrained.rs b/src/bin/beacon_constrained.rs index b4dcdf7..8e0438d 100644 --- a/src/bin/beacon_constrained.rs +++ b/src/bin/beacon_constrained.rs @@ -40,26 +40,24 @@ fn main() { use crypto::sha2::Sha256; use crypto::digest::Digest; - // Place block hash here (block number #514200) - let mut cur_hash: [u8; 32] = hex!("00000000000000000034b33e842ac1c50456abe5fa92b60f6b3dfc5d247f7b58"); + // Place block hash here (block number #564321) + let mut cur_hash: [u8; 32] = hex!("0000000000000000000a558a61ddc8ee4e488d647a747fe4dcc362fe2026c620"); // Performs 2^n hash iterations over it - // const N: usize = 42; - - const N: usize = 16; + const N: usize = 31; for i in 0..(1u64<().expect("digest is large enough for this to work"); - } - - ChaChaRng::from_seed(&seed) - }; - - // Try to load `./challenge` from disk. - let reader = OpenOptions::new() - .read(true) - .open("challenge").expect("unable open `./challenge` in this directory"); - - { - let metadata = reader.metadata().expect("unable to get filesystem metadata for `./challenge`"); - if metadata.len() != (ACCUMULATOR_BYTE_SIZE as u64) { - panic!("The size of `./challenge` should be {}, but it's {}, so something isn't right.", ACCUMULATOR_BYTE_SIZE, metadata.len()); - } - } - - let reader = BufReader::new(reader); - let mut reader = HashReader::new(reader); - - // Create `./response` in this directory - let writer = OpenOptions::new() - .read(false) - .write(true) - .create_new(true) - .open("response").expect("unable to create `./response` in this directory"); - - let writer = BufWriter::new(writer); - let mut writer = HashWriter::new(writer); - - println!("Reading `./challenge` into memory..."); - - // Read the BLAKE2b hash of the previous contribution - { - // We don't need to do anything with it, but it's important for - // the hash chain. - let mut tmp = [0; 64]; - reader.read_exact(&mut tmp).expect("unable to read BLAKE2b hash of previous contribution"); - } - - // Load the current accumulator into memory - let mut current_accumulator = Accumulator::deserialize(&mut reader, UseCompression::No, CheckForCorrectness::No).expect("unable to read uncompressed accumulator"); - - // Get the hash of the current accumulator - let current_accumulator_hash = reader.into_hash(); - - // Construct our keypair using the RNG we created above - let (pubkey, privkey) = keypair(&mut rng, current_accumulator_hash.as_ref()); - - // Perform the transformation - println!("Computing, this could take a while..."); - current_accumulator.transform(&privkey); - println!("Writing your contribution to `./response`..."); - - // Write the hash of the input accumulator - writer.write_all(¤t_accumulator_hash.as_ref()).expect("unable to write BLAKE2b hash of input accumulator"); - - // Write the transformed accumulator (in compressed form, to save upload bandwidth for disadvantaged - // players.) - current_accumulator.serialize(&mut writer, UseCompression::Yes).expect("unable to write transformed accumulator"); - - // Write the public key - pubkey.serialize(&mut writer).expect("unable to write public key"); - - // Get the hash of the contribution, so the user can compare later - let contribution_hash = writer.into_hash(); - - print!("Done!\n\n\ - Your contribution has been written to `./response`\n\n\ - The BLAKE2b hash of `./response` is:\n"); - - for line in contribution_hash.as_slice().chunks(16) { - print!("\t"); - for section in line.chunks(4) { - for b in section { - print!("{:02x}", b); - } - print!(" "); - } - println!(""); - } - - println!("\n"); -} diff --git a/src/bin/compute_constrained.rs b/src/bin/compute_constrained.rs index 05f8f27..6d904d1 100644 --- a/src/bin/compute_constrained.rs +++ b/src/bin/compute_constrained.rs @@ -19,9 +19,9 @@ use std::io::Write; use powersoftau::parameters::PowersOfTauParameters; -const input_is_compressed: UseCompression = UseCompression::No; -const compress_the_output: UseCompression = UseCompression::Yes; -const check_input_correctness: CheckForCorrectness = CheckForCorrectness::No; +const INPUT_IS_COMPRESSED: UseCompression = UseCompression::No; +const COMPRESS_THE_OUTPUT: UseCompression = UseCompression::Yes; +const CHECK_INPUT_CORRECTNESS: CheckForCorrectness = CheckForCorrectness::No; fn main() { println!("Will contribute to accumulator for 2^{} powers of tau", Bn256CeremonyParameters::REQUIRED_POWER); @@ -72,7 +72,7 @@ fn main() { { let metadata = reader.metadata().expect("unable to get filesystem metadata for `./challenge`"); - let expected_challenge_length = match input_is_compressed { + let expected_challenge_length = match INPUT_IS_COMPRESSED { UseCompression::Yes => { Bn256CeremonyParameters::CONTRIBUTION_BYTE_SIZE }, @@ -95,7 +95,7 @@ fn main() { .create_new(true) .open("response").expect("unable to create `./response` in this directory"); - let required_output_length = match compress_the_output { + let required_output_length = match COMPRESS_THE_OUTPUT { UseCompression::Yes => { Bn256CeremonyParameters::CONTRIBUTION_BYTE_SIZE }, @@ -110,7 +110,7 @@ fn main() { println!("Calculating previous contribution hash..."); - assert!(UseCompression::No == input_is_compressed, "Hashing the compressed file in not yet defined"); + assert!(UseCompression::No == INPUT_IS_COMPRESSED, "Hashing the compressed file in not yet defined"); let current_accumulator_hash = BachedAccumulator::::calculate_hash(&readable_map); { @@ -141,16 +141,16 @@ fn main() { BachedAccumulator::::transform( &readable_map, &mut writable_map, - input_is_compressed, - compress_the_output, - check_input_correctness, + INPUT_IS_COMPRESSED, + COMPRESS_THE_OUTPUT, + CHECK_INPUT_CORRECTNESS, &privkey ).expect("must transform with the key"); println!("Finihsing writing your contribution to `./response`..."); // Write the public key - pubkey.write::(&mut writable_map, compress_the_output).expect("unable to write public key"); + pubkey.write::(&mut writable_map, COMPRESS_THE_OUTPUT).expect("unable to write public key"); writable_map.flush().expect("must flush a memory map"); diff --git a/src/bin/new.rs.nocompile b/src/bin/new.rs.nocompile deleted file mode 100644 index a5bb301..0000000 --- a/src/bin/new.rs.nocompile +++ /dev/null @@ -1,24 +0,0 @@ -extern crate powersoftau; -use powersoftau::*; - -use std::fs::OpenOptions; -use std::io::{Write, BufWriter}; - -fn main() { - let writer = OpenOptions::new() - .read(false) - .write(true) - .create_new(true) - .open("challenge").expect("unable to create `./challenge`"); - - let mut writer = BufWriter::new(writer); - - // Write a blank BLAKE2b hash: - writer.write_all(&blank_hash().as_slice()).expect("unable to write blank hash to `./challenge`"); - - let acc = Accumulator::new(); - acc.serialize(&mut writer, UseCompression::No).expect("unable to write fresh accumulator to `./challenge`"); - writer.flush().expect("unable to flush accumulator to disk"); - - println!("Wrote a fresh accumulator to `./challenge`"); -} diff --git a/src/bin/verify_transform.rs.nocompile b/src/bin/verify_transform.rs.nocompile deleted file mode 100644 index cb66f6e..0000000 --- a/src/bin/verify_transform.rs.nocompile +++ /dev/null @@ -1,117 +0,0 @@ -extern crate powersoftau; -use powersoftau::*; - -use std::fs::OpenOptions; -use std::io::{Read, Write, BufWriter, BufReader}; - -fn main() { - // Try to load `./challenge` from disk. - let challenge_reader = OpenOptions::new() - .read(true) - .open("challenge").expect("unable open `./challenge` in this directory"); - - { - let metadata = challenge_reader.metadata().expect("unable to get filesystem metadata for `./challenge`"); - if metadata.len() != (ACCUMULATOR_BYTE_SIZE as u64) { - panic!("The size of `./challenge` should be {}, but it's {}, so something isn't right.", ACCUMULATOR_BYTE_SIZE, metadata.len()); - } - } - - let challenge_reader = BufReader::new(challenge_reader); - let mut challenge_reader = HashReader::new(challenge_reader); - - // Try to load `./response` from disk. - let response_reader = OpenOptions::new() - .read(true) - .open("response").expect("unable open `./response` in this directory"); - - { - let metadata = response_reader.metadata().expect("unable to get filesystem metadata for `./response`"); - if metadata.len() != (CONTRIBUTION_BYTE_SIZE as u64) { - panic!("The size of `./response` should be {}, but it's {}, so something isn't right.", CONTRIBUTION_BYTE_SIZE, metadata.len()); - } - } - - let response_reader = BufReader::new(response_reader); - let mut response_reader = HashReader::new(response_reader); - - // Create new_challenge file - let writer = OpenOptions::new() - .read(false) - .write(true) - .create_new(true) - .open("new_challenge").expect("unable to create `./new_challenge`"); - - let mut writer = BufWriter::new(writer); - - // Deserialize the current challenge - - // Read the BLAKE2b hash of the previous contribution - { - // We don't need to do anything with it, but it's important for - // the hash chain. - let mut tmp = [0; 64]; - challenge_reader.read_exact(&mut tmp).expect("unable to read BLAKE2b hash of previous contribution"); - } - - // Load the current accumulator into memory - let current_accumulator = Accumulator::deserialize( - &mut challenge_reader, - UseCompression::No, - CheckForCorrectness::No // no need to check since we constructed the challenge already - ).expect("unable to read uncompressed accumulator"); - - // Get the hash of the current accumulator - let current_accumulator_hash = challenge_reader.into_hash(); - - // Load the response into memory - - // Check the hash chain - { - let mut response_challenge_hash = [0; 64]; - response_reader.read_exact(&mut response_challenge_hash).expect("couldn't read hash of challenge file from response file"); - - if &response_challenge_hash[..] != current_accumulator_hash.as_slice() { - panic!("Hash chain failure. This is not the right response."); - } - } - - // Load the response's accumulator - let new_accumulator = Accumulator::deserialize(&mut response_reader, UseCompression::Yes, CheckForCorrectness::Yes) - .expect("wasn't able to deserialize the response file's accumulator"); - - // Load the response's pubkey - let public_key = PublicKey::deserialize(&mut response_reader) - .expect("wasn't able to deserialize the response file's public key"); - - // Get the hash of the response file - let response_hash = response_reader.into_hash(); - - if !verify_transform(¤t_accumulator, &new_accumulator, &public_key, current_accumulator_hash.as_slice()) { - println!("Verification failed, contribution was invalid somehow."); - panic!("INVALID CONTRIBUTION!!!"); - } else { - println!("Verification succeeded!"); - } - - println!("Here's the BLAKE2b hash of the participant's response file:"); - - for line in response_hash.as_slice().chunks(16) { - print!("\t"); - for section in line.chunks(4) { - for b in section { - print!("{:02x}", b); - } - print!(" "); - } - println!(""); - } - - println!("Verification succeeded! Writing to `./new_challenge`..."); - - writer.write_all(response_hash.as_slice()).expect("couldn't write response file's hash into the `./new_challenge` file"); - new_accumulator.serialize(&mut writer, UseCompression::No).expect("unable to write uncompressed accumulator into the `./new_challenge` file"); - - println!("Done! `./new_challenge` contains the new challenge file. The other files"); - println!("were left alone."); -} diff --git a/test.sh b/test.sh index 035c085..78eaa4a 100755 --- a/test.sh +++ b/test.sh @@ -7,13 +7,12 @@ rm challenge_old rm response_old cargo run --release --bin new_constrained -cargo run --release --bin beacon_constrained +cargo run --release --bin compute_constrained cargo run --release --bin verify_transform_constrained mv challenge challenge_old mv response response_old mv new_challenge challenge - -cargo run --release --bin compute_constrained -cargo run --release --bin verify_transform_constrained \ No newline at end of file +cargo run --release --bin beacon_constrained +cargo run --release --bin verify_transform_constrained From e62cb6a09bc76d8fda3726555b701e561ee7db3f Mon Sep 17 00:00:00 2001 From: Alex Vlasov Date: Tue, 5 Mar 2019 10:35:30 +0100 Subject: [PATCH 18/23] use versioning for reproducible builds --- Cargo.lock | 151 +++++++++++++++++++++--- Cargo.toml | 14 +-- src/accumulator.rs | 10 +- src/batched_accumulator.rs | 9 +- src/bin/beacon_constrained.rs | 4 +- src/bin/compute_constrained.rs | 4 +- src/bin/new.rs | 4 +- src/bin/new_constrained.rs | 4 +- src/bin/verify.rs.nocompile | 4 +- src/bin/verify_transform_constrained.rs | 4 +- src/bls12_381/mod.rs | 5 +- src/bn256/mod.rs | 9 +- src/keypair.rs | 9 +- src/parameters.rs | 9 +- src/small_bn256/mod.rs | 9 +- src/utils.rs | 9 +- 16 files changed, 177 insertions(+), 81 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 968b511..ec72ebc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,16 +1,24 @@ +[[package]] +name = "arrayvec" +version = "0.4.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "nodrop 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "bellman" -version = "0.1.3" -source = "git+https://github.com/matterinc/bellman#e775b47d99562243f4ed3ab432eb3a56ad9493a3" +version = "0.2.0" +source = "git+https://github.com/matterinc/bellman?tag=0.2.0#6e45a4b233e97a71f4a8a0565c8f8d753c04c08f" dependencies = [ "bit-vec 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", + "blake2-rfc 0.2.18 (git+https://github.com/gtank/blake2-rfc?rev=7a5b5fc99ae483a0043db7547fb79a6fa44b88a9)", "byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "ff 0.5.0 (git+https://github.com/matterinc/ff)", + "crossbeam 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)", "futures-cpupool 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)", - "pairing 0.15.2 (git+https://github.com/matterinc/pairing)", + "pairing 0.16.2 (git+https://github.com/matterinc/pairing?tag=0.16.2)", "rand 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -30,6 +38,16 @@ dependencies = [ "generic-array 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "blake2-rfc" +version = "0.2.18" +source = "git+https://github.com/gtank/blake2-rfc?rev=7a5b5fc99ae483a0043db7547fb79a6fa44b88a9#7a5b5fc99ae483a0043db7547fb79a6fa44b88a9" +dependencies = [ + "arrayvec 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)", + "byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "constant_time_eq 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "byte-tools" version = "0.2.0" @@ -40,6 +58,11 @@ name = "byteorder" version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "cfg-if" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "constant_time_eq" version = "0.1.3" @@ -50,6 +73,67 @@ name = "crossbeam" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "crossbeam" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cfg-if 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", + "crossbeam-channel 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", + "crossbeam-deque 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "crossbeam-epoch 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "crossbeam-queue 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "crossbeam-utils 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "crossbeam-channel" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "crossbeam-utils 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", + "smallvec 0.6.9 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "crossbeam-deque" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "crossbeam-epoch 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "crossbeam-utils 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "arrayvec 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", + "crossbeam-utils 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "memoffset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "crossbeam-queue" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "crossbeam-utils 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "crossbeam-utils" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cfg-if 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "crypto-mac" version = "0.4.0" @@ -75,17 +159,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "ff" version = "0.5.0" -source = "git+https://github.com/matterinc/ff#056a13b95f4b971a9ae2c6fbb5fbc9f1e4f4828e" +source = "git+https://github.com/matterinc/ff?tag=0.5#056a13b95f4b971a9ae2c6fbb5fbc9f1e4f4828e" dependencies = [ "byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", - "ff_derive 0.4.0 (git+https://github.com/matterinc/ff)", + "ff_derive 0.4.0 (git+https://github.com/matterinc/ff?tag=0.5)", "rand 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "ff_derive" version = "0.4.0" -source = "git+https://github.com/matterinc/ff#056a13b95f4b971a9ae2c6fbb5fbc9f1e4f4828e" +source = "git+https://github.com/matterinc/ff?tag=0.5#056a13b95f4b971a9ae2c6fbb5fbc9f1e4f4828e" dependencies = [ "num-bigint 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "num-integer 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", @@ -164,6 +248,11 @@ name = "itoa" version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "lazy_static" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "libc" version = "0.2.49" @@ -178,6 +267,11 @@ dependencies = [ "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "memoffset" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "nodrop" version = "0.1.13" @@ -215,11 +309,11 @@ dependencies = [ [[package]] name = "pairing" -version = "0.15.2" -source = "git+https://github.com/matterinc/pairing#84b57df3259c2f41d97744792cc89269e261d44e" +version = "0.16.2" +source = "git+https://github.com/matterinc/pairing?tag=0.16.2#c2af46cac3e6ebc8e1e1f37bb993e5e6c7f689d1" dependencies = [ "byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", - "ff 0.5.0 (git+https://github.com/matterinc/ff)", + "ff 0.5.0 (git+https://github.com/matterinc/ff?tag=0.5)", "hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.88 (registry+https://github.com/rust-lang/crates.io-index)", @@ -231,17 +325,15 @@ dependencies = [ name = "powersoftau" version = "0.2.0" dependencies = [ - "bellman 0.1.3 (git+https://github.com/matterinc/bellman)", + "bellman 0.2.0 (git+https://github.com/matterinc/bellman?tag=0.2.0)", "blake2 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", "byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "ff 0.5.0 (git+https://github.com/matterinc/ff)", "generic-array 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)", "hex-literal 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "memmap 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)", - "pairing 0.15.2 (git+https://github.com/matterinc/pairing)", "rand 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)", "typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -345,6 +437,11 @@ name = "ryu" version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "scopeguard" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "serde" version = "1.0.88" @@ -370,6 +467,11 @@ dependencies = [ "serde 1.0.88 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "smallvec" +version = "0.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "syn" version = "0.14.9" @@ -430,18 +532,27 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" [metadata] -"checksum bellman 0.1.3 (git+https://github.com/matterinc/bellman)" = "" +"checksum arrayvec 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)" = "92c7fb76bc8826a8b33b4ee5bb07a247a81e76764ab4d55e8f73e3a4d8808c71" +"checksum bellman 0.2.0 (git+https://github.com/matterinc/bellman?tag=0.2.0)" = "" "checksum bit-vec 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "02b4ff8b16e6076c3e14220b39fbc1fabb6737522281a388998046859400895f" "checksum blake2 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "53bf612c0f2839b7e764ebac65d6cb985f7c6812de399d0728038f4b1da141bc" +"checksum blake2-rfc 0.2.18 (git+https://github.com/gtank/blake2-rfc?rev=7a5b5fc99ae483a0043db7547fb79a6fa44b88a9)" = "" "checksum byte-tools 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "560c32574a12a89ecd91f5e742165893f86e3ab98d21f8ea548658eb9eef5f40" "checksum byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a019b10a2a7cdeb292db131fc8113e57ea2a908f6e7894b0c3c671893b65dbeb" +"checksum cfg-if 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "11d43355396e872eefb45ce6342e4374ed7bc2b3a502d1b28e36d6e23c05d1f4" "checksum constant_time_eq 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8ff012e225ce166d4422e0e78419d901719760f62ae2b7969ca6b564d1b54a9e" "checksum crossbeam 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "24ce9782d4d5c53674646a6a4c1863a21a8fc0cb649b3c94dfc16e45071dea19" +"checksum crossbeam 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b14492071ca110999a20bf90e3833406d5d66bfd93b4e52ec9539025ff43fe0d" +"checksum crossbeam-channel 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "0f0ed1a4de2235cabda8558ff5840bffb97fcb64c97827f354a451307df5f72b" +"checksum crossbeam-deque 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b18cd2e169ad86297e6bc0ad9aa679aee9daa4f19e8163860faf7c164e4f5a71" +"checksum crossbeam-epoch 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "04c9e3102cc2d69cd681412141b390abd55a362afc1540965dad0ad4d34280b4" +"checksum crossbeam-queue 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7c979cd6cfe72335896575c6b5688da489e420d36a27a0b9eb0c73db574b4a4b" +"checksum crossbeam-utils 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "f8306fcef4a7b563b76b7dd949ca48f52bc1141aa067d2ea09565f3e2652aa5c" "checksum crypto-mac 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "779015233ac67d65098614aec748ac1c756ab6677fa2e14cf8b37c08dfed1198" "checksum digest 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e5b29bf156f3f4b3c4f610a25ff69370616ae6e0657d416de22645483e72af0a" "checksum either 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c67353c641dc847124ea1902d69bd753dee9bb3beff9aa3662ecf86c971d1fac" -"checksum ff 0.5.0 (git+https://github.com/matterinc/ff)" = "" -"checksum ff_derive 0.4.0 (git+https://github.com/matterinc/ff)" = "" +"checksum ff 0.5.0 (git+https://github.com/matterinc/ff?tag=0.5)" = "" +"checksum ff_derive 0.4.0 (git+https://github.com/matterinc/ff?tag=0.5)" = "" "checksum fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" "checksum futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)" = "49e7653e374fe0d0c12de4250f0bdb60680b8c80eed558c5c7538eec9c89e21b" "checksum futures-cpupool 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "ab90cde24b3319636588d0c35fe03b1333857621051837ed769faefb4c2162e4" @@ -452,14 +563,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum hex-literal-impl 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1d340b6514f232f6db1bd16db65302a5278a04fef9ce867cb932e7e5fa21130a" "checksum itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5b8467d9c1cebe26feb08c640139247fac215782d35371ade9a2136ed6085358" "checksum itoa 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "1306f3464951f30e30d12373d31c79fbd52d236e5e896fd92f96ec7babbbe60b" +"checksum lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bc5729f27f159ddd61f4df6228e827e86643d4d3e7c32183cb30a1c08f604a14" "checksum libc 0.2.49 (registry+https://github.com/rust-lang/crates.io-index)" = "413f3dfc802c5dc91dc570b05125b6cda9855edfaa9825c9849807876376e70e" "checksum memmap 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6585fd95e7bb50d6cc31e20d4cf9afb4e2ba16c5846fc76793f11218da9c475b" +"checksum memoffset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0f9dc261e2b62d7a622bf416ea3c5245cdd5d9a7fcc428c0d06804dfce1775b3" "checksum nodrop 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "2f9667ddcc6cc8a43afc9b7917599d7216aa09c463919ea32c59ed6cac8bc945" "checksum num-bigint 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "57450397855d951f1a41305e54851b1a7b8f5d2e349543a02a2effe25459f718" "checksum num-integer 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)" = "e83d528d2677f0518c570baf2b7abdcf0cd2d248860b68507bdcb3e91d4c0cea" "checksum num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0b3a5d7cc97d6d30d8b9bc8fa19bf45349ffe46241e8816f50f62f6d6aaabee1" "checksum num_cpus 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1a23f0ed30a54abaa0c7e83b1d2d87ada7c3c23078d1d87815af3e3b6385fbba" -"checksum pairing 0.15.2 (git+https://github.com/matterinc/pairing)" = "" +"checksum pairing 0.16.2 (git+https://github.com/matterinc/pairing?tag=0.16.2)" = "" "checksum proc-macro-hack 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2c725b36c99df7af7bf9324e9c999b9e37d92c8f8caf106d82e1d7953218d2d8" "checksum proc-macro-hack-impl 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2b753ad9ed99dd8efeaa7d2fb8453c8f6bc3e54b97966d35f1bc77ca6865254a" "checksum proc-macro2 0.4.27 (registry+https://github.com/rust-lang/crates.io-index)" = "4d317f9caece796be1980837fd5cb3dfec5613ebdb04ad0956deea83ce168915" @@ -473,9 +586,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)" = "f76d05d3993fd5f4af9434e8e436db163a12a9d40e1a58a726f27a01dfd12a2a" "checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" "checksum ryu 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "eb9e9b8cde282a9fe6a42dd4681319bfb63f121b8a8ee9439c6f4107e58a46f7" +"checksum scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "94258f53601af11e6a49f722422f6e3425c52b06245a5cf9bc09908b174f5e27" "checksum serde 1.0.88 (registry+https://github.com/rust-lang/crates.io-index)" = "9f301d728f2b94c9a7691c90f07b0b4e8a4517181d9461be94c04bddeb4bd850" "checksum serde_derive 1.0.88 (registry+https://github.com/rust-lang/crates.io-index)" = "beed18e6f5175aef3ba670e57c60ef3b1b74d250d962a26604bff4c80e970dd4" "checksum serde_json 1.0.38 (registry+https://github.com/rust-lang/crates.io-index)" = "27dce848e7467aa0e2fcaf0a413641499c0b745452aaca1194d24dedde9e13c9" +"checksum smallvec 0.6.9 (registry+https://github.com/rust-lang/crates.io-index)" = "c4488ae950c49d403731982257768f48fada354a5203fe81f9bb6f43ca9002be" "checksum syn 0.14.9 (registry+https://github.com/rust-lang/crates.io-index)" = "261ae9ecaa397c42b960649561949d69311f08eeaea86a65696e6e46517cf741" "checksum syn 0.15.26 (registry+https://github.com/rust-lang/crates.io-index)" = "f92e629aa1d9c827b2bb8297046c1ccffc57c99b947a680d3ccff1f136a3bee9" "checksum time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)" = "db8dcfca086c1143c9270ac42a2bbd8a7ee477b78ac8e45b19abfb0cbede4b6f" diff --git a/Cargo.toml b/Cargo.toml index 9edb8d2..98c49b7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -24,19 +24,7 @@ rust-crypto = "0.2" memmap = "0.7.0" itertools = "0.8.0" -ff = { git = 'https://github.com/matterinc/ff', features = ["derive"] } -pairing = { git = 'https://github.com/matterinc/pairing', features = ["expose-arith"]} -bellman = { git = 'https://github.com/matterinc/bellman'} - -#ff = { git = 'https://github.com/matterinc/ff', features = ["derive"], rev = "056a13b9" } -#pairing = { git = 'https://github.com/matterinc/pairing', features = ["expose-arith"], rev = "84b57df3" } -#bellman = { git = 'https://github.com/matterinc/bellman', rev = "e775b47d"} - -#[patch.'https://https://github.com/matterinc/ff'] -#ff = { git = 'https://github.com/matterinc/ff', features = ["derive"], rev = "056a13b9" } - -#[patch.'https://https://github.com/matterinc/pairing'] -#pairing = { git = 'https://github.com/matterinc/pairing', features = ["expose-arith"], rev = "84b57df3" } +bellman = { git = 'https://github.com/matterinc/bellman', tag = "0.2.0"} [features] diff --git a/src/accumulator.rs b/src/accumulator.rs index 4032911..cce9d3a 100644 --- a/src/accumulator.rs +++ b/src/accumulator.rs @@ -25,8 +25,6 @@ //! After some time has elapsed for participants to contribute to the ceremony, a participant is //! simulated with a randomness beacon. The resulting `Accumulator` contains partial zk-SNARK //! public parameters for all circuits within a bounded size. - -extern crate pairing; extern crate rand; extern crate crossbeam; extern crate num_cpus; @@ -34,16 +32,16 @@ extern crate blake2; extern crate generic_array; extern crate typenum; extern crate byteorder; -extern crate ff; +extern crate bellman; extern crate memmap; use memmap::{Mmap, MmapMut}; -use ff::{Field, PrimeField}; +use bellman::pairing::ff::{Field, PrimeField}; use byteorder::{ReadBytesExt, BigEndian}; use rand::{SeedableRng, Rng, Rand}; use rand::chacha::ChaChaRng; -use pairing::bn256::{Bn256}; -use pairing::*; +use bellman::pairing::bn256::{Bn256}; +use bellman::pairing::*; use std::io::{self, Read, Write}; use std::sync::{Arc, Mutex}; use generic_array::GenericArray; diff --git a/src/batched_accumulator.rs b/src/batched_accumulator.rs index e840e91..9bd9b16 100644 --- a/src/batched_accumulator.rs +++ b/src/batched_accumulator.rs @@ -1,7 +1,6 @@ /// Memory constrained accumulator that checks parts of the initial information in parts that fit to memory /// and then contributes to entropy in parts as well -extern crate pairing; extern crate rand; extern crate crossbeam; extern crate num_cpus; @@ -9,18 +8,18 @@ extern crate blake2; extern crate generic_array; extern crate typenum; extern crate byteorder; -extern crate ff; +extern crate bellman; extern crate memmap; extern crate itertools; use itertools::Itertools; use memmap::{Mmap, MmapMut}; -use ff::{Field, PrimeField}; +use bellman::pairing::ff::{Field, PrimeField}; use byteorder::{ReadBytesExt, BigEndian}; use rand::{SeedableRng, Rng, Rand}; use rand::chacha::ChaChaRng; -use pairing::bn256::{Bn256}; -use pairing::*; +use bellman::pairing::bn256::{Bn256}; +use bellman::pairing::*; use std::io::{self, Read, Write}; use std::sync::{Arc, Mutex}; use generic_array::GenericArray; diff --git a/src/bin/beacon_constrained.rs b/src/bin/beacon_constrained.rs index 8e0438d..bcedef3 100644 --- a/src/bin/beacon_constrained.rs +++ b/src/bin/beacon_constrained.rs @@ -1,5 +1,5 @@ extern crate powersoftau; -extern crate pairing; +extern crate bellman; extern crate memmap; extern crate rand; extern crate blake2; @@ -13,7 +13,7 @@ use powersoftau::keypair::{keypair}; use powersoftau::parameters::{UseCompression, CheckForCorrectness}; use std::fs::OpenOptions; -use pairing::bn256::Bn256; +use bellman::pairing::bn256::Bn256; use memmap::*; use std::io::Write; diff --git a/src/bin/compute_constrained.rs b/src/bin/compute_constrained.rs index 6d904d1..46d4ad3 100644 --- a/src/bin/compute_constrained.rs +++ b/src/bin/compute_constrained.rs @@ -1,5 +1,5 @@ extern crate powersoftau; -extern crate pairing; +extern crate bellman; extern crate memmap; extern crate rand; extern crate blake2; @@ -12,7 +12,7 @@ use powersoftau::keypair::{keypair}; use powersoftau::parameters::{UseCompression, CheckForCorrectness}; use std::fs::OpenOptions; -use pairing::bn256::Bn256; +use bellman::pairing::bn256::Bn256; use memmap::*; use std::io::Write; diff --git a/src/bin/new.rs b/src/bin/new.rs index fcf5408..b04d176 100644 --- a/src/bin/new.rs +++ b/src/bin/new.rs @@ -1,5 +1,5 @@ extern crate powersoftau; -extern crate pairing; +extern crate bellman; // use powersoftau::bn256::{Bn256CeremonyParameters}; use powersoftau::small_bn256::{Bn256CeremonyParameters}; @@ -9,7 +9,7 @@ use powersoftau::parameters::{UseCompression}; use std::fs::OpenOptions; use std::io::{Write, BufWriter}; -use pairing::bn256::Bn256; +use bellman::pairing::bn256::Bn256; fn main() { let writer = OpenOptions::new() diff --git a/src/bin/new_constrained.rs b/src/bin/new_constrained.rs index 4d63365..36c6b5a 100644 --- a/src/bin/new_constrained.rs +++ b/src/bin/new_constrained.rs @@ -1,5 +1,5 @@ extern crate powersoftau; -extern crate pairing; +extern crate bellman; extern crate memmap; // use powersoftau::bn256::{Bn256CeremonyParameters}; @@ -10,7 +10,7 @@ use powersoftau::utils::{blank_hash}; use std::fs::OpenOptions; use std::io::{Write}; -use pairing::bn256::Bn256; +use bellman::pairing::bn256::Bn256; use memmap::*; use powersoftau::parameters::PowersOfTauParameters; diff --git a/src/bin/verify.rs.nocompile b/src/bin/verify.rs.nocompile index ece1fcc..cdf8158 100644 --- a/src/bin/verify.rs.nocompile +++ b/src/bin/verify.rs.nocompile @@ -5,8 +5,8 @@ extern crate blake2; extern crate byteorder; extern crate bellman; -use pairing::{CurveAffine, CurveProjective}; -use pairing::bls12_381::{G1, G2}; +use bellman::pairing::{CurveAffine, CurveProjective}; +use bellman::pairing::bls12_381::{G1, G2}; use powersoftau::*; use bellman::multicore::Worker; diff --git a/src/bin/verify_transform_constrained.rs b/src/bin/verify_transform_constrained.rs index 8727f66..2a3a375 100644 --- a/src/bin/verify_transform_constrained.rs +++ b/src/bin/verify_transform_constrained.rs @@ -1,5 +1,5 @@ extern crate powersoftau; -extern crate pairing; +extern crate bellman; extern crate memmap; extern crate rand; extern crate blake2; @@ -12,7 +12,7 @@ use powersoftau::keypair::{PublicKey}; use powersoftau::parameters::{UseCompression, CheckForCorrectness}; use std::fs::OpenOptions; -use pairing::bn256::Bn256; +use bellman::pairing::bn256::Bn256; use memmap::*; use std::io::{Read, Write}; diff --git a/src/bls12_381/mod.rs b/src/bls12_381/mod.rs index 22c82d8..665c23b 100644 --- a/src/bls12_381/mod.rs +++ b/src/bls12_381/mod.rs @@ -34,12 +34,13 @@ extern crate blake2; extern crate generic_array; extern crate typenum; extern crate byteorder; +extern crate bellman; use byteorder::{ReadBytesExt, BigEndian}; use rand::{SeedableRng, Rng, Rand}; use rand::chacha::ChaChaRng; -use pairing::bls12_381::*; -use pairing::*; +use bellman::pairing::bls12_381::*; +use bellman::pairing::*; use std::io::{self, Read, Write}; use std::sync::{Arc, Mutex}; use generic_array::GenericArray; diff --git a/src/bn256/mod.rs b/src/bn256/mod.rs index b2fafa9..a59285a 100644 --- a/src/bn256/mod.rs +++ b/src/bn256/mod.rs @@ -1,4 +1,3 @@ -extern crate pairing; extern crate rand; extern crate crossbeam; extern crate num_cpus; @@ -6,14 +5,14 @@ extern crate blake2; extern crate generic_array; extern crate typenum; extern crate byteorder; -extern crate ff; +extern crate bellman; -use self::ff::{Field, PrimeField}; +use self::bellman::pairing::ff::{Field, PrimeField}; use self::byteorder::{ReadBytesExt, BigEndian}; use self::rand::{SeedableRng, Rng, Rand}; use self::rand::chacha::ChaChaRng; -use self::pairing::bn256::{Bn256}; -use self::pairing::*; +use self::bellman::pairing::bn256::{Bn256}; +use self::bellman::pairing::*; use std::io::{self, Read, Write}; use std::sync::{Arc, Mutex}; use self::generic_array::GenericArray; diff --git a/src/keypair.rs b/src/keypair.rs index a49591d..e3bb9ac 100644 --- a/src/keypair.rs +++ b/src/keypair.rs @@ -1,4 +1,3 @@ -extern crate pairing; extern crate rand; extern crate crossbeam; extern crate num_cpus; @@ -6,18 +5,18 @@ extern crate blake2; extern crate generic_array; extern crate typenum; extern crate byteorder; -extern crate ff; +extern crate bellman; extern crate memmap; extern crate itertools; use itertools::Itertools; use memmap::{Mmap, MmapMut}; -use self::ff::{Field, PrimeField}; +use self::bellman::pairing::ff::{Field, PrimeField}; use self::byteorder::{ReadBytesExt, BigEndian}; use self::rand::{SeedableRng, Rng, Rand}; use self::rand::chacha::ChaChaRng; -use self::pairing::bn256::{Bn256}; -use self::pairing::*; +use self::bellman::pairing::bn256::{Bn256}; +use self::bellman::pairing::*; use std::io::{self, Read, Write}; use std::sync::{Arc, Mutex}; use self::generic_array::GenericArray; diff --git a/src/parameters.rs b/src/parameters.rs index ab35351..0b56013 100644 --- a/src/parameters.rs +++ b/src/parameters.rs @@ -1,4 +1,3 @@ -extern crate pairing; extern crate rand; extern crate crossbeam; extern crate num_cpus; @@ -6,14 +5,14 @@ extern crate blake2; extern crate generic_array; extern crate typenum; extern crate byteorder; -extern crate ff; +extern crate bellman; -use ff::{Field, PrimeField}; +use bellman::pairing::ff::{Field, PrimeField}; use byteorder::{ReadBytesExt, BigEndian}; use rand::{SeedableRng, Rng, Rand}; use rand::chacha::ChaChaRng; -use pairing::bn256::{Bn256}; -use pairing::*; +use bellman::pairing::bn256::{Bn256}; +use bellman::pairing::*; use std::io::{self, Read, Write}; use std::sync::{Arc, Mutex}; use generic_array::GenericArray; diff --git a/src/small_bn256/mod.rs b/src/small_bn256/mod.rs index 8c47568..0d9876d 100644 --- a/src/small_bn256/mod.rs +++ b/src/small_bn256/mod.rs @@ -1,4 +1,3 @@ -extern crate pairing; extern crate rand; extern crate crossbeam; extern crate num_cpus; @@ -6,14 +5,14 @@ extern crate blake2; extern crate generic_array; extern crate typenum; extern crate byteorder; -extern crate ff; +extern crate bellman; -use self::ff::{Field, PrimeField}; +use self::bellman::pairing::ff::{Field, PrimeField}; use self::byteorder::{ReadBytesExt, BigEndian}; use self::rand::{SeedableRng, Rng, Rand}; use self::rand::chacha::ChaChaRng; -use self::pairing::bn256::{Bn256}; -use self::pairing::*; +use self::bellman::pairing::bn256::{Bn256}; +use self::bellman::pairing::*; use std::io::{self, Read, Write}; use std::sync::{Arc, Mutex}; use self::generic_array::GenericArray; diff --git a/src/utils.rs b/src/utils.rs index 48336bd..fddd40c 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1,4 +1,3 @@ -extern crate pairing; extern crate rand; extern crate crossbeam; extern crate num_cpus; @@ -6,14 +5,14 @@ extern crate blake2; extern crate generic_array; extern crate typenum; extern crate byteorder; -extern crate ff; +extern crate bellman; -use ff::{Field, PrimeField}; +use bellman::pairing::ff::{Field, PrimeField}; use byteorder::{ReadBytesExt, BigEndian}; use rand::{SeedableRng, Rng, Rand}; use rand::chacha::ChaChaRng; -use pairing::bn256::{Bn256}; -use pairing::*; +use bellman::pairing::bn256::{Bn256}; +use bellman::pairing::*; use std::io::{self, Read, Write}; use std::sync::{Arc, Mutex}; use generic_array::GenericArray; From 2edd90248c830e7587fde10889c5aef7b371a1b2 Mon Sep 17 00:00:00 2001 From: Alex Vlasov Date: Tue, 5 Mar 2019 11:41:00 +0100 Subject: [PATCH 19/23] choose 2^25 powers for test ceremony in Paris --- src/bn256/mod.rs | 6 +++--- src/small_bn256/mod.rs | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/bn256/mod.rs b/src/bn256/mod.rs index a59285a..9a289f1 100644 --- a/src/bn256/mod.rs +++ b/src/bn256/mod.rs @@ -56,7 +56,7 @@ fn test_pubkey_serialization() { #[test] fn test_power_pairs() { use self::rand::thread_rng; - use self::pairing::bn256::{Fr, G1Affine, G2Affine}; + use self::bellman::pairing::bn256::{Fr, G1Affine, G2Affine}; let rng = &mut thread_rng(); let mut v = vec![]; @@ -79,7 +79,7 @@ fn test_power_pairs() { #[test] fn test_same_ratio() { use self::rand::thread_rng; - use self::pairing::bn256::{Fr, G1Affine, G2Affine}; + use self::bellman::pairing::bn256::{Fr, G1Affine, G2Affine}; let rng = &mut thread_rng(); @@ -98,7 +98,7 @@ fn test_accumulator_serialization() { use crate::accumulator::*; use self::rand::thread_rng; - use self::pairing::bn256::{Bn256, Fr, G1Affine, G2Affine}; + use self::bellman::pairing::bn256::{Bn256, Fr, G1Affine, G2Affine}; use self::PowersOfTauParameters; let rng = &mut thread_rng(); diff --git a/src/small_bn256/mod.rs b/src/small_bn256/mod.rs index 0d9876d..a5311a6 100644 --- a/src/small_bn256/mod.rs +++ b/src/small_bn256/mod.rs @@ -30,7 +30,7 @@ pub struct Bn256CeremonyParameters { } impl PowersOfTauParameters for Bn256CeremonyParameters { - const REQUIRED_POWER: usize = 21; // generate to have roughly 2 million constraints + const REQUIRED_POWER: usize = 25; // generate to have roughly 2 million constraints // This ceremony is based on the BN256 elliptic curve construction. const G1_UNCOMPRESSED_BYTE_SIZE: usize = 64; From 8260b7dfa92d69fa8d4aa685c7835762cc96a8c3 Mon Sep 17 00:00:00 2001 From: Alex Vlasov Date: Fri, 8 Mar 2019 01:23:02 +0100 Subject: [PATCH 20/23] update tools for more clear messages about hashes of files --- Cargo.lock | 2 + src/batched_accumulator.rs | 2 + src/bin/compute_constrained.rs | 22 ++- src/bin/verify_transform_constrained.rs | 26 ++- src/utils.rs | 211 +++++++++++++++++++----- 5 files changed, 218 insertions(+), 45 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ec72ebc..1b98e54 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,3 +1,5 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. [[package]] name = "arrayvec" version = "0.4.10" diff --git a/src/batched_accumulator.rs b/src/batched_accumulator.rs index 9bd9b16..6bc8c07 100644 --- a/src/batched_accumulator.rs +++ b/src/batched_accumulator.rs @@ -302,6 +302,7 @@ impl BachedAccumulator { println!("Invalid ratio power_pairs(&after.beta_tau_powers_g1), (tau_powers_g2_0, tau_powers_g2_1)"); return false; } + println!("Done processing {} powers of tau", end); } else { panic!("Chunk does not have a min and max"); } @@ -321,6 +322,7 @@ impl BachedAccumulator { println!("Invalid ratio power_pairs(&after.tau_powers_g1), (tau_powers_g2_0, tau_powers_g2_1) in extra TauG1 contribution"); return false; } + println!("Done processing {} powers of tau", end); } else { panic!("Chunk does not have a min and max"); } diff --git a/src/bin/compute_constrained.rs b/src/bin/compute_constrained.rs index 46d4ad3..007dad2 100644 --- a/src/bin/compute_constrained.rs +++ b/src/bin/compute_constrained.rs @@ -15,7 +15,7 @@ use std::fs::OpenOptions; use bellman::pairing::bn256::Bn256; use memmap::*; -use std::io::Write; +use std::io::{Read, Write}; use powersoftau::parameters::PowersOfTauParameters; @@ -114,7 +114,7 @@ fn main() { let current_accumulator_hash = BachedAccumulator::::calculate_hash(&readable_map); { - println!("Contributing on top of the hash:"); + println!("`challenge` file contains decompressed points and has a hash:"); for line in current_accumulator_hash.as_slice().chunks(16) { print!("\t"); for section in line.chunks(4) { @@ -131,6 +131,24 @@ fn main() { writable_map.flush().expect("unable to write hash to `./response`"); } + { + let mut challenge_hash = [0; 64]; + let memory_slice = readable_map.get(0..64).expect("must read point data from file"); + memory_slice.clone().read_exact(&mut challenge_hash).expect("couldn't read hash of challenge file from response file"); + + println!("`challenge` file claims (!!! Must not be blindly trusted) that it was based on the original contribution with a hash:"); + for line in challenge_hash.chunks(16) { + print!("\t"); + for section in line.chunks(4) { + for b in section { + print!("{:02x}", b); + } + print!(" "); + } + println!(""); + } + } + // Construct our keypair using the RNG we created above let (pubkey, privkey) = keypair(&mut rng, current_accumulator_hash.as_ref()); diff --git a/src/bin/verify_transform_constrained.rs b/src/bin/verify_transform_constrained.rs index 2a3a375..278f749 100644 --- a/src/bin/verify_transform_constrained.rs +++ b/src/bin/verify_transform_constrained.rs @@ -76,7 +76,7 @@ fn main() { let current_accumulator_hash = BachedAccumulator::::calculate_hash(&challenge_readable_map); - println!("Previous challenge hash"); + println!("Hash of the `challenge` file for verification:"); for line in current_accumulator_hash.as_slice().chunks(16) { print!("\t"); for section in line.chunks(4) { @@ -94,7 +94,7 @@ fn main() { let memory_slice = response_readable_map.get(0..64).expect("must read point data from file"); memory_slice.clone().read_exact(&mut response_challenge_hash).expect("couldn't read hash of challenge file from response file"); - println!("Response was based on the hash"); + println!("`response` was based on the hash:"); for line in response_challenge_hash.chunks(16) { print!("\t"); for section in line.chunks(4) { @@ -118,6 +118,8 @@ fn main() { // check that it follows the protocol + println!("Verifying a contribution to contain proper powers and correspond to the public key..."); + let valid = BachedAccumulator::::verify_transformation( &challenge_readable_map, &response_readable_map, @@ -136,10 +138,9 @@ fn main() { println!("Verification succeeded!"); } - let response_hash = BachedAccumulator::::calculate_hash(&response_readable_map); - println!("Here's the BLAKE2b hash of the participant's response file:"); + println!("Here's the BLAKE2b hash of the participant's original compressed `response` file:"); for line in response_hash.as_slice().chunks(16) { print!("\t"); @@ -184,6 +185,23 @@ fn main() { writable_map.flush().expect("must flush the memory map"); + let new_challenge_readable_map = writable_map.make_read_only().expect("must make a map readonly"); + + let recompressed_hash = BachedAccumulator::::calculate_hash(&new_challenge_readable_map); + + println!("Here's the BLAKE2b hash of the decompressed participant's response as `new_challenge` file:"); + + for line in recompressed_hash.as_slice().chunks(16) { + print!("\t"); + for section in line.chunks(4) { + for b in section { + print!("{:02x}", b); + } + print!(" "); + } + println!(""); + } + println!("Done! `./new_challenge` contains the new challenge file. The other files"); println!("were left alone."); } diff --git a/src/utils.rs b/src/utils.rs index fddd40c..c309833 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -7,7 +7,7 @@ extern crate typenum; extern crate byteorder; extern crate bellman; -use bellman::pairing::ff::{Field, PrimeField}; +use bellman::pairing::ff::{Field, PrimeField, PrimeFieldRepr}; use byteorder::{ReadBytesExt, BigEndian}; use rand::{SeedableRng, Rng, Rand}; use rand::chacha::ChaChaRng; @@ -65,50 +65,65 @@ fn test_hash_to_g2() { /// e(g, (as)*r1 + (bs)*r2 + (cs)*r3) = e(g^s, a*r1 + b*r2 + c*r3) /// /// ... with high probability. +// fn merge_pairs>(v1: &[G], v2: &[G]) -> (G, G) +// { +// use std::sync::{Arc, Mutex}; +// use self::rand::{thread_rng}; + +// assert_eq!(v1.len(), v2.len()); + +// let chunk = (v1.len() / num_cpus::get()) + 1; + +// let s = Arc::new(Mutex::new(G::Projective::zero())); +// let sx = Arc::new(Mutex::new(G::Projective::zero())); + +// crossbeam::scope(|scope| { +// for (v1, v2) in v1.chunks(chunk).zip(v2.chunks(chunk)) { +// let s = s.clone(); +// let sx = sx.clone(); + +// scope.spawn(move || { +// // We do not need to be overly cautious of the RNG +// // used for this check. +// let rng = &mut thread_rng(); + +// let mut wnaf = Wnaf::new(); +// let mut local_s = G::Projective::zero(); +// let mut local_sx = G::Projective::zero(); + +// for (v1, v2) in v1.iter().zip(v2.iter()) { +// let rho = G::Scalar::rand(rng); +// let mut wnaf = wnaf.scalar(rho.into_repr()); +// let v1 = wnaf.base(v1.into_projective()); +// let v2 = wnaf.base(v2.into_projective()); + +// local_s.add_assign(&v1); +// local_sx.add_assign(&v2); +// } + +// s.lock().unwrap().add_assign(&local_s); +// sx.lock().unwrap().add_assign(&local_sx); +// }); +// } +// }); + +// let s = s.lock().unwrap().into_affine(); +// let sx = sx.lock().unwrap().into_affine(); + +// (s, sx) +// } + fn merge_pairs>(v1: &[G], v2: &[G]) -> (G, G) { - use std::sync::{Arc, Mutex}; use self::rand::{thread_rng}; - + assert_eq!(v1.len(), v2.len()); + let rng = &mut thread_rng(); - let chunk = (v1.len() / num_cpus::get()) + 1; + let randomness: Vec<::Repr> = (0..v1.len()).map(|_| G::Scalar::rand(rng).into_repr()).collect(); - let s = Arc::new(Mutex::new(G::Projective::zero())); - let sx = Arc::new(Mutex::new(G::Projective::zero())); - - crossbeam::scope(|scope| { - for (v1, v2) in v1.chunks(chunk).zip(v2.chunks(chunk)) { - let s = s.clone(); - let sx = sx.clone(); - - scope.spawn(move || { - // We do not need to be overly cautious of the RNG - // used for this check. - let rng = &mut thread_rng(); - - let mut wnaf = Wnaf::new(); - let mut local_s = G::Projective::zero(); - let mut local_sx = G::Projective::zero(); - - for (v1, v2) in v1.iter().zip(v2.iter()) { - let rho = G::Scalar::rand(rng); - let mut wnaf = wnaf.scalar(rho.into_repr()); - let v1 = wnaf.base(v1.into_projective()); - let v2 = wnaf.base(v2.into_projective()); - - local_s.add_assign(&v1); - local_sx.add_assign(&v2); - } - - s.lock().unwrap().add_assign(&local_s); - sx.lock().unwrap().add_assign(&local_sx); - }); - } - }); - - let s = s.lock().unwrap().into_affine(); - let sx = sx.lock().unwrap().into_affine(); + let s = dense_multiexp(&v1, &randomness[..]).into_affine(); + let sx = dense_multiexp(&v2, &randomness[..]).into_affine(); (s, sx) } @@ -164,4 +179,122 @@ pub fn compute_g2_s ( h.input(g1_s_x.into_uncompressed().as_ref()); hash_to_g2::(h.result().as_ref()).into_affine() +} + +/// Perform multi-exponentiation. The caller is responsible for ensuring that +/// the number of bases is the same as the number of exponents. +#[allow(dead_code)] +pub fn dense_multiexp( + bases: & [G], + exponents: & [::Repr] +) -> ::Projective +{ + if exponents.len() != bases.len() { + panic!("invalid length") + } + let c = if exponents.len() < 32 { + 3u32 + } else { + (f64::from(exponents.len() as u32)).ln().ceil() as u32 + }; + + dense_multiexp_inner(bases, exponents, 0, c, true) +} + +fn dense_multiexp_inner( + bases: & [G], + exponents: & [::Repr], + mut skip: u32, + c: u32, + handle_trivial: bool +) -> ::Projective +{ + use std::sync::{Mutex}; + // Perform this region of the multiexp. We use a different strategy - go over region in parallel, + // then over another region, etc. No Arc required + let chunk = (bases.len() / num_cpus::get()) + 1; + let this = { + // let mask = (1u64 << c) - 1u64; + let this_region = Mutex::new(::Projective::zero()); + let arc = Arc::new(this_region); + crossbeam::scope(|scope| { + for (base, exp) in bases.chunks(chunk).zip(exponents.chunks(chunk)) { + let this_region_rwlock = arc.clone(); + // let handle = + scope.spawn(move || { + let mut buckets = vec![::Projective::zero(); (1 << c) - 1]; + // Accumulate the result + let mut acc = G::Projective::zero(); + let zero = G::Scalar::zero().into_repr(); + let one = G::Scalar::one().into_repr(); + + for (base, &exp) in base.iter().zip(exp.iter()) { + // let index = (exp.as_ref()[0] & mask) as usize; + + // if index != 0 { + // buckets[index - 1].add_assign_mixed(base); + // } + + // exp.shr(c as u32); + + if exp != zero { + if exp == one { + if handle_trivial { + acc.add_assign_mixed(base); + } + } else { + let mut exp = exp; + exp.shr(skip); + let exp = exp.as_ref()[0] % (1 << c); + if exp != 0 { + buckets[(exp - 1) as usize].add_assign_mixed(base); + } + } + } + } + + // buckets are filled with the corresponding accumulated value, now sum + let mut running_sum = G::Projective::zero(); + for exp in buckets.into_iter().rev() { + running_sum.add_assign(&exp); + acc.add_assign(&running_sum); + } + + let mut guard = match this_region_rwlock.lock() { + Ok(guard) => guard, + Err(_) => { + panic!("poisoned!"); + // poisoned.into_inner() + } + }; + + (*guard).add_assign(&acc); + }); + + } + }); + + let this_region = Arc::try_unwrap(arc).unwrap(); + let this_region = this_region.into_inner().unwrap(); + + this_region + }; + + skip += c; + + if skip >= ::NUM_BITS { + // There isn't another region, and this will be the highest region + return this; + } else { + // next region is actually higher than this one, so double it enough times + let mut next_region = dense_multiexp_inner( + bases, exponents, skip, c, false); + for _ in 0..c { + next_region.double(); + } + + next_region.add_assign(&this); + + return next_region; + } } \ No newline at end of file From 847b22826f6b84c2e96eb25e8b87ad9471c7a029 Mon Sep 17 00:00:00 2001 From: Alex Vlasov Date: Fri, 8 Mar 2019 15:37:36 +0100 Subject: [PATCH 21/23] add explicit non-infinity checks --- src/batched_accumulator.rs | 32 ++++++++++++++++++------- src/bin/verify_transform_constrained.rs | 29 +++++++++++----------- 2 files changed, 37 insertions(+), 24 deletions(-) diff --git a/src/batched_accumulator.rs b/src/batched_accumulator.rs index 6bc8c07..c0b1c87 100644 --- a/src/batched_accumulator.rs +++ b/src/batched_accumulator.rs @@ -234,8 +234,8 @@ impl BachedAccumulator { { let chunk_size = 2; - before.read_chunk(0, chunk_size, input_is_compressed, check_input_for_correctness, &input_map).expect("must read a first chunk"); - after.read_chunk(0, chunk_size, output_is_compressed, check_output_for_correctness, &output_map).expect("must read a first chunk"); + before.read_chunk(0, chunk_size, input_is_compressed, check_input_for_correctness, &input_map).expect("must read a first chunk from `challenge`"); + after.read_chunk(0, chunk_size, output_is_compressed, check_output_for_correctness, &output_map).expect("must read a first chunk from `response`"); // Check the correctness of the generators for tau powers if after.tau_powers_g1[0] != E::G1Affine::one() { @@ -282,8 +282,8 @@ impl BachedAccumulator { for chunk in &(0..P::TAU_POWERS_LENGTH).into_iter().chunks(P::EMPIRICAL_BATCH_SIZE) { if let MinMax(start, end) = chunk.minmax() { let size = end - start + 1; - before.read_chunk(start, size, input_is_compressed, check_input_for_correctness, &input_map).expect("must read a first chunk"); - after.read_chunk(start, size, output_is_compressed, check_output_for_correctness, &output_map).expect("must read a first chunk"); + before.read_chunk(start, size, input_is_compressed, check_input_for_correctness, &input_map).expect(&format!("must read a chunk from {} to {} from `challenge`", start, end)); + after.read_chunk(start, size, output_is_compressed, check_output_for_correctness, &output_map).expect(&format!("must read a chunk from {} to {} from `response`", start, end)); // Are the powers of tau correct? if !same_ratio(power_pairs(&after.tau_powers_g1), (tau_powers_g2_0, tau_powers_g2_1)) { @@ -311,8 +311,8 @@ impl BachedAccumulator { for chunk in &(P::TAU_POWERS_LENGTH..P::TAU_POWERS_G1_LENGTH).into_iter().chunks(P::EMPIRICAL_BATCH_SIZE) { if let MinMax(start, end) = chunk.minmax() { let size = end - start + 1; - before.read_chunk(start, size, input_is_compressed, check_input_for_correctness, &input_map).expect("must read a first chunk"); - after.read_chunk(start, size, output_is_compressed, check_output_for_correctness, &output_map).expect("must read a first chunk"); + before.read_chunk(start, size, input_is_compressed, check_input_for_correctness, &input_map).expect(&format!("must read a chunk from {} to {} from `challenge`", start, end)); + after.read_chunk(start, size, output_is_compressed, check_output_for_correctness, &output_map).expect(&format!("must read a chunk from {} to {} from `response`", start, end)); assert_eq!(before.tau_powers_g2.len(), 0, "during rest of tau g1 generation tau g2 must be empty"); assert_eq!(after.tau_powers_g2.len(), 0, "during rest of tau g1 generation tau g2 must be empty"); @@ -344,7 +344,7 @@ impl BachedAccumulator { for chunk in &(0..P::TAU_POWERS_LENGTH).into_iter().chunks(P::EMPIRICAL_BATCH_SIZE) { if let MinMax(start, end) = chunk.minmax() { let size = end - start + 1; - accumulator.read_chunk(start, size, UseCompression::Yes, check_input_for_correctness, &input_map).expect("Must read a chunk"); + accumulator.read_chunk(start, size, UseCompression::Yes, check_input_for_correctness, &input_map).expect(&format!("must read a chunk from {} to {} from source of decompression", start, end)); accumulator.write_chunk(start, UseCompression::No, output_map)?; } else { panic!("Chunk does not have a min and max"); @@ -354,9 +354,11 @@ impl BachedAccumulator { for chunk in &(P::TAU_POWERS_LENGTH..P::TAU_POWERS_G1_LENGTH).into_iter().chunks(P::EMPIRICAL_BATCH_SIZE) { if let MinMax(start, end) = chunk.minmax() { let size = end - start + 1; - accumulator.read_chunk(start, size, UseCompression::Yes, check_input_for_correctness, &input_map).expect("must read a chunk"); + accumulator.read_chunk(start, size, UseCompression::Yes, check_input_for_correctness, &input_map).expect(&format!("must read a chunk from {} to {} from source of decompression", start, end)); assert_eq!(accumulator.tau_powers_g2.len(), 0, "during rest of tau g1 generation tau g2 must be empty"); - + assert_eq!(accumulator.alpha_tau_powers_g1.len(), 0, "during rest of tau g1 generation alpha*tau in g1 must be empty"); + assert_eq!(accumulator.beta_tau_powers_g1.len(), 0, "during rest of tau g1 generation beta*tau in g1 must be empty"); + accumulator.write_chunk(start, UseCompression::No, output_map)?; } else { @@ -483,6 +485,7 @@ impl BachedAccumulator { let decoding_error = decoding_error.clone(); scope.spawn(move || { + assert_eq!(source.len(), target.len()); for (source, target) in source.iter().zip(target.iter_mut()) { match { // If we're a participant, we don't need to check all of the @@ -517,6 +520,14 @@ impl BachedAccumulator { } }); + // extra check that during the decompression all the the initially initialized infinitu points + // were replaced with something + for decoded in res_affine.iter() { + if decoded.is_zero() { + return Err(DeserializationError::PointAtInfinity); + } + } + match Arc::try_unwrap(decoding_error).unwrap().into_inner().unwrap() { Some(e) => { Err(e) @@ -694,6 +705,7 @@ impl BachedAccumulator { // Turn it all back into affine points for (projective, affine) in projective.iter().zip(bases.iter_mut()) { *affine = projective.into_affine(); + assert!(!affine.is_zero(), "your contribution happed to produce a point at infinity, please re-run"); } } @@ -729,6 +741,7 @@ impl BachedAccumulator { batch_exp::(&mut accumulator.alpha_tau_powers_g1, &taupowers[0..], Some(&key.alpha)); batch_exp::(&mut accumulator.beta_tau_powers_g1, &taupowers[0..], Some(&key.beta)); accumulator.beta_g2 = accumulator.beta_g2.mul(key.beta).into_affine(); + assert!(!accumulator.beta_g2.is_zero(), "your contribution happed to produce a point at infinity, please re-run"); accumulator.write_chunk(start, compress_the_output, output_map)?; println!("Done processing {} powers of tau", end); @@ -763,6 +776,7 @@ impl BachedAccumulator { batch_exp::(&mut accumulator.tau_powers_g1, &taupowers[0..], None); accumulator.beta_g2 = accumulator.beta_g2.mul(key.beta).into_affine(); + assert!(!accumulator.beta_g2.is_zero(), "your contribution happed to produce a point at infinity, please re-run"); accumulator.write_chunk(start, compress_the_output, output_map)?; println!("Done processing {} powers of tau", end); diff --git a/src/bin/verify_transform_constrained.rs b/src/bin/verify_transform_constrained.rs index 278f749..07e5833 100644 --- a/src/bin/verify_transform_constrained.rs +++ b/src/bin/verify_transform_constrained.rs @@ -111,6 +111,20 @@ fn main() { } } + let response_hash = BachedAccumulator::::calculate_hash(&response_readable_map); + + println!("Hash of the `response` file for verification:"); + for line in response_hash.as_slice().chunks(16) { + print!("\t"); + for section in line.chunks(4) { + for b in section { + print!("{:02x}", b); + } + print!(" "); + } + println!(""); + } + // get the contributor's public key let public_key = PublicKey::::read::(&response_readable_map, CONTRIBUTION_IS_COMPRESSED) .expect("wasn't able to deserialize the response file's public key"); @@ -138,21 +152,6 @@ fn main() { println!("Verification succeeded!"); } - let response_hash = BachedAccumulator::::calculate_hash(&response_readable_map); - - println!("Here's the BLAKE2b hash of the participant's original compressed `response` file:"); - - for line in response_hash.as_slice().chunks(16) { - print!("\t"); - for section in line.chunks(4) { - for b in section { - print!("{:02x}", b); - } - print!(" "); - } - println!(""); - } - if COMPRESS_NEW_CHALLENGE == UseCompression::Yes { println!("Don't need to recompress the contribution, please copy `./response` as `./new_challenge`"); } else { From c46bc9b4ad8a79fd956e92b267932eef11d5d498 Mon Sep 17 00:00:00 2001 From: Alex Vlasov Date: Fri, 8 Mar 2019 21:31:59 +0100 Subject: [PATCH 22/23] disable async flush of memmap for now, before further investigation --- src/batched_accumulator.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/batched_accumulator.rs b/src/batched_accumulator.rs index c0b1c87..ca01671 100644 --- a/src/batched_accumulator.rs +++ b/src/batched_accumulator.rs @@ -579,7 +579,7 @@ impl BachedAccumulator { } }; - output_map.flush_async()?; + output_map.flush()?; Ok(()) } From 812e4082fe97e7005880b41facc5a22a5a053405 Mon Sep 17 00:00:00 2001 From: Petr Korolev Date: Tue, 26 Mar 2019 23:52:14 +0200 Subject: [PATCH 23/23] add instructions for ceremony --- README.md | 37 +++++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/README.md b/README.md index 01831ce..09bc9a8 100644 --- a/README.md +++ b/README.md @@ -14,6 +14,43 @@ Extended to support Ethereum's BN256 curve and made it easier to change size of Instructions for a planned ceremony will be posted when everything is tested and finalized. +--- +## To run the ceremony on your laptop: + +1. Preparation: + +``` +rustup update # tested on rustup 1.17.0 +cargo build +``` + +2. Put `response` file from the previous ceremony to root directory. +3. To generate `new_challenge` run: + +``` +cargo run --release --bin verify_transform_constrained # this will generate new_challenge from response file +``` + +4. Backup old files and replace `challenge` file: + +``` +mv challenge challenge_old +mv response response_old +mv new_challenge challenge +``` + +5. Run ceremony: + +``` +cargo run --release --bin compute_constrained # generate response file +``` + +Put your hash from output response to private gist (example: https://gist.github.com/skywinder/c35ab03c66c6b200b33ea2f388a6df89) + +6. Reboot laptop to clean up toxic waste. + +7. Save `response` file and give it to the next participant. + ## Recommendations from original ceremony Participants of the ceremony sample some randomness, perform a computation, and then destroy the randomness. **Only one participant needs to do this successfully to ensure the final parameters are secure.** In order to see that this randomness is truly destroyed, participants may take various kinds of precautions: