Merge pull request #42 from garious/genesis

Make num_hashes more intuitive
This commit is contained in:
Greg Fitzgerald
2018-03-04 13:05:38 -07:00
committed by GitHub
11 changed files with 91 additions and 111 deletions

View File

@ -71,9 +71,9 @@ fn main() {
Running the program should produce a log similar to: Running the program should produce a log similar to:
```rust ```rust
Entry { num_hashes: 0, end_hash: [0, ...], event: Tick } Entry { num_hashes: 0, id: [0, ...], event: Tick }
Entry { num_hashes: 2, end_hash: [67, ...], event: Transaction { data: [37, ...] } } Entry { num_hashes: 3, id: [67, ...], event: Transaction { data: [37, ...] } }
Entry { num_hashes: 3, end_hash: [123, ...], event: Tick } Entry { num_hashes: 3, id: [123, ...], event: Tick }
``` ```
Proof-of-History Proof-of-History
@ -86,7 +86,7 @@ assert!(verify_slice(&entries, &seed));
``` ```
[It's a proof!](https://en.wikipedia.org/wiki/CurryHoward_correspondence) For each entry returned by the [It's a proof!](https://en.wikipedia.org/wiki/CurryHoward_correspondence) For each entry returned by the
historian, we can verify that `end_hash` is the result of applying a sha256 hash to the previous `end_hash` historian, we can verify that `id` is the result of applying a sha256 hash to the previous `id`
exactly `num_hashes` times, and then hashing then event data on top of that. Because the event data is exactly `num_hashes` times, and then hashing then event data on top of that. Because the event data is
included in the hash, the events cannot be reordered without regenerating all the hashes. included in the hash, the events cannot be reordered without regenerating all the hashes.

View File

@ -1,17 +1,17 @@
msc { msc {
client,historian,logger; client,historian,logger;
logger=>historian [ label = "e0 = Entry{hash: h0, n: 0, event: Tick}" ] ; logger=>historian [ label = "e0 = Entry{id: h0, n: 0, event: Tick}" ] ;
logger=>logger [ label = "h1 = hash(h0)" ] ; logger=>logger [ label = "h1 = hash(h0)" ] ;
logger=>logger [ label = "h2 = hash(h1)" ] ; logger=>logger [ label = "h2 = hash(h1)" ] ;
client=>historian [ label = "Claim(d0)" ] ; client=>historian [ label = "Transaction(d0)" ] ;
historian=>logger [ label = "Claim(d0)" ] ; historian=>logger [ label = "Transaction(d0)" ] ;
logger=>logger [ label = "h3 = hash(h2 + d0)" ] ; logger=>logger [ label = "h3 = hash(h2 + d0)" ] ;
logger=>historian [ label = "e1 = Entry{hash: hash(h3), n: 2, event: Claim(d0)}" ] ; logger=>historian [ label = "e1 = Entry{id: hash(h3), n: 3, event: Transaction(d0)}" ] ;
logger=>logger [ label = "h4 = hash(h3)" ] ; logger=>logger [ label = "h4 = hash(h3)" ] ;
logger=>logger [ label = "h5 = hash(h4)" ] ; logger=>logger [ label = "h5 = hash(h4)" ] ;
logger=>logger [ label = "h6 = hash(h5)" ] ; logger=>logger [ label = "h6 = hash(h5)" ] ;
logger=>historian [ label = "e2 = Entry{hash: h6, n: 3, event: Tick}" ] ; logger=>historian [ label = "e2 = Entry{id: h6, n: 3, event: Tick}" ] ;
client=>historian [ label = "collect()" ] ; client=>historian [ label = "collect()" ] ;
historian=>client [ label = "entries = [e0, e1, e2]" ] ; historian=>client [ label = "entries = [e0, e1, e2]" ] ;
client=>client [ label = "verify_slice(entries, h0)" ] ; client=>client [ label = "verify_slice(entries, h0)" ] ;

View File

@ -3,13 +3,15 @@
//! transfer funds to other users. //! transfer funds to other users.
use log::{hash, Entry, Sha256Hash}; use log::{hash, Entry, Sha256Hash};
use event::{Event, PublicKey, Signature}; use event::{get_pubkey, sign_transaction_data, Event, PublicKey, Signature};
use genesis::Genesis; use genesis::Genesis;
use historian::Historian; use historian::Historian;
use ring::signature::Ed25519KeyPair; use ring::signature::Ed25519KeyPair;
use std::sync::mpsc::SendError; use std::sync::mpsc::SendError;
use std::collections::HashMap; use std::collections::HashMap;
use std::result; use std::result;
use std::thread::sleep;
use std::time::Duration;
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
pub enum AccountingError { pub enum AccountingError {
@ -23,7 +25,7 @@ pub type Result<T> = result::Result<T, AccountingError>;
pub struct Accountant { pub struct Accountant {
pub historian: Historian<u64>, pub historian: Historian<u64>,
pub balances: HashMap<PublicKey, u64>, pub balances: HashMap<PublicKey, u64>,
pub end_hash: Sha256Hash, pub last_id: Sha256Hash,
} }
impl Accountant { impl Accountant {
@ -33,7 +35,7 @@ impl Accountant {
let mut acc = Accountant { let mut acc = Accountant {
historian: hist, historian: hist,
balances: HashMap::new(), balances: HashMap::new(),
end_hash: start_hash, last_id: start_hash,
}; };
for (i, event) in gen.create_events().into_iter().enumerate() { for (i, event) in gen.create_events().into_iter().enumerate() {
acc.process_verified_event(event, i < 2).unwrap(); acc.process_verified_event(event, i < 2).unwrap();
@ -48,23 +50,12 @@ impl Accountant {
} }
if let Some(last_entry) = entries.last() { if let Some(last_entry) = entries.last() {
self.end_hash = last_entry.end_hash; self.last_id = last_entry.id;
} }
entries entries
} }
pub fn deposit(self: &mut Self, n: u64, keypair: &Ed25519KeyPair) -> Result<Signature> {
use event::{get_pubkey, sign_claim_data};
let to = get_pubkey(keypair);
let sig = sign_claim_data(&n, keypair);
let event = Event::new_claim(to, n, sig);
if !self.historian.verify_event(&event) {
return Err(AccountingError::InvalidEvent);
}
self.process_verified_event(event, true).map(|_| sig)
}
fn is_deposit(allow_deposits: bool, from: &PublicKey, to: &PublicKey) -> bool { fn is_deposit(allow_deposits: bool, from: &PublicKey, to: &PublicKey) -> bool {
allow_deposits && from == to allow_deposits && from == to
} }
@ -118,7 +109,6 @@ impl Accountant {
keypair: &Ed25519KeyPair, keypair: &Ed25519KeyPair,
to: PublicKey, to: PublicKey,
) -> Result<Signature> { ) -> Result<Signature> {
use event::{get_pubkey, sign_transaction_data};
let from = get_pubkey(keypair); let from = get_pubkey(keypair);
let sig = sign_transaction_data(&n, keypair, &to); let sig = sign_transaction_data(&n, keypair, &to);
let event = Event::Transaction { let event = Event::Transaction {
@ -135,8 +125,6 @@ impl Accountant {
} }
pub fn wait_on_signature(self: &mut Self, wait_sig: &Signature) { pub fn wait_on_signature(self: &mut Self, wait_sig: &Signature) {
use std::thread::sleep;
use std::time::Duration;
let mut entries = self.sync(); let mut entries = self.sync();
let mut found = false; let mut found = false;
while !found { while !found {
@ -158,6 +146,8 @@ mod tests {
use event::{generate_keypair, get_pubkey}; use event::{generate_keypair, get_pubkey};
use logger::ExitReason; use logger::ExitReason;
use genesis::Creator; use genesis::Creator;
use std::thread::sleep;
use std::time::Duration;
#[test] #[test]
fn test_accountant() { fn test_accountant() {
@ -180,8 +170,6 @@ mod tests {
#[test] #[test]
fn test_invalid_transfer() { fn test_invalid_transfer() {
use std::thread::sleep;
use std::time::Duration;
let bob = Creator::new(1_000); let bob = Creator::new(1_000);
let bob_pubkey = bob.pubkey; let bob_pubkey = bob.pubkey;
let alice = Genesis::new(11_000, vec![bob]); let alice = Genesis::new(11_000, vec![bob]);
@ -210,9 +198,6 @@ mod tests {
let mut acc = Accountant::new(&alice, Some(2)); let mut acc = Accountant::new(&alice, Some(2));
let alice_keypair = alice.get_keypair(); let alice_keypair = alice.get_keypair();
let bob_keypair = generate_keypair(); let bob_keypair = generate_keypair();
let sig = acc.deposit(10_000, &alice_keypair).unwrap();
acc.wait_on_signature(&sig);
let bob_pubkey = get_pubkey(&bob_keypair); let bob_pubkey = get_pubkey(&bob_keypair);
let sig = acc.transfer(500, &alice_keypair, bob_pubkey).unwrap(); let sig = acc.transfer(500, &alice_keypair, bob_pubkey).unwrap();
acc.wait_on_signature(&sig); acc.wait_on_signature(&sig);

View File

@ -1,6 +1,8 @@
use std::io; use std::io;
use accountant::Accountant; use accountant::Accountant;
use event::{Event, PublicKey, Signature}; use event::{Event, PublicKey, Signature};
use std::net::UdpSocket;
use bincode::{deserialize, serialize};
pub struct AccountantSkel { pub struct AccountantSkel {
pub obj: Accountant, pub obj: Accountant,
@ -60,8 +62,6 @@ impl AccountantSkel {
/// UDP Server that forwards messages to Accountant methods. /// UDP Server that forwards messages to Accountant methods.
pub fn serve(self: &mut Self, addr: &str) -> io::Result<()> { pub fn serve(self: &mut Self, addr: &str) -> io::Result<()> {
use std::net::UdpSocket;
use bincode::{deserialize, serialize};
let socket = UdpSocket::bind(addr)?; let socket = UdpSocket::bind(addr)?;
let mut buf = vec![0u8; 1024]; let mut buf = vec![0u8; 1024];
loop { loop {

View File

@ -5,7 +5,7 @@
use std::net::UdpSocket; use std::net::UdpSocket;
use std::io; use std::io;
use bincode::{deserialize, serialize}; use bincode::{deserialize, serialize};
use event::{PublicKey, Signature}; use event::{get_pubkey, sign_transaction_data, PublicKey, Signature};
use ring::signature::Ed25519KeyPair; use ring::signature::Ed25519KeyPair;
use accountant_skel::{Request, Response}; use accountant_skel::{Request, Response};
@ -40,7 +40,6 @@ impl AccountantStub {
keypair: &Ed25519KeyPair, keypair: &Ed25519KeyPair,
to: PublicKey, to: PublicKey,
) -> io::Result<Signature> { ) -> io::Result<Signature> {
use event::{get_pubkey, sign_transaction_data};
let from = get_pubkey(keypair); let from = get_pubkey(keypair);
let sig = sign_transaction_data(&n, keypair, &to); let sig = sign_transaction_data(&n, keypair, &to);
self.transfer_signed(from, to, n, sig).map(|_| sig) self.transfer_signed(from, to, n, sig).map(|_| sig)

View File

@ -1,19 +1,18 @@
//extern crate serde_json; //extern crate serde_json;
extern crate silk; extern crate silk;
use silk::accountant_stub::AccountantStub;
use silk::accountant_skel::AccountantSkel;
use silk::accountant::Accountant;
use silk::event::{generate_keypair, get_pubkey, sign_transaction_data, verify_event, Event};
use silk::genesis::Genesis;
use std::time::Instant;
use std::net::UdpSocket;
use std::thread::{sleep, spawn};
use std::time::Duration;
//use std::io::stdin; //use std::io::stdin;
fn main() { fn main() {
use silk::accountant_stub::AccountantStub;
use silk::accountant_skel::AccountantSkel;
use silk::accountant::Accountant;
use silk::event::{generate_keypair, get_pubkey, sign_transaction_data};
use silk::genesis::Genesis;
use std::time::Instant;
use std::net::UdpSocket;
use std::thread::{sleep, spawn};
use std::time::Duration;
let addr = "127.0.0.1:8000"; let addr = "127.0.0.1:8000";
let send_addr = "127.0.0.1:8001"; let send_addr = "127.0.0.1:8001";
@ -53,7 +52,6 @@ fn main() {
); );
println!("Verify signatures..."); println!("Verify signatures...");
use silk::event::{verify_event, Event};
let now = Instant::now(); let now = Instant::now();
for &(k, s) in &sigs { for &(k, s) in &sigs {
let e = Event::Transaction { let e = Event::Transaction {

View File

@ -22,7 +22,7 @@ fn main() {
drop(logger.sender); drop(logger.sender);
let entries = receiver.iter().collect::<Vec<_>>(); let entries = receiver.iter().collect::<Vec<_>>();
verify_slice_u64(&entries, &entries[0].end_hash); verify_slice_u64(&entries, &entries[0].id);
println!("["); println!("[");
let len = entries.len(); let len = entries.len();
for (i, x) in entries.iter().enumerate() { for (i, x) in entries.iter().enumerate() {

View File

@ -2,9 +2,9 @@
//! an ordered log of events in time. //! an ordered log of events in time.
/// Each log entry contains three pieces of data. The 'num_hashes' field is the number /// Each log entry contains three pieces of data. The 'num_hashes' field is the number
/// of hashes performed since the previous entry. The 'end_hash' field is the result /// of hashes performed since the previous entry. The 'id' field is the result
/// of hashing 'end_hash' from the previous entry 'num_hashes' times. The 'event' /// of hashing 'id' from the previous entry 'num_hashes' times. The 'event'
/// field points to an Event that took place shortly after 'end_hash' was generated. /// field points to an Event that took place shortly after 'id' was generated.
/// ///
/// If you divide 'num_hashes' by the amount of time it takes to generate a new hash, you /// If you divide 'num_hashes' by the amount of time it takes to generate a new hash, you
/// get a duration estimate since the last event. Since processing power increases /// get a duration estimate since the last event. Since processing power increases
@ -16,7 +16,10 @@
use generic_array::GenericArray; use generic_array::GenericArray;
use generic_array::typenum::{U32, U64}; use generic_array::typenum::{U32, U64};
use ring::signature::Ed25519KeyPair; use ring::signature::Ed25519KeyPair;
use ring::{rand, signature};
use untrusted;
use serde::Serialize; use serde::Serialize;
use bincode::serialize;
pub type PublicKey = GenericArray<u8, U32>; pub type PublicKey = GenericArray<u8, U32>;
pub type Signature = GenericArray<u8, U64>; pub type Signature = GenericArray<u8, U64>;
@ -24,7 +27,7 @@ pub type Signature = GenericArray<u8, U64>;
/// When 'event' is Tick, the event represents a simple clock tick, and exists for the /// When 'event' is Tick, the event represents a simple clock tick, and exists for the
/// sole purpose of improving the performance of event log verification. A tick can /// sole purpose of improving the performance of event log verification. A tick can
/// be generated in 'num_hashes' hashes and verified in 'num_hashes' hashes. By logging /// be generated in 'num_hashes' hashes and verified in 'num_hashes' hashes. By logging
/// a hash alongside the tick, each tick and be verified in parallel using the 'end_hash' /// a hash alongside the tick, each tick and be verified in parallel using the 'id'
/// of the preceding tick to seed its hashing. /// of the preceding tick to seed its hashing.
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Clone)] #[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Clone)]
pub enum Event<T> { pub enum Event<T> {
@ -50,8 +53,6 @@ impl<T> Event<T> {
/// Return a new ED25519 keypair /// Return a new ED25519 keypair
pub fn generate_keypair() -> Ed25519KeyPair { pub fn generate_keypair() -> Ed25519KeyPair {
use ring::{rand, signature};
use untrusted;
let rng = rand::SystemRandom::new(); let rng = rand::SystemRandom::new();
let pkcs8_bytes = signature::Ed25519KeyPair::generate_pkcs8(&rng).unwrap(); let pkcs8_bytes = signature::Ed25519KeyPair::generate_pkcs8(&rng).unwrap();
signature::Ed25519KeyPair::from_pkcs8(untrusted::Input::from(&pkcs8_bytes)).unwrap() signature::Ed25519KeyPair::from_pkcs8(untrusted::Input::from(&pkcs8_bytes)).unwrap()
@ -64,7 +65,6 @@ pub fn get_pubkey(keypair: &Ed25519KeyPair) -> PublicKey {
/// Return a signature for the given data using the private key from the given keypair. /// Return a signature for the given data using the private key from the given keypair.
fn sign_serialized<T: Serialize>(data: &T, keypair: &Ed25519KeyPair) -> Signature { fn sign_serialized<T: Serialize>(data: &T, keypair: &Ed25519KeyPair) -> Signature {
use bincode::serialize;
let serialized = serialize(data).unwrap(); let serialized = serialize(data).unwrap();
GenericArray::clone_from_slice(keypair.sign(&serialized).as_ref()) GenericArray::clone_from_slice(keypair.sign(&serialized).as_ref())
} }
@ -86,8 +86,6 @@ pub fn sign_claim_data<T: Serialize>(data: &T, keypair: &Ed25519KeyPair) -> Sign
/// Verify a signed message with the given public key. /// Verify a signed message with the given public key.
pub fn verify_signature(peer_public_key_bytes: &[u8], msg_bytes: &[u8], sig_bytes: &[u8]) -> bool { pub fn verify_signature(peer_public_key_bytes: &[u8], msg_bytes: &[u8], sig_bytes: &[u8]) -> bool {
use untrusted;
use ring::signature;
let peer_public_key = untrusted::Input::from(peer_public_key_bytes); let peer_public_key = untrusted::Input::from(peer_public_key_bytes);
let msg = untrusted::Input::from(msg_bytes); let msg = untrusted::Input::from(msg_bytes);
let sig = untrusted::Input::from(sig_bytes); let sig = untrusted::Input::from(sig_bytes);
@ -102,7 +100,6 @@ pub fn get_signature<T>(event: &Event<T>) -> Option<Signature> {
} }
pub fn verify_event<T: Serialize>(event: &Event<T>) -> bool { pub fn verify_event<T: Serialize>(event: &Event<T>) -> bool {
use bincode::serialize;
if let Event::Transaction { if let Event::Transaction {
from, from,
to, to,

View File

@ -3,13 +3,14 @@
use std::thread::JoinHandle; use std::thread::JoinHandle;
use std::collections::HashSet; use std::collections::HashSet;
use std::sync::mpsc::{Receiver, SyncSender}; use std::sync::mpsc::{sync_channel, Receiver, SyncSender};
use std::time::Instant; use std::time::Instant;
use log::{hash, Entry, Sha256Hash}; use log::{hash, Entry, Sha256Hash};
use logger::{verify_event_and_reserve_signature, ExitReason, Logger}; use logger::{verify_event_and_reserve_signature, ExitReason, Logger};
use event::{Event, Signature}; use event::{Event, Signature};
use serde::Serialize; use serde::Serialize;
use std::fmt::Debug; use std::fmt::Debug;
use std::thread;
pub struct Historian<T> { pub struct Historian<T> {
pub sender: SyncSender<Event<T>>, pub sender: SyncSender<Event<T>>,
@ -20,7 +21,6 @@ pub struct Historian<T> {
impl<T: 'static + Serialize + Clone + Debug + Send> Historian<T> { impl<T: 'static + Serialize + Clone + Debug + Send> Historian<T> {
pub fn new(start_hash: &Sha256Hash, ms_per_tick: Option<u64>) -> Self { pub fn new(start_hash: &Sha256Hash, ms_per_tick: Option<u64>) -> Self {
use std::sync::mpsc::sync_channel;
let (sender, event_receiver) = sync_channel(1000); let (sender, event_receiver) = sync_channel(1000);
let (entry_sender, receiver) = sync_channel(1000); let (entry_sender, receiver) = sync_channel(1000);
let thread_hdl = let thread_hdl =
@ -46,7 +46,6 @@ impl<T: 'static + Serialize + Clone + Debug + Send> Historian<T> {
receiver: Receiver<Event<T>>, receiver: Receiver<Event<T>>,
sender: SyncSender<Entry<T>>, sender: SyncSender<Entry<T>>,
) -> JoinHandle<(Entry<T>, ExitReason)> { ) -> JoinHandle<(Entry<T>, ExitReason)> {
use std::thread;
thread::spawn(move || { thread::spawn(move || {
let mut logger = Logger::new(receiver, sender, start_hash); let mut logger = Logger::new(receiver, sender, start_hash);
let now = Instant::now(); let now = Instant::now();
@ -54,7 +53,7 @@ impl<T: 'static + Serialize + Clone + Debug + Send> Historian<T> {
if let Err(err) = logger.log_events(now, ms_per_tick) { if let Err(err) = logger.log_events(now, ms_per_tick) {
return err; return err;
} }
logger.end_hash = hash(&logger.end_hash); logger.last_id = hash(&logger.last_id);
logger.num_hashes += 1; logger.num_hashes += 1;
} }
}) })

View File

@ -2,9 +2,9 @@
//! an ordered log of events in time. //! an ordered log of events in time.
/// Each log entry contains three pieces of data. The 'num_hashes' field is the number /// Each log entry contains three pieces of data. The 'num_hashes' field is the number
/// of hashes performed since the previous entry. The 'end_hash' field is the result /// of hashes performed since the previous entry. The 'id' field is the result
/// of hashing 'end_hash' from the previous entry 'num_hashes' times. The 'event' /// of hashing 'id' from the previous entry 'num_hashes' times. The 'event'
/// field points to an Event that took place shortly after 'end_hash' was generated. /// field points to an Event that took place shortly after 'id' was generated.
/// ///
/// If you divide 'num_hashes' by the amount of time it takes to generate a new hash, you /// If you divide 'num_hashes' by the amount of time it takes to generate a new hash, you
/// get a duration estimate since the last event. Since processing power increases /// get a duration estimate since the last event. Since processing power increases
@ -16,24 +16,27 @@
use generic_array::GenericArray; use generic_array::GenericArray;
use generic_array::typenum::U32; use generic_array::typenum::U32;
use serde::Serialize; use serde::Serialize;
use event::*; use event::{get_signature, verify_event, Event};
use sha2::{Digest, Sha256};
use rayon::prelude::*;
use std::iter;
pub type Sha256Hash = GenericArray<u8, U32>; pub type Sha256Hash = GenericArray<u8, U32>;
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Clone)] #[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Clone)]
pub struct Entry<T> { pub struct Entry<T> {
pub num_hashes: u64, pub num_hashes: u64,
pub end_hash: Sha256Hash, pub id: Sha256Hash,
pub event: Event<T>, pub event: Event<T>,
} }
impl<T> Entry<T> { impl<T> Entry<T> {
/// Creates a Entry from the number of hashes 'num_hashes' since the previous event /// Creates a Entry from the number of hashes 'num_hashes' since the previous event
/// and that resulting 'end_hash'. /// and that resulting 'id'.
pub fn new_tick(num_hashes: u64, end_hash: &Sha256Hash) -> Self { pub fn new_tick(num_hashes: u64, id: &Sha256Hash) -> Self {
Entry { Entry {
num_hashes, num_hashes,
end_hash: *end_hash, id: *id,
event: Event::Tick, event: Event::Tick,
} }
} }
@ -41,40 +44,39 @@ impl<T> Entry<T> {
/// Return a Sha256 hash for the given data. /// Return a Sha256 hash for the given data.
pub fn hash(val: &[u8]) -> Sha256Hash { pub fn hash(val: &[u8]) -> Sha256Hash {
use sha2::{Digest, Sha256};
let mut hasher = Sha256::default(); let mut hasher = Sha256::default();
hasher.input(val); hasher.input(val);
hasher.result() hasher.result()
} }
/// Return the hash of the given hash extended with the given value. /// Return the hash of the given hash extended with the given value.
pub fn extend_and_hash(end_hash: &Sha256Hash, val: &[u8]) -> Sha256Hash { pub fn extend_and_hash(id: &Sha256Hash, val: &[u8]) -> Sha256Hash {
let mut hash_data = end_hash.to_vec(); let mut hash_data = id.to_vec();
hash_data.extend_from_slice(val); hash_data.extend_from_slice(val);
hash(&hash_data) hash(&hash_data)
} }
pub fn hash_event<T>(end_hash: &Sha256Hash, event: &Event<T>) -> Sha256Hash { /// Creates the hash 'num_hashes' after start_hash. If the event contains
match get_signature(event) { /// signature, the final hash will be a hash of both the previous ID and
None => *end_hash, /// the signature.
Some(sig) => extend_and_hash(end_hash, &sig),
}
}
/// Creates the hash 'num_hashes' after start_hash, plus an additional hash for any event data.
pub fn next_hash<T: Serialize>( pub fn next_hash<T: Serialize>(
start_hash: &Sha256Hash, start_hash: &Sha256Hash,
num_hashes: u64, num_hashes: u64,
event: &Event<T>, event: &Event<T>,
) -> Sha256Hash { ) -> Sha256Hash {
let mut end_hash = *start_hash; let mut id = *start_hash;
for _ in 0..num_hashes { let sig = get_signature(event);
end_hash = hash(&end_hash); let start_index = if sig.is_some() { 1 } else { 0 };
for _ in start_index..num_hashes {
id = hash(&id);
} }
hash_event(&end_hash, event) if let Some(sig) = sig {
id = extend_and_hash(&id, &sig);
}
id
} }
/// Creates the next Tick Entry 'num_hashes' after 'start_hash'. /// Creates the next Entry 'num_hashes' after 'start_hash'.
pub fn next_entry<T: Serialize>( pub fn next_entry<T: Serialize>(
start_hash: &Sha256Hash, start_hash: &Sha256Hash,
num_hashes: u64, num_hashes: u64,
@ -82,7 +84,7 @@ pub fn next_entry<T: Serialize>(
) -> Entry<T> { ) -> Entry<T> {
Entry { Entry {
num_hashes, num_hashes,
end_hash: next_hash(start_hash, num_hashes, &event), id: next_hash(start_hash, num_hashes, &event),
event, event,
} }
} }
@ -94,7 +96,7 @@ pub fn next_entry_mut<T: Serialize>(
event: Event<T>, event: Event<T>,
) -> Entry<T> { ) -> Entry<T> {
let entry = next_entry(start_hash, num_hashes, event); let entry = next_entry(start_hash, num_hashes, event);
*start_hash = entry.end_hash; *start_hash = entry.id;
entry entry
} }
@ -103,36 +105,34 @@ pub fn next_tick<T: Serialize>(start_hash: &Sha256Hash, num_hashes: u64) -> Entr
next_entry(start_hash, num_hashes, Event::Tick) next_entry(start_hash, num_hashes, Event::Tick)
} }
/// Verifies self.end_hash is the result of hashing a 'start_hash' 'self.num_hashes' times. /// Verifies self.id is the result of hashing a 'start_hash' 'self.num_hashes' times.
/// If the event is not a Tick, then hash that as well. /// If the event is not a Tick, then hash that as well.
pub fn verify_entry<T: Serialize>(entry: &Entry<T>, start_hash: &Sha256Hash) -> bool { pub fn verify_entry<T: Serialize>(entry: &Entry<T>, start_hash: &Sha256Hash) -> bool {
if !verify_event(&entry.event) { if !verify_event(&entry.event) {
return false; return false;
} }
entry.end_hash == next_hash(start_hash, entry.num_hashes, &entry.event) entry.id == next_hash(start_hash, entry.num_hashes, &entry.event)
} }
/// Verifies the hashes and counts of a slice of events are all consistent. /// Verifies the hashes and counts of a slice of events are all consistent.
pub fn verify_slice(events: &[Entry<Sha256Hash>], start_hash: &Sha256Hash) -> bool { pub fn verify_slice(events: &[Entry<Sha256Hash>], start_hash: &Sha256Hash) -> bool {
use rayon::prelude::*;
let genesis = [Entry::new_tick(Default::default(), start_hash)]; let genesis = [Entry::new_tick(Default::default(), start_hash)];
let event_pairs = genesis.par_iter().chain(events).zip(events); let event_pairs = genesis.par_iter().chain(events).zip(events);
event_pairs.all(|(x0, x1)| verify_entry(&x1, &x0.end_hash)) event_pairs.all(|(x0, x1)| verify_entry(&x1, &x0.id))
} }
/// Verifies the hashes and counts of a slice of events are all consistent. /// Verifies the hashes and counts of a slice of events are all consistent.
pub fn verify_slice_u64(events: &[Entry<u64>], start_hash: &Sha256Hash) -> bool { pub fn verify_slice_u64(events: &[Entry<u64>], start_hash: &Sha256Hash) -> bool {
use rayon::prelude::*;
let genesis = [Entry::new_tick(Default::default(), start_hash)]; let genesis = [Entry::new_tick(Default::default(), start_hash)];
let event_pairs = genesis.par_iter().chain(events).zip(events); let event_pairs = genesis.par_iter().chain(events).zip(events);
event_pairs.all(|(x0, x1)| verify_entry(&x1, &x0.end_hash)) event_pairs.all(|(x0, x1)| verify_entry(&x1, &x0.id))
} }
/// Verifies the hashes and events serially. Exists only for reference. /// Verifies the hashes and events serially. Exists only for reference.
pub fn verify_slice_seq<T: Serialize>(events: &[Entry<T>], start_hash: &Sha256Hash) -> bool { pub fn verify_slice_seq<T: Serialize>(events: &[Entry<T>], start_hash: &Sha256Hash) -> bool {
let genesis = [Entry::new_tick(0, start_hash)]; let genesis = [Entry::new_tick(0, start_hash)];
let mut event_pairs = genesis.iter().chain(events).zip(events); let mut event_pairs = genesis.iter().chain(events).zip(events);
event_pairs.all(|(x0, x1)| verify_entry(&x1, &x0.end_hash)) event_pairs.all(|(x0, x1)| verify_entry(&x1, &x0.id))
} }
pub fn create_entries<T: Serialize>( pub fn create_entries<T: Serialize>(
@ -140,10 +140,10 @@ pub fn create_entries<T: Serialize>(
num_hashes: u64, num_hashes: u64,
events: Vec<Event<T>>, events: Vec<Event<T>>,
) -> Vec<Entry<T>> { ) -> Vec<Entry<T>> {
let mut end_hash = *start_hash; let mut id = *start_hash;
events events
.into_iter() .into_iter()
.map(|event| next_entry_mut(&mut end_hash, num_hashes, event)) .map(|event| next_entry_mut(&mut id, num_hashes, event))
.collect() .collect()
} }
@ -153,17 +153,17 @@ pub fn create_ticks(
num_hashes: u64, num_hashes: u64,
len: usize, len: usize,
) -> Vec<Entry<Sha256Hash>> { ) -> Vec<Entry<Sha256Hash>> {
use std::iter; let mut id = *start_hash;
let mut end_hash = *start_hash;
iter::repeat(Event::Tick) iter::repeat(Event::Tick)
.take(len) .take(len)
.map(|event| next_entry_mut(&mut end_hash, num_hashes, event)) .map(|event| next_entry_mut(&mut id, num_hashes, event))
.collect() .collect()
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use event::{generate_keypair, get_pubkey, sign_claim_data, sign_transaction_data};
#[test] #[test]
fn test_event_verify() { fn test_event_verify() {
@ -190,7 +190,7 @@ mod tests {
assert!(verify_slice(&create_ticks(&zero, 0, 2), &zero)); // inductive step assert!(verify_slice(&create_ticks(&zero, 0, 2), &zero)); // inductive step
let mut bad_ticks = create_ticks(&zero, 0, 2); let mut bad_ticks = create_ticks(&zero, 0, 2);
bad_ticks[1].end_hash = one; bad_ticks[1].id = one;
assert!(!verify_slice(&bad_ticks, &zero)); // inductive step, bad assert!(!verify_slice(&bad_ticks, &zero)); // inductive step, bad
} }

View File

@ -6,9 +6,9 @@
//! The resulting stream of entries represents ordered events in time. //! The resulting stream of entries represents ordered events in time.
use std::collections::HashSet; use std::collections::HashSet;
use std::sync::mpsc::{Receiver, SyncSender}; use std::sync::mpsc::{Receiver, SyncSender, TryRecvError};
use std::time::{Duration, Instant}; use std::time::{Duration, Instant};
use log::{hash_event, Entry, Sha256Hash}; use log::{extend_and_hash, Entry, Sha256Hash};
use event::{get_signature, verify_event, Event, Signature}; use event::{get_signature, verify_event, Event, Signature};
use serde::Serialize; use serde::Serialize;
use std::fmt::Debug; use std::fmt::Debug;
@ -22,7 +22,7 @@ pub enum ExitReason {
pub struct Logger<T> { pub struct Logger<T> {
pub sender: SyncSender<Entry<T>>, pub sender: SyncSender<Entry<T>>,
pub receiver: Receiver<Event<T>>, pub receiver: Receiver<Event<T>>,
pub end_hash: Sha256Hash, pub last_id: Sha256Hash,
pub num_hashes: u64, pub num_hashes: u64,
pub num_ticks: u64, pub num_ticks: u64,
} }
@ -52,16 +52,19 @@ impl<T: Serialize + Clone + Debug> Logger<T> {
Logger { Logger {
receiver, receiver,
sender, sender,
end_hash: start_hash, last_id: start_hash,
num_hashes: 0, num_hashes: 0,
num_ticks: 0, num_ticks: 0,
} }
} }
pub fn log_event(&mut self, event: Event<T>) -> Result<(), (Entry<T>, ExitReason)> { pub fn log_event(&mut self, event: Event<T>) -> Result<(), (Entry<T>, ExitReason)> {
self.end_hash = hash_event(&self.end_hash, &event); if let Some(sig) = get_signature(&event) {
self.last_id = extend_and_hash(&self.last_id, &sig);
self.num_hashes += 1;
}
let entry = Entry { let entry = Entry {
end_hash: self.end_hash, id: self.last_id,
num_hashes: self.num_hashes, num_hashes: self.num_hashes,
event, event,
}; };
@ -77,7 +80,6 @@ impl<T: Serialize + Clone + Debug> Logger<T> {
epoch: Instant, epoch: Instant,
ms_per_tick: Option<u64>, ms_per_tick: Option<u64>,
) -> Result<(), (Entry<T>, ExitReason)> { ) -> Result<(), (Entry<T>, ExitReason)> {
use std::sync::mpsc::TryRecvError;
loop { loop {
if let Some(ms) = ms_per_tick { if let Some(ms) = ms_per_tick {
if epoch.elapsed() > Duration::from_millis((self.num_ticks + 1) * ms) { if epoch.elapsed() > Duration::from_millis((self.num_ticks + 1) * ms) {
@ -94,7 +96,7 @@ impl<T: Serialize + Clone + Debug> Logger<T> {
} }
Err(TryRecvError::Disconnected) => { Err(TryRecvError::Disconnected) => {
let entry = Entry { let entry = Entry {
end_hash: self.end_hash, id: self.last_id,
num_hashes: self.num_hashes, num_hashes: self.num_hashes,
event: Event::Tick, event: Event::Tick,
}; };
@ -149,12 +151,12 @@ mod tests {
#[test] #[test]
fn test_genesis_no_creators() { fn test_genesis_no_creators() {
let entries = run_genesis(Genesis::new(100, vec![])); let entries = run_genesis(Genesis::new(100, vec![]));
assert!(verify_slice_u64(&entries, &entries[0].end_hash)); assert!(verify_slice_u64(&entries, &entries[0].id));
} }
#[test] #[test]
fn test_genesis() { fn test_genesis() {
let entries = run_genesis(Genesis::new(100, vec![Creator::new(42)])); let entries = run_genesis(Genesis::new(100, vec![Creator::new(42)]));
assert!(verify_slice_u64(&entries, &entries[0].end_hash)); assert!(verify_slice_u64(&entries, &entries[0].id));
} }
} }