Rename end_hash to id
This commit is contained in:
		| @@ -71,9 +71,9 @@ fn main() { | ||||
| Running the program should produce a log similar to: | ||||
|  | ||||
| ```rust | ||||
| Entry { num_hashes: 0, end_hash: [0, ...], event: Tick } | ||||
| Entry { num_hashes: 2, end_hash: [67, ...], event: Transaction { data: [37, ...] } } | ||||
| Entry { num_hashes: 3, end_hash: [123, ...], event: Tick } | ||||
| Entry { num_hashes: 0, id: [0, ...], event: Tick } | ||||
| Entry { num_hashes: 2, id: [67, ...], event: Transaction { data: [37, ...] } } | ||||
| Entry { num_hashes: 3, id: [123, ...], event: Tick } | ||||
| ``` | ||||
|  | ||||
| Proof-of-History | ||||
| @@ -86,7 +86,7 @@ assert!(verify_slice(&entries, &seed)); | ||||
| ``` | ||||
|  | ||||
| [It's a proof!](https://en.wikipedia.org/wiki/Curry–Howard_correspondence) For each entry returned by the | ||||
| historian, we can verify that `end_hash` is the result of applying a sha256 hash to the previous `end_hash` | ||||
| historian, we can verify that `id` is the result of applying a sha256 hash to the previous `id` | ||||
| exactly `num_hashes` times, and then hashing then event data on top of that. Because the event data is | ||||
| included in the hash, the events cannot be reordered without regenerating all the hashes. | ||||
|  | ||||
|   | ||||
| @@ -1,17 +1,17 @@ | ||||
| msc { | ||||
|   client,historian,logger; | ||||
|  | ||||
|   logger=>historian [ label = "e0 = Entry{hash: h0, n: 0, event: Tick}" ] ; | ||||
|   logger=>historian [ label = "e0 = Entry{id: h0, n: 0, event: Tick}" ] ; | ||||
|   logger=>logger [ label = "h1 = hash(h0)" ] ; | ||||
|   logger=>logger [ label = "h2 = hash(h1)" ] ; | ||||
|   client=>historian [ label = "Claim(d0)" ] ; | ||||
|   historian=>logger [ label = "Claim(d0)" ] ; | ||||
|   logger=>logger [ label = "h3 = hash(h2 + d0)" ] ; | ||||
|   logger=>historian [ label = "e1 = Entry{hash: hash(h3), n: 2, event: Claim(d0)}" ] ; | ||||
|   logger=>historian [ label = "e1 = Entry{id: hash(h3), n: 2, event: Claim(d0)}" ] ; | ||||
|   logger=>logger [ label = "h4 = hash(h3)" ] ; | ||||
|   logger=>logger [ label = "h5 = hash(h4)" ] ; | ||||
|   logger=>logger [ label = "h6 = hash(h5)" ] ; | ||||
|   logger=>historian [ label = "e2 = Entry{hash: h6, n: 3, event: Tick}" ] ; | ||||
|   logger=>historian [ label = "e2 = Entry{id: h6, n: 3, event: Tick}" ] ; | ||||
|   client=>historian [ label = "collect()" ] ; | ||||
|   historian=>client [ label = "entries = [e0, e1, e2]" ] ; | ||||
|   client=>client [ label = "verify_slice(entries, h0)" ] ; | ||||
|   | ||||
| @@ -25,7 +25,7 @@ pub type Result<T> = result::Result<T, AccountingError>; | ||||
| pub struct Accountant { | ||||
|     pub historian: Historian<u64>, | ||||
|     pub balances: HashMap<PublicKey, u64>, | ||||
|     pub end_hash: Sha256Hash, | ||||
|     pub last_id: Sha256Hash, | ||||
| } | ||||
|  | ||||
| impl Accountant { | ||||
| @@ -35,7 +35,7 @@ impl Accountant { | ||||
|         let mut acc = Accountant { | ||||
|             historian: hist, | ||||
|             balances: HashMap::new(), | ||||
|             end_hash: start_hash, | ||||
|             last_id: start_hash, | ||||
|         }; | ||||
|         for (i, event) in gen.create_events().into_iter().enumerate() { | ||||
|             acc.process_verified_event(event, i < 2).unwrap(); | ||||
| @@ -50,7 +50,7 @@ impl Accountant { | ||||
|         } | ||||
|  | ||||
|         if let Some(last_entry) = entries.last() { | ||||
|             self.end_hash = last_entry.end_hash; | ||||
|             self.last_id = last_entry.id; | ||||
|         } | ||||
|  | ||||
|         entries | ||||
|   | ||||
| @@ -22,7 +22,7 @@ fn main() { | ||||
|     drop(logger.sender); | ||||
|  | ||||
|     let entries = receiver.iter().collect::<Vec<_>>(); | ||||
|     verify_slice_u64(&entries, &entries[0].end_hash); | ||||
|     verify_slice_u64(&entries, &entries[0].id); | ||||
|     println!("["); | ||||
|     let len = entries.len(); | ||||
|     for (i, x) in entries.iter().enumerate() { | ||||
|   | ||||
| @@ -2,9 +2,9 @@ | ||||
| //! an ordered log of events in time. | ||||
|  | ||||
| /// Each log entry contains three pieces of data. The 'num_hashes' field is the number | ||||
| /// of hashes performed since the previous entry.  The 'end_hash' field is the result | ||||
| /// of hashing 'end_hash' from the previous entry 'num_hashes' times.  The 'event' | ||||
| /// field points to an Event that took place shortly after 'end_hash' was generated. | ||||
| /// of hashes performed since the previous entry.  The 'id' field is the result | ||||
| /// of hashing 'id' from the previous entry 'num_hashes' times.  The 'event' | ||||
| /// field points to an Event that took place shortly after 'id' was generated. | ||||
| /// | ||||
| /// If you divide 'num_hashes' by the amount of time it takes to generate a new hash, you | ||||
| /// get a duration estimate since the last event. Since processing power increases | ||||
| @@ -27,7 +27,7 @@ pub type Signature = GenericArray<u8, U64>; | ||||
| /// When 'event' is Tick, the event represents a simple clock tick, and exists for the | ||||
| /// sole purpose of improving the performance of event log verification. A tick can | ||||
| /// be generated in 'num_hashes' hashes and verified in 'num_hashes' hashes.  By logging | ||||
| /// a hash alongside the tick, each tick and be verified in parallel using the 'end_hash' | ||||
| /// a hash alongside the tick, each tick and be verified in parallel using the 'id' | ||||
| /// of the preceding tick to seed its hashing. | ||||
| #[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Clone)] | ||||
| pub enum Event<T> { | ||||
|   | ||||
| @@ -53,7 +53,7 @@ impl<T: 'static + Serialize + Clone + Debug + Send> Historian<T> { | ||||
|                 if let Err(err) = logger.log_events(now, ms_per_tick) { | ||||
|                     return err; | ||||
|                 } | ||||
|                 logger.end_hash = hash(&logger.end_hash); | ||||
|                 logger.last_id = hash(&logger.last_id); | ||||
|                 logger.num_hashes += 1; | ||||
|             } | ||||
|         }) | ||||
|   | ||||
							
								
								
									
										54
									
								
								src/log.rs
									
									
									
									
									
								
							
							
						
						
									
										54
									
								
								src/log.rs
									
									
									
									
									
								
							| @@ -2,9 +2,9 @@ | ||||
| //! an ordered log of events in time. | ||||
|  | ||||
| /// Each log entry contains three pieces of data. The 'num_hashes' field is the number | ||||
| /// of hashes performed since the previous entry.  The 'end_hash' field is the result | ||||
| /// of hashing 'end_hash' from the previous entry 'num_hashes' times.  The 'event' | ||||
| /// field points to an Event that took place shortly after 'end_hash' was generated. | ||||
| /// of hashes performed since the previous entry.  The 'id' field is the result | ||||
| /// of hashing 'id' from the previous entry 'num_hashes' times.  The 'event' | ||||
| /// field points to an Event that took place shortly after 'id' was generated. | ||||
| /// | ||||
| /// If you divide 'num_hashes' by the amount of time it takes to generate a new hash, you | ||||
| /// get a duration estimate since the last event. Since processing power increases | ||||
| @@ -26,17 +26,17 @@ pub type Sha256Hash = GenericArray<u8, U32>; | ||||
| #[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Clone)] | ||||
| pub struct Entry<T> { | ||||
|     pub num_hashes: u64, | ||||
|     pub end_hash: Sha256Hash, | ||||
|     pub id: Sha256Hash, | ||||
|     pub event: Event<T>, | ||||
| } | ||||
|  | ||||
| impl<T> Entry<T> { | ||||
|     /// Creates a Entry from the number of hashes 'num_hashes' since the previous event | ||||
|     /// and that resulting 'end_hash'. | ||||
|     pub fn new_tick(num_hashes: u64, end_hash: &Sha256Hash) -> Self { | ||||
|     /// and that resulting 'id'. | ||||
|     pub fn new_tick(num_hashes: u64, id: &Sha256Hash) -> Self { | ||||
|         Entry { | ||||
|             num_hashes, | ||||
|             end_hash: *end_hash, | ||||
|             id: *id, | ||||
|             event: Event::Tick, | ||||
|         } | ||||
|     } | ||||
| @@ -50,16 +50,16 @@ pub fn hash(val: &[u8]) -> Sha256Hash { | ||||
| } | ||||
|  | ||||
| /// Return the hash of the given hash extended with the given value. | ||||
| pub fn extend_and_hash(end_hash: &Sha256Hash, val: &[u8]) -> Sha256Hash { | ||||
|     let mut hash_data = end_hash.to_vec(); | ||||
| pub fn extend_and_hash(id: &Sha256Hash, val: &[u8]) -> Sha256Hash { | ||||
|     let mut hash_data = id.to_vec(); | ||||
|     hash_data.extend_from_slice(val); | ||||
|     hash(&hash_data) | ||||
| } | ||||
|  | ||||
| pub fn hash_event<T>(end_hash: &Sha256Hash, event: &Event<T>) -> Sha256Hash { | ||||
| pub fn hash_event<T>(id: &Sha256Hash, event: &Event<T>) -> Sha256Hash { | ||||
|     match get_signature(event) { | ||||
|         None => *end_hash, | ||||
|         Some(sig) => extend_and_hash(end_hash, &sig), | ||||
|         None => *id, | ||||
|         Some(sig) => extend_and_hash(id, &sig), | ||||
|     } | ||||
| } | ||||
|  | ||||
| @@ -69,11 +69,11 @@ pub fn next_hash<T: Serialize>( | ||||
|     num_hashes: u64, | ||||
|     event: &Event<T>, | ||||
| ) -> Sha256Hash { | ||||
|     let mut end_hash = *start_hash; | ||||
|     let mut id = *start_hash; | ||||
|     for _ in 0..num_hashes { | ||||
|         end_hash = hash(&end_hash); | ||||
|         id = hash(&id); | ||||
|     } | ||||
|     hash_event(&end_hash, event) | ||||
|     hash_event(&id, event) | ||||
| } | ||||
|  | ||||
| /// Creates the next Tick Entry 'num_hashes' after 'start_hash'. | ||||
| @@ -84,7 +84,7 @@ pub fn next_entry<T: Serialize>( | ||||
| ) -> Entry<T> { | ||||
|     Entry { | ||||
|         num_hashes, | ||||
|         end_hash: next_hash(start_hash, num_hashes, &event), | ||||
|         id: next_hash(start_hash, num_hashes, &event), | ||||
|         event, | ||||
|     } | ||||
| } | ||||
| @@ -96,7 +96,7 @@ pub fn next_entry_mut<T: Serialize>( | ||||
|     event: Event<T>, | ||||
| ) -> Entry<T> { | ||||
|     let entry = next_entry(start_hash, num_hashes, event); | ||||
|     *start_hash = entry.end_hash; | ||||
|     *start_hash = entry.id; | ||||
|     entry | ||||
| } | ||||
|  | ||||
| @@ -105,34 +105,34 @@ pub fn next_tick<T: Serialize>(start_hash: &Sha256Hash, num_hashes: u64) -> Entr | ||||
|     next_entry(start_hash, num_hashes, Event::Tick) | ||||
| } | ||||
|  | ||||
| /// Verifies self.end_hash is the result of hashing a 'start_hash' 'self.num_hashes' times. | ||||
| /// Verifies self.id is the result of hashing a 'start_hash' 'self.num_hashes' times. | ||||
| /// If the event is not a Tick, then hash that as well. | ||||
| pub fn verify_entry<T: Serialize>(entry: &Entry<T>, start_hash: &Sha256Hash) -> bool { | ||||
|     if !verify_event(&entry.event) { | ||||
|         return false; | ||||
|     } | ||||
|     entry.end_hash == next_hash(start_hash, entry.num_hashes, &entry.event) | ||||
|     entry.id == next_hash(start_hash, entry.num_hashes, &entry.event) | ||||
| } | ||||
|  | ||||
| /// Verifies the hashes and counts of a slice of events are all consistent. | ||||
| pub fn verify_slice(events: &[Entry<Sha256Hash>], start_hash: &Sha256Hash) -> bool { | ||||
|     let genesis = [Entry::new_tick(Default::default(), start_hash)]; | ||||
|     let event_pairs = genesis.par_iter().chain(events).zip(events); | ||||
|     event_pairs.all(|(x0, x1)| verify_entry(&x1, &x0.end_hash)) | ||||
|     event_pairs.all(|(x0, x1)| verify_entry(&x1, &x0.id)) | ||||
| } | ||||
|  | ||||
| /// Verifies the hashes and counts of a slice of events are all consistent. | ||||
| pub fn verify_slice_u64(events: &[Entry<u64>], start_hash: &Sha256Hash) -> bool { | ||||
|     let genesis = [Entry::new_tick(Default::default(), start_hash)]; | ||||
|     let event_pairs = genesis.par_iter().chain(events).zip(events); | ||||
|     event_pairs.all(|(x0, x1)| verify_entry(&x1, &x0.end_hash)) | ||||
|     event_pairs.all(|(x0, x1)| verify_entry(&x1, &x0.id)) | ||||
| } | ||||
|  | ||||
| /// Verifies the hashes and events serially. Exists only for reference. | ||||
| pub fn verify_slice_seq<T: Serialize>(events: &[Entry<T>], start_hash: &Sha256Hash) -> bool { | ||||
|     let genesis = [Entry::new_tick(0, start_hash)]; | ||||
|     let mut event_pairs = genesis.iter().chain(events).zip(events); | ||||
|     event_pairs.all(|(x0, x1)| verify_entry(&x1, &x0.end_hash)) | ||||
|     event_pairs.all(|(x0, x1)| verify_entry(&x1, &x0.id)) | ||||
| } | ||||
|  | ||||
| pub fn create_entries<T: Serialize>( | ||||
| @@ -140,10 +140,10 @@ pub fn create_entries<T: Serialize>( | ||||
|     num_hashes: u64, | ||||
|     events: Vec<Event<T>>, | ||||
| ) -> Vec<Entry<T>> { | ||||
|     let mut end_hash = *start_hash; | ||||
|     let mut id = *start_hash; | ||||
|     events | ||||
|         .into_iter() | ||||
|         .map(|event| next_entry_mut(&mut end_hash, num_hashes, event)) | ||||
|         .map(|event| next_entry_mut(&mut id, num_hashes, event)) | ||||
|         .collect() | ||||
| } | ||||
|  | ||||
| @@ -153,10 +153,10 @@ pub fn create_ticks( | ||||
|     num_hashes: u64, | ||||
|     len: usize, | ||||
| ) -> Vec<Entry<Sha256Hash>> { | ||||
|     let mut end_hash = *start_hash; | ||||
|     let mut id = *start_hash; | ||||
|     iter::repeat(Event::Tick) | ||||
|         .take(len) | ||||
|         .map(|event| next_entry_mut(&mut end_hash, num_hashes, event)) | ||||
|         .map(|event| next_entry_mut(&mut id, num_hashes, event)) | ||||
|         .collect() | ||||
| } | ||||
|  | ||||
| @@ -189,7 +189,7 @@ mod tests { | ||||
|         assert!(verify_slice(&create_ticks(&zero, 0, 2), &zero)); // inductive step | ||||
|  | ||||
|         let mut bad_ticks = create_ticks(&zero, 0, 2); | ||||
|         bad_ticks[1].end_hash = one; | ||||
|         bad_ticks[1].id = one; | ||||
|         assert!(!verify_slice(&bad_ticks, &zero)); // inductive step, bad | ||||
|     } | ||||
|  | ||||
|   | ||||
| @@ -22,7 +22,7 @@ pub enum ExitReason { | ||||
| pub struct Logger<T> { | ||||
|     pub sender: SyncSender<Entry<T>>, | ||||
|     pub receiver: Receiver<Event<T>>, | ||||
|     pub end_hash: Sha256Hash, | ||||
|     pub last_id: Sha256Hash, | ||||
|     pub num_hashes: u64, | ||||
|     pub num_ticks: u64, | ||||
| } | ||||
| @@ -52,16 +52,16 @@ impl<T: Serialize + Clone + Debug> Logger<T> { | ||||
|         Logger { | ||||
|             receiver, | ||||
|             sender, | ||||
|             end_hash: start_hash, | ||||
|             last_id: start_hash, | ||||
|             num_hashes: 0, | ||||
|             num_ticks: 0, | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     pub fn log_event(&mut self, event: Event<T>) -> Result<(), (Entry<T>, ExitReason)> { | ||||
|         self.end_hash = hash_event(&self.end_hash, &event); | ||||
|         self.last_id = hash_event(&self.last_id, &event); | ||||
|         let entry = Entry { | ||||
|             end_hash: self.end_hash, | ||||
|             id: self.last_id, | ||||
|             num_hashes: self.num_hashes, | ||||
|             event, | ||||
|         }; | ||||
| @@ -93,7 +93,7 @@ impl<T: Serialize + Clone + Debug> Logger<T> { | ||||
|                 } | ||||
|                 Err(TryRecvError::Disconnected) => { | ||||
|                     let entry = Entry { | ||||
|                         end_hash: self.end_hash, | ||||
|                         id: self.last_id, | ||||
|                         num_hashes: self.num_hashes, | ||||
|                         event: Event::Tick, | ||||
|                     }; | ||||
| @@ -148,12 +148,12 @@ mod tests { | ||||
|     #[test] | ||||
|     fn test_genesis_no_creators() { | ||||
|         let entries = run_genesis(Genesis::new(100, vec![])); | ||||
|         assert!(verify_slice_u64(&entries, &entries[0].end_hash)); | ||||
|         assert!(verify_slice_u64(&entries, &entries[0].id)); | ||||
|     } | ||||
|  | ||||
|     #[test] | ||||
|     fn test_genesis() { | ||||
|         let entries = run_genesis(Genesis::new(100, vec![Creator::new(42)])); | ||||
|         assert!(verify_slice_u64(&entries, &entries[0].end_hash)); | ||||
|         assert!(verify_slice_u64(&entries, &entries[0].id)); | ||||
|     } | ||||
| } | ||||
|   | ||||
		Reference in New Issue
	
	Block a user