Unverified Commit 0dae2f19 authored by Roman Zeyde's avatar Roman Zeyde
Browse files

Re-format latest Rust version

parent e49cef1b
This diff is collapsed.
......@@ -51,7 +51,8 @@ fn run_server(config: &Config) -> Result<()> {
index.reload(&store); // make sure the block header index is up-to-date
store
}
}.enable_compaction(); // enable auto compactions before starting incremental index updates.
}
.enable_compaction(); // enable auto compactions before starting incremental index updates.
let app = App::new(store, index, daemon)?;
let tx_cache = TransactionCache::new(config.tx_cache_size);
......
......@@ -241,6 +241,7 @@ pub fn index_blk_files(
});
store.write(vec![parser.last_indexed_row()]);
store
}).join()
})
.join()
.expect("writer panicked"))
}
......@@ -26,7 +26,8 @@ fn parse_hash(value: &Value) -> Result<Sha256dHash> {
value
.as_str()
.chain_err(|| format!("non-string value: {}", value))?,
).chain_err(|| format!("non-hex value: {}", value))?)
)
.chain_err(|| format!("non-hex value: {}", value))?)
}
fn header_from_value(value: Value) -> Result<BlockHeader> {
......@@ -206,7 +207,8 @@ impl Connection {
.next()
.chain_err(|| {
ErrorKind::Connection("disconnected from daemon while receiving".to_owned())
})?.chain_err(|| "failed to read status")?;
})?
.chain_err(|| "failed to read status")?;
let mut headers = HashMap::new();
for line in iter {
let line = line.chain_err(|| ErrorKind::Connection("failed to read".to_owned()))?;
......@@ -478,7 +480,8 @@ impl Daemon {
self.request(
"getblock",
json!([blockhash.be_hex_string(), /*verbose=*/ 1]),
)?.get("tx")
)?
.get("tx")
.chain_err(|| "block missing txids")?
.as_array()
.chain_err(|| "invalid block txids")?
......
use chan_signal::Signal;
error_chain!{
error_chain! {
types {
Error, ErrorKind, ResultExt, Result;
}
......
......@@ -51,7 +51,8 @@ impl TxInRow {
code: b'I',
prev_hash_prefix: hash_prefix(&txid[..]),
prev_index: output_index as u16,
}).unwrap()
})
.unwrap()
}
pub fn to_row(&self) -> Row {
......@@ -93,7 +94,8 @@ impl TxOutRow {
bincode::serialize(&TxOutKey {
code: b'O',
script_hash_prefix: hash_prefix(&script_hash[..HASH_PREFIX_LEN]),
}).unwrap()
})
.unwrap()
}
pub fn to_row(&self) -> Row {
......@@ -194,7 +196,8 @@ pub fn index_block(block: &Block, height: usize) -> Vec<Row> {
key: bincode::serialize(&BlockKey {
code: b'B',
hash: full_hash(&blockhash[..]),
}).unwrap(),
})
.unwrap(),
value: serialize(&block.header),
});
rows
......
......@@ -205,7 +205,8 @@ impl Tracker {
None // ignore this transaction for now
}
}
}).collect();
})
.collect();
if entries.is_empty() {
return Ok(());
}
......
......@@ -44,7 +44,8 @@ fn unspent_from_status(status: &Status) -> Value {
"tx_pos": out.output_index,
"tx_hash": out.txn_id.be_hex_string(),
"value": out.value,
})).collect()
}))
.collect()
))
}
......@@ -492,14 +493,16 @@ impl RPC {
for msg in notification.receiver().iter() {
let mut senders = senders.lock().unwrap();
match msg {
Notification::Periodic => for sender in senders.split_off(0) {
if let Err(TrySendError::Disconnected(_)) =
sender.try_send(Message::PeriodicUpdate)
{
continue;
Notification::Periodic => {
for sender in senders.split_off(0) {
if let Err(TrySendError::Disconnected(_)) =
sender.try_send(Message::PeriodicUpdate)
{
continue;
}
senders.push(sender);
}
senders.push(sender);
},
}
Notification::Exit => acceptor.send(None).unwrap(),
}
}
......
......@@ -113,7 +113,8 @@ impl HeaderList {
height: height,
hash: hashed_header.blockhash,
header: hashed_header.header,
}).collect()
})
.collect()
}
pub fn apply(&mut self, new_headers: Vec<HeaderEntry>) {
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment