Commit bd52eae7 authored by kenshin-samourai's avatar kenshin-samourai
Browse files

renamings and comments

parent 2efc9182
......@@ -3,6 +3,9 @@ use std::sync::{Arc, Mutex};
use crate::{daemon, errors::*, index, signal::Waiter, store};
//
// Application
//
pub struct App {
store: store::DBStore,
index: index::Index,
......@@ -27,13 +30,16 @@ impl App {
fn write_store(&self) -> &impl store::WriteStore {
&self.store
}
// TODO: use index for queries.
pub fn read_store(&self) -> &dyn store::ReadStore {
&self.store
}
pub fn index(&self) -> &index::Index {
&self.index
}
pub fn daemon(&self) -> &daemon::Daemon {
&self.daemon
}
......
......@@ -20,7 +20,9 @@ use crate::signal::Waiter;
use crate::store::{DBStore, Row, WriteStore};
use crate::util::{spawn_thread, HeaderList, SyncChannel};
//
// Blockchain parser (bulk mode)
//
struct Parser {
magic: u32,
current_headers: HeaderList,
......@@ -79,6 +81,9 @@ impl Parser {
}
}
//
// Parse the bitcoin blocks
//
fn parse_blocks(blob: Vec<u8>, magic: u32) -> Result<Vec<Block>> {
let mut cursor = Cursor::new(&blob);
let mut blocks = vec![];
......@@ -125,6 +130,9 @@ fn parse_blocks(blob: Vec<u8>, magic: u32) -> Result<Vec<Block>> {
Ok(blocks)
}
//
// Retrieve the block headers
//
fn load_headers(daemon: &Daemon) -> Result<HeaderList> {
let tip = daemon.getbestblockhash()?;
let mut headers = HeaderList::empty();
......@@ -133,6 +141,9 @@ fn load_headers(daemon: &Daemon) -> Result<HeaderList> {
Ok(headers)
}
//
// Manage open file limits
//
fn set_open_files_limit(limit: libc::rlim_t) {
let resource = libc::RLIMIT_NOFILE;
let mut rlim = libc::rlimit {
......@@ -150,9 +161,13 @@ fn set_open_files_limit(limit: libc::rlim_t) {
}
}
type JoinHandle = thread::JoinHandle<Result<()>>;
type BlobReceiver = Arc<Mutex<Receiver<(Vec<u8>, PathBuf)>>>;
//
//
//
fn start_reader(blk_files: Vec<PathBuf>, parser: Arc<Parser>) -> (BlobReceiver, JoinHandle) {
let chan = SyncChannel::new(0);
let blobs = chan.sender();
......@@ -167,6 +182,9 @@ fn start_reader(blk_files: Vec<PathBuf>, parser: Arc<Parser>) -> (BlobReceiver,
(Arc::new(Mutex::new(chan.into_receiver())), handle)
}
//
// Bulk indexing of blocks
//
fn start_indexer(
blobs: BlobReceiver,
parser: Arc<Parser>,
......@@ -191,6 +209,9 @@ fn start_indexer(
})
}
//
// Index block files of bitcoind
//
pub fn index_blk_files(
daemon: &Daemon,
index_threads: usize,
......
......@@ -5,6 +5,9 @@ use std::hash::Hash;
use std::sync::Mutex;
//
// LRU cache with a fixed size
//
struct SizedLruCache<K, V> {
map: LruCache<K, (V, usize)>,
bytes_usage: usize,
......@@ -45,6 +48,9 @@ impl<K: Hash + Eq, V> SizedLruCache<K, V> {
}
}
//
// Cache storing the txids of transactions included in a block
//
pub struct BlockTxIDsCache {
map: Mutex<SizedLruCache<Sha256dHash /* blockhash */, Vec<Sha256dHash /* txid */>>>,
}
......
......@@ -15,14 +15,20 @@ use stderrlog;
use crate::daemon::CookieGetter;
use crate::errors::*;
//
// Default IP address of the RPC server
//
const DEFAULT_SERVER_ADDRESS: [u8; 4] = [127, 0, 0, 1]; // by default, serve on IPv4 localhost
mod internal {
#![allow(unused)]
include!(concat!(env!("OUT_DIR"), "/configure_me_config.rs"));
}
/// A simple error type representing invalid UTF-8 input.
//
// A simple error type representing invalid UTF-8 input.
//
pub struct InvalidUtf8(OsString);
impl fmt::Display for InvalidUtf8 {
......@@ -31,7 +37,9 @@ impl fmt::Display for InvalidUtf8 {
}
}
/// An error that might happen when resolving an address
//
// An error that might happen when resolving an address
//
pub enum AddressError {
ResolvError { addr: String, err: std::io::Error },
NoAddrError(String),
......@@ -48,10 +56,10 @@ impl fmt::Display for AddressError {
}
}
/// Newtype for an address that is parsed as `String`
///
/// The main point of this newtype is to provide better description than what `String` type
/// provides.
// Newtype for an address that is parsed as `String`
//
// The main point of this newtype is to provide better description than what `String` type
// provides.
#[derive(Deserialize)]
pub struct ResolvAddr(String);
......@@ -89,7 +97,9 @@ impl ResolvAddr {
}
}
/// This newtype implements `ParseArg` for `Network`.
//
// This newtype implements `ParseArg` for `Network`.
//
#[derive(Deserialize)]
pub struct BitcoinNetwork(Network);
......@@ -119,7 +129,9 @@ impl Into<Network> for BitcoinNetwork {
}
}
/// Parsed and post-processed configuration
//
// Parsed and post-processed configuration
//
#[derive(Debug)]
pub struct Config {
// See below for the documentation of each field:
......@@ -265,6 +277,9 @@ impl Config {
}
}
//
// Auth cookie for bitcoind
//
struct StaticCookie {
value: Vec<u8>,
}
......
......@@ -14,17 +14,24 @@ use crate::errors::*;
use crate::signal::Waiter;
use crate::store::{ReadStore, Row, WriteStore};
use crate::util::{
full_hash, hash_prefix, spawn_thread, Bytes, FullHash, HashPrefix, HeaderEntry, HeaderList,
full_hash, hash_prefix, spawn_thread, Bytes,
FullHash, HashPrefix, HeaderEntry, HeaderList,
HeaderMap, SyncChannel, HASH_PREFIX_LEN,
};
//
// Key of a row storing an input of a transaction
//
#[derive(Serialize, Deserialize)]
pub struct TxInKey {
pub code: u8,
pub prev_hash_prefix: HashPrefix,
pub prev_index: u16,
pub prev_txid_prefix: HashPrefix,
pub prev_vout: u16,
}
//
// Row storing an input of a transaction
//
#[derive(Serialize, Deserialize)]
pub struct TxInRow {
key: TxInKey,
......@@ -36,18 +43,18 @@ impl TxInRow {
TxInRow {
key: TxInKey {
code: b'I',
prev_hash_prefix: hash_prefix(&input.previous_output.txid[..]),
prev_index: input.previous_output.vout as u16,
prev_txid_prefix: hash_prefix(&input.previous_output.txid[..]),
prev_vout: input.previous_output.vout as u16,
},
txid_prefix: hash_prefix(&txid[..]),
}
}
pub fn filter(txid: &Sha256dHash, output_index: usize) -> Bytes {
pub fn filter(txid: &Sha256dHash, vout: usize) -> Bytes {
bincode::serialize(&TxInKey {
code: b'I',
prev_hash_prefix: hash_prefix(&txid[..]),
prev_index: output_index as u16,
prev_txid_prefix: hash_prefix(&txid[..]),
prev_vout: vout as u16,
})
.unwrap()
}
......@@ -64,12 +71,18 @@ impl TxInRow {
}
}
//
// Key of a row storing an output of a transaction
//
#[derive(Serialize, Deserialize)]
pub struct TxOutKey {
code: u8,
script_hash_prefix: HashPrefix,
}
//
// Row storing an output of a transaction
//
#[derive(Serialize, Deserialize)]
pub struct TxOutRow {
key: TxOutKey,
......@@ -109,12 +122,18 @@ impl TxOutRow {
}
}
//
// Key of a row storing a transaction
//
#[derive(Serialize, Deserialize)]
pub struct TxKey {
code: u8,
pub txid: FullHash,
}
//
// Row storing a transaction
//
#[derive(Serialize, Deserialize)]
pub struct TxRow {
pub key: TxKey,
......@@ -150,12 +169,18 @@ impl TxRow {
}
}
//
// Key of a row storing a block
//
#[derive(Serialize, Deserialize)]
struct BlockKey {
code: u8,
hash: FullHash,
}
//
// Compute the script hash of a scriptpubkey
//
pub fn compute_script_hash(data: &[u8]) -> FullHash {
let mut hash = FullHash::default();
let mut sha2 = Sha256::new();
......@@ -164,6 +189,9 @@ pub fn compute_script_hash(data: &[u8]) -> FullHash {
hash
}
//
// Index a transaction
//
pub fn index_transaction<'a>(txn: &'a Transaction) -> impl 'a + Iterator<Item = Row> {
let null_hash = Sha256dHash::default();
let txid: Sha256dHash = txn.txid();
......@@ -187,6 +215,9 @@ pub fn index_transaction<'a>(txn: &'a Transaction) -> impl 'a + Iterator<Item =
.chain(std::iter::once(TxRow::new(&txid).to_row()))
}
//
// Index a block
//
pub fn index_block<'a>(block: &'a Block) -> impl 'a + Iterator<Item = Row> {
let blockhash = block.bitcoin_hash();
// Persist block hash and header
......@@ -205,6 +236,9 @@ pub fn index_block<'a>(block: &'a Block) -> impl 'a + Iterator<Item = Row> {
.chain(std::iter::once(row))
}
//
// Retrieve the last indexed block
//
pub fn last_indexed_block(blockhash: &Sha256dHash) -> Row {
// Store last indexed block (i.e. all previous blocks were indexed)
Row {
......@@ -213,6 +247,9 @@ pub fn last_indexed_block(blockhash: &Sha256dHash) -> Row {
}
}
//
// Retrieve the hashes of all the indexed blocks
//
pub fn read_indexed_blockhashes(store: &dyn ReadStore) -> HashSet<Sha256dHash> {
let mut result = HashSet::new();
for row in store.scan(b"B") {
......@@ -222,6 +259,9 @@ pub fn read_indexed_blockhashes(store: &dyn ReadStore) -> HashSet<Sha256dHash> {
result
}
//
// Retrieve the headers of all the indexed blocks
//
fn read_indexed_headers(store: &dyn ReadStore) -> HeaderList {
let latest_blockhash: Sha256dHash = match store.get(b"L") {
// latest blockheader persisted in the DB.
......@@ -229,15 +269,18 @@ fn read_indexed_headers(store: &dyn ReadStore) -> HeaderList {
None => Sha256dHash::default(),
};
trace!("lastest indexed blockhash: {}", latest_blockhash);
let mut map = HeaderMap::new();
for row in store.scan(b"B") {
let key: BlockKey = bincode::deserialize(&row.key).unwrap();
let header: BlockHeader = deserialize(&row.value).unwrap();
map.insert(deserialize(&key.hash).unwrap(), header);
}
let mut headers = vec![];
let null_hash = Sha256dHash::default();
let mut blockhash = latest_blockhash;
while blockhash != null_hash {
let header = map
.remove(&blockhash)
......@@ -245,7 +288,9 @@ fn read_indexed_headers(store: &dyn ReadStore) -> HeaderList {
blockhash = header.prev_blockhash;
headers.push(header);
}
headers.reverse();
assert_eq!(
headers
.first()
......@@ -253,6 +298,7 @@ fn read_indexed_headers(store: &dyn ReadStore) -> HeaderList {
.unwrap_or(null_hash),
null_hash
);
assert_eq!(
headers
.last()
......@@ -260,12 +306,16 @@ fn read_indexed_headers(store: &dyn ReadStore) -> HeaderList {
.unwrap_or(null_hash),
latest_blockhash
);
let mut result = HeaderList::empty();
let entries = result.order(headers);
result.apply(entries, latest_blockhash);
result
}
//
// Indexer
//
pub struct Index {
// TODO: store also latest snapshot.
headers: RwLock<HeaderList>,
......
......@@ -12,6 +12,10 @@ use crate::store::{ReadStore, Row};
use crate::util::Bytes;
//
// BTree emulating a db store
// for mempool transactions
//
struct MempoolStore {
map: BTreeMap<Bytes, Vec<Bytes>>,
}
......@@ -85,6 +89,9 @@ impl ReadStore for MempoolStore {
}
}
//
// Tracker managing mempool transactions
//
pub struct Tracker {
items: HashMap<Sha256dHash, Transaction>,
index: MempoolStore,
......
......@@ -9,26 +9,35 @@ use crate::mempool::Tracker;
use crate::store::ReadStore;
use crate::util::HashPrefix;
pub struct FundingOutput {
pub txn_id: Sha256dHash,
pub output_index: usize,
//
// Output of a Transaction
//
pub struct Txo {
pub txid: Sha256dHash,
pub vout: usize,
}
type OutPoint = (Sha256dHash, usize); // (txid, output_index)
//
// Input of a Transaction
//
type OutPoint = (Sha256dHash, usize); // (txid, vout)
struct SpendingInput {
txn_id: Sha256dHash,
funding_output: OutPoint,
txid: Sha256dHash,
outpoint: OutPoint,
}
//
// Status of an Address
// (vectors of confirmed and unconfirmed outputs and inputs)
//
pub struct Status {
confirmed: (Vec<FundingOutput>, Vec<SpendingInput>),
mempool: (Vec<FundingOutput>, Vec<SpendingInput>),
confirmed: (Vec<Txo>, Vec<SpendingInput>),
mempool: (Vec<Txo>, Vec<SpendingInput>),
}
impl Status {
fn funding(&self) -> impl Iterator<Item = &FundingOutput> {
fn funding(&self) -> impl Iterator<Item = &Txo> {
self.confirmed.0.iter().chain(self.mempool.0.iter())
}
......@@ -39,50 +48,19 @@ impl Status {
pub fn history(&self) -> Vec<Sha256dHash> {
let mut txns = vec![];
for f in self.funding() {
txns.push(f.txn_id);
txns.push(f.txid);
}
for s in self.spending() {
txns.push(s.txn_id);
txns.push(s.txid);
}
txns.sort_unstable();
txns
}
}
fn txrows_by_prefix(
store: &dyn ReadStore,
txid_prefix: HashPrefix
) -> Vec<TxRow> {
store
.scan(&TxRow::filter_prefix(txid_prefix))
.iter()
.map(|row| TxRow::from_row(row))
.collect()
}
fn txoutrows_by_script_hash(
store: &dyn ReadStore,
script_hash: &[u8]
) -> Vec<TxOutRow> {
store
.scan(&TxOutRow::filter(script_hash))
.iter()
.map(|row| TxOutRow::from_row(row))
.collect()
}
fn txids_by_funding_output(
store: &dyn ReadStore,
txn_id: &Sha256dHash,
output_index: usize,
) -> Vec<HashPrefix> {
store
.scan(&TxInRow::filter(&txn_id, output_index))
.iter()
.map(|row| TxInRow::from_row(row).txid_prefix)
.collect()
}
//
// QUery tool for the indexer
//
pub struct Query {
app: Arc<App>,
tracker: RwLock<Tracker>,
......@@ -101,14 +79,51 @@ impl Query {
})
}
fn load_txns_by_prefix(
fn get_txrows_by_prefix(
&self,
store: &dyn ReadStore,
prefix: HashPrefix
) -> Vec<TxRow> {
store
.scan(&TxRow::filter_prefix(prefix))
.iter()
.map(|row| TxRow::from_row(row))
.collect()
}
fn get_txoutrows_by_script_hash(
&self,
store: &dyn ReadStore,
script_hash: &[u8]
) -> Vec<TxOutRow> {
store
.scan(&TxOutRow::filter(script_hash))
.iter()
.map(|row| TxOutRow::from_row(row))
.collect()
}
fn get_prefixes_by_funding_txo(
&self,
store: &dyn ReadStore,
txid: &Sha256dHash,
vout: usize,
) -> Vec<HashPrefix> {
store
.scan(&TxInRow::filter(&txid, vout))
.iter()
.map(|row| TxInRow::from_row(row).txid_prefix)
.collect()
}
fn get_txids_by_prefix(
&self,
store: &dyn ReadStore,
prefixes: Vec<HashPrefix>,
) -> Result<Vec<Sha256dHash>> {
let mut txns = vec![];
for txid_prefix in prefixes {
for tx_row in txrows_by_prefix(store, txid_prefix) {
for prefix in prefixes {
for tx_row in self.get_txrows_by_prefix(store, prefix) {
let txid: Sha256dHash = deserialize(&tx_row.key.txid).unwrap();
txns.push(txid)
}
......@@ -119,26 +134,24 @@ impl Query {
fn find_spending_input(
&self,
store: &dyn ReadStore,
funding: &FundingOutput,
txo: &Txo,
) -> Result<Option<SpendingInput>> {
let mut spending_inputs = vec![];
let prefixes = txids_by_funding_output(store, &funding.txn_id, funding.output_index);
let spending_txns = self.load_txns_by_prefix(store, prefixes)?;
let mut spendings = vec![];
let prefixes = self.get_prefixes_by_funding_txo(store, &txo.txid, txo.vout);
let txids = self.get_txids_by_prefix(store, prefixes)?;
for t in &spending_txns {
if *t == funding.txn_id {
spending_inputs.push(SpendingInput {
txn_id: *t,
funding_output: (funding.txn_id, funding.output_index),
})
}
for txid in &txids {
spendings.push(SpendingInput {
txid: *txid,
outpoint: (txo.txid, txo.vout),
})
}
assert!(spending_inputs.len() <= 1);
assert!(spendings.len() <= 1);
Ok(if spending_inputs.len() == 1 {
Some(spending_inputs.remove(0))
Ok(if spendings.len() == 1 {
Some(spendings.remove(0))
} else {
None
})
......@@ -148,17 +161,17 @@ impl Query {
&self,
store: &dyn ReadStore,
script_hash: &[u8]
) -> Result<Vec<FundingOutput>> {
let txout_rows = txoutrows_by_script_hash(store, script_hash);
) -> Result<Vec<Txo>> {
let txout_rows = self.get_txoutrows_by_script_hash(store, script_hash);
let mut result = vec![];
for row in &txout_rows {
let funding_txns = self.load_txns_by_prefix(store, vec![row.txid_prefix])?;
for t in &funding_txns {
result.push(FundingOutput {
txn_id: *t,
output_index: row.vout as usize,
let txids = self.get_txids_by_prefix(store, vec![row.txid_prefix])?;
for txid in &txids {
result.push(Txo {
txid: *txid,
vout: row.vout as usize,
})