Created
August 11, 2021 12:07
-
-
Save itzmeanjan/58b238ebce464290922de1f843c080e0 to your computer and use it in GitHub Desktop.
IPLD + Polygon Avail --- Exploration
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
[package] | |
name = "avail-ipld" | |
version = "0.1.0" | |
edition = "2018" | |
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html | |
[dependencies] | |
libipld = {version = "0.12.0", features = ["unleashed"]} | |
multihash = "0.14.0" | |
fnv = "1.0.7" | |
rand = "0.8.4" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
extern crate fnv; | |
extern crate libipld; | |
extern crate multihash; | |
extern crate rand; | |
use fnv::FnvHashSet; | |
use libipld::codec_impl::IpldCodec; | |
use libipld::Cid; | |
use libipld::Ipld; | |
use libipld::{Block, DefaultParams}; | |
use multihash::Code; | |
use rand::prelude::random; | |
use std::collections::{BTreeMap, HashMap}; | |
type IpldBlock = Block<DefaultParams>; | |
// random byte data for filling up cell's of data matrix | |
fn random_data(n: u64) -> Vec<u8> { | |
let mut data = Vec::with_capacity(n as usize); | |
for _ in 0..n { | |
data.push(random::<u8>()); | |
} | |
data | |
} | |
fn construct_cell(n: u64, s_ipfs: &mut HashMap<Cid, IpldBlock>) -> Cid { | |
let cell = Ipld::Bytes(random_data(n)); | |
let coded_cell = IpldBlock::encode(IpldCodec::DagJson, Code::Blake3_256, &cell).unwrap(); | |
// this simulates some specific cell of data matrix being pushed to IPFS | |
s_ipfs.insert(*coded_cell.cid(), coded_cell.clone()); | |
*coded_cell.cid() | |
} | |
fn construct_colwise(rows: u64, cell_size: u64, s_ipfs: &mut HashMap<Cid, IpldBlock>) -> Cid { | |
let mut col = Vec::new(); | |
for _ in 0..rows { | |
// stores CID of specific cell, instead of whole serialised block, because it's | |
// already pushed to IPFS | |
col.push(Ipld::Link(construct_cell(cell_size, s_ipfs))); | |
} | |
let col = Ipld::List(col); | |
let coded_col = IpldBlock::encode(IpldCodec::DagJson, Code::Blake3_256, &col).unwrap(); | |
// simulates DAG constructed column-wise being pushed to IPFS | |
s_ipfs.insert(*coded_col.cid(), coded_col.clone()); | |
*coded_col.cid() | |
} | |
fn construct_rowwise( | |
rows: u64, | |
cols: u64, | |
cell_size: u64, | |
block_num: i128, | |
prev_block: Option<Cid>, | |
s_ipfs: &mut HashMap<Cid, IpldBlock>, | |
) -> Cid { | |
let mut row = Vec::new(); | |
for _ in 0..cols { | |
// just adding CIDs, for easy column-wise traversal | |
row.push(Ipld::Link(construct_colwise(rows, cell_size, s_ipfs))); | |
} | |
let mut map = BTreeMap::new(); | |
// list of CIDs of column-wise DAGs | |
map.insert("columns".to_owned(), Ipld::List(row)); | |
// current Avail block number | |
map.insert("block".to_owned(), Ipld::Integer(block_num)); | |
// link to previous block | |
map.insert( | |
"prev".to_owned(), | |
match prev_block { | |
Some(v) => Ipld::Link(v), | |
None => Ipld::Null, | |
}, | |
); | |
let map = Ipld::StringMap(map); | |
let coded_row = IpldBlock::encode(IpldCodec::DagJson, Code::Blake3_256, &map).unwrap(); | |
// simulating DAG constructed by encoding whole block ( multi-level though ) | |
// being pushed to IPFS | |
s_ipfs.insert(*coded_row.cid(), coded_row.clone()); | |
*coded_row.cid() | |
} | |
fn simulate( | |
rows: u64, | |
cols: u64, | |
cell_size: u64, | |
upto: i128, | |
s_ipfs: &mut HashMap<Cid, IpldBlock>, | |
) -> BTreeMap<i128, Cid> { | |
let mut cur_block = 0; | |
let mut prev_block = None; | |
let mut entries = BTreeMap::new(); | |
while cur_block <= upto { | |
let current_cid = construct_rowwise(rows, cols, cell_size, cur_block, prev_block, s_ipfs); | |
prev_block = Some(current_cid); | |
if let Some(_) = entries.insert(cur_block, current_cid) { | |
panic!("block {} entry must not be present !", cur_block); | |
} | |
cur_block += 1; | |
} | |
entries | |
} | |
// Recursive backwards DAG traversal | |
fn traverse(cid: Cid, s_ipfs: &HashMap<Cid, IpldBlock>, cids: &mut Vec<Cid>) { | |
if let Some(coded_data) = s_ipfs.get(&cid) { | |
cids.push(cid); | |
let decoded_data = coded_data.ipld().unwrap(); | |
let mut links = FnvHashSet::default(); | |
decoded_data.references(&mut links); | |
links.iter().for_each(|cid| traverse(*cid, s_ipfs, cids)); | |
} | |
return; | |
} | |
fn main() { | |
// simulated IPFS | |
let mut s_ipfs: HashMap<Cid, IpldBlock> = HashMap::new(); | |
// original data matrix size: 4 x 4 | |
// erasure coded data matrix size: 8 x 4 | |
// each cell of matrix consists of random byte array of length 2 | |
// | |
// simluate Avail Chain creating 3 blocks, which are interlinked | |
// by putting last block's CID in current block's `prev` field | |
// so that whole data structure can be traversed by just having latest block's | |
// CID | |
let (rows, cols, cell_size, upto_block) = (4 << 1, 1 << 2, 1 << 1, 3); | |
let tracker = simulate(rows, cols, cell_size, upto_block, &mut s_ipfs); | |
// keeping track of CID of each block mined in Avail --- needed for starting traversal | |
// from any position of chain | |
tracker.iter().for_each(|(k, v)| { | |
println!("block: {}\tCID: {:?}", k, v); | |
}); | |
// take CID of last block --- block number: 3 | |
let cid = tracker.get(&upto_block).unwrap(); | |
let mut cids: Vec<Cid> = Vec::new(); | |
// simulate traversal of whole DAG just from CID of latest | |
// block --- recursively, no doubt ! | |
traverse(*cid, &s_ipfs, &mut cids); | |
println!("\nTraversed DAG of {} CIDs", cids.len()); | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Background
I was interested in understanding how Polygon Avail Blockchain's data can be pushed to IPFS ?
I show how Polygon Avail's erasure coded block data matrices can be formulated into easily traversable MerkleDAG data structure using IPLD which can finally be pushed to IPFS, after primary level of exploration.
Usage
cargo
toolchain installedupto_block
variable