From cd2c97edf096c79d3f60654fbb6eda2e7522b096 Mon Sep 17 00:00:00 2001 From: Jie You Date: Fri, 12 Oct 2018 14:09:53 -0700 Subject: [PATCH 1/3] stake trie --- fixtures/TrieTests/staketrietest.json | 59 ++ quarkchain/evm/stake_trie.py | 857 ++++++++++++++++++++++++ quarkchain/evm/tests/test_stake_trie.py | 156 +++++ 3 files changed, 1072 insertions(+) create mode 100644 fixtures/TrieTests/staketrietest.json create mode 100644 quarkchain/evm/stake_trie.py create mode 100644 quarkchain/evm/tests/test_stake_trie.py diff --git a/fixtures/TrieTests/staketrietest.json b/fixtures/TrieTests/staketrietest.json new file mode 100644 index 000000000..ab58b934c --- /dev/null +++ b/fixtures/TrieTests/staketrietest.json @@ -0,0 +1,59 @@ +{ + "randomSelectTests": { + "in":[ + ["0x04110d816c380812a427968ece99b1c963dfbce6", 1], + ["0x095e7baea6a6c7c4c2dfeb977efac326af552d87", 2], + ["0x0a517d755cebbf66312b30fff713666a9cb917e0", 3], + ["0x24dd378f51adc67a50e339e8031fe9bd4aafab36", 4], + ["0x293f982d000532a7861ab122bdc4bbfd26bf9030", 5], + ["0x2cf5732f017b0cf1b1f13a1478e10239716bf6b5", 1], + ["0x31c640b92c21a1f1465c91070b4b3b4d6854195f", 2], + ["0x37f998764813b136ddf5a754f34063fd03065e36", 3], + ["0x37fa399a749c121f8a15ce77e3d9f9bec8020d7a", 4], + ["0x4f36659fa632310b6ec438dea4085b522a2dd077", 5], + ["0x62c01474f089b07dae603491675dc5b5748f7049", 1], + ["0x729af7294be595a0efd7d891c9e51f89c07950c7", 2], + ["0x83e3e5a16d3b696a0314b30b2534804dd5e11197", 3], + ["0x8703df2417e0d7c59d063caa9583cb10a4d20532", 4], + ["0x8dffcd74e5b5923512916c6a64b502689cfa65e1", 5], + ["0x95a4d7cccb5204733874fa87285a176fe1e9e240", 1], + ["0x99b2fcba8120bedd048fe79f5262a6690ed38c39", 2], + ["0xa4202b8b8afd5354e3e40a219bdc17f6001bf2cf", 3], + ["0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", 4], + ["0xa9647f4a0a14042d91dc33c0328030a7157c93ae", 5], + ["0xaa6cffe5185732689c18f37a7f86170cb7304c2a", 1], + ["0xaae4a2e3c51c04606dcb3723456e58f3ed214f45", 2], + ["0xc37a43e940dfb5baf581a0b82b351d48305fc885", 3], + ["0xd2571607e241ecf590ed94b12d87c94babe36db6", 4], + ["0xf735071cbee190d76b704ce68384fc21e389fbe7", 5], + ["0x04110d816c380812a427968ece99b1c963dfbce6", null], + ["0x095e7baea6a6c7c4c2dfeb977efac326af552d87", null], + ["0x0a517d755cebbf66312b30fff713666a9cb917e0", null], + ["0x24dd378f51adc67a50e339e8031fe9bd4aafab36", null], + ["0x293f982d000532a7861ab122bdc4bbfd26bf9030", null], + ["0x2cf5732f017b0cf1b1f13a1478e10239716bf6b5", null], + ["0x31c640b92c21a1f1465c91070b4b3b4d6854195f", null], + ["0x37f998764813b136ddf5a754f34063fd03065e36", null], + ["0x37fa399a749c121f8a15ce77e3d9f9bec8020d7a", null], + ["0x4f36659fa632310b6ec438dea4085b522a2dd077", null], + ["0x62c01474f089b07dae603491675dc5b5748f7049", null], + ["0x729af7294be595a0efd7d891c9e51f89c07950c7", null], + ["0x83e3e5a16d3b696a0314b30b2534804dd5e11197", null], + ["0x8703df2417e0d7c59d063caa9583cb10a4d20532", null], + ["0x8dffcd74e5b5923512916c6a64b502689cfa65e1", null], + ["0x95a4d7cccb5204733874fa87285a176fe1e9e240", null], + ["0x99b2fcba8120bedd048fe79f5262a6690ed38c39", null], + ["0xa4202b8b8afd5354e3e40a219bdc17f6001bf2cf", null], + ["0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", null], + ["0xa9647f4a0a14042d91dc33c0328030a7157c93ae", null], + ["0xaa6cffe5185732689c18f37a7f86170cb7304c2a", null], + ["0xaae4a2e3c51c04606dcb3723456e58f3ed214f45", null], + ["0xc37a43e940dfb5baf581a0b82b351d48305fc885", null], + ["0xd2571607e241ecf590ed94b12d87c94babe36db6", null], + ["0xf735071cbee190d76b704ce68384fc21e389fbe7", null] + ], + "root": "0x25d946ba89d4510afdd0b376be458cdf58c19c7349702ba9d5f76a1abd97cf9a" + } + +} + diff --git a/quarkchain/evm/stake_trie.py b/quarkchain/evm/stake_trie.py new file mode 100644 index 000000000..a36873720 --- /dev/null +++ b/quarkchain/evm/stake_trie.py @@ -0,0 +1,857 @@ +#!/usr/bin/python3 +# trie.py from ethereum under MIT license +# use rlp to encode/decode a node as the original code +import rlp +from quarkchain import utils +from quarkchain.evm.fast_rlp import encode_optimized +from rlp.sedes import big_endian_int +from quarkchain.evm.utils import int_to_big_endian, big_endian_to_int +from quarkchain.evm.trie import ( + to_bytes, + bin_to_nibbles, + nibbles_to_bin, + with_terminator, + without_terminator, + adapt_terminator, + pack_nibbles, + unpack_to_nibbles, + starts_with, + is_key_value_type, +) + +rlp_encode = encode_optimized + +bin_to_nibbles_cache = {} + + +hti = {} +for i, c in enumerate("0123456789abcdef"): + hti[c] = i + +itoh = {} +for i, c in enumerate("0123456789abcdef"): + itoh[i] = c + + +def nibbles_to_address(nibbles): + """convert nibbles to address (hex public key address starting with "0x") + >>> nibbles_to_address([2, 4, 13, 13]) + "0x24dd" + """ + if any(x > 15 or x < 0 for x in nibbles): + raise Exception("nibbles can only be [0,..15]") + res = "0x" + for n in nibbles: + res += itoh[n] + return res + + +(NODE_TYPE_BLANK, NODE_TYPE_LEAF, NODE_TYPE_EXTENSION, NODE_TYPE_BRANCH) = tuple( + range(4) +) + + +NIBBLE_TERMINATOR = 16 +BLANK_NODE = b"" +BLANK_ROOT = utils.sha3_256(rlp.encode(b"")) + + +class Stake_Trie(object): + def __init__(self, db, root_hash=BLANK_ROOT): + """it also present a dictionary like interface + + :param db key value database + :root: blank or stake trie node in form of [key, [value, token]] or [[v0, token],[v1, token]..[v15, token],[v, token]] + :token: the total numbers of tokens rooted at that node (i.e., the number of tokens below it) + All operations that modify the trie must adjust the token information + """ + self.db = db # Pass in a database object directly + self.set_root_hash(root_hash) + self.deletes = [] + + # def __init__(self, dbfile, root_hash=BLANK_ROOT): + # """it also present a dictionary like interface + + # :param dbfile: key value database + # :root: blank or trie node in form of [key, value] or [v0,v1..v15,v] + # """ + # if isinstance(dbfile, str): + # dbfile = os.path.abspath(dbfile) + # self.db = DB(dbfile) + # else: + # self.db = dbfile # Pass in a database object directly + # self.set_root_hash(root_hash) + + @property + def root_hash(self): + """always empty or a 32 bytes + """ + return self._root_hash + + def get_root_hash(self): + return self._root_hash + + def _update_root_hash(self): + val = rlp_encode(self.root_node) + key = utils.sha3_256(val) + self.db.put(key, val) + self._root_hash = key + + @root_hash.setter + def root_hash(self, value): + self.set_root_hash(value) + + def set_root_hash(self, root_hash): + assert isinstance(root_hash, bytes) + assert len(root_hash) in [0, 32] + if root_hash == BLANK_ROOT: + self.root_node = BLANK_NODE + self._root_hash = BLANK_ROOT + return + self.root_node = self._decode_to_node(root_hash) + self._root_hash = root_hash + + def clear(self): + """ clear all tree data + """ + self._delete_child_storage(self.root_node) + self._delete_node_storage(self.root_node) + self.root_node = BLANK_NODE + self._root_hash = BLANK_ROOT + + def _delete_child_storage(self, node): + node_type = self._get_node_type(node) + if node_type == NODE_TYPE_BRANCH: + for item in node[:16]: + self._delete_child_storage(self._decode_to_node(item)) + elif node_type == NODE_TYPE_EXTENSION: + self._delete_child_storage(self._decode_to_node(node[1])) + + def _encode_node(self, node, put_in_db=True): + """ + All the operations that modify the trie must adjust the stake information + """ + if node == BLANK_NODE: + return BLANK_NODE + # assert isinstance(node, list) + node_type = self._get_node_type(node) + stake_sum = 0 + if is_key_value_type(node_type): + stake_sum = big_endian_to_int(node[1][1]) + elif node_type == NODE_TYPE_BRANCH: + for i in range(17): + if node[i] != BLANK_NODE: + stake_sum += big_endian_to_int(node[i][1]) + + rlpnode = rlp_encode(node) + # may fix + # if len(rlpnode) < 32: + # return node + + hashkey = utils.sha3_256(rlpnode) + if put_in_db: + self.db.put(hashkey, rlpnode) + return [hashkey, int_to_big_endian(stake_sum)] + + def _decode_to_node(self, encoded): + if encoded == BLANK_NODE: + return BLANK_NODE + + encoded = encoded[0] + + if isinstance(encoded, list): + return encoded + o = rlp.decode(self.db[encoded]) + return o + + def _get_node_type(self, node): + """ get node type and content + + :param node: node in form of list, or BLANK_NODE + :return: node type + """ + if node == BLANK_NODE: + return NODE_TYPE_BLANK + + if len(node) == 2: + nibbles = unpack_to_nibbles(node[0]) + has_terminator = nibbles and nibbles[-1] == NIBBLE_TERMINATOR + return NODE_TYPE_LEAF if has_terminator else NODE_TYPE_EXTENSION + if len(node) == 17: + return NODE_TYPE_BRANCH + + def _get(self, node, key): + """ get value inside a node + + :param node: node in form of list, or BLANK_NODE + :param key: nibble list without terminator + :return: + BLANK_NODE if does not exist, otherwise value or hash + """ + node_type = self._get_node_type(node) + + if node_type == NODE_TYPE_BLANK: + return BLANK_NODE + + if node_type == NODE_TYPE_BRANCH: + # already reach the expected node + if not key: + return node[-1][1] + sub_node = self._decode_to_node(node[key[0]]) + return self._get(sub_node, key[1:]) + + # key value node + curr_key = without_terminator(unpack_to_nibbles(node[0])) + if node_type == NODE_TYPE_LEAF: + return node[1][1] if key == curr_key else BLANK_NODE + + if node_type == NODE_TYPE_EXTENSION: + # traverse child nodes + if starts_with(key, curr_key): + sub_node = self._decode_to_node(node[1]) + return self._get(sub_node, key[len(curr_key) :]) + else: + return BLANK_NODE + + def _update(self, node, key, value): + """ update item inside a node + + :param node: node in form of list, or BLANK_NODE + :param key: nibble list without terminator + .. note:: key may be [] + :param value: value bytes + :return: new node + + if this node is changed to a new node, it's parent will take the + responsibility to *store* the new node storage, and delete the old + node storage + """ + node_type = self._get_node_type(node) + + if node_type == NODE_TYPE_BLANK: + return [pack_nibbles(with_terminator(key)), [value, value]] + + elif node_type == NODE_TYPE_BRANCH: + if not key: + node[-1] = [value, value] + else: + new_node = self._update_and_delete_storage( + self._decode_to_node(node[key[0]]), key[1:], value + ) + node[key[0]] = self._encode_node(new_node) + return node + + elif is_key_value_type(node_type): + return self._update_kv_node(node, key, value) + + def _update_and_delete_storage(self, node, key, value): + old_node = node[:] + new_node = self._update(node, key, value) + if old_node != new_node: + self._delete_node_storage(old_node) + return new_node + + def _update_kv_node(self, node, key, value): + node_type = self._get_node_type(node) + curr_key = without_terminator(unpack_to_nibbles(node[0])) + is_inner = node_type == NODE_TYPE_EXTENSION + + # find longest common prefix + prefix_length = 0 + for i in range(min(len(curr_key), len(key))): + if key[i] != curr_key[i]: + break + prefix_length = i + 1 + + remain_key = key[prefix_length:] + remain_curr_key = curr_key[prefix_length:] + + if remain_key == [] == remain_curr_key: + if not is_inner: + return [node[0], [value, value]] + new_node = self._update_and_delete_storage( + self._decode_to_node(node[1]), remain_key, value + ) + + elif remain_curr_key == []: + if is_inner: + new_node = self._update_and_delete_storage( + self._decode_to_node(node[1]), remain_key, value + ) + else: + new_node = [BLANK_NODE] * 17 + new_node[-1] = node[1] + new_node[remain_key[0]] = self._encode_node( + [pack_nibbles(with_terminator(remain_key[1:])), [value, value]] + ) + else: + new_node = [BLANK_NODE] * 17 + if len(remain_curr_key) == 1 and is_inner: + new_node[remain_curr_key[0]] = node[1] + else: + new_node[remain_curr_key[0]] = self._encode_node( + [ + pack_nibbles( + adapt_terminator(remain_curr_key[1:], not is_inner) + ), + node[1], + ] + ) + + if remain_key == []: + new_node[-1] = [value, value] + else: + new_node[remain_key[0]] = self._encode_node( + [pack_nibbles(with_terminator(remain_key[1:])), [value, value]] + ) + + if prefix_length: + # create node for key prefix + return [pack_nibbles(curr_key[:prefix_length]), self._encode_node(new_node)] + else: + return new_node + + def _getany(self, node, reverse=False, path=[]): + # print('getany', node, 'reverse=', reverse, path) + node_type = self._get_node_type(node) + if node_type == NODE_TYPE_BLANK: + return None + if node_type == NODE_TYPE_BRANCH: + if node[16] and not reverse: + # print('found!', [16], path) + return [16] + scan_range = list(range(16)) + if reverse: + scan_range.reverse() + for i in scan_range: + o = self._getany( + self._decode_to_node(node[i]), reverse=reverse, path=path + [i] + ) + if o is not None: + # print('found@', [i] + o, path) + return [i] + o + if node[16] and reverse: + # print('found!', [16], path) + return [16] + return None + curr_key = without_terminator(unpack_to_nibbles(node[0])) + if node_type == NODE_TYPE_LEAF: + # print('found#', curr_key, path) + return curr_key + + if node_type == NODE_TYPE_EXTENSION: + sub_node = self._decode_to_node(node[1]) + return curr_key + self._getany( + sub_node, reverse=reverse, path=path + curr_key + ) + + def _iter(self, node, key, reverse=False, path=[]): + # print('iter', node, key, 'reverse =', reverse, 'path =', path) + node_type = self._get_node_type(node) + + if node_type == NODE_TYPE_BLANK: + return None + + elif node_type == NODE_TYPE_BRANCH: + # print('b') + if len(key): + sub_node = self._decode_to_node(node[key[0]]) + o = self._iter(sub_node, key[1:], reverse, path + [key[0]]) + if o is not None: + # print('returning', [key[0]] + o, path) + return [key[0]] + o + if reverse: + scan_range = reversed(list(range(key[0] if len(key) else 0))) + else: + scan_range = list(range(key[0] + 1 if len(key) else 0, 16)) + for i in scan_range: + sub_node = self._decode_to_node(node[i]) + # print('prelim getany', path+[i]) + o = self._getany(sub_node, reverse, path + [i]) + if o is not None: + # print('returning', [i] + o, path) + return [i] + o + if reverse and key and node[16]: + # print('o') + return [16] + return None + + descend_key = without_terminator(unpack_to_nibbles(node[0])) + if node_type == NODE_TYPE_LEAF: + if reverse: + # print('L', descend_key, key, descend_key if descend_key < key else None, path) + return descend_key if descend_key < key else None + else: + # print('L', descend_key, key, descend_key if descend_key > key else None, path) + return descend_key if descend_key > key else None + + if node_type == NODE_TYPE_EXTENSION: + # traverse child nodes + sub_node = self._decode_to_node(node[1]) + sub_key = key[len(descend_key) :] + # print('amhere', key, descend_key, descend_key > key[:len(descend_key)]) + if starts_with(key, descend_key): + o = self._iter(sub_node, sub_key, reverse, path + descend_key) + elif descend_key > key[: len(descend_key)] and not reverse: + # print(1) + # print('prelim getany', path+descend_key) + o = self._getany(sub_node, False, path + descend_key) + elif descend_key < key[: len(descend_key)] and reverse: + # print(2) + # print('prelim getany', path+descend_key) + o = self._getany(sub_node, True, path + descend_key) + else: + o = None + # print('returning@', descend_key + o if o else None, path) + return descend_key + o if o else None + + def next(self, key): + # print('nextting') + key = bin_to_nibbles(key) + o = self._iter(self.root_node, key) + # print('answer', o) + return nibbles_to_bin(without_terminator(o)) if o else None + + def prev(self, key): + # print('prevving') + key = bin_to_nibbles(key) + o = self._iter(self.root_node, key, reverse=True) + # print('answer', o) + return nibbles_to_bin(without_terminator(o)) if o else None + + def _delete_node_storage(self, node): + """delete storage + :param node: node in form of list, or BLANK_NODE + """ + if node == BLANK_NODE: + return + # assert isinstance(node, list) + encoded = self._encode_node(node, put_in_db=False) + encoded = encoded[0] + if len(encoded) < 32: + return + """ + ===== FIXME ==== + in the current trie implementation two nodes can share identical subtrees + thus we can not safely delete nodes for now + """ + self.deletes.append(encoded) + # print('del', encoded, self.db.get_refcount(encoded)) + + def _delete(self, node, key): + """ update item inside a node + + :param node: node in form of list, or BLANK_NODE + :param key: nibble list without terminator + .. note:: key may be [] + :return: new node + + if this node is changed to a new node, it's parent will take the + responsibility to *store* the new node storage, and delete the old + node storage + """ + node_type = self._get_node_type(node) + if node_type == NODE_TYPE_BLANK: + return BLANK_NODE + + if node_type == NODE_TYPE_BRANCH: + return self._delete_branch_node(node, key) + + if is_key_value_type(node_type): + return self._delete_kv_node(node, key) + + def _normalize_branch_node(self, node): + """node should have only one item changed + """ + not_blank_items_count = sum(1 for x in range(17) if node[x]) + assert not_blank_items_count >= 1 + + if not_blank_items_count > 1: + return node + + # now only one item is not blank + not_blank_index = [i for i, item in enumerate(node) if item][0] + + # the value item is not blank + if not_blank_index == 16: + return [pack_nibbles(with_terminator([])), node[16]] + + # normal item is not blank + sub_node = self._decode_to_node(node[not_blank_index]) + sub_node_type = self._get_node_type(sub_node) + + if is_key_value_type(sub_node_type): + # collape subnode to this node, not this node will have same + # terminator with the new sub node, and value does not change + new_key = [not_blank_index] + unpack_to_nibbles(sub_node[0]) + return [pack_nibbles(new_key), sub_node[1]] + if sub_node_type == NODE_TYPE_BRANCH: + return [pack_nibbles([not_blank_index]), self._encode_node(sub_node)] + assert False + + def _delete_and_delete_storage(self, node, key): + old_node = node[:] + new_node = self._delete(node, key) + if old_node != new_node: + self._delete_node_storage(old_node) + return new_node + + def _delete_branch_node(self, node, key): + # already reach the expected node + if not key: + node[-1] = BLANK_NODE + return self._normalize_branch_node(node) + + encoded_new_sub_node = self._encode_node( + self._delete_and_delete_storage(self._decode_to_node(node[key[0]]), key[1:]) + ) + + if encoded_new_sub_node == node[key[0]]: + return node + + node[key[0]] = encoded_new_sub_node + if encoded_new_sub_node == BLANK_NODE: + return self._normalize_branch_node(node) + + return node + + def _delete_kv_node(self, node, key): + node_type = self._get_node_type(node) + assert is_key_value_type(node_type) + curr_key = without_terminator(unpack_to_nibbles(node[0])) + + if not starts_with(key, curr_key): + # key not found + return node + + if node_type == NODE_TYPE_LEAF: + return BLANK_NODE if key == curr_key else node + + # for inner key value type + new_sub_node = self._delete_and_delete_storage( + self._decode_to_node(node[1]), key[len(curr_key) :] + ) + + if self._encode_node(new_sub_node) == node[1]: + return node + + # new sub node is BLANK_NODE + if new_sub_node == BLANK_NODE: + return BLANK_NODE + + # assert isinstance(new_sub_node, list) + + # new sub node not blank, not value and has changed + new_sub_node_type = self._get_node_type(new_sub_node) + + if is_key_value_type(new_sub_node_type): + # collape subnode to this node, not this node will have same + # terminator with the new sub node, and value does not change + new_key = curr_key + unpack_to_nibbles(new_sub_node[0]) + return [pack_nibbles(new_key), new_sub_node[1]] + + if new_sub_node_type == NODE_TYPE_BRANCH: + return [pack_nibbles(curr_key), self._encode_node(new_sub_node)] + + # should be no more cases + assert False + + def delete(self, key): + """ + :param key: a bytes with length of [0, 32] + """ + if not isinstance(key, bytes): + raise Exception("Key must be bytes") + + if len(key) > 32: + raise Exception("Max key length is 32") + + self.root_node = self._delete_and_delete_storage( + self.root_node, bin_to_nibbles(key) + ) + self._update_root_hash() + + def _get_size(self, node): + """Get counts of (key, value) stored in this and the descendant nodes + + :param node: node in form of list, or BLANK_NODE + """ + if node == BLANK_NODE: + return 0 + + node_type = self._get_node_type(node) + + if is_key_value_type(node_type): + value_is_node = node_type == NODE_TYPE_EXTENSION + if value_is_node: + return self._get_size(self._decode_to_node(node[1])) + else: + return 1 + elif node_type == NODE_TYPE_BRANCH: + sizes = [self._get_size(self._decode_to_node(node[x])) for x in range(16)] + sizes = sizes + [1 if node[-1] else 0] + return sum(sizes) + + def _iter_branch(self, node): + """yield (key, value) stored in this and the descendant nodes + :param node: node in form of list, or BLANK_NODE + + .. note:: + Here key is in full form, rather than key of the individual node + """ + if node == BLANK_NODE: + raise StopIteration + + node_type = self._get_node_type(node) + + if is_key_value_type(node_type): + nibbles = without_terminator(unpack_to_nibbles(node[0])) + key = b"+".join([to_bytes(x) for x in nibbles]) + if node_type == NODE_TYPE_EXTENSION: + sub_tree = self._iter_branch(self._decode_to_node(node[1])) + else: + sub_tree = [(to_bytes(NIBBLE_TERMINATOR), node[1][1])] + + # prepend key of this node to the keys of children + for sub_key, sub_value in sub_tree: + full_key = (key + b"+" + sub_key).strip(b"+") + yield (full_key, sub_value) + + elif node_type == NODE_TYPE_BRANCH: + for i in range(16): + sub_tree = self._iter_branch(self._decode_to_node(node[i])) + for sub_key, sub_value in sub_tree: + full_key = (bytes(str(i), "ascii") + b"+" + sub_key).strip(b"+") + yield (full_key, sub_value) + if node[16]: + yield (to_bytes(NIBBLE_TERMINATOR), node[-1][1]) + + def iter_branch(self): + for key_str, value in self._iter_branch(self.root_node): + if key_str: + nibbles = [int(x) for x in key_str.split(b"+")] + else: + nibbles = [] + key = nibbles_to_bin(without_terminator(nibbles)) + yield key, value + + def _to_dict(self, node): + """convert (key, value) stored in this and the descendant nodes + to dict items. + + :param node: node in form of list, or BLANK_NODE + + .. note:: + + Here key is in full form, rather than key of the individual node + """ + if node == BLANK_NODE: + return {} + + node_type = self._get_node_type(node) + + if is_key_value_type(node_type): + nibbles = without_terminator(unpack_to_nibbles(node[0])) + key = b"+".join([to_bytes(x) for x in nibbles]) + if node_type == NODE_TYPE_EXTENSION: + sub_dict = self._to_dict(self._decode_to_node(node[1])) + else: + sub_dict = {to_bytes(NIBBLE_TERMINATOR): node[1][1]} + + # prepend key of this node to the keys of children + res = {} + for sub_key, sub_value in sub_dict.items(): + full_key = (key + b"+" + sub_key).strip(b"+") + res[full_key] = sub_value + return res + + elif node_type == NODE_TYPE_BRANCH: + res = {} + for i in range(16): + sub_dict = self._to_dict(self._decode_to_node(node[i])) + + for sub_key, sub_value in sub_dict.items(): + full_key = (bytes(str(i), "ascii") + b"+" + sub_key).strip(b"+") + res[full_key] = sub_value + + if node[16]: + res[to_bytes(NIBBLE_TERMINATOR)] = node[-1][1] + return res + + def to_dict(self): + d = self._to_dict(self.root_node) + res = {} + for key_str, value in d.items(): + if key_str: + nibbles = [int(x) for x in key_str.split(b"+")] + else: + nibbles = [] + key = nibbles_to_bin(without_terminator(nibbles)) + res[key] = value + return res + + def get(self, key): + if not isinstance(key, bytes): + raise Exception("Key must be bytes") + return self._get(self.root_node, bin_to_nibbles(to_bytes(key))) + + def update(self, key, value): + """ + :param key: a bytes + :value: a bytes + """ + if not isinstance(key, bytes): + raise Exception("Key must be bytes") + + # if len(key) > 32: + # raise Exception("Max key length is 32") + + if not isinstance(value, bytes): + raise Exception("Value must be bytes") + + # if value == '': + # return self.delete(key) + self.root_node = self._update_and_delete_storage( + self.root_node, bin_to_nibbles(key), value + ) + self._update_root_hash() + + def root_hash_valid(self): + if self.root_hash == BLANK_ROOT: + return True + return self.root_hash in self.db + + # New functions for POS (Photon) + def _get_total_stake(self, node): + """Get the total stake + + :param node: node in form of list, or BLANK_NODE + """ + if node == BLANK_NODE: + return 0 + + node_type = self._get_node_type(node) + + if is_key_value_type(node_type): + stake_is_node = node_type == NODE_TYPE_EXTENSION + if stake_is_node: + return self._get_total_stake(self._decode_to_node(node[1])) + else: + return big_endian_to_int(node[1][1]) + elif node_type == NODE_TYPE_BRANCH: + tokens = [ + self._get_total_stake(self._decode_to_node(node[x])) for x in range(16) + ] + tokens = tokens + [big_endian_to_int(node[-1][1]) if node[-1] else 0] + return sum(tokens) + + def _get_total_stake_from_root_node(self, node): + """ Get the total stake directly from root node informaiton + + :param node: node in form of list, or BLANK_NODE + """ + if node == BLANK_NODE: + return 0 + + node_type = self._get_node_type(node) + + if is_key_value_type(node_type): + return big_endian_to_int(node[1][1]) + else: + stake_sum = 0 + for i in range(17): + if node[i] != BLANK_NODE: + stake_sum += big_endian_to_int(node[i][1]) + return stake_sum + + def _select_staker(self, node, value): + """ Get the selected staker address given the pseudo-randomly value + + :param node: node in form of list, or BLANK_NODE + :value: pseudo-randomly selected value + """ + node_type = self._get_node_type(node) + assert value >= 0 + + if node_type == NODE_TYPE_BLANK: + return None + + if node_type == NODE_TYPE_BRANCH: + scan_range = list(range(17)) + for i in scan_range: + if node[i] != BLANK_NODE: + if big_endian_to_int(node[i][1]) >= value: + sub_node = self._decode_to_node(node[i]) + o = self._select_staker(sub_node, value) + return [i] + o if o is not None else None + else: + value = value - big_endian_to_int(node[i][1]) + return None + + if node_type == NODE_TYPE_LEAF: + descend_key = without_terminator(unpack_to_nibbles(node[0])) + return descend_key if value <= big_endian_to_int(node[1][1]) else None + + elif node_type == NODE_TYPE_EXTENSION: + descend_key = without_terminator(unpack_to_nibbles(node[0])) + if value <= big_endian_to_int(node[1][1]): + sub_node = self._decode_to_node(node[1]) + o = self._select_staker(sub_node, value) + return descend_key + o if o else None + else: + return None + + return None + + def _check_total_tokens(self): + if self.root_node != BLANK_NODE: + assert self._get_total_stake( + self.root_node + ) == self._get_total_stake_from_root_node(self.root_node) + + def get_total_stake(self): + return self._get_total_stake_from_root_node(self.root_node) + + def select_staker(self, value): + o = self._select_staker(self.root_node, value) + return nibbles_to_address(o) + + def __len__(self): + return self._get_size(self.root_node) + + def __getitem__(self, key): + return self.get(key) + + def __setitem__(self, key, value): + return self.update(key, value) + + def __delitem__(self, key): + return self.delete(key) + + def __iter__(self): + return iter(self.to_dict()) + + def __contains__(self, key): + return self.get(key) != BLANK_NODE + + +if __name__ == "__main__": + import sys + from quarkchain.db import PersistentDb + + _db = PersistentDb(path=sys.argv[2]) + + def encode_node(nd): + if isinstance(nd, bytes): + return nd.hex() + else: + return rlp_encode(nd).hex() + + if len(sys.argv) >= 2: + if sys.argv[1] == "insert": + t = Stake_Trie(_db, bytes.fromhex(sys.argv[3])) + t.update(bytes(sys.argv[4], "ascii"), bytes(sys.argv[5], "ascii")) + print(encode_node(t.root_hash)) + elif sys.argv[1] == "get": + t = Stake_Trie(_db, bytes.fromhex(sys.argv[3])) + print(t.get(bytes(sys.argv[4], "ascii"))) diff --git a/quarkchain/evm/tests/test_stake_trie.py b/quarkchain/evm/tests/test_stake_trie.py new file mode 100644 index 000000000..3bae5200c --- /dev/null +++ b/quarkchain/evm/tests/test_stake_trie.py @@ -0,0 +1,156 @@ +import os +import json +import quarkchain.evm.stake_trie as stake_trie +from quarkchain.db import InMemoryDb +import itertools +from quarkchain.utils import Logger +import unittest +import random + +from rlp.sedes import big_endian_int +from quarkchain.evm.utils import int_to_big_endian, big_endian_to_int + + +# customize VM log output to your needs +# hint: use 'py.test' with the '-s' option to dump logs to the console +# configure_logging(':trace') + + +def check_testdata(data_keys, expected_keys): + assert set(data_keys) == set( + expected_keys + ), "test data changed, please adjust tests" + + +fixture_path = os.path.join(os.path.dirname(__file__), "..", "..", "..", "fixtures") + + +def load_tests_dict(): + fixture = {} + testdir = os.path.join(fixture_path, "TrieTests") + for f in os.listdir(testdir): + if f != "staketrietest.json": + continue + sub_fixture = json.load(open(os.path.join(testdir, f))) + for k, v in sub_fixture.items(): + fixture[f + "_" + k] = v + return fixture + + +def load_tests(loader, tests, pattern): + # python3 unittest interface + suite = unittest.TestSuite() + for key, pairs in load_tests_dict().items(): + test = unittest.FunctionTestCase( + (lambda key, pairs: lambda: run_test(key, pairs))(key, pairs), + description=key, + ) + suite.addTests([test]) + return suite + + +def run_test(name, pairs): + Logger.debug("testing %s" % name) + # random seed + seed = "000000000000000000139dba4d2169f991f9867dd3e996f5e0a86f1ae985308f" + random.seed(seed) + + def _dec(x): + if isinstance(x, str) and x.startswith("0x"): + return bytes.fromhex(str(x[2:])) + + if isinstance(x, int): + return int_to_big_endian(x) + + if isinstance(x, str): + return bytes(x, "ascii") + return x + + pairs["in"] = [(_dec(k), _dec(v)) for k, v in pairs["in"]] + stakerlist = [(k, v) for k, v in pairs["in"] if v is not None] + deletes = [(k, v) for k, v in pairs["in"] if v is None] + + N_PERMUTATIONS = 100 + N_RANDOM = 300 + for i, permut in enumerate(itertools.permutations(pairs["in"])): + if i > N_PERMUTATIONS: + break + if pairs.get("nopermute", None) is not None and pairs["nopermute"]: + permut = pairs["in"] + N_PERMUTATIONS = 1 + t = stake_trie.Stake_Trie(InMemoryDb()) + # insert the account + for k, v in permut: + # logger.debug('updating with (%s, %s)' %(k, v)) + if v is not None: + t.update(k, v) + + # check the total account of stakes + if t._check_total_tokens() == False: + raise Exception( + "Mismatch_total_stake: %r %r %r" + % (name, t.root_node, t.get_total_stake()) + ) + + if t.get_total_stake() != 75: + raise Exception( + "Mismatch_total_stake: %r %r %r" + % (name, t.root_node, t.get_total_stake()) + ) + + # check the root hash + if pairs["root"] != "0x" + t.root_hash.hex(): + raise Exception( + "Mismatch_root_hash: %r %r %r %r" + % ( + name, + pairs["root"], + "0x" + t.root_hash.hex(), + (i, list(permut) + deletes), + ) + ) + + for _ in range(N_RANDOM): + # pseudo-randomly select a stake + chosen_num = random.random() * t.get_total_stake() + chosen = chosen_num + + for k, v in stakerlist: + curr_val = big_endian_to_int(v) + if chosen <= curr_val: + result = k + break + chosen -= curr_val + + if _dec(t.select_staker(chosen_num)) != result: + raise Exception( + "Mismatch_random_selection: %r %r %r %r %r" + % ( + name, + t.root_node, + chosen_num, + t.select_staker(chosen_num), + result, + ) + ) + + # delete the corresponding account + for k, v in permut: + # logger.debug('updating with (%s, %s)' %(k, v)) + if v is None: + t.delete(k) + + # make sure we have deletes at the end + for k, v in deletes: + t.delete(k) + + # make sure we have deleted all the stakes + if t.get_total_stake() != 0: + raise Exception( + "Mismatch: %r %r %r" % (name, t.root_node, t.get_total_stake()) + ) + + +if __name__ == "__main__": + for name, pairs in load_tests_dict().items(): + run_test(name, pairs) From b40fc97607748141c4be537c7288edfdc27fa91b Mon Sep 17 00:00:00 2001 From: Jie You Date: Tue, 16 Oct 2018 10:13:42 -0700 Subject: [PATCH 2/3] original trie --- .DS_Store | Bin 0 -> 8196 bytes fixtures/.DS_Store | Bin 0 -> 6148 bytes fixtures/TrieTests/staketrietest.json | 54 +-- quarkchain/.DS_Store | Bin 0 -> 8196 bytes quarkchain/evm/.DS_Store | Bin 0 -> 8196 bytes quarkchain/evm/stake_trie.py | 472 +++++++++++++++--------- quarkchain/evm/tests/test_stake_trie.py | 83 +---- 7 files changed, 331 insertions(+), 278 deletions(-) create mode 100644 .DS_Store create mode 100644 fixtures/.DS_Store create mode 100644 quarkchain/.DS_Store create mode 100644 quarkchain/evm/.DS_Store diff --git a/.DS_Store b/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..4fb83ef42fad83210419163c834930ed85aac096 GIT binary patch literal 8196 zcmeHMPfr_16n{eqSv#RD{}Lc5oC$Y;Xx~;W_6H5*sESZ@lClsb;$Ne2;PAsprsSN{$fnEk=xC~K^ zJldyb>ijOpmQilJR-(WbEMwY39ip_T(RJRJ_CNL5?~+fCfN3d2blRRd|72e|h?>d! zgx@O7z>nhD+27f~xxw>87c47lWrwr(%X{LW9J_HVD!Ad6yehfzUL|R-3lW^O-O4@R zJJ_)&mIRMopL?MK`!x?H+h6;sQg*tw&lx%v5N=l1N}@w9Wa zurNFA%zyOp@o~nwa{bn)#RrYU$45_&pMH-;L0FQjXhdJ1)IXqQQHq%Ta>xV0>k^1^ zfxe(3Ezt^nMP*t^(e?}$-H#!odXyepHvcxm{(5qorOd~{-Ta&q$WmC4-L)oVNUc*U(gtcTK1mP8=JLeTPd?2!nb zSF8$N3cY=J*+%R=ip&0E42`7#YhLY}H(l$8P1sDl=}h5mj#T#;l zYAAxBBqBfd1>fu%-P~5Q$6lkzcLipY-(a;V&7yl=7(w^oH#Xfcba~w?9NrJyiWjW% zf{0_$*zmCm`OVb?<)&Npd41~6AMhYZW-5O}w``iy4fwOo*16yw21~kcDt|W_wCE-+ z!i%@zrEPjnKhiJsE4`ps^e4S$!|XEqfZb%%>^56qdA7)w*ypUs)--yc6GiWLrqY^i zLHB*EsYn{{7aKH#^i{{_Lm4TEMR7V>RO0)tEAeG$4Y9b0{x6|ts6n+9LZ-fMgsl>u zj7wj&8kmtI5k^4AG9{kIGbe?&U@4Pr3yFCKSYkzbBfl>g zzJZV86Zkm#&6$bVrbWCFv2&I)-A%ea$v( z%N9^@jyAoZF%9A6+Gy9{6mSasYYOPK+ogb?4*j}*ONyzWQ?w0ff;gumUe|3>B+;6o zrhsEQp<|%pUWJw4rYXjkcec_XG27Eo1TN8!wYaTijXhn zoP$-~a~Zyz%TT8QxG6?o!VufPXC(7-YJcU|WSXU=3W9H9W3#q(?YieRyvEJOyJ#Zk zk&4x{ON@*F literal 0 HcmV?d00001 diff --git a/fixtures/TrieTests/staketrietest.json b/fixtures/TrieTests/staketrietest.json index ab58b934c..f1a12144b 100644 --- a/fixtures/TrieTests/staketrietest.json +++ b/fixtures/TrieTests/staketrietest.json @@ -1,31 +1,31 @@ { - "randomSelectTests": { + "branchingTests": { "in":[ - ["0x04110d816c380812a427968ece99b1c963dfbce6", 1], - ["0x095e7baea6a6c7c4c2dfeb977efac326af552d87", 2], - ["0x0a517d755cebbf66312b30fff713666a9cb917e0", 3], - ["0x24dd378f51adc67a50e339e8031fe9bd4aafab36", 4], - ["0x293f982d000532a7861ab122bdc4bbfd26bf9030", 5], - ["0x2cf5732f017b0cf1b1f13a1478e10239716bf6b5", 1], - ["0x31c640b92c21a1f1465c91070b4b3b4d6854195f", 2], - ["0x37f998764813b136ddf5a754f34063fd03065e36", 3], - ["0x37fa399a749c121f8a15ce77e3d9f9bec8020d7a", 4], - ["0x4f36659fa632310b6ec438dea4085b522a2dd077", 5], - ["0x62c01474f089b07dae603491675dc5b5748f7049", 1], - ["0x729af7294be595a0efd7d891c9e51f89c07950c7", 2], - ["0x83e3e5a16d3b696a0314b30b2534804dd5e11197", 3], - ["0x8703df2417e0d7c59d063caa9583cb10a4d20532", 4], - ["0x8dffcd74e5b5923512916c6a64b502689cfa65e1", 5], - ["0x95a4d7cccb5204733874fa87285a176fe1e9e240", 1], - ["0x99b2fcba8120bedd048fe79f5262a6690ed38c39", 2], - ["0xa4202b8b8afd5354e3e40a219bdc17f6001bf2cf", 3], - ["0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", 4], - ["0xa9647f4a0a14042d91dc33c0328030a7157c93ae", 5], - ["0xaa6cffe5185732689c18f37a7f86170cb7304c2a", 1], - ["0xaae4a2e3c51c04606dcb3723456e58f3ed214f45", 2], - ["0xc37a43e940dfb5baf581a0b82b351d48305fc885", 3], - ["0xd2571607e241ecf590ed94b12d87c94babe36db6", 4], - ["0xf735071cbee190d76b704ce68384fc21e389fbe7", 5], + ["0x04110d816c380812a427968ece99b1c963dfbce6", "something"], + ["0x095e7baea6a6c7c4c2dfeb977efac326af552d87", "something"], + ["0x0a517d755cebbf66312b30fff713666a9cb917e0", "something"], + ["0x24dd378f51adc67a50e339e8031fe9bd4aafab36", "something"], + ["0x293f982d000532a7861ab122bdc4bbfd26bf9030", "something"], + ["0x2cf5732f017b0cf1b1f13a1478e10239716bf6b5", "something"], + ["0x31c640b92c21a1f1465c91070b4b3b4d6854195f", "something"], + ["0x37f998764813b136ddf5a754f34063fd03065e36", "something"], + ["0x37fa399a749c121f8a15ce77e3d9f9bec8020d7a", "something"], + ["0x4f36659fa632310b6ec438dea4085b522a2dd077", "something"], + ["0x62c01474f089b07dae603491675dc5b5748f7049", "something"], + ["0x729af7294be595a0efd7d891c9e51f89c07950c7", "something"], + ["0x83e3e5a16d3b696a0314b30b2534804dd5e11197", "something"], + ["0x8703df2417e0d7c59d063caa9583cb10a4d20532", "something"], + ["0x8dffcd74e5b5923512916c6a64b502689cfa65e1", "something"], + ["0x95a4d7cccb5204733874fa87285a176fe1e9e240", "something"], + ["0x99b2fcba8120bedd048fe79f5262a6690ed38c39", "something"], + ["0xa4202b8b8afd5354e3e40a219bdc17f6001bf2cf", "something"], + ["0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", "something"], + ["0xa9647f4a0a14042d91dc33c0328030a7157c93ae", "something"], + ["0xaa6cffe5185732689c18f37a7f86170cb7304c2a", "something"], + ["0xaae4a2e3c51c04606dcb3723456e58f3ed214f45", "something"], + ["0xc37a43e940dfb5baf581a0b82b351d48305fc885", "something"], + ["0xd2571607e241ecf590ed94b12d87c94babe36db6", "something"], + ["0xf735071cbee190d76b704ce68384fc21e389fbe7", "something"], ["0x04110d816c380812a427968ece99b1c963dfbce6", null], ["0x095e7baea6a6c7c4c2dfeb977efac326af552d87", null], ["0x0a517d755cebbf66312b30fff713666a9cb917e0", null], @@ -52,7 +52,7 @@ ["0xd2571607e241ecf590ed94b12d87c94babe36db6", null], ["0xf735071cbee190d76b704ce68384fc21e389fbe7", null] ], - "root": "0x25d946ba89d4510afdd0b376be458cdf58c19c7349702ba9d5f76a1abd97cf9a" + "root": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421" } } diff --git a/quarkchain/.DS_Store b/quarkchain/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..51ef04fd2b576839007505a414acba53e6f2e53d GIT binary patch literal 8196 zcmeHML2nyH6n+yY&Du%HrcIhqfn;GJRceIN5am6%% zY{v~jzHsFX{Q*e)15VsH@C%T*aOJ=O35hGbH#<&u*UqUDs_0BK^PXqF_h#N3zc+px z0AQ7F;|9Pg09e?G+H*KeX!M-=Sew!_B+O7Azyk?2{Cf*JV`wvBJYYOvJYYOvJYYQV zB6tArY--jy_kR4;jK%}T123fqC_faj6D?~xjHyZ;IH(c;=`>EOhCJ2*N~Wl3S<_)m zl_=_rs|ThudbQsfE99S$4Os~vzg~HV8C|6W)VC9&a(RjdkVB7;# zyU(eb-Gd-izYn&2QFndeB7%bIJ?z*1Q4P6~R0jr72M7BRaMF{J`hHA`uyqj7#q}ga z>Rmx*Ew+-8nnej8B_fpI5T@5)UuDpFr9Xfs+S8h-|F4V$=@$^9ZhCW(*^RRNn}p>v z&;WNLlB`W4XA$Y)KAMxz%E~1A0h{7d2@ZOxcTA*q<;@$nwhr5aCnsN=e)%n$f{p2X4-4_eZ{S)$GfzQ~$gb(pK*XUxXFE%lEvx2&&!_z9;#yB&KFx z%HZzt#^YyGyMqp5mYz*0T_1*=2M4XtYiu{05tn-je!pffMD4{+fVT2yv zlDxeYGzG5MLl19v=uvc^hY`{-nZ11{44t6GD}zVA<8psHs0b-Vn~>rnBQ&2|G3@O-{h{zN~u;5xhyA43&`z7Jo+ zQ}_;kfS=%3_yhiC)9ea+gI#Bv>=wJtK4PD;du*5Ov(MN;EI&0m)7P~=O;`gNeWtx? z82BP+;S8bY@#}t!p^3qd0o7K4A~Egw=>VWmWmY0o3zz^Xs5qt$@BCzwl9|uQ)G&x0 zOgv6zEe!UQTn_iQ0i868l2Zybho}&fjl@n#$DbJP7|1E9yh=`e+((GJIVVZ6;gyJE=mdmtC9Sf%>^gY)12KmS5TgYkgzzzgdEHhs5xw}RUn-OJgXQmtLYeu$kK z;f66)A~>jW9H}bDk)Hj-5cwiPNv5pnFs3+z?Z1B!kokE}Kf_aZt-1eQJQpnalxLE6 NwYmS*SN4A~{0pKNUkm^M literal 0 HcmV?d00001 diff --git a/quarkchain/evm/.DS_Store b/quarkchain/evm/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..de167ec12334dbb54824dc8b0a6b133ff73518c7 GIT binary patch literal 8196 zcmeHM&2G~`5T5N1by8LBp{PCjg2bUhX({x8kfKlyy&(m`0g!7qX=~xwCU%;J5R?n= z0K5T@!V~Zc+<5@_W_KIeuH73Hsk_$hZ0z}FcE6pqJ6j?WtwCp-Xp@L6RF-SYXi5q{ z=X$Bk={p;c0(c^yQgU$Jqxh1xxnUSE3>XFs1BL;^!2iGizS&%?itoO9YFfj9VcitQj^{S3LpHUX zDyiTk6r6d^f2%;k|h&*WUrh0^W z_qQzQU7%tb(Gj_*r_^OT1=P0!$|8>fig5MhDM7)D;PGU=)L@x)duYQ_#HUh3NXMAX zNh#u20kMLSpTkcVrxqzt>?tWjN)A5fI5@`K6M5><$GqZF6myVA_24VPO-bzeaJFz%t;ox3?#492`-L|QYM)XVP*KZ4O9f2 zPaV`0vEg8j9{LW(z}yDqvn6npv&8;mId&fwDo`9PITX(~h5lP0oh#=HrgBJ2BhzYr za{7y~&MTKvQiyow=H?d`Z!K9?-KwwD-?UGoVLNrx zLDF*julUz9H$C-q|9KPzv%c%S@Vi52ZT(3Urmi1$`x5MTx=?xj(hob)uoVr$P9p8J z)dS0_Sv6;Eb3ESJ-QBVuGj=ctc^R{j}H!eqxa(vlaF5z7$`!2 zcWG7qp0!`m%S`dC-4BB(JYfjxx?BWTZYX|9Tx>6g-XS2mvX+FS42LX+^cU%@{ADJ$$InbC%t$jG!@%`3Ft3KN$@%}&>hu5WAE+6D zVZbo(*BB5B`|bS}%wK%ti=H^wwowmJxv<_;NeMwC({V_djziA=VTisBRmL>6oGOVs VDF6N-p!kQj`Tom=FwKo(;18hzrDFg9 literal 0 HcmV?d00001 diff --git a/quarkchain/evm/stake_trie.py b/quarkchain/evm/stake_trie.py index a36873720..78a5a97f7 100644 --- a/quarkchain/evm/stake_trie.py +++ b/quarkchain/evm/stake_trie.py @@ -4,46 +4,126 @@ import rlp from quarkchain import utils from quarkchain.evm.fast_rlp import encode_optimized -from rlp.sedes import big_endian_int -from quarkchain.evm.utils import int_to_big_endian, big_endian_to_int -from quarkchain.evm.trie import ( - to_bytes, - bin_to_nibbles, - nibbles_to_bin, - with_terminator, - without_terminator, - adapt_terminator, - pack_nibbles, - unpack_to_nibbles, - starts_with, - is_key_value_type, -) rlp_encode = encode_optimized bin_to_nibbles_cache = {} +def to_bytes(value): + if isinstance(value, bytes): + return value + if isinstance(value, str): + return bytes(value, "utf-8") + if isinstance(value, int): + return bytes(str(value), "utf-8") + + hti = {} for i, c in enumerate("0123456789abcdef"): hti[c] = i -itoh = {} -for i, c in enumerate("0123456789abcdef"): - itoh[i] = c +def bin_to_nibbles(s): + """convert bytes s to nibbles (half-bytes) -def nibbles_to_address(nibbles): - """convert nibbles to address (hex public key address starting with "0x") - >>> nibbles_to_address([2, 4, 13, 13]) - "0x24dd" + >>> bin_to_nibbles("") + [] + >>> bin_to_nibbles("h") + [6, 8] + >>> bin_to_nibbles("he") + [6, 8, 6, 5] + >>> bin_to_nibbles("hello") + [6, 8, 6, 5, 6, 12, 6, 12, 6, 15] """ + return [hti[c] for c in s.hex()] + + +def nibbles_to_bin(nibbles): if any(x > 15 or x < 0 for x in nibbles): raise Exception("nibbles can only be [0,..15]") - res = "0x" - for n in nibbles: - res += itoh[n] - return res + + if len(nibbles) % 2: + raise Exception("nibbles must be of even numbers") + + res = bytearray() + for i in range(0, len(nibbles), 2): + res.append(16 * nibbles[i] + nibbles[i + 1]) + return bytes(res) + + +NIBBLE_TERMINATOR = 16 + + +def with_terminator(nibbles): + nibbles = nibbles[:] + if not nibbles or nibbles[-1] != NIBBLE_TERMINATOR: + nibbles.append(NIBBLE_TERMINATOR) + return nibbles + + +def without_terminator(nibbles): + nibbles = nibbles[:] + if nibbles and nibbles[-1] == NIBBLE_TERMINATOR: + del nibbles[-1] + return nibbles + + +def adapt_terminator(nibbles, has_terminator): + if has_terminator: + return with_terminator(nibbles) + else: + return without_terminator(nibbles) + + +def pack_nibbles(nibbles): + """pack nibbles to binary + + :param nibbles: a nibbles sequence. may have a terminator + """ + + if nibbles[-1:] == [NIBBLE_TERMINATOR]: + flags = 2 + nibbles = nibbles[:-1] + else: + flags = 0 + + oddlen = len(nibbles) % 2 + flags |= oddlen # set lowest bit if odd number of nibbles + if oddlen: + nibbles = [flags] + nibbles + else: + nibbles = [flags, 0] + nibbles + o = bytearray() + for i in range(0, len(nibbles), 2): + o.append(16 * nibbles[i] + nibbles[i + 1]) + return bytes(o) + + +def unpack_to_nibbles(bindata): + """unpack packed binary data to nibbles + + :param bindata: binary packed from nibbles + :return: nibbles sequence, may have a terminator + """ + o = bin_to_nibbles(bindata) + flags = o[0] + if flags & 2: + o.append(NIBBLE_TERMINATOR) + if flags & 1 == 1: + o = o[1:] + else: + o = o[2:] + return o + + +def starts_with(full, part): + """ test whether the items in the part is + the leading items of the full + """ + if len(full) < len(part): + return False + return full[: len(part)] == part (NODE_TYPE_BLANK, NODE_TYPE_LEAF, NODE_TYPE_EXTENSION, NODE_TYPE_BRANCH) = tuple( @@ -51,19 +131,20 @@ def nibbles_to_address(nibbles): ) -NIBBLE_TERMINATOR = 16 +def is_key_value_type(node_type): + return node_type in [NODE_TYPE_LEAF, NODE_TYPE_EXTENSION] + + BLANK_NODE = b"" BLANK_ROOT = utils.sha3_256(rlp.encode(b"")) -class Stake_Trie(object): +class Trie(object): def __init__(self, db, root_hash=BLANK_ROOT): """it also present a dictionary like interface :param db key value database - :root: blank or stake trie node in form of [key, [value, token]] or [[v0, token],[v1, token]..[v15, token],[v, token]] - :token: the total numbers of tokens rooted at that node (i.e., the number of tokens below it) - All operations that modify the trie must adjust the token information + :root: blank or trie node in form of [key, value] or [v0,v1..v15,v] """ self.db = db # Pass in a database object directly self.set_root_hash(root_hash) @@ -128,37 +209,21 @@ def _delete_child_storage(self, node): self._delete_child_storage(self._decode_to_node(node[1])) def _encode_node(self, node, put_in_db=True): - """ - All the operations that modify the trie must adjust the stake information - """ if node == BLANK_NODE: return BLANK_NODE # assert isinstance(node, list) - node_type = self._get_node_type(node) - stake_sum = 0 - if is_key_value_type(node_type): - stake_sum = big_endian_to_int(node[1][1]) - elif node_type == NODE_TYPE_BRANCH: - for i in range(17): - if node[i] != BLANK_NODE: - stake_sum += big_endian_to_int(node[i][1]) - rlpnode = rlp_encode(node) - # may fix - # if len(rlpnode) < 32: - # return node + if len(rlpnode) < 32: + return node hashkey = utils.sha3_256(rlpnode) if put_in_db: self.db.put(hashkey, rlpnode) - return [hashkey, int_to_big_endian(stake_sum)] + return hashkey def _decode_to_node(self, encoded): if encoded == BLANK_NODE: return BLANK_NODE - - encoded = encoded[0] - if isinstance(encoded, list): return encoded o = rlp.decode(self.db[encoded]) @@ -196,14 +261,14 @@ def _get(self, node, key): if node_type == NODE_TYPE_BRANCH: # already reach the expected node if not key: - return node[-1][1] + return node[-1] sub_node = self._decode_to_node(node[key[0]]) return self._get(sub_node, key[1:]) # key value node curr_key = without_terminator(unpack_to_nibbles(node[0])) if node_type == NODE_TYPE_LEAF: - return node[1][1] if key == curr_key else BLANK_NODE + return node[1] if key == curr_key else BLANK_NODE if node_type == NODE_TYPE_EXTENSION: # traverse child nodes @@ -229,11 +294,11 @@ def _update(self, node, key, value): node_type = self._get_node_type(node) if node_type == NODE_TYPE_BLANK: - return [pack_nibbles(with_terminator(key)), [value, value]] + return [pack_nibbles(with_terminator(key)), value] elif node_type == NODE_TYPE_BRANCH: if not key: - node[-1] = [value, value] + node[-1] = value else: new_node = self._update_and_delete_storage( self._decode_to_node(node[key[0]]), key[1:], value @@ -268,7 +333,7 @@ def _update_kv_node(self, node, key, value): if remain_key == [] == remain_curr_key: if not is_inner: - return [node[0], [value, value]] + return [node[0], value] new_node = self._update_and_delete_storage( self._decode_to_node(node[1]), remain_key, value ) @@ -282,7 +347,7 @@ def _update_kv_node(self, node, key, value): new_node = [BLANK_NODE] * 17 new_node[-1] = node[1] new_node[remain_key[0]] = self._encode_node( - [pack_nibbles(with_terminator(remain_key[1:])), [value, value]] + [pack_nibbles(with_terminator(remain_key[1:])), value] ) else: new_node = [BLANK_NODE] * 17 @@ -299,10 +364,10 @@ def _update_kv_node(self, node, key, value): ) if remain_key == []: - new_node[-1] = [value, value] + new_node[-1] = value else: new_node[remain_key[0]] = self._encode_node( - [pack_nibbles(with_terminator(remain_key[1:])), [value, value]] + [pack_nibbles(with_terminator(remain_key[1:])), value] ) if prefix_length: @@ -340,11 +405,160 @@ def _getany(self, node, reverse=False, path=[]): return curr_key if node_type == NODE_TYPE_EXTENSION: + curr_key = without_terminator(unpack_to_nibbles(node[0])) sub_node = self._decode_to_node(node[1]) return curr_key + self._getany( sub_node, reverse=reverse, path=path + curr_key ) + def _split(self, node, key): + node_type = self._get_node_type(node) + if node_type == NODE_TYPE_BLANK: + return BLANK_NODE, BLANK_NODE + elif not key: + return BLANK_NODE, node + elif node_type == NODE_TYPE_BRANCH: + b1 = node[: key[0]] + b1 += [""] * (17 - len(b1)) + b2 = node[key[0] + 1 :] + b2 = [""] * (17 - len(b2)) + b2 + b1[16], b2[16] = b2[16], b1[16] + sub = self._decode_to_node(node[key[0]]) + sub1, sub2 = self._split(sub, key[1:]) + b1[key[0]] = self._encode_node(sub1) if sub1 else "" + b2[key[0]] = self._encode_node(sub2) if sub2 else "" + return ( + self._normalize_branch_node(b1) + if len([x for x in b1 if x]) + else BLANK_NODE, + self._normalize_branch_node(b2) + if len([x for x in b2 if x]) + else BLANK_NODE, + ) + + descend_key = without_terminator(unpack_to_nibbles(node[0])) + if node_type == NODE_TYPE_LEAF: + if descend_key < key: + return node, BLANK_NODE + else: + return BLANK_NODE, node + elif node_type == NODE_TYPE_EXTENSION: + sub_node = self._decode_to_node(node[1]) + sub_key = key[len(descend_key) :] + if starts_with(key, descend_key): + sub1, sub2 = self._split(sub_node, sub_key) + subtype1 = self._get_node_type(sub1) + subtype2 = self._get_node_type(sub2) + if not sub1: + o1 = BLANK_NODE + elif subtype1 in (NODE_TYPE_LEAF, NODE_TYPE_EXTENSION): + new_key = key[: len(descend_key)] + unpack_to_nibbles(sub1[0]) + o1 = [pack_nibbles(new_key), sub1[1]] + else: + o1 = [ + pack_nibbles(key[: len(descend_key)]), + self._encode_node(sub1), + ] + if not sub2: + o2 = BLANK_NODE + elif subtype2 in (NODE_TYPE_LEAF, NODE_TYPE_EXTENSION): + new_key = key[: len(descend_key)] + unpack_to_nibbles(sub2[0]) + o2 = [pack_nibbles(new_key), sub2[1]] + else: + o2 = [ + pack_nibbles(key[: len(descend_key)]), + self._encode_node(sub2), + ] + return o1, o2 + elif descend_key < key[: len(descend_key)]: + return node, BLANK_NODE + elif descend_key > key[: len(descend_key)]: + return BLANK_NODE, node + else: + return BLANK_NODE, BLANK_NODE + + def split(self, key): + key = bin_to_nibbles(key) + r1, r2 = self._split(self.root_node, key) + t1, t2 = Trie(self.db), Trie(self.db) + t1.root_node, t2.root_node = r1, r2 + return t1, t2 + + def _merge(self, node1, node2): + # assert isinstance(node1, list) or not node1 + # assert isinstance(node2, list) or not node2 + node_type1 = self._get_node_type(node1) + node_type2 = self._get_node_type(node2) + if not node1: + return node2 + if not node2: + return node1 + if node_type1 != NODE_TYPE_BRANCH and node_type2 != NODE_TYPE_BRANCH: + descend_key1 = unpack_to_nibbles(node1[0]) + descend_key2 = unpack_to_nibbles(node2[0]) + # find longest common prefix + prefix_length = 0 + for i in range(min(len(descend_key1), len(descend_key2))): + if descend_key1[i] != descend_key2[i]: + break + prefix_length = i + 1 + if prefix_length: + sub1 = ( + self._decode_to_node(node1[1]) + if node_type1 == NODE_TYPE_EXTENSION + else node1[1] + ) + new_sub1 = ( + [pack_nibbles(descend_key1[prefix_length:]), sub1] + if descend_key1[prefix_length:] + else sub1 + ) + sub2 = ( + self._decode_to_node(node2[1]) + if node_type2 == NODE_TYPE_EXTENSION + else node2[1] + ) + new_sub2 = ( + [pack_nibbles(descend_key2[prefix_length:]), sub2] + if descend_key2[prefix_length:] + else sub2 + ) + return [ + pack_nibbles(descend_key1[:prefix_length]), + self._encode_node(self._merge(new_sub1, new_sub2)), + ] + + nodes = [[node1], [node2]] + for (node, node_type) in zip(nodes, [node_type1, node_type2]): + if node_type != NODE_TYPE_BRANCH: + new_node = [BLANK_NODE] * 17 + curr_key = unpack_to_nibbles(node[0][0]) + new_node[curr_key[0]] = ( + self._encode_node([pack_nibbles(curr_key[1:]), node[0][1]]) + if curr_key[0] < 16 and curr_key[1:] + else node[0][1] + ) + node[0] = new_node + node1, node2 = nodes[0][0], nodes[1][0] + assert len([i for i in range(17) if node1[i] and node2[i]]) <= 1 + new_node = [ + self._encode_node( + self._merge( + self._decode_to_node(node1[i]), self._decode_to_node(node2[i]) + ) + ) + if node1[i] and node2[i] + else node1[i] or node2[i] + for i in range(17) + ] + return new_node + + @classmethod + def unsafe_merge(cls, trie1, trie2): + t = Trie(trie1.db) + t.root_node = t._merge(trie1.root_node, trie2.root_node) + return t + def _iter(self, node, key, reverse=False, path=[]): # print('iter', node, key, 'reverse =', reverse, 'path =', path) node_type = self._get_node_type(node) @@ -427,7 +641,6 @@ def _delete_node_storage(self, node): return # assert isinstance(node, list) encoded = self._encode_node(node, put_in_db=False) - encoded = encoded[0] if len(encoded) < 32: return """ @@ -610,7 +823,7 @@ def _iter_branch(self, node): if node_type == NODE_TYPE_EXTENSION: sub_tree = self._iter_branch(self._decode_to_node(node[1])) else: - sub_tree = [(to_bytes(NIBBLE_TERMINATOR), node[1][1])] + sub_tree = [(to_bytes(NIBBLE_TERMINATOR), node[1])] # prepend key of this node to the keys of children for sub_key, sub_value in sub_tree: @@ -624,7 +837,7 @@ def _iter_branch(self, node): full_key = (bytes(str(i), "ascii") + b"+" + sub_key).strip(b"+") yield (full_key, sub_value) if node[16]: - yield (to_bytes(NIBBLE_TERMINATOR), node[-1][1]) + yield (to_bytes(NIBBLE_TERMINATOR), node[-1]) def iter_branch(self): for key_str, value in self._iter_branch(self.root_node): @@ -656,7 +869,7 @@ def _to_dict(self, node): if node_type == NODE_TYPE_EXTENSION: sub_dict = self._to_dict(self._decode_to_node(node[1])) else: - sub_dict = {to_bytes(NIBBLE_TERMINATOR): node[1][1]} + sub_dict = {to_bytes(NIBBLE_TERMINATOR): node[1]} # prepend key of this node to the keys of children res = {} @@ -675,7 +888,7 @@ def _to_dict(self, node): res[full_key] = sub_value if node[16]: - res[to_bytes(NIBBLE_TERMINATOR)] = node[-1][1] + res[to_bytes(NIBBLE_TERMINATOR)] = node[-1] return res def to_dict(self): @@ -695,6 +908,24 @@ def get(self, key): raise Exception("Key must be bytes") return self._get(self.root_node, bin_to_nibbles(to_bytes(key))) + def __len__(self): + return self._get_size(self.root_node) + + def __getitem__(self, key): + return self.get(key) + + def __setitem__(self, key, value): + return self.update(key, value) + + def __delitem__(self, key): + return self.delete(key) + + def __iter__(self): + return iter(self.to_dict()) + + def __contains__(self, key): + return self.get(key) != BLANK_NODE + def update(self, key, value): """ :param key: a bytes @@ -721,119 +952,6 @@ def root_hash_valid(self): return True return self.root_hash in self.db - # New functions for POS (Photon) - def _get_total_stake(self, node): - """Get the total stake - - :param node: node in form of list, or BLANK_NODE - """ - if node == BLANK_NODE: - return 0 - - node_type = self._get_node_type(node) - - if is_key_value_type(node_type): - stake_is_node = node_type == NODE_TYPE_EXTENSION - if stake_is_node: - return self._get_total_stake(self._decode_to_node(node[1])) - else: - return big_endian_to_int(node[1][1]) - elif node_type == NODE_TYPE_BRANCH: - tokens = [ - self._get_total_stake(self._decode_to_node(node[x])) for x in range(16) - ] - tokens = tokens + [big_endian_to_int(node[-1][1]) if node[-1] else 0] - return sum(tokens) - - def _get_total_stake_from_root_node(self, node): - """ Get the total stake directly from root node informaiton - - :param node: node in form of list, or BLANK_NODE - """ - if node == BLANK_NODE: - return 0 - - node_type = self._get_node_type(node) - - if is_key_value_type(node_type): - return big_endian_to_int(node[1][1]) - else: - stake_sum = 0 - for i in range(17): - if node[i] != BLANK_NODE: - stake_sum += big_endian_to_int(node[i][1]) - return stake_sum - - def _select_staker(self, node, value): - """ Get the selected staker address given the pseudo-randomly value - - :param node: node in form of list, or BLANK_NODE - :value: pseudo-randomly selected value - """ - node_type = self._get_node_type(node) - assert value >= 0 - - if node_type == NODE_TYPE_BLANK: - return None - - if node_type == NODE_TYPE_BRANCH: - scan_range = list(range(17)) - for i in scan_range: - if node[i] != BLANK_NODE: - if big_endian_to_int(node[i][1]) >= value: - sub_node = self._decode_to_node(node[i]) - o = self._select_staker(sub_node, value) - return [i] + o if o is not None else None - else: - value = value - big_endian_to_int(node[i][1]) - return None - - if node_type == NODE_TYPE_LEAF: - descend_key = without_terminator(unpack_to_nibbles(node[0])) - return descend_key if value <= big_endian_to_int(node[1][1]) else None - - elif node_type == NODE_TYPE_EXTENSION: - descend_key = without_terminator(unpack_to_nibbles(node[0])) - if value <= big_endian_to_int(node[1][1]): - sub_node = self._decode_to_node(node[1]) - o = self._select_staker(sub_node, value) - return descend_key + o if o else None - else: - return None - - return None - - def _check_total_tokens(self): - if self.root_node != BLANK_NODE: - assert self._get_total_stake( - self.root_node - ) == self._get_total_stake_from_root_node(self.root_node) - - def get_total_stake(self): - return self._get_total_stake_from_root_node(self.root_node) - - def select_staker(self, value): - o = self._select_staker(self.root_node, value) - return nibbles_to_address(o) - - def __len__(self): - return self._get_size(self.root_node) - - def __getitem__(self, key): - return self.get(key) - - def __setitem__(self, key, value): - return self.update(key, value) - - def __delitem__(self, key): - return self.delete(key) - - def __iter__(self): - return iter(self.to_dict()) - - def __contains__(self, key): - return self.get(key) != BLANK_NODE - if __name__ == "__main__": import sys @@ -849,9 +967,9 @@ def encode_node(nd): if len(sys.argv) >= 2: if sys.argv[1] == "insert": - t = Stake_Trie(_db, bytes.fromhex(sys.argv[3])) + t = Trie(_db, bytes.fromhex(sys.argv[3])) t.update(bytes(sys.argv[4], "ascii"), bytes(sys.argv[5], "ascii")) print(encode_node(t.root_hash)) elif sys.argv[1] == "get": - t = Stake_Trie(_db, bytes.fromhex(sys.argv[3])) + t = Trie(_db, bytes.fromhex(sys.argv[3])) print(t.get(bytes(sys.argv[4], "ascii"))) diff --git a/quarkchain/evm/tests/test_stake_trie.py b/quarkchain/evm/tests/test_stake_trie.py index 3bae5200c..809541555 100644 --- a/quarkchain/evm/tests/test_stake_trie.py +++ b/quarkchain/evm/tests/test_stake_trie.py @@ -1,15 +1,10 @@ import os import json -import quarkchain.evm.stake_trie as stake_trie +import quarkchain.evm.trie as trie from quarkchain.db import InMemoryDb import itertools from quarkchain.utils import Logger import unittest -import random - -from rlp.sedes import big_endian_int -from quarkchain.evm.utils import int_to_big_endian, big_endian_to_int - # customize VM log output to your needs # hint: use 'py.test' with the '-s' option to dump logs to the console @@ -29,7 +24,7 @@ def load_tests_dict(): fixture = {} testdir = os.path.join(fixture_path, "TrieTests") for f in os.listdir(testdir): - if f != "staketrietest.json": + if f != "trietest.json": continue sub_fixture = json.load(open(os.path.join(testdir, f))) for k, v in sub_fixture.items(): @@ -51,57 +46,37 @@ def load_tests(loader, tests, pattern): def run_test(name, pairs): Logger.debug("testing %s" % name) - # random seed - seed = "000000000000000000139dba4d2169f991f9867dd3e996f5e0a86f1ae985308f" - random.seed(seed) def _dec(x): if isinstance(x, str) and x.startswith("0x"): return bytes.fromhex(str(x[2:])) - - if isinstance(x, int): - return int_to_big_endian(x) - if isinstance(x, str): return bytes(x, "ascii") return x pairs["in"] = [(_dec(k), _dec(v)) for k, v in pairs["in"]] - stakerlist = [(k, v) for k, v in pairs["in"] if v is not None] deletes = [(k, v) for k, v in pairs["in"] if v is None] N_PERMUTATIONS = 100 - N_RANDOM = 300 for i, permut in enumerate(itertools.permutations(pairs["in"])): if i > N_PERMUTATIONS: break if pairs.get("nopermute", None) is not None and pairs["nopermute"]: permut = pairs["in"] N_PERMUTATIONS = 1 - t = stake_trie.Stake_Trie(InMemoryDb()) - # insert the account + t = trie.Trie(InMemoryDb()) for k, v in permut: # logger.debug('updating with (%s, %s)' %(k, v)) if v is not None: t.update(k, v) - - # check the total account of stakes - if t._check_total_tokens() == False: - raise Exception( - "Mismatch_total_stake: %r %r %r" - % (name, t.root_node, t.get_total_stake()) - ) - - if t.get_total_stake() != 75: - raise Exception( - "Mismatch_total_stake: %r %r %r" - % (name, t.root_node, t.get_total_stake()) - ) - - # check the root hash + else: + t.delete(k) + # make sure we have deletes at the end + for k, v in deletes: + t.delete(k) if pairs["root"] != "0x" + t.root_hash.hex(): raise Exception( - "Mismatch_root_hash: %r %r %r %r" + "Mismatch: %r %r %r %r" % ( name, pairs["root"], @@ -110,46 +85,6 @@ def _dec(x): ) ) - for _ in range(N_RANDOM): - # pseudo-randomly select a stake - chosen_num = random.random() * t.get_total_stake() - chosen = chosen_num - - for k, v in stakerlist: - curr_val = big_endian_to_int(v) - if chosen <= curr_val: - result = k - break - chosen -= curr_val - - if _dec(t.select_staker(chosen_num)) != result: - raise Exception( - "Mismatch_random_selection: %r %r %r %r %r" - % ( - name, - t.root_node, - chosen_num, - t.select_staker(chosen_num), - result, - ) - ) - - # delete the corresponding account - for k, v in permut: - # logger.debug('updating with (%s, %s)' %(k, v)) - if v is None: - t.delete(k) - - # make sure we have deletes at the end - for k, v in deletes: - t.delete(k) - - # make sure we have deleted all the stakes - if t.get_total_stake() != 0: - raise Exception( - "Mismatch: %r %r %r" % (name, t.root_node, t.get_total_stake()) - ) - if __name__ == "__main__": for name, pairs in load_tests_dict().items(): From e6f2c58a3311bf0c60cce30955d1cdf42189eb9d Mon Sep 17 00:00:00 2001 From: Jie You Date: Tue, 16 Oct 2018 10:28:09 -0700 Subject: [PATCH 3/3] stake trie comparision --- fixtures/TrieTests/.DS_Store | Bin 0 -> 6148 bytes fixtures/TrieTests/staketrietest.json | 54 +-- quarkchain/.DS_Store | Bin 8196 -> 8196 bytes quarkchain/evm/.DS_Store | Bin 8196 -> 8196 bytes quarkchain/evm/stake_trie.py | 472 +++++++++--------------- quarkchain/evm/tests/.DS_Store | Bin 0 -> 6148 bytes quarkchain/evm/tests/test_stake_trie.py | 83 ++++- 7 files changed, 278 insertions(+), 331 deletions(-) create mode 100644 fixtures/TrieTests/.DS_Store create mode 100644 quarkchain/evm/tests/.DS_Store diff --git a/fixtures/TrieTests/.DS_Store b/fixtures/TrieTests/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..a87a09f8b49f225444bc163a10fec060accf0744 GIT binary patch literal 6148 zcmeHKy-EW?5T4P1K@p-Lrm)!3O0c#$LM*JckCz|tC*&k161!`B2VcRLv+)7+o7o{= zavs4(B+S6fw>!HtyYq$H%@Pr}ru|c*1`*Yvg0*d!A4JAQOR`~}Eg)--0gb3f8TDyU zn(Yo}z!~^&49K?|=my{Gj5@I2-)|rNM|gTPr7>^!;Y`GgckzDKoCx1-~FeaTiCG85Zlj zvN|iO{(?_cT+tgc6?cZN%i1%5 zJ6oi_6Va_R;0!nea|Yym2vEVqFpsE52aG-efHk;P(3Zad5)%v)!#pBJARI%1F_i6z z!7&{EVDl2gJYozdw#NtC%4{zb*42?e=yc*FqFZOc8CYasGjj8V H{3!!ppEi=5 literal 0 HcmV?d00001 diff --git a/fixtures/TrieTests/staketrietest.json b/fixtures/TrieTests/staketrietest.json index f1a12144b..ab58b934c 100644 --- a/fixtures/TrieTests/staketrietest.json +++ b/fixtures/TrieTests/staketrietest.json @@ -1,31 +1,31 @@ { - "branchingTests": { + "randomSelectTests": { "in":[ - ["0x04110d816c380812a427968ece99b1c963dfbce6", "something"], - ["0x095e7baea6a6c7c4c2dfeb977efac326af552d87", "something"], - ["0x0a517d755cebbf66312b30fff713666a9cb917e0", "something"], - ["0x24dd378f51adc67a50e339e8031fe9bd4aafab36", "something"], - ["0x293f982d000532a7861ab122bdc4bbfd26bf9030", "something"], - ["0x2cf5732f017b0cf1b1f13a1478e10239716bf6b5", "something"], - ["0x31c640b92c21a1f1465c91070b4b3b4d6854195f", "something"], - ["0x37f998764813b136ddf5a754f34063fd03065e36", "something"], - ["0x37fa399a749c121f8a15ce77e3d9f9bec8020d7a", "something"], - ["0x4f36659fa632310b6ec438dea4085b522a2dd077", "something"], - ["0x62c01474f089b07dae603491675dc5b5748f7049", "something"], - ["0x729af7294be595a0efd7d891c9e51f89c07950c7", "something"], - ["0x83e3e5a16d3b696a0314b30b2534804dd5e11197", "something"], - ["0x8703df2417e0d7c59d063caa9583cb10a4d20532", "something"], - ["0x8dffcd74e5b5923512916c6a64b502689cfa65e1", "something"], - ["0x95a4d7cccb5204733874fa87285a176fe1e9e240", "something"], - ["0x99b2fcba8120bedd048fe79f5262a6690ed38c39", "something"], - ["0xa4202b8b8afd5354e3e40a219bdc17f6001bf2cf", "something"], - ["0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", "something"], - ["0xa9647f4a0a14042d91dc33c0328030a7157c93ae", "something"], - ["0xaa6cffe5185732689c18f37a7f86170cb7304c2a", "something"], - ["0xaae4a2e3c51c04606dcb3723456e58f3ed214f45", "something"], - ["0xc37a43e940dfb5baf581a0b82b351d48305fc885", "something"], - ["0xd2571607e241ecf590ed94b12d87c94babe36db6", "something"], - ["0xf735071cbee190d76b704ce68384fc21e389fbe7", "something"], + ["0x04110d816c380812a427968ece99b1c963dfbce6", 1], + ["0x095e7baea6a6c7c4c2dfeb977efac326af552d87", 2], + ["0x0a517d755cebbf66312b30fff713666a9cb917e0", 3], + ["0x24dd378f51adc67a50e339e8031fe9bd4aafab36", 4], + ["0x293f982d000532a7861ab122bdc4bbfd26bf9030", 5], + ["0x2cf5732f017b0cf1b1f13a1478e10239716bf6b5", 1], + ["0x31c640b92c21a1f1465c91070b4b3b4d6854195f", 2], + ["0x37f998764813b136ddf5a754f34063fd03065e36", 3], + ["0x37fa399a749c121f8a15ce77e3d9f9bec8020d7a", 4], + ["0x4f36659fa632310b6ec438dea4085b522a2dd077", 5], + ["0x62c01474f089b07dae603491675dc5b5748f7049", 1], + ["0x729af7294be595a0efd7d891c9e51f89c07950c7", 2], + ["0x83e3e5a16d3b696a0314b30b2534804dd5e11197", 3], + ["0x8703df2417e0d7c59d063caa9583cb10a4d20532", 4], + ["0x8dffcd74e5b5923512916c6a64b502689cfa65e1", 5], + ["0x95a4d7cccb5204733874fa87285a176fe1e9e240", 1], + ["0x99b2fcba8120bedd048fe79f5262a6690ed38c39", 2], + ["0xa4202b8b8afd5354e3e40a219bdc17f6001bf2cf", 3], + ["0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", 4], + ["0xa9647f4a0a14042d91dc33c0328030a7157c93ae", 5], + ["0xaa6cffe5185732689c18f37a7f86170cb7304c2a", 1], + ["0xaae4a2e3c51c04606dcb3723456e58f3ed214f45", 2], + ["0xc37a43e940dfb5baf581a0b82b351d48305fc885", 3], + ["0xd2571607e241ecf590ed94b12d87c94babe36db6", 4], + ["0xf735071cbee190d76b704ce68384fc21e389fbe7", 5], ["0x04110d816c380812a427968ece99b1c963dfbce6", null], ["0x095e7baea6a6c7c4c2dfeb977efac326af552d87", null], ["0x0a517d755cebbf66312b30fff713666a9cb917e0", null], @@ -52,7 +52,7 @@ ["0xd2571607e241ecf590ed94b12d87c94babe36db6", null], ["0xf735071cbee190d76b704ce68384fc21e389fbe7", null] ], - "root": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421" + "root": "0x25d946ba89d4510afdd0b376be458cdf58c19c7349702ba9d5f76a1abd97cf9a" } } diff --git a/quarkchain/.DS_Store b/quarkchain/.DS_Store index 51ef04fd2b576839007505a414acba53e6f2e53d..ffdbe4288d54abdf705ee531b1a36312490418e6 100644 GIT binary patch delta 211 zcmZp1XmQx^SIAIIR9r$-Qb<}RUO+f0F*!TED8Do>#W_DGzsM=4G&Np;zc?em+%q{p zFQ6#3EHkxyvZ`=^EJ!%HC_g7BAip@XBr`uRGCI8|GsQnGtvI!0@+4tj&WKE)mf*~) z)X5)(MHx9K^NWZ@g&kl30S*Diki7i7R8D1w0I&dqvV%8-!N9?w&EUuo%n-v+$k4zr zonbM#W_DGzsM=4G&LeKIX^ErvnqA6 zo^XI{bb3){iho*KacW6qya0c3a#4OxPC$NfW=Up#-sIWBzCs|;jQny>pss+T)UwRf z^2r~CMeCIv0vNzR*+B!s5MWS-Fc=t=9nurPYz_g&ki7i7R8CH12XCk>2ZJ_)BZC)1 zFhd4IIYS@AEQVzaI~a~K++ujc@Qsm$QH)WEQJc|((U#Gk(StFNF^DmQF_JNgF&d(u Ni4jC^ZWXEL1^`)-IiLUl diff --git a/quarkchain/evm/.DS_Store b/quarkchain/evm/.DS_Store index de167ec12334dbb54824dc8b0a6b133ff73518c7..2468d7546218952a4d3c5d315383129b1aeb2c58 100644 GIT binary patch delta 13 UcmZp1XmQxUEX>%om_?Wu03JO9p#T5? delta 13 UcmZp1XmQxUEX>%tm_?Wu03IX*oB#j- diff --git a/quarkchain/evm/stake_trie.py b/quarkchain/evm/stake_trie.py index 78a5a97f7..a36873720 100644 --- a/quarkchain/evm/stake_trie.py +++ b/quarkchain/evm/stake_trie.py @@ -4,126 +4,46 @@ import rlp from quarkchain import utils from quarkchain.evm.fast_rlp import encode_optimized +from rlp.sedes import big_endian_int +from quarkchain.evm.utils import int_to_big_endian, big_endian_to_int +from quarkchain.evm.trie import ( + to_bytes, + bin_to_nibbles, + nibbles_to_bin, + with_terminator, + without_terminator, + adapt_terminator, + pack_nibbles, + unpack_to_nibbles, + starts_with, + is_key_value_type, +) rlp_encode = encode_optimized bin_to_nibbles_cache = {} -def to_bytes(value): - if isinstance(value, bytes): - return value - if isinstance(value, str): - return bytes(value, "utf-8") - if isinstance(value, int): - return bytes(str(value), "utf-8") - - hti = {} for i, c in enumerate("0123456789abcdef"): hti[c] = i +itoh = {} +for i, c in enumerate("0123456789abcdef"): + itoh[i] = c -def bin_to_nibbles(s): - """convert bytes s to nibbles (half-bytes) - >>> bin_to_nibbles("") - [] - >>> bin_to_nibbles("h") - [6, 8] - >>> bin_to_nibbles("he") - [6, 8, 6, 5] - >>> bin_to_nibbles("hello") - [6, 8, 6, 5, 6, 12, 6, 12, 6, 15] +def nibbles_to_address(nibbles): + """convert nibbles to address (hex public key address starting with "0x") + >>> nibbles_to_address([2, 4, 13, 13]) + "0x24dd" """ - return [hti[c] for c in s.hex()] - - -def nibbles_to_bin(nibbles): if any(x > 15 or x < 0 for x in nibbles): raise Exception("nibbles can only be [0,..15]") - - if len(nibbles) % 2: - raise Exception("nibbles must be of even numbers") - - res = bytearray() - for i in range(0, len(nibbles), 2): - res.append(16 * nibbles[i] + nibbles[i + 1]) - return bytes(res) - - -NIBBLE_TERMINATOR = 16 - - -def with_terminator(nibbles): - nibbles = nibbles[:] - if not nibbles or nibbles[-1] != NIBBLE_TERMINATOR: - nibbles.append(NIBBLE_TERMINATOR) - return nibbles - - -def without_terminator(nibbles): - nibbles = nibbles[:] - if nibbles and nibbles[-1] == NIBBLE_TERMINATOR: - del nibbles[-1] - return nibbles - - -def adapt_terminator(nibbles, has_terminator): - if has_terminator: - return with_terminator(nibbles) - else: - return without_terminator(nibbles) - - -def pack_nibbles(nibbles): - """pack nibbles to binary - - :param nibbles: a nibbles sequence. may have a terminator - """ - - if nibbles[-1:] == [NIBBLE_TERMINATOR]: - flags = 2 - nibbles = nibbles[:-1] - else: - flags = 0 - - oddlen = len(nibbles) % 2 - flags |= oddlen # set lowest bit if odd number of nibbles - if oddlen: - nibbles = [flags] + nibbles - else: - nibbles = [flags, 0] + nibbles - o = bytearray() - for i in range(0, len(nibbles), 2): - o.append(16 * nibbles[i] + nibbles[i + 1]) - return bytes(o) - - -def unpack_to_nibbles(bindata): - """unpack packed binary data to nibbles - - :param bindata: binary packed from nibbles - :return: nibbles sequence, may have a terminator - """ - o = bin_to_nibbles(bindata) - flags = o[0] - if flags & 2: - o.append(NIBBLE_TERMINATOR) - if flags & 1 == 1: - o = o[1:] - else: - o = o[2:] - return o - - -def starts_with(full, part): - """ test whether the items in the part is - the leading items of the full - """ - if len(full) < len(part): - return False - return full[: len(part)] == part + res = "0x" + for n in nibbles: + res += itoh[n] + return res (NODE_TYPE_BLANK, NODE_TYPE_LEAF, NODE_TYPE_EXTENSION, NODE_TYPE_BRANCH) = tuple( @@ -131,20 +51,19 @@ def starts_with(full, part): ) -def is_key_value_type(node_type): - return node_type in [NODE_TYPE_LEAF, NODE_TYPE_EXTENSION] - - +NIBBLE_TERMINATOR = 16 BLANK_NODE = b"" BLANK_ROOT = utils.sha3_256(rlp.encode(b"")) -class Trie(object): +class Stake_Trie(object): def __init__(self, db, root_hash=BLANK_ROOT): """it also present a dictionary like interface :param db key value database - :root: blank or trie node in form of [key, value] or [v0,v1..v15,v] + :root: blank or stake trie node in form of [key, [value, token]] or [[v0, token],[v1, token]..[v15, token],[v, token]] + :token: the total numbers of tokens rooted at that node (i.e., the number of tokens below it) + All operations that modify the trie must adjust the token information """ self.db = db # Pass in a database object directly self.set_root_hash(root_hash) @@ -209,21 +128,37 @@ def _delete_child_storage(self, node): self._delete_child_storage(self._decode_to_node(node[1])) def _encode_node(self, node, put_in_db=True): + """ + All the operations that modify the trie must adjust the stake information + """ if node == BLANK_NODE: return BLANK_NODE # assert isinstance(node, list) + node_type = self._get_node_type(node) + stake_sum = 0 + if is_key_value_type(node_type): + stake_sum = big_endian_to_int(node[1][1]) + elif node_type == NODE_TYPE_BRANCH: + for i in range(17): + if node[i] != BLANK_NODE: + stake_sum += big_endian_to_int(node[i][1]) + rlpnode = rlp_encode(node) - if len(rlpnode) < 32: - return node + # may fix + # if len(rlpnode) < 32: + # return node hashkey = utils.sha3_256(rlpnode) if put_in_db: self.db.put(hashkey, rlpnode) - return hashkey + return [hashkey, int_to_big_endian(stake_sum)] def _decode_to_node(self, encoded): if encoded == BLANK_NODE: return BLANK_NODE + + encoded = encoded[0] + if isinstance(encoded, list): return encoded o = rlp.decode(self.db[encoded]) @@ -261,14 +196,14 @@ def _get(self, node, key): if node_type == NODE_TYPE_BRANCH: # already reach the expected node if not key: - return node[-1] + return node[-1][1] sub_node = self._decode_to_node(node[key[0]]) return self._get(sub_node, key[1:]) # key value node curr_key = without_terminator(unpack_to_nibbles(node[0])) if node_type == NODE_TYPE_LEAF: - return node[1] if key == curr_key else BLANK_NODE + return node[1][1] if key == curr_key else BLANK_NODE if node_type == NODE_TYPE_EXTENSION: # traverse child nodes @@ -294,11 +229,11 @@ def _update(self, node, key, value): node_type = self._get_node_type(node) if node_type == NODE_TYPE_BLANK: - return [pack_nibbles(with_terminator(key)), value] + return [pack_nibbles(with_terminator(key)), [value, value]] elif node_type == NODE_TYPE_BRANCH: if not key: - node[-1] = value + node[-1] = [value, value] else: new_node = self._update_and_delete_storage( self._decode_to_node(node[key[0]]), key[1:], value @@ -333,7 +268,7 @@ def _update_kv_node(self, node, key, value): if remain_key == [] == remain_curr_key: if not is_inner: - return [node[0], value] + return [node[0], [value, value]] new_node = self._update_and_delete_storage( self._decode_to_node(node[1]), remain_key, value ) @@ -347,7 +282,7 @@ def _update_kv_node(self, node, key, value): new_node = [BLANK_NODE] * 17 new_node[-1] = node[1] new_node[remain_key[0]] = self._encode_node( - [pack_nibbles(with_terminator(remain_key[1:])), value] + [pack_nibbles(with_terminator(remain_key[1:])), [value, value]] ) else: new_node = [BLANK_NODE] * 17 @@ -364,10 +299,10 @@ def _update_kv_node(self, node, key, value): ) if remain_key == []: - new_node[-1] = value + new_node[-1] = [value, value] else: new_node[remain_key[0]] = self._encode_node( - [pack_nibbles(with_terminator(remain_key[1:])), value] + [pack_nibbles(with_terminator(remain_key[1:])), [value, value]] ) if prefix_length: @@ -405,160 +340,11 @@ def _getany(self, node, reverse=False, path=[]): return curr_key if node_type == NODE_TYPE_EXTENSION: - curr_key = without_terminator(unpack_to_nibbles(node[0])) sub_node = self._decode_to_node(node[1]) return curr_key + self._getany( sub_node, reverse=reverse, path=path + curr_key ) - def _split(self, node, key): - node_type = self._get_node_type(node) - if node_type == NODE_TYPE_BLANK: - return BLANK_NODE, BLANK_NODE - elif not key: - return BLANK_NODE, node - elif node_type == NODE_TYPE_BRANCH: - b1 = node[: key[0]] - b1 += [""] * (17 - len(b1)) - b2 = node[key[0] + 1 :] - b2 = [""] * (17 - len(b2)) + b2 - b1[16], b2[16] = b2[16], b1[16] - sub = self._decode_to_node(node[key[0]]) - sub1, sub2 = self._split(sub, key[1:]) - b1[key[0]] = self._encode_node(sub1) if sub1 else "" - b2[key[0]] = self._encode_node(sub2) if sub2 else "" - return ( - self._normalize_branch_node(b1) - if len([x for x in b1 if x]) - else BLANK_NODE, - self._normalize_branch_node(b2) - if len([x for x in b2 if x]) - else BLANK_NODE, - ) - - descend_key = without_terminator(unpack_to_nibbles(node[0])) - if node_type == NODE_TYPE_LEAF: - if descend_key < key: - return node, BLANK_NODE - else: - return BLANK_NODE, node - elif node_type == NODE_TYPE_EXTENSION: - sub_node = self._decode_to_node(node[1]) - sub_key = key[len(descend_key) :] - if starts_with(key, descend_key): - sub1, sub2 = self._split(sub_node, sub_key) - subtype1 = self._get_node_type(sub1) - subtype2 = self._get_node_type(sub2) - if not sub1: - o1 = BLANK_NODE - elif subtype1 in (NODE_TYPE_LEAF, NODE_TYPE_EXTENSION): - new_key = key[: len(descend_key)] + unpack_to_nibbles(sub1[0]) - o1 = [pack_nibbles(new_key), sub1[1]] - else: - o1 = [ - pack_nibbles(key[: len(descend_key)]), - self._encode_node(sub1), - ] - if not sub2: - o2 = BLANK_NODE - elif subtype2 in (NODE_TYPE_LEAF, NODE_TYPE_EXTENSION): - new_key = key[: len(descend_key)] + unpack_to_nibbles(sub2[0]) - o2 = [pack_nibbles(new_key), sub2[1]] - else: - o2 = [ - pack_nibbles(key[: len(descend_key)]), - self._encode_node(sub2), - ] - return o1, o2 - elif descend_key < key[: len(descend_key)]: - return node, BLANK_NODE - elif descend_key > key[: len(descend_key)]: - return BLANK_NODE, node - else: - return BLANK_NODE, BLANK_NODE - - def split(self, key): - key = bin_to_nibbles(key) - r1, r2 = self._split(self.root_node, key) - t1, t2 = Trie(self.db), Trie(self.db) - t1.root_node, t2.root_node = r1, r2 - return t1, t2 - - def _merge(self, node1, node2): - # assert isinstance(node1, list) or not node1 - # assert isinstance(node2, list) or not node2 - node_type1 = self._get_node_type(node1) - node_type2 = self._get_node_type(node2) - if not node1: - return node2 - if not node2: - return node1 - if node_type1 != NODE_TYPE_BRANCH and node_type2 != NODE_TYPE_BRANCH: - descend_key1 = unpack_to_nibbles(node1[0]) - descend_key2 = unpack_to_nibbles(node2[0]) - # find longest common prefix - prefix_length = 0 - for i in range(min(len(descend_key1), len(descend_key2))): - if descend_key1[i] != descend_key2[i]: - break - prefix_length = i + 1 - if prefix_length: - sub1 = ( - self._decode_to_node(node1[1]) - if node_type1 == NODE_TYPE_EXTENSION - else node1[1] - ) - new_sub1 = ( - [pack_nibbles(descend_key1[prefix_length:]), sub1] - if descend_key1[prefix_length:] - else sub1 - ) - sub2 = ( - self._decode_to_node(node2[1]) - if node_type2 == NODE_TYPE_EXTENSION - else node2[1] - ) - new_sub2 = ( - [pack_nibbles(descend_key2[prefix_length:]), sub2] - if descend_key2[prefix_length:] - else sub2 - ) - return [ - pack_nibbles(descend_key1[:prefix_length]), - self._encode_node(self._merge(new_sub1, new_sub2)), - ] - - nodes = [[node1], [node2]] - for (node, node_type) in zip(nodes, [node_type1, node_type2]): - if node_type != NODE_TYPE_BRANCH: - new_node = [BLANK_NODE] * 17 - curr_key = unpack_to_nibbles(node[0][0]) - new_node[curr_key[0]] = ( - self._encode_node([pack_nibbles(curr_key[1:]), node[0][1]]) - if curr_key[0] < 16 and curr_key[1:] - else node[0][1] - ) - node[0] = new_node - node1, node2 = nodes[0][0], nodes[1][0] - assert len([i for i in range(17) if node1[i] and node2[i]]) <= 1 - new_node = [ - self._encode_node( - self._merge( - self._decode_to_node(node1[i]), self._decode_to_node(node2[i]) - ) - ) - if node1[i] and node2[i] - else node1[i] or node2[i] - for i in range(17) - ] - return new_node - - @classmethod - def unsafe_merge(cls, trie1, trie2): - t = Trie(trie1.db) - t.root_node = t._merge(trie1.root_node, trie2.root_node) - return t - def _iter(self, node, key, reverse=False, path=[]): # print('iter', node, key, 'reverse =', reverse, 'path =', path) node_type = self._get_node_type(node) @@ -641,6 +427,7 @@ def _delete_node_storage(self, node): return # assert isinstance(node, list) encoded = self._encode_node(node, put_in_db=False) + encoded = encoded[0] if len(encoded) < 32: return """ @@ -823,7 +610,7 @@ def _iter_branch(self, node): if node_type == NODE_TYPE_EXTENSION: sub_tree = self._iter_branch(self._decode_to_node(node[1])) else: - sub_tree = [(to_bytes(NIBBLE_TERMINATOR), node[1])] + sub_tree = [(to_bytes(NIBBLE_TERMINATOR), node[1][1])] # prepend key of this node to the keys of children for sub_key, sub_value in sub_tree: @@ -837,7 +624,7 @@ def _iter_branch(self, node): full_key = (bytes(str(i), "ascii") + b"+" + sub_key).strip(b"+") yield (full_key, sub_value) if node[16]: - yield (to_bytes(NIBBLE_TERMINATOR), node[-1]) + yield (to_bytes(NIBBLE_TERMINATOR), node[-1][1]) def iter_branch(self): for key_str, value in self._iter_branch(self.root_node): @@ -869,7 +656,7 @@ def _to_dict(self, node): if node_type == NODE_TYPE_EXTENSION: sub_dict = self._to_dict(self._decode_to_node(node[1])) else: - sub_dict = {to_bytes(NIBBLE_TERMINATOR): node[1]} + sub_dict = {to_bytes(NIBBLE_TERMINATOR): node[1][1]} # prepend key of this node to the keys of children res = {} @@ -888,7 +675,7 @@ def _to_dict(self, node): res[full_key] = sub_value if node[16]: - res[to_bytes(NIBBLE_TERMINATOR)] = node[-1] + res[to_bytes(NIBBLE_TERMINATOR)] = node[-1][1] return res def to_dict(self): @@ -908,24 +695,6 @@ def get(self, key): raise Exception("Key must be bytes") return self._get(self.root_node, bin_to_nibbles(to_bytes(key))) - def __len__(self): - return self._get_size(self.root_node) - - def __getitem__(self, key): - return self.get(key) - - def __setitem__(self, key, value): - return self.update(key, value) - - def __delitem__(self, key): - return self.delete(key) - - def __iter__(self): - return iter(self.to_dict()) - - def __contains__(self, key): - return self.get(key) != BLANK_NODE - def update(self, key, value): """ :param key: a bytes @@ -952,6 +721,119 @@ def root_hash_valid(self): return True return self.root_hash in self.db + # New functions for POS (Photon) + def _get_total_stake(self, node): + """Get the total stake + + :param node: node in form of list, or BLANK_NODE + """ + if node == BLANK_NODE: + return 0 + + node_type = self._get_node_type(node) + + if is_key_value_type(node_type): + stake_is_node = node_type == NODE_TYPE_EXTENSION + if stake_is_node: + return self._get_total_stake(self._decode_to_node(node[1])) + else: + return big_endian_to_int(node[1][1]) + elif node_type == NODE_TYPE_BRANCH: + tokens = [ + self._get_total_stake(self._decode_to_node(node[x])) for x in range(16) + ] + tokens = tokens + [big_endian_to_int(node[-1][1]) if node[-1] else 0] + return sum(tokens) + + def _get_total_stake_from_root_node(self, node): + """ Get the total stake directly from root node informaiton + + :param node: node in form of list, or BLANK_NODE + """ + if node == BLANK_NODE: + return 0 + + node_type = self._get_node_type(node) + + if is_key_value_type(node_type): + return big_endian_to_int(node[1][1]) + else: + stake_sum = 0 + for i in range(17): + if node[i] != BLANK_NODE: + stake_sum += big_endian_to_int(node[i][1]) + return stake_sum + + def _select_staker(self, node, value): + """ Get the selected staker address given the pseudo-randomly value + + :param node: node in form of list, or BLANK_NODE + :value: pseudo-randomly selected value + """ + node_type = self._get_node_type(node) + assert value >= 0 + + if node_type == NODE_TYPE_BLANK: + return None + + if node_type == NODE_TYPE_BRANCH: + scan_range = list(range(17)) + for i in scan_range: + if node[i] != BLANK_NODE: + if big_endian_to_int(node[i][1]) >= value: + sub_node = self._decode_to_node(node[i]) + o = self._select_staker(sub_node, value) + return [i] + o if o is not None else None + else: + value = value - big_endian_to_int(node[i][1]) + return None + + if node_type == NODE_TYPE_LEAF: + descend_key = without_terminator(unpack_to_nibbles(node[0])) + return descend_key if value <= big_endian_to_int(node[1][1]) else None + + elif node_type == NODE_TYPE_EXTENSION: + descend_key = without_terminator(unpack_to_nibbles(node[0])) + if value <= big_endian_to_int(node[1][1]): + sub_node = self._decode_to_node(node[1]) + o = self._select_staker(sub_node, value) + return descend_key + o if o else None + else: + return None + + return None + + def _check_total_tokens(self): + if self.root_node != BLANK_NODE: + assert self._get_total_stake( + self.root_node + ) == self._get_total_stake_from_root_node(self.root_node) + + def get_total_stake(self): + return self._get_total_stake_from_root_node(self.root_node) + + def select_staker(self, value): + o = self._select_staker(self.root_node, value) + return nibbles_to_address(o) + + def __len__(self): + return self._get_size(self.root_node) + + def __getitem__(self, key): + return self.get(key) + + def __setitem__(self, key, value): + return self.update(key, value) + + def __delitem__(self, key): + return self.delete(key) + + def __iter__(self): + return iter(self.to_dict()) + + def __contains__(self, key): + return self.get(key) != BLANK_NODE + if __name__ == "__main__": import sys @@ -967,9 +849,9 @@ def encode_node(nd): if len(sys.argv) >= 2: if sys.argv[1] == "insert": - t = Trie(_db, bytes.fromhex(sys.argv[3])) + t = Stake_Trie(_db, bytes.fromhex(sys.argv[3])) t.update(bytes(sys.argv[4], "ascii"), bytes(sys.argv[5], "ascii")) print(encode_node(t.root_hash)) elif sys.argv[1] == "get": - t = Trie(_db, bytes.fromhex(sys.argv[3])) + t = Stake_Trie(_db, bytes.fromhex(sys.argv[3])) print(t.get(bytes(sys.argv[4], "ascii"))) diff --git a/quarkchain/evm/tests/.DS_Store b/quarkchain/evm/tests/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..b8c5d0c471bd48439ee835d31b678dcd5dd494f8 GIT binary patch literal 6148 zcmeHKO-sW-5S^_Bt5ql>e%x~OBzP0Ngi;Tl{Q)&;0|rZ5O^fuFi~qsD;IGs-v!iU2 z)T5}(l$p0*Z#Mf%cC$p}`pfYJQICi^aK`p7LW6NXd(B$fvIBIIBc}xo@jIt^ZL~F* z0;a%kQ-I&yuDr83O(}eTS2vT{D4onwjNt9z3V!x3hSDc%PAeKw0>7t>^Hf-G71j}E z$>Ogg^HRxku$G6P z>Ot1BpeJO=nsT(Zk>fH@W51SzSJp7Q==d5iPXi{;m_VTdafJ39tD0ghk5DEPZ_3CT zhZ*-O88@s!^lavR|39#PArDovKd1oCY_?9 N_PERMUTATIONS: break if pairs.get("nopermute", None) is not None and pairs["nopermute"]: permut = pairs["in"] N_PERMUTATIONS = 1 - t = trie.Trie(InMemoryDb()) + t = stake_trie.Stake_Trie(InMemoryDb()) + # insert the account for k, v in permut: # logger.debug('updating with (%s, %s)' %(k, v)) if v is not None: t.update(k, v) - else: - t.delete(k) - # make sure we have deletes at the end - for k, v in deletes: - t.delete(k) + + # check the total account of stakes + if t._check_total_tokens() == False: + raise Exception( + "Mismatch_total_stake: %r %r %r" + % (name, t.root_node, t.get_total_stake()) + ) + + if t.get_total_stake() != 75: + raise Exception( + "Mismatch_total_stake: %r %r %r" + % (name, t.root_node, t.get_total_stake()) + ) + + # check the root hash if pairs["root"] != "0x" + t.root_hash.hex(): raise Exception( - "Mismatch: %r %r %r %r" + "Mismatch_root_hash: %r %r %r %r" % ( name, pairs["root"], @@ -85,6 +110,46 @@ def _dec(x): ) ) + for _ in range(N_RANDOM): + # pseudo-randomly select a stake + chosen_num = random.random() * t.get_total_stake() + chosen = chosen_num + + for k, v in stakerlist: + curr_val = big_endian_to_int(v) + if chosen <= curr_val: + result = k + break + chosen -= curr_val + + if _dec(t.select_staker(chosen_num)) != result: + raise Exception( + "Mismatch_random_selection: %r %r %r %r %r" + % ( + name, + t.root_node, + chosen_num, + t.select_staker(chosen_num), + result, + ) + ) + + # delete the corresponding account + for k, v in permut: + # logger.debug('updating with (%s, %s)' %(k, v)) + if v is None: + t.delete(k) + + # make sure we have deletes at the end + for k, v in deletes: + t.delete(k) + + # make sure we have deleted all the stakes + if t.get_total_stake() != 0: + raise Exception( + "Mismatch: %r %r %r" % (name, t.root_node, t.get_total_stake()) + ) + if __name__ == "__main__": for name, pairs in load_tests_dict().items():