Skip to content

Commit

Permalink
New blockchain class implementation
Browse files Browse the repository at this point in the history
  • Loading branch information
mariano54 committed Sep 24, 2019
1 parent a5688f9 commit 55063e0
Show file tree
Hide file tree
Showing 17 changed files with 252 additions and 191 deletions.
1 change: 1 addition & 0 deletions .flake8
@@ -1,2 +1,3 @@
[flake8]
max-line-length = 120
exclude = ./typings/**/*
278 changes: 152 additions & 126 deletions src/blockchain.py

Large diffs are not rendered by default.

11 changes: 11 additions & 0 deletions src/config/farmer.yaml
Expand Up @@ -4,9 +4,20 @@ port: 8001
# Private key used for spending farming rewards
farmer_sk: "43aead2006e8c67126adb596ce8f47aa01b956daa0214e06cd1a82ebd73aa88d"

# Address where the money will be sent
# sha256(farmer_sk.get_public_key()).digest()
farmer_target: "c28592540bd281f603bb479b3692139339f10464413b2d307a25d8d145ce1833"

# Private keys of pool operators
# [PrivateKey.from_seed(b'pool key 0'), PrivateKey.from_seed(b'pool key 1')]
pool_sks:
- "4c7c4ca5c7cf99e1e6dbd64a61d20698270a6e1939b2b2eb2e1e034f9568712a"
- "0c225913cb84b4601f83c89e369dd3ac696e37facce3ebfab4b87abe611db309"

# Address of pool operator
# sha256(PrivateKey.from_seed(b'0').get_public_key().serialize()).digest()
pool_target: "9940b95222a1d19abb73c192f2c10dc65b32bcc7a703db1b40456f2dbf1e416e"

pool_share_threshold: 50 # To send to pool, must be expected to take less than these seconds
propagate_threshold: 30 # To propagate to network, must be expected to take less than these seconds

Expand Down
14 changes: 8 additions & 6 deletions src/farmer.py
Expand Up @@ -18,8 +18,6 @@

class Database:
lock = asyncio.Lock()
pool_sks = [PrivateKey.from_seed(b'pool key 0'), PrivateKey.from_seed(b'pool key 1')]
pool_target = sha256(PrivateKey.from_seed(b'0').get_public_key().serialize()).digest()
plotter_responses_header_hash: Dict[bytes32, bytes32] = {}
plotter_responses_challenge: Dict[bytes32, bytes32] = {}
plotter_responses_proofs: Dict[bytes32, ProofOfSpace] = {}
Expand Down Expand Up @@ -84,7 +82,8 @@ async def respond_proof_of_space(response: plotter_protocol.RespondProofOfSpace)
"""

async with db.lock:
assert response.proof.pool_pubkey in [sk.get_public_key() for sk in db.pool_sks]
pool_sks: List[PrivateKey] = [PrivateKey.from_bytes(bytes.fromhex(ce)) for ce in config["pool_sks"]]
assert response.proof.pool_pubkey in [sk.get_public_key() for sk in pool_sks]

challenge_hash: bytes32 = db.plotter_responses_challenge[response.quality]
challenge_height: uint32 = db.challenge_to_height[challenge_hash]
Expand Down Expand Up @@ -199,9 +198,12 @@ async def proof_of_space_finalized(proof_of_space_finalized: farmer_protocol.Pro
db.current_height = proof_of_space_finalized.height

# TODO: ask the pool for this information
coinbase: CoinbaseInfo = CoinbaseInfo(db.current_height + 1, calculate_block_reward(db.current_height),
db.pool_target)
coinbase_signature: PrependSignature = db.pool_sks[0].sign_prepend(coinbase.serialize())
coinbase: CoinbaseInfo = CoinbaseInfo(uint32(db.current_height + 1),
calculate_block_reward(db.current_height),
bytes.fromhex(config["pool_target"]))

pool_sks: List[PrivateKey] = [PrivateKey.from_bytes(bytes.fromhex(ce)) for ce in config["pool_sks"]]
coinbase_signature: PrependSignature = pool_sks[0].sign_prepend(coinbase.serialize())
db.coinbase_rewards[uint32(db.current_height + 1)] = (coinbase, coinbase_signature)

log.info(f"Current height set to {db.current_height}")
Expand Down
20 changes: 4 additions & 16 deletions src/full_node.py
Expand Up @@ -201,6 +201,7 @@ async def sync():
async with db.lock:
fork_point: TrunkBlock = db.blockchain.find_fork_point(trunks)

# TODO: optimize, send many requests at once, and for more blocks
for height in range(fork_point.challenge.height + 1, tip_height + 1):
# Only download from fork point (what we don't have)
async with db.lock:
Expand Down Expand Up @@ -368,7 +369,6 @@ async def request_header_hash(request: farmer_protocol.RequestHeaderHash) -> Asy

# Creates the block header
prev_header_hash: bytes32 = target_head.header.get_hash()
# prev_header_hash: bytes32 = bytes32([0] * 32)
timestamp: uint64 = uint64(time.time())

# TODO: use a real BIP158 filter based on transactions
Expand Down Expand Up @@ -524,18 +524,6 @@ async def unfinished_block(unfinished_block: peer_protocol.UnfinishedBlock) -> A
async def block(block: peer_protocol.Block) -> AsyncGenerator[OutboundMessage, None]:
"""
Receive a full block from a peer full node (or ourselves).
Pseudocode:
if we have block:
return
if we don't care about block:
return
if block invalid:
return
Store block
if block actually good:
propagate to other full nodes
propagate challenge to farmers
propagate challenge to timelords
"""
header_hash = block.block.trunk_block.header.get_hash()

Expand Down Expand Up @@ -595,9 +583,9 @@ async def block(block: peer_protocol.Block) -> AsyncGenerator[OutboundMessage, N
block.block.trunk_block.proof_of_time.output.challenge_hash
)
farmer_request = farmer_protocol.ProofOfSpaceFinalized(block.block.trunk_block.challenge.get_hash(),
block.block.trunk_block.challenge.height,
pos_quality,
difficulty)
block.block.trunk_block.challenge.height,
pos_quality,
difficulty)
timelord_request = timelord_protocol.ChallengeStart(block.block.trunk_block.challenge.get_hash())
timelord_request_end = timelord_protocol.ChallengeStart(block.block.trunk_block.proof_of_time.
output.challenge_hash)
Expand Down
5 changes: 2 additions & 3 deletions src/plotter.py
@@ -1,4 +1,3 @@
from hashlib import sha256
import logging
import os
import os.path
Expand Down Expand Up @@ -75,8 +74,8 @@ async def new_challenge(new_challenge: plotter_protocol.NewChallenge):
log.warning("Error using prover object. Reinitializing prover object.")
db.provers[filename] = DiskProver(filename)
quality_strings = prover.get_qualities_for_challenge(new_challenge.challenge_hash)
for index, quality_string in enumerate(quality_strings):
quality = sha256(new_challenge.challenge_hash + quality_string).digest()
for index, quality_str in enumerate(quality_strings):
quality = ProofOfSpace.quality_str_to_quality(new_challenge.challenge_hash, quality_str)
db.challenge_hashes[quality] = (new_challenge.challenge_hash, filename, index)
response: plotter_protocol.ChallengeResponse = plotter_protocol.ChallengeResponse(
new_challenge.challenge_hash,
Expand Down
16 changes: 10 additions & 6 deletions src/server/server.py
Expand Up @@ -4,7 +4,11 @@
from typing import Tuple, AsyncGenerator, Callable, Optional
from types import ModuleType
from lib.aiter.aiter.server import start_server_aiter
from lib.aiter.aiter import parallel_map_aiter, map_aiter, join_aiters, iter_to_aiter, aiter_forker
from lib.aiter.aiter.map_aiter import map_aiter
from lib.aiter.aiter.join_aiters import join_aiters
from lib.aiter.aiter.parallel_map_aiter import parallel_map_aiter
from lib.aiter.aiter.iter_to_aiter import iter_to_aiter
from lib.aiter.aiter.aiter_forker import aiter_forker
from lib.aiter.aiter.push_aiter import push_aiter
from src.types.peer_info import PeerInfo
from src.types.sized_bytes import bytes32
Expand Down Expand Up @@ -43,7 +47,7 @@ async def stream_reader_writer_to_connection(pair: Tuple[asyncio.StreamReader, a

async def connection_to_outbound(connection: Connection,
on_connect: Callable[[], AsyncGenerator[OutboundMessage, None]]) -> AsyncGenerator[
OutboundMessage, None]:
Tuple[Connection, OutboundMessage], None]:
"""
Async generator which calls the on_connect async generator method, and yields any outbound messages.
"""
Expand Down Expand Up @@ -115,7 +119,7 @@ async def connection_to_message(connection: Connection) -> AsyncGenerator[Tuple[
connection.close()


async def handle_message(pair: Tuple[Connection, bytes], api: ModuleType) -> AsyncGenerator[
async def handle_message(pair: Tuple[Connection, Message], api: ModuleType) -> AsyncGenerator[
Tuple[Connection, OutboundMessage], None]:
"""
Async generator which takes messages, parses, them, executes the right
Expand Down Expand Up @@ -175,11 +179,11 @@ async def expand_outbound_messages(pair: Tuple[Connection, OutboundMessage]) ->
yield item


async def initialize_pipeline(aiter: AsyncGenerator[Tuple[asyncio.StreamReader, asyncio.StreamWriter], None],
async def initialize_pipeline(aiter,
api: ModuleType, connection_type: NodeType,
on_connect: Callable[[], AsyncGenerator[OutboundMessage, None]] = None,
outbound_aiter: AsyncGenerator[OutboundMessage, None] = None,
wait_for_handshake=False) -> None:
outbound_aiter=None,
wait_for_handshake=False) -> asyncio.Task:

# Maps a stream reader and writer to connection object
connections_aiter = map_aiter(partial_func.partial_async(stream_reader_writer_to_connection,
Expand Down
8 changes: 5 additions & 3 deletions src/server/start_farmer.py
@@ -1,8 +1,9 @@
import asyncio
import logging
from src.types.peer_info import PeerInfo

from typing import List
from blspy import PrivateKey
from src import farmer
from src.types.peer_info import PeerInfo
from src.server.server import start_chia_client, start_chia_server
from src.protocols.plotter_protocol import PlotterHandshake
from src.server.outbound_message import OutboundMessage, Message, Delivery, NodeType
Expand All @@ -18,7 +19,8 @@ async def main():
plotter_con_task, plotter_client = await start_chia_client(plotter_peer, farmer, NodeType.PLOTTER)

# Sends a handshake to the plotter
msg = PlotterHandshake([sk.get_public_key() for sk in farmer.db.pool_sks])
pool_sks: List[PrivateKey] = [PrivateKey.from_bytes(bytes.fromhex(ce)) for ce in farmer.config["pool_sks"]]
msg = PlotterHandshake([sk.get_public_key() for sk in pool_sks])
plotter_client.push(OutboundMessage(NodeType.PLOTTER, Message("plotter_handshake", msg), Delivery.BROADCAST))

# Starts the farmer server (which full nodes can connect to)
Expand Down
Empty file added src/types/__init__.py
Empty file.
3 changes: 1 addition & 2 deletions src/types/block_header.py
@@ -1,5 +1,4 @@
from blspy import PrependSignature
from hashlib import sha256
from src.util.streamable import streamable
from src.util.ints import uint64
from src.types.sized_bytes import bytes32
Expand All @@ -22,4 +21,4 @@ class BlockHeader:

@property
def header_hash(self):
return bytes32(sha256(self.serialize()).digest())
return self.get_hash()
4 changes: 2 additions & 2 deletions src/types/challenge.py
Expand Up @@ -8,5 +8,5 @@ class Challenge:
proof_of_space_hash: bytes32
proof_of_time_output_hash: bytes32
height: uint32
total_weight: uint64
total_iters: uint64
total_weight: uint64 # Total weight up to this point, not counting
total_iters: uint64 # Total iterations done up to this point, counting new PoT
4 changes: 2 additions & 2 deletions src/types/classgroup.py
@@ -1,5 +1,5 @@
from ..util.streamable import streamable
from ..util.ints import int1024
from src.util.streamable import streamable
from src.util.ints import int1024


@streamable
Expand Down
27 changes: 14 additions & 13 deletions src/types/full_block.py
@@ -1,3 +1,5 @@
from src.util.ints import uint32, uint64
from src.types.sized_bytes import bytes32
from src.util.streamable import streamable
from src.types.block_body import BlockBody
from src.types.trunk_block import TrunkBlock
Expand All @@ -8,25 +10,24 @@ class FullBlock:
trunk_block: TrunkBlock
body: BlockBody

def is_valid(self):
# TODO(alex): review, recursively. A lot of things are not verified.
body_hash = self.body.get_hash()
return (self.trunk_block.header.data.body_hash == body_hash
and self.trunk_block.is_valid()
and self.body.is_valid())

@property
def prev_hash(self):
def prev_header_hash(self) -> bytes32:
return self.trunk_block.header.data.prev_header_hash

@property
def height(self):
return self.trunk_block.challenge.height
def height(self) -> uint32:
if (self.trunk_block.challenge):
return self.trunk_block.challenge.height
else:
return uint32(0)

@property
def weight(self):
return self.trunk_block.challenge.total_weight
def weight(self) -> uint64:
if (self.trunk_block.challenge):
return self.trunk_block.challenge.total_weight
else:
return uint64(0)

@property
def header_hash(self):
def header_hash(self) -> bytes32:
return self.trunk_block.header.header_hash
8 changes: 7 additions & 1 deletion src/types/proof_of_space.py
Expand Up @@ -14,6 +14,8 @@ class ProofOfSpace:
size: uint8
proof: List[uint8]

_cached_quality = None

def get_plot_seed(self) -> bytes32:
return self.calculate_plot_seed(self.pool_pubkey, self.plot_pubkey)

Expand All @@ -26,10 +28,14 @@ def verify_and_get_quality(self, challenge_hash: bytes32) -> Optional[bytes32]:
bytes(self.proof))
if not quality_str:
return None
self._cached_quality = sha256(challenge_hash + quality_str).digest()
self._cached_quality: bytes32 = self.quality_str_to_quality(challenge_hash, quality_str)
return self._cached_quality

@staticmethod
def calculate_plot_seed(pool_pubkey: PublicKey, plot_pubkey: PublicKey) -> bytes32:
return bytes32(sha256(pool_pubkey.serialize() +
plot_pubkey.serialize()).digest())

@staticmethod
def quality_str_to_quality(challenge_hash: bytes32, quality_str: bytes) -> bytes32:
return bytes32(sha256(challenge_hash + quality_str).digest())
2 changes: 1 addition & 1 deletion src/types/trunk_block.py
Expand Up @@ -14,7 +14,7 @@ class TrunkBlock:
header: BlockHeader

@property
def prev_hash(self):
def prev_header_hash(self):
return self.header.data.prev_header_hash

@property
Expand Down

0 comments on commit 55063e0

Please sign in to comment.