Skip to content

Commit

Permalink
Merge pull request #1 from ElixiumNetwork/forking
Browse files Browse the repository at this point in the history
Forking
  • Loading branch information
alexdovzhanyn committed Dec 6, 2018
2 parents 6c5767a + f1ac42e commit 95bd3d0
Show file tree
Hide file tree
Showing 18 changed files with 442 additions and 149 deletions.
11 changes: 0 additions & 11 deletions config/config.exs
Original file line number Diff line number Diff line change
@@ -1,11 +0,0 @@
use Mix.Config

config :logger, backends: [:console, {LoggerFileBackend, :info}]

config :logger, :info,
path: "./log/info.log",
level: :info

if File.exists?("config/#{Mix.env}.exs") do
import_config "#{Mix.env}.exs"
end
4 changes: 0 additions & 4 deletions config/dev.exs.sample

This file was deleted.

100 changes: 0 additions & 100 deletions lib/elixium_node.ex

This file was deleted.

25 changes: 0 additions & 25 deletions lib/elixium_node_app.ex

This file was deleted.

45 changes: 45 additions & 0 deletions lib/node.ex
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
defmodule ElixiumNode do
use Application

def start(_type, _args) do
print_ascii_header()
Elixium.Store.Ledger.initialize()

# TODO: Make genesis block mined rather than hard-coded
if !Elixium.Store.Ledger.empty?() do
Elixium.Store.Ledger.hydrate()
end

Elixium.Store.Utxo.initialize()
Elixium.Store.Oracle.start_link(Elixium.Store.Utxo)
Elixium.Pool.Orphan.initialize()
ElixiumNode.Supervisor.start_link()
end

def print_ascii_header do
IO.puts "\e[34m
EEEEEEEEEEEEEEEEEEEEEElllllll iiii iiii
E::::::::::::::::::::El:::::l i::::i i::::i
E::::::::::::::::::::El:::::l iiii iiii
EE::::::EEEEEEEEE::::El:::::l
E:::::E EEEEEE l::::l iiiiiii xxxxxxx xxxxxxxiiiiiii uuuuuu uuuuuu mmmmmmm mmmmmmm
E:::::E l::::l i:::::i x:::::x x:::::x i:::::i u::::u u::::u mm:::::::m m:::::::mm
E::::::EEEEEEEEEE l::::l i::::i x:::::x x:::::x i::::i u::::u u::::u m::::::::::mm::::::::::m
E:::::::::::::::E l::::l i::::i x:::::xx:::::x i::::i u::::u u::::u m::::::::::::::::::::::m
E:::::::::::::::E l::::l i::::i x::::::::::x i::::i u::::u u::::u m:::::mmm::::::mmm:::::m
E::::::EEEEEEEEEE l::::l i::::i x::::::::x i::::i u::::u u::::u m::::m m::::m m::::m
E:::::E l::::l i::::i x::::::::x i::::i u::::u u::::u m::::m m::::m m::::m
E:::::E EEEEEE l::::l i::::i x::::::::::x i::::i u:::::uuuu:::::u m::::m m::::m m::::m
EE::::::EEEEEEEE:::::El::::::li::::::i x:::::xx:::::x i::::::iu:::::::::::::::uum::::m m::::m m::::m
E::::::::::::::::::::El::::::li::::::i x:::::x x:::::x i::::::i u:::::::::::::::um::::m m::::m m::::m
E::::::::::::::::::::El::::::li::::::i x:::::x x:::::x i::::::i uu::::::::uu:::um::::m m::::m m::::m
EEEEEEEEEEEEEEEEEEEEEElllllllliiiiiiiixxxxxxx xxxxxxxiiiiiiii uuuuuuuu uuuummmmmm mmmmmm mmmmmm
\e[32m
Elixium Core Version #{Application.spec(:elixium_core, :vsn)} Node version #{Application.spec(:elixium_node, :vsn)}
\e[0m
\n
"

end

end
189 changes: 189 additions & 0 deletions lib/peer/peer_router.ex
Original file line number Diff line number Diff line change
@@ -0,0 +1,189 @@
defmodule ElixiumNode.PeerRouter do
use GenServer
require Logger
alias Elixium.Node.Supervisor, as: Peer
alias Elixium.Node.LedgerManager
alias Elixium.Store.Ledger
alias Elixium.Pool.Orphan
alias Elixium.Block
alias Elixium.Transaction
alias Elixium.Validator

def start_link(_args) do
GenServer.start_link(__MODULE__, [], name: __MODULE__)
end

def init(_args), do: {:ok, []}

# Handles recieved blocks
def handle_info({block = %{type: "BLOCK"}, caller}, state) do
block = Block.sanitize(block)

case LedgerManager.handle_new_block(block) do
:ok ->
# We've received a valid block. We need to stop mining the block we're
# currently working on and start mining the new one. We also need to gossip
# this block to all the nodes we know of.
Logger.info("Received valid block #{block.hash} at index #{:binary.decode_unsigned(block.index)}.")
Peer.gossip("BLOCK", block)

:gossip ->
# For one reason or another, we want to gossip this block without
# restarting our current block calculation. (Perhaps this is a fork block)
Peer.gossip("BLOCK", block)

{:missing_blocks, fork_chain} ->
# We've discovered a fork, but we can't rebuild the fork chain without
# some blocks. Let's request them from our peer.
query_block(:binary.decode_unsigned(hd(fork_chain).index) - 1, caller)

:ignore -> :ignore # We already know of this block. Ignore it
:invalid -> Logger.info("Recieved invalid block at index #{:binary.decode_unsigned(block.index)}.")
end

{:noreply, state}
end

def handle_info({block_query_request = %{type: "BLOCK_QUERY_REQUEST"}, caller}, state) do
send(caller, {"BLOCK_QUERY_RESPONSE", Ledger.block_at_height(block_query_request.index)})

{:noreply, state}
end

def handle_info({block_query_response = %{type: "BLOCK_QUERY_RESPONSE"}, _caller}, state) do
orphans_ahead =
Ledger.last_block().index
|> :binary.decode_unsigned()
|> Kernel.+(1)
|> Orphan.blocks_at_height()
|> length()

if orphans_ahead > 0 do
# If we have an orphan with an index that is greater than our current latest
# block, we're likely here trying to rebuild the fork chain and have requested
# a block that we're missing.
# TODO: FETCH BLOCKS
end

{:noreply, state}
end

# Handles a batch block query request, where another peer has asked this node to send
# all the blocks it has since a given index.
def handle_info({block_query_request = %{type: "BLOCK_BATCH_QUERY_REQUEST"}, caller}, state) do
# TODO: This is a possible DOS vulnerability if an attacker requests a very
# high amount of blocks. Need to figure out a better way to do this; maybe
# we need to limit the maximum amount of blocks a peer is allowed to request.
last_block = Ledger.last_block()

blocks =
if last_block != :err && block_query_request.starting_at <= :binary.decode_unsigned(last_block.index) do
block_query_request.starting_at
|> Range.new(:binary.decode_unsigned(last_block.index))
|> Enum.map(&Ledger.block_at_height/1)
|> Enum.filter(&(&1 != :none))
else
[]
end

send(caller, {"BLOCK_BATCH_QUERY_RESPONSE", %{blocks: blocks}})

{:noreply, state}
end

# Handles a batch block query response, where we've requested new blocks and are now
# getting a response with potentially new blocks
def handle_info({block_query_response = %{type: "BLOCK_BATCH_QUERY_RESPONSE"}, _caller}, state) do
if length(block_query_response.blocks) > 0 do
Logger.info("Recieved #{length(block_query_response.blocks)} blocks from peer.")

block_query_response.blocks
|> Enum.with_index()
|> Enum.each(fn {block, i} ->
block = Block.sanitize(block)

if LedgerManager.handle_new_block(block) == :ok do
IO.write("Syncing blocks #{round(((i + 1) / length(block_query_response.blocks)) * 100)}% [#{i + 1}/#{length(block_query_response.blocks)}]\r")
end
end)

IO.write("Block Sync Complete")
end

{:noreply, state}
end

def handle_info({transaction = %{type: "TRANSACTION"}, _caller}, state) do
transaction = Transaction.sanitize(transaction)

state =
if Validator.valid_transaction?(transaction) do
if transaction not in state.known_transactions do
<<shortid::bytes-size(20), _rest::binary>> = transaction.id
Logger.info("Received transaction \e[32m#{shortid}...\e[0m")
Peer.gossip("TRANSACTION", transaction)

%{state | known_transactions: [transaction | state.known_transactions]}
end
else
Logger.info("Received Invalid Transaction. Ignoring.")
state
end

{:noreply, state}
end

def handle_info({:new_outbound_connection, handler_pid}, state) do
# Let's ask our peer for new blocks, if there
# are any. We'll ask for all blocks starting from our current index minus
# 120 (4 hours worth of blocks before we disconnected) just in case there
# was a fork after we disconnected.

starting_at =
case Ledger.last_block() do
:err -> 0
last_block ->
# Current index minus 120 or 1, whichever is greater.
max(0, :binary.decode_unsigned(last_block.index) - 120)
end

send(handler_pid, {"BLOCK_BATCH_QUERY_REQUEST", %{starting_at: starting_at}})

send(handler_pid, {"PEER_QUERY_REQUEST", %{}})

{:noreply, state}
end

def handle_info({:new_inbound_connection, handler_pid}, state) do
send(handler_pid, {"PEER_QUERY_REQUEST", %{}})

{:noreply, state}
end

def handle_info({%{type: "PEER_QUERY_REQUEST"}, handler_pid}, state) do
peers =
:"Elixir.Elixium.Store.PeerOracle"
|> GenServer.call({:load_known_peers, []})
|> Enum.take(8)

send(handler_pid, {"PEER_QUERY_RESPONSE", %{peers: peers}})

{:noreply, state}
end

def handle_info({%{type: "PEER_QUERY_RESPONSE", peers: peers}, _caller}, state) do
Enum.each(peers, fn peer ->
GenServer.call(:"Elixir.Elixium.Store.PeerOracle", {:save_known_peer, [peer]})
end)

{:noreply, state}
end

def handle_info(_, state) do
Logger.warn("Received message that isn't handled by any other case.")

{:noreply, state}
end

def query_block(index, caller), do: send(caller, {"BLOCK_QUERY_REQUEST", %{index: index}})
end
13 changes: 13 additions & 0 deletions lib/peer/supervisor.ex
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
defmodule ElixiumNode.PeerRouter.Supervisor do
use Supervisor

def start_link(_args) do
Supervisor.start_link(__MODULE__, [], name: __MODULE__)
end

def init(_args) do
children = [ElixiumNode.PeerRouter]

Supervisor.init(children, strategy: :one_for_one)
end
end
Loading

0 comments on commit 95bd3d0

Please sign in to comment.