Skip to content
This repository has been archived by the owner on Oct 29, 2021. It is now read-only.

Refactor ethash #100

Merged
merged 1 commit into from
May 13, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
479 changes: 121 additions & 358 deletions consensus/ethash/algorithm.go

Large diffs are not rendered by default.

56 changes: 56 additions & 0 deletions consensus/ethash/algorithm_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
package ethash

import (
"testing"

"github.com/ethereum/go-ethereum/common/hexutil"
"github.com/stretchr/testify/assert"
)

func TestSizes(t *testing.T) {
t.Run("cache", func(t *testing.T) {
for epoch, val := range cacheSizes {
assert.Equal(t, getCacheSizeByEpoch(epoch), val)
}
})
t.Run("dataset", func(t *testing.T) {
for epoch, val := range datasetSizes {
assert.Equal(t, getDatasetSizeByEpoch(epoch), val)
}
})
}

func TestBlockSeed(t *testing.T) {
cases := []struct {
block int
seed string
}{
{
block: 0,
seed: "0x0000000000000000000000000000000000000000000000000000000000000000",
},
{
block: 30000,
seed: "0x290decd9548b62a8d60345a988386fc84ba6bc95484008f6362f93160ef3e563",
},
{
block: 60000,
seed: "0x510e4e770828ddbf7f7b00ab00a9f6adaf81c0dc9cc85f1f8249c256942d61d9",
},
{
block: 80000,
seed: "0x510e4e770828ddbf7f7b00ab00a9f6adaf81c0dc9cc85f1f8249c256942d61d9",
},
{
block: 90000,
seed: "0x356e5a2cc1eba076e650ac7473fccc37952b46bc2e419a200cec0c451dce2336",
},
}

for _, c := range cases {
t.Run("", func(t *testing.T) {
assert.Equal(t, c.seed, hexutil.Encode(getSeedHash(c.block)))
assert.Equal(t, c.seed, hexutil.Encode(getSeedHashByEpoch(c.block/epochLength)))
})
}
}
128 changes: 128 additions & 0 deletions consensus/ethash/cache.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,128 @@
package ethash

import (
"encoding/binary"
"fmt"
"hash"

"golang.org/x/crypto/sha3"
)

// Cache is a 16 MB pseudorandom cache.
type Cache struct {
cacheSize uint32
datasetSize int
cache [][]uint32
sha512 hash.Hash
sha256 hash.Hash
}

func newCache(epoch int) *Cache {
cacheSize := getCacheSizeByEpoch(epoch)
datasetSize := getDatasetSizeByEpoch(epoch)
seed := getSeedHashByEpoch(epoch)

c := &Cache{
sha512: sha3.NewLegacyKeccak512(),
sha256: sha3.NewLegacyKeccak256(),
datasetSize: int(datasetSize),
}

c.mkcache(int(cacheSize), seed)
c.cacheSize = uint32(len(c.cache))
return c
}

func (c *Cache) calcDatasetItem(i uint32) []uint32 {
n := c.cacheSize
r := hashBytes / wordBytes

mix := make([]uint32, len(c.cache[0]))
copy(mix[:], c.cache[i%n])
mix[0] ^= i
c.sha512Int(mix)

for j := 0; j < datasetParents; j++ {
cacheIndex := fnvOp(i^uint32(j), mix[j%r])

// fnv map
for o := 0; o < 16; o++ {
mix[o] = fnvOp(mix[o], c.cache[cacheIndex%n][o])
}
}

c.sha512Int(mix)
return mix
}

func (c *Cache) sha512Aux(p []byte) []byte {
c.sha512.Reset()
c.sha512.Write(p)
return c.sha512.Sum(nil)
}

func (c *Cache) sha256Aux(p []byte) []byte {
c.sha256.Reset()
c.sha256.Write(p)
return c.sha256.Sum(nil)
}

func (c *Cache) sha512Int(p []uint32) {
aux := make([]byte, 4)

c.sha512.Reset()
for _, i := range p {
binary.LittleEndian.PutUint32(aux, i)
c.sha512.Write(aux)
}
res := c.sha512.Sum(nil)
for i := 0; i < len(p); i++ {
p[i] = binary.LittleEndian.Uint32(res[i*4:])
}
}

func (c *Cache) mkcache(cacheSize int, seed []byte) {
n := cacheSize / hashBytes

res := [][]byte{}
res = append(res, c.sha512Aux(seed))
for i := 1; i < n; i++ {
aux := c.sha512Aux(res[i-1])
res = append(res, aux)
}

for j := 0; j < cacheRounds; j++ {
for i := 0; i < n; i++ {
v := binary.LittleEndian.Uint32(res[i]) % uint32(n)
temp := xorBytes(res[(i-1+n)%n], res[v])
res[i] = c.sha512Aux(temp)
}
}

// Convert bytes to words
resInt := [][]uint32{}
for _, i := range res {
entry := make([]uint32, 16)
for indx := range entry {
entry[indx] = binary.LittleEndian.Uint32(i[indx*4:])
}
resInt = append(resInt, entry)
}

c.cache = resInt
}

func (c *Cache) hashimoto(header []byte, nonce uint64) ([]byte, []byte) {
return hashimoto(header, nonce, c.datasetSize, c.sha512Aux, c.sha256Aux, c.calcDatasetItem)
}

func xorBytes(a, b []byte) []byte {
if len(a) != len(b) {
panic(fmt.Sprintf("length of byte slices is not equivalent: %d != %d", len(a), len(b)))
}
buf := make([]byte, len(a))
for i := range a {
buf[i] = a[i] ^ b[i]
}
return buf
}
49 changes: 49 additions & 0 deletions consensus/ethash/cache_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
package ethash

import (
"encoding/binary"
"hash/fnv"
"testing"

"github.com/ethereum/go-ethereum/common/hexutil"
"github.com/stretchr/testify/assert"
)

func TestCache(t *testing.T) {
cases := []struct {
epoch int
hash string
}{
{
epoch: 0,
hash: "0xbdae398aa93d3e0593f55180d4d9e14a",
},
{
epoch: 1,
hash: "0xfd9ef335c5dc2f3831abe21fa248747b",
},
{
epoch: 100,
hash: "0x19bea946d49edfcfc01e34a58495921b",
},
{
epoch: 1000,
hash: "0xe42941588426211fb7d56eaba630a687",
},
}

for _, c := range cases {
t.Run("", func(t *testing.T) {
cache := newCache(c.epoch)
hash := fnv.New128()
b := make([]byte, 4)
for _, i := range cache.cache {
for _, j := range i {
binary.BigEndian.PutUint32(b, j)
hash.Write(b)
}
}
assert.Equal(t, c.hash, hexutil.Encode(hash.Sum(nil)))
})
}
}
Loading