Skip to content

Commit

Permalink
Making it so LevelDB works, changing website to create and save a wal…
Browse files Browse the repository at this point in the history
…let if one does not exist in browser storage but reuse the wallet in storage if one exists (#469)
  • Loading branch information
willmeister committed Sep 25, 2019
1 parent cef49e9 commit a3cc851
Show file tree
Hide file tree
Showing 8 changed files with 133 additions and 23 deletions.
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ node_modules/
/.idea
/.awcache
/.vscode
*.iml

# misc
*.swp
Expand All @@ -17,3 +18,6 @@ npm-debug.log
/**/npm-debug.log
/packages/**/LICENSE.txt
*.test.tmp
/**/package-lock.json

/**/*.log
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@
"dependencies": {
"bignumber.js": "^9.0.0",
"chai-bignumber": "^3.0.0",
"ganache-core": "2.5.7"
"ganache-core": "2.5.7",
"level": "^5.0.1"
}
}
38 changes: 34 additions & 4 deletions packages/core/src/app/block-production/merkle-tree.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,9 @@ import {
TWO,
ZERO,
} from '../../types'
import { keccak256, runInDomain } from '../utils'
import { getLogger, keccak256, runInDomain } from '../utils'

const log = getLogger('merkle-tree')

/**
* SparseMerkleTree implementation assuming a 256-bit hash algorithm is used.
Expand Down Expand Up @@ -56,18 +58,40 @@ export class SparseMerkleTreeImpl implements SparseMerkleTree {
}

public async getLeaf(leafKey: BigNumber, rootHash?: Buffer): Promise<Buffer> {
log.debug(`Trying to get leaf [${leafKey.toString(10)}]`)
return this.treeLock.acquire(SparseMerkleTreeImpl.lockKey, async () => {
if (!!rootHash && !rootHash.equals(this.root.hash)) {
log.debug(
`Cannot get Leaf [${leafKey.toString(
10
)}] because root hash does not match.`
)
return undefined
}

const nodesInPath: MerkleTreeNode[] = await this.getNodesInPath(leafKey)
if (!nodesInPath || !nodesInPath.length) {
if (!nodesInPath || nodesInPath.length !== this.height) {
log.debug(
`Cannot get Leaf [${leafKey.toString(
10
)}] because nodes in path does not equal tree height.`
)
return undefined
}
const leaf: MerkleTreeNode = nodesInPath[nodesInPath.length - 1]

// Will only match if we were able to traverse all the way to the leaf
return leaf.key.equals(leafKey) ? leaf.value : undefined
if (!leaf.key.equals(leafKey)) {
log.debug(
`Cannot get Leaf because leaf key does not match. Path: [${leafKey.toString(
10
)}], leaf key: ${leaf.key.toString(10)}.`
)
return undefined
}

log.debug(`Returning leaf value: [${leaf.value.toString()}].`)
return leaf.value
})
}

Expand Down Expand Up @@ -557,7 +581,13 @@ export class SparseMerkleTreeImpl implements SparseMerkleTree {
* @param node The node in question
*/
private getNodeID(node: MerkleTreeNode): Buffer {
return this.getNodeIDFromHashAndKey(node.hash, node.key)
const id: Buffer = this.getNodeIDFromHashAndKey(node.hash, node.key)
log.debug(
`Node ID for key [${node.key}] is ${id.toString(
'hex'
)}. (node hash: ${node.hash.toString('hex')}`
)
return id
}

private getNodeIDFromHashAndKey(
Expand Down
43 changes: 37 additions & 6 deletions packages/core/src/app/db/db.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,19 +22,16 @@ import {
Iterator,
Bucket,
RangeBucket,
KeyValueStore,
PutBatch,
PUT_BATCH_TYPE,
DEL_BATCH_TYPE,
BigNumber,
ZERO,
ONE,
} from '../../types'
import { BaseIterator } from './iterator'
import { BaseBucket } from './bucket'
import { BaseRangeBucket } from './range-bucket'
import { bufferUtils } from '../../app'
import { bufferUtils, getLogger } from '../utils'

const log = getLogger('db')
export const DEFAULT_PREFIX_LENGTH = 3

/**
Expand Down Expand Up @@ -73,6 +70,7 @@ export class BaseDB implements DB {
this.db.open(options, (err) => {
if (err) {
reject(err)
log.error(`Error opening DB: ${err.message}, ${err.stack}`)
return
}
resolve()
Expand All @@ -92,6 +90,7 @@ export class BaseDB implements DB {
this.db.close((err) => {
if (err) {
reject(err)
log.error(`Error closing DB: ${err.message}, ${err.stack}`)
return
}
resolve()
Expand All @@ -112,12 +111,23 @@ export class BaseDB implements DB {
this.db.get(key, (err, value) => {
if (err) {
if (isNotFound(err)) {
log.debug(`Key ${key.toString('hex')} not found.`)
resolve(null)
return
}
reject(err)
log.error(
`Error getting key ${key.toString('hex')}: ${err.message}, ${
err.stack
}`
)
return
}
log.debug(
`Key ${key.toString(
'hex'
)} fetched. Returning value [${value.toString('hex')}].`
)
resolve(value)
})
})
Expand All @@ -133,8 +143,16 @@ export class BaseDB implements DB {
this.db.put(key, value, (err) => {
if (err) {
reject(err)
log.error(
`Error putting key / value ${key.toString(
'hex'
)} / ${value.toString('hex')}: ${err.message}, ${err.stack}`
)
return
}
log.debug(
`Put key / value [${key.toString('hex')} / ${value.toString('hex')}]`
)
resolve()
})
})
Expand All @@ -149,6 +167,11 @@ export class BaseDB implements DB {
this.db.del(key, (err) => {
if (err) {
reject(err)
log.error(
`Error deleting key ${key.toString('hex')}: ${err.message}, ${
err.stack
}`
)
return
}
resolve()
Expand All @@ -165,7 +188,12 @@ export class BaseDB implements DB {
try {
await this.get(key)
return true
} catch {
} catch (err) {
log.error(
`Error checking key existence: key ${key.toString('hex')}: ${
err.message
}, ${err.stack}`
)
return false
}
}
Expand All @@ -179,6 +207,9 @@ export class BaseDB implements DB {
this.db.batch(operations, (err) => {
if (err) {
reject(err)
log.error(
`Error executing batch operation: ${err.message}, ${err.stack}`
)
return
}
resolve()
Expand Down
43 changes: 38 additions & 5 deletions packages/example-rollup/index.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
import * as Level from 'level'

/* Imports */
import {
DB,
newInMemoryDB,
SignedByDB,
SignedByDecider,
SimpleClient,
getLogger,
BaseDB,
} from '@pigi/core'
import {
UNI_TOKEN_TYPE,
Expand Down Expand Up @@ -69,9 +71,30 @@ let unipigWallet
let wallet: ethers.Wallet

async function initialize() {
wallet = ethers.Wallet.createRandom()
const levelOptions = {
keyEncoding: 'binary',
valueEncoding: 'binary',
}

const walletDB = new BaseDB(
(await Level('build/level/wallet', levelOptions)) as any,
256
)
const mnemonicKey: Buffer = Buffer.from('mnemonic')
const mnemonic: Buffer = await walletDB.get(mnemonicKey)
if (!!mnemonic) {
log.info('mnemonic found. Initializing existing wallet.')
wallet = ethers.Wallet.fromMnemonic(mnemonic.toString())
} else {
log.info('mnemonic not found. Generating new wallet.')
wallet = ethers.Wallet.createRandom()
await walletDB.put(mnemonicKey, Buffer.from(wallet.mnemonic))
}

const signatureDB: DB = newInMemoryDB()
const signatureDB: DB = new BaseDB(
(await Level('build/level/signatures', levelOptions)) as any,
256
)
const signedByDB: SignedByDB = new SignedByDB(signatureDB)
const signedByDecider: SignedByDecider = new SignedByDecider(
signedByDB,
Expand All @@ -81,9 +104,19 @@ async function initialize() {
signedByDB,
signedByDecider
)
const rollupClient: RollupClient = new RollupClient(newInMemoryDB())

const clientDB: DB = new BaseDB(
(await Level('build/level/client', levelOptions)) as any,
256
)
const transitionerDB: DB = new BaseDB(
(await Level('build/level/transitioner', levelOptions)) as any,
256
)

const rollupClient: RollupClient = new RollupClient(clientDB)
unipigWallet = new UnipigTransitioner(
newInMemoryDB(),
transitionerDB,
rollupStateSolver,
rollupClient,
undefined,
Expand Down
6 changes: 3 additions & 3 deletions packages/example-rollup/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
"scripts": {
"lint": "tslint --format stylish --project .",
"fix": "prettier --config ./.prettierrc.js --write 'index.ts' '{src,test}/**/*.ts'",
"build": "tsc -p . && browserify index.ts -p [ tsify ] > public/bundle.js",
"build": "tsc -p . && browserify index.ts -p [ tsify ] > public/bundle.js && mkdir build/level",
"serve": "http-server ./public",
"aggregator": "node ./build/src/mock-aggregator.js",
"clean": "rimraf build/"
Expand All @@ -30,7 +30,8 @@
"@pigi/core": "^0.0.1-alpha.1",
"@pigi/wallet": "^0.0.1-alpha.1",
"cors": "^2.8.5",
"http-server": "^0.11.1"
"http-server": "^0.11.1",
"level": "^5.0.1"
},
"devDependencies": {
"@pigi/core": "^0.0.1-alpha.1",
Expand All @@ -43,7 +44,6 @@
"chai-as-promised": "^7.1.1",
"debug": "^4.1.1",
"ethers": "^4.0.30",
"memdown": "^5.0.0",
"mocha": "^6.1.2",
"prettier": "^1.16.4",
"rimraf": "^2.6.3",
Expand Down
17 changes: 14 additions & 3 deletions packages/example-rollup/src/mock-aggregator.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
/* External Imports */
import MemDown from 'memdown'
import * as Level from 'level'

import { BaseDB } from '@pigi/core'
import {
State,
Expand Down Expand Up @@ -39,8 +40,18 @@ const host = '0.0.0.0'
const port = 3000

async function runAggregator() {
const stateDB = new BaseDB(new MemDown('state') as any)
const blockDB = new BaseDB(new MemDown('blocks') as any, 4)
const levelOptions = {
keyEncoding: 'binary',
valueEncoding: 'binary',
}
const stateDB = new BaseDB((await Level(
'build/level/state',
levelOptions
)) as any)
const blockDB = new BaseDB(
(await Level('build/level/blocks', levelOptions)) as any,
4
)

const rollupStateMachine: RollupStateMachine = await DefaultRollupStateMachine.create(
genesisState,
Expand Down
2 changes: 1 addition & 1 deletion yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -9323,7 +9323,7 @@ typewiselite@~1.0.0:
resolved "https://registry.yarnpkg.com/typewiselite/-/typewiselite-1.0.0.tgz#c8882fa1bb1092c06005a97f34ef5c8508e3664e"
integrity sha1-yIgvobsQksBgBal/NO9chQjjZk4=

uglify-js@^3.1.4:
uglify-js@^3.1.4, uglify-js@^3.6.0:
version "3.6.0"
resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.6.0.tgz#704681345c53a8b2079fb6cec294b05ead242ff5"
integrity sha512-W+jrUHJr3DXKhrsS7NUVxn3zqMOFn0hL/Ei6v0anCIMoKC93TjcflTagwIHLW7SfMFfiQuktQyFVCFHGUE0+yg==
Expand Down

0 comments on commit a3cc851

Please sign in to comment.