Skip to content

Commit

Permalink
works
Browse files Browse the repository at this point in the history
  • Loading branch information
fireduck64 committed Jun 30, 2018
1 parent f68a92c commit f87fd2c
Show file tree
Hide file tree
Showing 4 changed files with 38 additions and 17 deletions.
18 changes: 11 additions & 7 deletions miner/src/Arktika.java
Expand Up @@ -32,10 +32,12 @@
import java.util.Set;
import java.util.TreeSet;
import java.util.TreeMap;
import java.util.Queue;

import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.MinMaxPriorityQueue;
import org.junit.Assert;

public class Arktika
{
Expand Down Expand Up @@ -90,8 +92,8 @@ public static void main(String args[]) throws Exception

private FieldSource all_sources[];
private ImmutableMap<Integer, Integer> chunk_to_layer_map;
private ImmutableMap<Integer, MinMaxPriorityQueue<PartialWork> > chunk_to_queue_map;
private ImmutableMap<Integer, MinMaxPriorityQueue<PartialWork> > layer_to_queue_map;
private ImmutableMap<Integer, Queue<PartialWork> > chunk_to_queue_map;
private ImmutableMap<Integer, Queue<PartialWork> > layer_to_queue_map;
protected FieldSource composit_source;

public Arktika(Config config) throws Exception
Expand Down Expand Up @@ -314,7 +316,7 @@ public void onNext(WorkUnit wu)
// If the block number changes, clear the queues
if (last_block != wu_new.getHeader().getBlockHeight())
{
for(MinMaxPriorityQueue<PartialWork> q : layer_to_queue_map.values())
for(Queue<PartialWork> q : layer_to_queue_map.values())
{
synchronized(q)
{
Expand Down Expand Up @@ -404,7 +406,7 @@ private void loadField() throws Exception
logger.info(String.format("Found %d chunks", found.size()));

TreeMap<Integer, Integer> chunk_to_source_map = new TreeMap<>();
TreeMap<Integer, MinMaxPriorityQueue<PartialWork> > layer_to_queue=new TreeMap();
TreeMap<Integer, Queue<PartialWork> > layer_to_queue=new TreeMap();

for(int i=0; i<layer_count; i++)
{
Expand All @@ -420,15 +422,15 @@ private void loadField() throws Exception
chunk_to_source_map.put(x,i);
}
}
layer_to_queue.put(i, MinMaxPriorityQueue.expectedSize(2048).maximumSize(2048).create());
layer_to_queue.put(i, MinMaxPriorityQueue.maximumSize(2000).expectedSize(2000).create());

logger.info(String.format("Layer %d - %s", i, fs.toString()));
}

chunk_to_layer_map = ImmutableMap.copyOf(chunk_to_source_map);
layer_to_queue_map = ImmutableMap.copyOf(layer_to_queue);

TreeMap<Integer, MinMaxPriorityQueue<PartialWork>> chunk_to_queue=new TreeMap<>();
TreeMap<Integer, Queue<PartialWork>> chunk_to_queue=new TreeMap<>();
for(int x : chunk_to_layer_map.keySet())
{
int layer = chunk_to_layer_map.get(x);
Expand Down Expand Up @@ -460,7 +462,9 @@ private void loadField() throws Exception

public void enqueue(int chunk, PartialWork work)
{
MinMaxPriorityQueue<PartialWork> q = chunk_to_queue_map.get(chunk);
Assert.assertNotNull(work);

Queue<PartialWork> q = chunk_to_queue_map.get(chunk);
if (q == null) return;
synchronized(q)
{
Expand Down
3 changes: 3 additions & 0 deletions miner/src/FieldSourceFile.java
Expand Up @@ -24,6 +24,7 @@
import java.util.TreeSet;
import java.util.logging.Logger;

import org.junit.Assert;

public class FieldSourceFile extends FieldSource
{
Expand Down Expand Up @@ -130,6 +131,8 @@ public void bulkRead(long word_index, ByteBuffer bb) throws java.io.IOException
int chunk = (int)(word_index / words_per_chunk);
long word_offset = word_index % words_per_chunk;

Assert.assertTrue(hasChunk(chunk));

long read_offset = word_offset * SnowMerkle.HASH_LEN_LONG;
ChannelUtil.readFully( snow_file_channel[chunk], bb, read_offset);
}
Expand Down
22 changes: 13 additions & 9 deletions miner/src/LayerWorkThread.java
Expand Up @@ -10,6 +10,7 @@
import duckutil.TimeRecordAuto;

import java.util.Queue;
import org.junit.Assert;


import java.util.logging.Level;
Expand All @@ -25,10 +26,6 @@ public class LayerWorkThread extends Thread
Random rnd;
MessageDigest md = DigestUtil.getMD();

byte[] word_buff = new byte[SnowMerkle.HASH_LEN];
ByteBuffer word_bb = ByteBuffer.wrap(word_buff);
int proof_field;
byte[] nonce = new byte[Globals.NONCE_LENGTH];
FieldSource fs;
Arktika arktika;
Queue<PartialWork> queue;
Expand All @@ -48,7 +45,8 @@ public LayerWorkThread(Arktika arktika, FieldSource fs, Queue<PartialWork> queue

private void runPass() throws Exception
{
PartialWork pw = queue.poll();
PartialWork pw = null;
synchronized(queue){pw=queue.poll();}
if (pw == null)
{
WorkUnit wu = arktika.last_work_unit;
Expand All @@ -71,6 +69,9 @@ private void processPw(PartialWork pw)
{
if (pw.passes_done == Globals.POW_LOOK_PASSES)
{
Assert.assertNotNull(pw);
Assert.assertNotNull(pw.context);
Assert.assertNotNull(pw.wu);
if (PowUtil.lessThanTarget(pw.context, pw.wu.getReportTarget()))
{
String str = HexUtil.getHexString(pw.context);
Expand Down Expand Up @@ -98,24 +99,26 @@ private void processPw(PartialWork pw)

private void submitWork(PartialWork pw) throws Exception
{


WorkUnit wu = pw.wu;
byte[] first_hash = PowUtil.hashHeaderBits(wu.getHeader(), nonce);
byte[] first_hash = PowUtil.hashHeaderBits(wu.getHeader(), pw.nonce);
byte[] context = first_hash;



BlockHeader.Builder header = BlockHeader.newBuilder();
header.mergeFrom(wu.getHeader());
header.setNonce(ByteString.copyFrom(pw.nonce));

byte[] word_buff = new byte[SnowMerkle.HASH_LEN];
ByteBuffer word_bb = ByteBuffer.wrap(word_buff);




for (int pass = 0; pass < Globals.POW_LOOK_PASSES; pass++)
{
word_bb.clear();
long word_idx = PowUtil.getNextSnowFieldIndex(context, total_words);

arktika.composit_source.readWord(word_idx, word_bb);
SnowPowProof proof = ProofGen.getProof(arktika.composit_source, arktika.deck_source, word_idx, total_words);
header.addPowProof(proof);
Expand Down Expand Up @@ -159,6 +162,7 @@ public void run()
{
err = true;
logger.warning("Error: " + t);
t.printStackTrace();
}

if (err)
Expand Down
12 changes: 11 additions & 1 deletion miner/src/PartialWork.java
Expand Up @@ -21,9 +21,9 @@ public class PartialWork implements Comparable<PartialWork>
byte[] word_buff = new byte[SnowMerkle.HASH_LEN];
ByteBuffer word_bb = ByteBuffer.wrap(word_buff);


public PartialWork(WorkUnit wu, Random rnd, MessageDigest md, long total_words)
{
this.wu = wu;
nonce = new byte[Globals.NONCE_LENGTH];
rnd.nextBytes(nonce);
wu.getHeader().getNonce().copyTo(nonce, 0);
Expand All @@ -39,8 +39,16 @@ public int compareTo(PartialWork o)
Assert.assertNotNull(o);
if (passes_done > o.passes_done) return -1;
if (passes_done < o.passes_done) return 1;

//if (sort < o.sort) return -1;
//if (sort > o.sort) return 1;
return 0;
}
public boolean equals(Object o)
{
System.out.println("Equals called");
return super.equals(o);
}

public long getNextWordIdx()
{
Expand All @@ -50,6 +58,8 @@ public long getNextWordIdx()
public void doPass(FieldSource fs, MessageDigest md, long total_words)
throws java.io.IOException
{
//System.out.println("Pass: " + passes_done);
Assert.assertTrue(next_word_idx >= 0);
word_bb.clear();
fs.readWord(next_word_idx, word_bb);
context = PowUtil.getNextContext(context, word_buff, md);
Expand Down

0 comments on commit f87fd2c

Please sign in to comment.