Skip to content

Commit

Permalink
zomg
Browse files Browse the repository at this point in the history
  • Loading branch information
fireduck64 committed Jun 30, 2018
1 parent 9c84fbe commit 10ed946
Show file tree
Hide file tree
Showing 6 changed files with 223 additions and 19 deletions.
7 changes: 5 additions & 2 deletions miner/src/Arktika.java
Expand Up @@ -34,6 +34,7 @@
import java.util.TreeMap;

import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.MinMaxPriorityQueue;

public class Arktika
Expand Down Expand Up @@ -65,7 +66,7 @@ public static void main(String args[]) throws Exception
protected volatile WorkUnit last_work_unit;

private MiningPoolServiceStub asyncStub;
private MiningPoolServiceBlockingStub blockingStub;
protected MiningPoolServiceBlockingStub blockingStub;

private final NetworkParams params;

Expand All @@ -85,12 +86,13 @@ public static void main(String args[]) throws Exception

private final int selected_field;

private FieldSource deck_source;
protected FieldSource deck_source;

private FieldSource all_sources[];
private ImmutableMap<Integer, Integer> chunk_to_layer_map;
private ImmutableMap<Integer, MinMaxPriorityQueue<PartialWork> > chunk_to_queue_map;
private ImmutableMap<Integer, MinMaxPriorityQueue<PartialWork> > layer_to_queue_map;
protected FieldSource composit_source;

public Arktika(Config config) throws Exception
{
Expand Down Expand Up @@ -438,6 +440,7 @@ private void loadField() throws Exception
{
throw new RuntimeException("No sources seem to have the deck files.");
}
composit_source = new FieldSourceComposit(ImmutableList.copyOf(all_sources));
}
public void enqueue(int chunk, PartialWork work)
{
Expand Down
6 changes: 6 additions & 0 deletions miner/src/FieldSource.java
Expand Up @@ -3,6 +3,8 @@

import com.google.common.collect.ImmutableSet;
import java.util.Set;
import java.util.Map;
import java.nio.channels.FileChannel;

import java.nio.ByteBuffer;
import snowblossom.lib.Globals;
Expand Down Expand Up @@ -41,5 +43,9 @@ public boolean hasDeckFiles()
{
return false;
}
public Map<Long, FileChannel> getDeckFiles()
{
return null;
}

}
47 changes: 47 additions & 0 deletions miner/src/FieldSourceComposit.java
@@ -0,0 +1,47 @@

package snowblossom.miner;

import com.google.common.collect.ImmutableSet;
import java.util.Set;
import java.util.List;
import java.util.TreeSet;

import java.nio.ByteBuffer;
import snowblossom.lib.Globals;
import snowblossom.lib.SnowMerkle;
import java.util.logging.Logger;

public class FieldSourceComposit extends FieldSource
{
private List<FieldSource> sources;

public FieldSourceComposit(List<FieldSource> sources)
{
this.sources = sources;
TreeSet<Integer> total=new TreeSet<>();

for(FieldSource fs : sources)
{
total.addAll(fs.getHoldingSet());
}

holding_set = ImmutableSet.copyOf(total);
}

public void bulkRead(long word_index, ByteBuffer bb) throws java.io.IOException
{
int chunk = (int)(word_index / words_per_chunk);
for(FieldSource fs : sources)
{
if (fs.hasChunk(chunk))
{
fs.bulkRead(word_index, bb);
return;
}
}
throw new RuntimeException("oh noes");

}


}
7 changes: 7 additions & 0 deletions miner/src/FieldSourceFile.java
Expand Up @@ -20,6 +20,7 @@
import java.util.List;
import java.util.TreeMap;
import java.util.Set;
import java.util.Map;
import java.util.TreeSet;
import java.util.logging.Logger;

Expand Down Expand Up @@ -115,6 +116,12 @@ public boolean hasDeckFiles()
return (deck_files != null);
}

@Override
public Map<Long, FileChannel> getDeckFiles()
{
return deck_files;
}


@Override
public void bulkRead(long word_index, ByteBuffer bb) throws java.io.IOException
Expand Down
40 changes: 23 additions & 17 deletions miner/src/LayerWorkThread.java
Expand Up @@ -5,9 +5,12 @@
import java.nio.ByteBuffer;
import snowblossom.lib.*;
import snowblossom.proto.*;
import snowblossom.mining.proto.*;
import duckutil.TimeRecord;
import duckutil.TimeRecordAuto;

import java.util.Queue;


import java.util.logging.Level;
import java.util.logging.Logger;
Expand All @@ -29,12 +32,14 @@ public class LayerWorkThread extends Thread
FieldSource fs;
Arktika arktika;
Queue<PartialWork> queue;
long total_words;

public LayerWorkThread(Arktika arktika, FieldSource fs, Queue<PartialWork> queue)
public LayerWorkThread(Arktika arktika, FieldSource fs, Queue<PartialWork> queue, long total_words)
{
this.fs = fs;
this.arktika = arktika;
this.queue = queue;
this.total_words = total_words;
setName("LayerWorkThread(" + fs.toString() + ")");
setDaemon(true);
rnd = new Random();
Expand Down Expand Up @@ -62,12 +67,13 @@ private void runPass() throws Exception
}

private void processPw(PartialWork pw)
throws Exception
{
if (pw.passes_done == Globals.POW_LOOK_PASSES)
{
if (PowUtil.lessThanTarget(found_hash, wu.getReportTarget()))
if (PowUtil.lessThanTarget(pw.context, pw.wu.getReportTarget()))
{
String str = HashUtils.getHexString(found_hash);
String str = HexUtil.getHexString(pw.context);
logger.info("Found passable solution: " + str);
submitWork(pw);
}
Expand All @@ -80,7 +86,7 @@ private void processPw(PartialWork pw)
if (fs.skipQueueOnRehit() && (fs.hasChunk(chunk)))
{
pw.doPass(fs, md, total_words);
processPW(pw);
processPw(pw);
}
else
{
Expand All @@ -92,26 +98,26 @@ private void processPw(PartialWork pw)

private void submitWork(PartialWork pw) throws Exception
{
WorkUnit wu = pw.wu;
byte[] first_hash = PowUtil.hashHeaderBits(wu.getHeader(), nonce);
byte[] context = first_hash;


BlockHeader.Builder header = BlockHeader.newBuilder();
header.mergeFrom(wu.getHeader());
header.setNonce(ByteString.copyFrom(nonce));
header.setNonce(ByteString.copyFrom(pw.nonce));

byte[] word_buff = new byte[SnowMerkle.HASH_LEN];
ByteBuffer word_bb = ByteBuffer.wrap(word_buff);



for (int pass = 0; pass < Globals.POW_LOOK_PASSES; pass++)
{
word_bb.clear();

long word_idx = PowUtil.getNextSnowFieldIndex(context, merkle_proof.getTotalWords());
boolean gotData = merkle_proof.readWord(word_idx, word_bb, pass);
if (!gotData)
{
logger.log(Level.SEVERE, "readWord returned false on pass " + pass);
}
SnowPowProof proof = merkle_proof.getProof(word_idx);
long word_idx = PowUtil.getNextSnowFieldIndex(context, total_words);
arktika.composit_source.readWord(word_idx, word_bb);
SnowPowProof proof = ProofGen.getProof(arktika.composit_source, arktika.deck_source, word_idx, total_words);
header.addPowProof(proof);
context = PowUtil.getNextContext(context, word_buff);
}
Expand All @@ -124,17 +130,17 @@ private void submitWork(PartialWork pw) throws Exception
req.setWorkId(wu.getWorkId());
req.setHeader(header.build());

SubmitReply reply = blockingStub.submitWork( req.build());
SubmitReply reply = arktika.blockingStub.submitWork( req.build());

if (PowUtil.lessThanTarget(found_hash, header.getTarget()))
{
share_block_count.getAndIncrement();
arktika.share_block_count.getAndIncrement();
}
logger.info("Work submit: " + reply);
share_submit_count.getAndIncrement();
arktika.share_submit_count.getAndIncrement();
if (!reply.getSuccess())
{
share_reject_count.getAndIncrement();
arktika.share_reject_count.getAndIncrement();
}

}
Expand Down
135 changes: 135 additions & 0 deletions miner/src/ProofGen.java
@@ -0,0 +1,135 @@
package snowblossom.miner;

import java.security.MessageDigest;

import duckutil.TimeRecord;
import duckutil.TimeRecordAuto;
import java.nio.ByteBuffer;

import com.google.protobuf.ByteString;

import org.junit.Assert;

import snowblossom.lib.*;
import snowblossom.proto.*;

import java.util.LinkedList;
import java.util.List;


public class ProofGen
{
public static SnowPowProof getProof(FieldSource field_source, FieldSource deck_source, long word_index, long total_words) throws java.io.IOException
{
try (TimeRecordAuto tra = TimeRecord.openAuto("SnowMerkleProof.getProof"))
{
LinkedList<ByteString> partners = new LinkedList<ByteString>();

MessageDigest md;
try
{
md = MessageDigest.getInstance(Globals.SNOW_MERKLE_HASH_ALGO);
}
catch (java.security.NoSuchAlgorithmException e)
{
throw new RuntimeException(e);
}
getInnerProof(field_source, deck_source, md, partners, word_index, 0, total_words);

SnowPowProof.Builder builder = SnowPowProof.newBuilder();
builder.setWordIdx(word_index);
builder.addAllMerkleComponent(partners);

return builder.build();
}
}

/**
* If the target is not in specified subtree, return hash of subtree
* If the target is in the specified subtree, return null and add hash partner from
* opposite subtree to partners
*/
private static ByteString getInnerProof(
FieldSource field_source, FieldSource deck_source,
MessageDigest md, List<ByteString> partners,
long target_word_index, long start, long end) throws java.io.IOException
{
boolean inside = false;
if ((start <= target_word_index) && (target_word_index < end))
{
inside = true;
}

long dist = end - start;
if (!inside)
{
if (deck_source.getDeckFiles().containsKey(dist))
{
long deck_pos = SnowMerkle.HASH_LEN_LONG * (start / dist);
byte[] buff = new byte[SnowMerkle.HASH_LEN];
ByteBuffer bb = ByteBuffer.wrap(buff);

ChannelUtil.readFully(deck_source.getDeckFiles().get(dist), bb, deck_pos);

return ByteString.copyFrom(buff);
}

if (dist == 1)
{
byte[] buff = new byte[SnowMerkle.HASH_LEN];
ByteBuffer bb = ByteBuffer.wrap(buff);
field_source.bulkRead(start, bb);

return ByteString.copyFrom(buff);

}
long mid = (start + end) / 2;

ByteString left = getInnerProof(field_source, deck_source, md, partners, target_word_index, start, mid);
ByteString right = getInnerProof(field_source, deck_source, md, partners, target_word_index, mid, end);

md.update(left.toByteArray());
md.update(right.toByteArray());

byte[] hash = md.digest();

return ByteString.copyFrom(hash);

}
else
{ // we are inside

if (dist == 1)
{
byte[] buff = new byte[SnowMerkle.HASH_LEN];
ByteBuffer bb = ByteBuffer.wrap(buff);
field_source.bulkRead(start, bb);

partners.add(ByteString.copyFrom(buff));
return null;
}

long mid = (start + end) / 2;

ByteString left = getInnerProof(field_source, deck_source, md, partners, target_word_index, start, mid);
ByteString right = getInnerProof(field_source, deck_source, md, partners, target_word_index, mid, end);

if (target_word_index < mid)
{
partners.add(right);
Assert.assertNull(left);
}
else
{
partners.add(left);
Assert.assertNull(right);
}
return null;

}

}



}

0 comments on commit 10ed946

Please sign in to comment.