Skip to content

Commit

Permalink
[plot] Add plot.html
Browse files Browse the repository at this point in the history
  • Loading branch information
tanghaibao committed Jul 7, 2018
1 parent efc52af commit a1cafed
Show file tree
Hide file tree
Showing 2 changed files with 128 additions and 0 deletions.
7 changes: 7 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -86,4 +86,11 @@ allhic build T4_Chr/{prunning.sub.tour,seq.fasta}
## WIP features

- [x] Add restriction enzyme for better normalization of contig lengths
- [ ] Translate "prune" from C++ code to golang
- [ ] Add partition split inside "partition"
- [ ] Isolate matrix generation to "plot"
- [ ] Add "pipeline" to simplify execution
- [ ] Use clustering when k = 1
- [ ] Compare numerical output with Lachesis
- [ ] Improve Ler0 results
- [ ] Add test suites
121 changes: 121 additions & 0 deletions plot.html
Original file line number Diff line number Diff line change
@@ -0,0 +1,121 @@
<html>
<script src="https://d3js.org/d3.v5.min.js"></script>
<script>

// Borrowed from gist: https://gist.github.com/nvictus/88b3b5bfe587d32ac1ab519fd0009607
// Client-side parser for .npy files
// See the specification: http://docs.scipy.org/doc/numpy-dev/neps/npy-format.html
var NumpyLoader = (function () {
function asciiDecode(buf) {
return String.fromCharCode.apply(null, new Uint8Array(buf));
}

function readUint16LE(buffer) {
var view = new DataView(buffer);
var val = view.getUint8(0);
val |= view.getUint8(1) << 8;
return val;
}

function fromArrayBuffer(buf) {
// Check the magic number
var magic = asciiDecode(buf.slice(0,6));
if (magic.slice(1,6) != 'NUMPY') {
throw new Error('unknown file type');
}

var version = new Uint8Array(buf.slice(6,8)),
headerLength = readUint16LE(buf.slice(8,10)),
headerStr = asciiDecode(buf.slice(10, 10+headerLength));
offsetBytes = 10 + headerLength;
//rest = buf.slice(10+headerLength); XXX -- This makes a copy!!! https://www.khronos.org/registry/typedarray/specs/latest/#5

// Hacky conversion of dict literal string to JS Object
eval("var info = " + headerStr.toLowerCase().replace('(','[').replace('),',']'));

// Intepret the bytes according to the specified dtype
var data;
if (info.descr === "|u1") {
data = new Uint8Array(buf, offsetBytes);
} else if (info.descr === "|i1") {
data = new Int8Array(buf, offsetBytes);
} else if (info.descr === "<u2") {
data = new Uint16Array(buf, offsetBytes);
} else if (info.descr === "<i2") {
data = new Int16Array(buf, offsetBytes);
} else if (info.descr === "<u4") {
data = new Uint32Array(buf, offsetBytes);
} else if (info.descr === "<i4") {
data = new Int32Array(buf, offsetBytes);
} else if (info.descr === "<f4") {
data = new Float32Array(buf, offsetBytes);
} else if (info.descr === "<f8") {
data = new Float64Array(buf, offsetBytes);
} else {
throw new Error('unknown numeric dtype')
}

return {
shape: info.shape,
fortran_order: info.fortran_order,
data: data
};
}

function open(file, callback) {
var reader = new FileReader();
reader.onload = function() {
// the file contents have been read as an array buffer
var buf = reader.result;
var ndarray = fromArrayBuffer(buf);
callback(ndarray);
};
reader.readAsArrayBuffer(file);
}

function ajax(url, callback) {
var xhr = new XMLHttpRequest();
xhr.onload = function(e) {
var buf = xhr.response; // not responseText
var ndarray = fromArrayBuffer(buf);
callback(ndarray);
};
xhr.open("GET", url, true);
xhr.responseType = "arraybuffer";
xhr.send(null);
}

return {
open: open,
ajax: ajax
};
})();

function openFile(filename) {
var content
if (filename.endsWith(".json")) {
return fetch(filename)
.then(response => response.json());
} else {
return fetch(filename)
.then(response => response.blob());
}
};

async function main() {
let genome = await openFile("genome.json");
let blob = await openFile("data.npy");
let data = NumpyLoader.open(blob, array => array)
console.log(genome);
console.log(data);
};

</script>
<head>
</head>
<body>
<button onclick="main()">
Plot
</button>
</body>
</html>

0 comments on commit a1cafed

Please sign in to comment.