Skip to content

Commit

Permalink
chore: jsr deployment
Browse files Browse the repository at this point in the history
  • Loading branch information
load1n9 committed Feb 8, 2024
1 parent d24b78c commit c0c6586
Show file tree
Hide file tree
Showing 45 changed files with 576 additions and 313 deletions.
6 changes: 3 additions & 3 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 3 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,8 @@
### QuickStart

This example shows how to train a neural network to predict the output of the
XOR function our speedy CPU backend written in [Rust](https://www.rust-lang.org/).
XOR function our speedy CPU backend written in
[Rust](https://www.rust-lang.org/).

```typescript
import {
Expand Down Expand Up @@ -253,7 +254,7 @@ console.log(`1 xor 1 = ${out4[0]} (should be close to 0)`);
### Documentation

The full documentation for Netsaur can be found
[here](https://deno.land/x/netsaur@0.3.0/mod.ts).
[here](https://deno.land/x/netsaur@0.3.1/mod.ts).

### License

Expand Down
8 changes: 5 additions & 3 deletions crates/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@ This directory contains the source code for the Netsaur Rust crates.

## Crates

* [core](/core) - The main crate for the Netsaur FFI and wasm bindings.
* [core-gpu](/core-gpu) - The main crate for the Netsaur GPU FFI and wasm bindings.
* [tokenizers](/tokenizers) - The main crate for the Netsaur tokenizers wasm bindings.
- [core](/core) - The main crate for the Netsaur FFI and wasm bindings.
- [core-gpu](/core-gpu) - The main crate for the Netsaur GPU FFI and wasm
bindings.
- [tokenizers](/tokenizers) - The main crate for the Netsaur tokenizers wasm
bindings.
2 changes: 1 addition & 1 deletion crates/core-gpu/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[package]
edition = "2021"
name = "netsaur-gpu"
version = "0.3.0"
version = "0.3.1"

[lib]
crate-type = ["cdylib"]
Expand Down
2 changes: 1 addition & 1 deletion crates/core/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[package]
edition = "2021"
name = "netsaur"
version = "0.3.0"
version = "0.3.1"

[lib]
crate-type = ["cdylib"]
Expand Down
2 changes: 1 addition & 1 deletion crates/tokenizers/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[package]
edition = "2021"
name = "netsaur-tokenizers"
version = "0.3.0"
version = "0.3.1"

[lib]
crate-type = ["cdylib"]
Expand Down
2 changes: 1 addition & 1 deletion data/data.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ export class Data {
/**
* Load data from a CSV file or URL containing CSV data.
*/
static async csv(url: string | URL, config?: CsvLoaderConfig) {
static async csv(url: string | URL, config?: CsvLoaderConfig): Promise<Data> {
return new Data(await loadCsv(url, config));
}
}
4 changes: 2 additions & 2 deletions data/datasets/csv.ts
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ export async function loadCsv(
let columnIndices!: Record<string, number>;
for await (const row of data) {
if (!columnNames) {
columnNames = row;
columnNames = row as string[];
columnIndices = columnNames.reduce((acc, col, i) => {
acc[col] = i;
return acc;
Expand All @@ -45,7 +45,7 @@ export async function loadCsv(
for (const col in columnIndices) {
const colConfig = config.columns?.[col];
const i = columnIndices[col];
const value = row[i];
const value = (row as string[])[i];
if (colConfig?.label) {
y.push(Number(value));
} else {
Expand Down
2 changes: 1 addition & 1 deletion data/deps.ts
Original file line number Diff line number Diff line change
@@ -1 +1 @@
export { CsvParseStream } from "https://deno.land/std@0.214.0/csv/csv_parse_stream.ts";
export { CsvParseStream } from "jsr:@std/csv@0.214.0";
7 changes: 7 additions & 0 deletions deno.json
Original file line number Diff line number Diff line change
@@ -1,4 +1,11 @@
{
"name": "@denosaurs/netsaur",
"version": "0.3.1",
"exports": {
".": "./mod.ts",
"./tokenizers": "./tokenizers/mod.ts",
"./data": "./data/mod.ts"
},
"tasks": {
"example:xor": "deno run -A --unstable-ffi ./examples/xor_auto.ts",
"example:xor-option": "deno run -A --unstable-ffi ./examples/xor_option.ts",
Expand Down
195 changes: 194 additions & 1 deletion deno.lock

Large diffs are not rendered by default.

4 changes: 2 additions & 2 deletions deps.ts
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
export { dlopen } from "https://deno.land/x/plug@1.0.3/mod.ts";
export type { FetchOptions } from "https://deno.land/x/plug@1.0.3/mod.ts";
export { dlopen } from "jsr:@denosaurs/plug@1.0.3";
export type { FetchOptions } from "jsr:@denosaurs/plug@1.0.3";
5 changes: 3 additions & 2 deletions examples/classification/README.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
# Binary Classification
This example showcases binary classification on the Iris dataset.
The `Iris Virginica` class is omitted for this example.

This example showcases binary classification on the Iris dataset. The
`Iris Virginica` class is omitted for this example.
2 changes: 1 addition & 1 deletion examples/classification/binary_iris.ts
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ net.train(
150,
1,
// Use a smaller learning rate
0.02
0.02,
);

console.log(`training time: ${performance.now() - time}ms`);
Expand Down
16 changes: 8 additions & 8 deletions examples/classification/iris.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,12 @@ import { parse } from "https://deno.land/std@0.204.0/csv/parse.ts";

// Import helpers for metrics
import {
ClassificationReport,
// Split the dataset
useSplit,
// One-hot encoding of targets
CategoricalEncoder,
ClassificationReport,
Matrix,
// Split the dataset
useSplit,
} from "https://deno.land/x/vectorizer@v0.3.7/mod.ts";

// Read the training dataset
Expand All @@ -40,7 +40,7 @@ const y = encoder.fit(y_pre).transform(y_pre, "f32");
// @ts-ignore Matrices can be split
const [train, test] = useSplit({ ratio: [7, 3], shuffle: true }, x, y) as [
[typeof x, typeof y],
[typeof x, typeof y]
[typeof x, typeof y],
];

// Setup the CPU backend for Netsaur
Expand Down Expand Up @@ -87,7 +87,7 @@ net.train(
// Train for 300 epochs
400,
1,
0.02
0.02,
);

console.log(`training time: ${performance.now() - time}ms`);
Expand All @@ -96,8 +96,8 @@ console.log(`training time: ${performance.now() - time}ms`);
const res = await net.predict(tensor2D(test[0]));
const y1 = encoder.untransform(
CategoricalEncoder.fromSoftmax(
new Matrix(res.data, [res.shape[0], res.shape[1]])
)
new Matrix(res.data, [res.shape[0], res.shape[1]]),
),
);
const y0 = encoder.untransform(test[1]);

Expand All @@ -106,5 +106,5 @@ const cMatrix = new ClassificationReport(y0, y1);
console.log(cMatrix);
console.log(
"Total Accuracy: ",
y1.filter((x, i) => x === y0[i]).length / y1.length
y1.filter((x, i) => x === y0[i]).length / y1.length,
);
7 changes: 3 additions & 4 deletions examples/classification/spam.ts
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ const y = data.map((msg) => ymap.indexOf(msg[0]));
// Split the dataset for training and testing
const [train, test] = useSplit({ ratio: [7, 3], shuffle: true }, x, y) as [
[typeof x, typeof y],
[typeof x, typeof y]
[typeof x, typeof y],
];

// Vectorize the text messages
Expand Down Expand Up @@ -88,7 +88,7 @@ net.train(
// Train for 20 epochs
20,
2,
0.01
0.01,
);

console.log(`training time: ${performance.now() - time}ms`);
Expand All @@ -97,7 +97,6 @@ const x_vec_test = vec.transform(test[0]);

// Calculate metrics
const res = await net.predict(tensor(x_vec_test.data, x_vec_test.shape));
const y1 = res.data.map(i => i < 0.5 ? 0 : 1)
const y1 = res.data.map((i) => i < 0.5 ? 0 : 1);
const cMatrix = new ClassificationReport(test[1], y1);
console.log("Confusion Matrix: ", cMatrix);

4 changes: 2 additions & 2 deletions examples/linear.ts
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ network.train(
/**
* The learning rate is set to 0.01.
*/
0.01
0.01,
);

console.log("training time", performance.now() - start, " milliseconds");
Expand All @@ -84,6 +84,6 @@ for (const [i, res] of predicted.data.entries()) {
console.log(
`input: ${testData[i]}\noutput: ${res.toFixed(2)}\nexpected: ${
2 * testData[i] + 1
}\n`
}\n`,
);
}
4 changes: 2 additions & 2 deletions examples/mnist/predict.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,8 @@ let correct = 0;
for (const test of testSet) {
const prediction = argmax(
await network.predict(
tensor(test.inputs.data, [1, ...test.inputs.shape] as Shape[keyof Shape])
)
tensor(test.inputs.data, [1, ...test.inputs.shape] as Shape[keyof Shape]),
),
);
const expected = argmax(test.outputs as Tensor<Rank>);
if (expected === prediction) {
Expand Down
2 changes: 1 addition & 1 deletion examples/mnist/train_batchnorm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -55,4 +55,4 @@ const start = performance.now();
network.train(trainSet, epochs, 1, 0.01);
console.log("Training complete!", performance.now() - start);

network.saveFile("examples/mnist/mnist.test.st");
network.saveFile("examples/mnist/mnist.test.st");
19 changes: 9 additions & 10 deletions examples/visualize.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -63,19 +63,19 @@
}
],
"source": [
"import { tensor1D } from \"https://deno.land/x/netsaur@0.3.0/mod.ts\";\n",
"import { Visualizer } from \"https://deno.land/x/netsaur@0.3.0/visualizer/mod.ts\";\n",
"import { tensor1D } from \"https://deno.land/x/netsaur@0.3.1/mod.ts\";\n",
"import { Visualizer } from \"https://deno.land/x/netsaur@0.3.1/visualizer/mod.ts\";\n",
"\n",
"import {\n",
" Cost,\n",
" AUTO,\n",
" Cost,\n",
" DenseLayer,\n",
" Sequential,\n",
" setupBackend,\n",
" SigmoidLayer,\n",
" tensor2D,\n",
"} from \"https://deno.land/x/netsaur@0.3.0/mod.ts\";\n",
" \n",
"} from \"https://deno.land/x/netsaur@0.3.1/mod.ts\";\n",
"\n",
"await setupBackend(AUTO);\n",
"\n",
"const net = new Sequential({\n",
Expand Down Expand Up @@ -107,10 +107,9 @@
" ],\n",
" 1000000,\n",
");\n",
" \n",
"\n",
"const visualizer = new Visualizer(\"XOR Example\");\n",
"await visualizer.graph(net,\n",
" [\n",
"await visualizer.graph(net, [\n",
" tensor1D([0, 0]),\n",
" tensor1D([1, 0]),\n",
" tensor1D([0, 1]),\n",
Expand All @@ -119,8 +118,8 @@
" tensor1D([0]),\n",
" tensor1D([1]),\n",
" tensor1D([1]),\n",
" tensor1D([0])\n",
"])"
" tensor1D([0]),\n",
"]);"
]
}
],
Expand Down
Loading

0 comments on commit c0c6586

Please sign in to comment.