Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
129 changes: 129 additions & 0 deletions perf/bulkInsert.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
const {
DocumentStore,
DeleteDatabasesOperation,
CreateDatabaseOperation,
RequestExecutor
} = require("../dist");
const {
bench,
settings
} = require("./common");

const nodbStore = new DocumentStore(settings.urls);
nodbStore.initialize();

// RequestExecutor.requestPostProcessor = (req) => {
// req.proxy = "http://127.0.0.1:8888";
// };

const store = new DocumentStore(settings.urls, settings.database);
store.initialize();

class Order {
constructor(opts) {
if (opts) {
Object.assign(this, opts);
}
}
}

let data;

(async function main() {

const getBenchOpts = (n) => ({
async before() {
const dataGen = getData();
data = Object.keys(Array.apply(0,Array(n))).map(x => new Order(dataGen.next().value));
try {
await nodbStore.maintenance.server.send(new DeleteDatabasesOperation({
databaseNames: [settings.database],
hardDelete: true
}));
} finally {
await nodbStore.maintenance.server.send(new CreateDatabaseOperation({
databaseName: settings.database
}));
}
},
async after() {
await nodbStore.maintenance.server.send(new DeleteDatabasesOperation({
databaseNames: [settings.database],
hardDelete: true
}));
}
});

try {
const name = "bulk-insert-2018-10-18-pipeline";
await bench(name, 10, bulkInsertPipeline, getBenchOpts(1000));
await bench(name, 50, bulkInsertPipeline, getBenchOpts(1000));
await bench(name, 100, bulkInsertPipeline, getBenchOpts(1000));
} finally {
store.dispose();
nodbStore.dispose();
}

}());

function randomDate() {
return new Date(2018, Math.floor(Math.random() * 11), Math.floor(Math.random() * 25));
}

function randomInt(max = 100) {
return Math.floor(Math.random() * max);
}

function* getData() {
let i = 1;
while (true) {
i++;
yield new Order({
"Id": "orders/" + i,
"Name": "Order #" + i,
"Company": "companies/58-A",
"Employee": "employees/2-A",
"Freight": randomInt(),
"Lines": [{
"Discount": 0,
"PricePerUnit": randomInt(),
"Product": "products/11-A",
"ProductName": "Queso Cabrales",
"Quantity": 10
},
{
"Discount": 0,
"PricePerUnit": 4.5,
"Product": "products/24-A",
"ProductName": "Guaraná Fantástica",
"Quantity": randomInt()
}
],
"OrderedAt": randomDate(),
"RequireAt": randomDate(),
"ShipTo": {
"City": "México D.F.",
"Country": "Mexico",
"Line1": "Calle Dr. Jorge Cash 321",
"Line2": null,
"Location": {
"Latitude": Math.random() * 100,
"Longitude": Math.random() * 100
},
"PostalCode": "05033",
"Region": null
},
"ShipVia": "shippers/2-A",
"ShippedAt": null
});
}
}

async function bulkInsertPipeline() {
const bulk = store.bulkInsert();
for (const item of data) {
await bulk.store(item);
}

await bulk.finish();
}
38 changes: 33 additions & 5 deletions perf/common.js
Original file line number Diff line number Diff line change
@@ -1,8 +1,36 @@
import { DocumentStore } from "../src";

// tslint:disable-next-line:no-var-requires
const settings = require("./settings.json");

export function getStore() {
return new DocumentStore(settings.urls, settings.database);
}
async function bench(name, attempts, run, opts) {
const benchName = `${name} x${ attempts }`;

if (opts && opts.before) {
try {
await opts.before();
} catch (err) {
console.log("BENCH BEFORE", err);
}
}

console.time(benchName);
try {
console.profile(benchName);
for (let n = 0; n < attempts; n++) {
await run();
}
console.profileEnd(benchName);
} catch (err) {
console.log("BENCH ERROR", err);
}
console.timeEnd(benchName);

if (opts && opts.after) {
try {
await opts.after();
} catch (err) {
console.log("BENCH AFTER", err);
}
}
}

module.exports = { settings, bench };
23 changes: 2 additions & 21 deletions perf/loadPipeline.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,35 +12,19 @@ const streamValues = require("stream-json/streamers/StreamValues")
const StreamUtil = require("../dist/Utility/StreamUtil");
const stream = require("readable-stream");
const Asm = require('stream-json/Assembler');
const { bench } = require("./common");

const store = new DocumentStore("http://localhost:8080", "Perf");
store.initialize();

async function bench(name, attempts, run) {
const benchName = `${name} x${ attempts }`;
console.time(benchName);
for (let n = 0; n < attempts; n++) {
await run();
}
console.timeEnd(benchName);
}

(async function main() {
{
const name = "4.0.4-load-full-pipeline";
const name = "load-full-pipeline";
await bench(name, 10, loadPipeline);
await bench(name, 50, loadPipeline);
await bench(name, 100, loadPipeline);
}

// {
// const name = "stream-json-with-proper-casing";
// // enhancedStreamJson();
// await bench(name, 10, enhancedStreamJson);
// await bench(name, 50, enhancedStreamJson);
// await bench(name, 100, enhancedStreamJson);
// }

store.dispose();
}());

Expand Down Expand Up @@ -68,8 +52,6 @@ async function rawStreamJson() {
await donePromise;
}



async function enhancedStreamJson() {
const dataStream = fs.createReadStream("./data/load_data.json");
const streams = [
Expand All @@ -96,5 +78,4 @@ async function enhancedStreamJson() {
});
await StreamUtil.pipelineAsync(streams);
const result = await donePromise;
// console.log(JSON.stringify(result, null, 2));
}
17 changes: 17 additions & 0 deletions perf/results/bulk-insert.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# BULK INSERT

## Current 4.0.3 - 2018-10-16

```
bulk-insert-2018-16-10-pipeline x10: 21635.223ms
```

# 4.0.4 optimizations - 2018-10-18

Buffer.concat() usage and redundant buffering logic removal.

```
bulk-insert-2018-16-10-pipeline x10: 2490.231ms
bulk-insert-2018-16-10-pipeline x50: 8280.333ms
bulk-insert-2018-16-10-pipeline x100: 15802.916ms
```
Loading