Skip to content

Commit

Permalink
fix: D1 execute and backup commands improvements (#2107)
Browse files Browse the repository at this point in the history
* fix: D1 execute and backup commands improvements

- Better and faster handling when importing big SQL files using execute --file
- Increased visibility during imports, sends output with each batch API call
- Backups are now downloaded to the directory where wrangler was initiated from

* Fixes lint errors

* Fixes formatting issues

* Fixes linter warning

Co-authored-by: Celso Martinho <celso@cloudflare.com>
  • Loading branch information
celso and Celso Martinho committed Nov 2, 2022
1 parent 9f509bc commit 511943e
Show file tree
Hide file tree
Showing 3 changed files with 72 additions and 52 deletions.
9 changes: 9 additions & 0 deletions .changeset/d1-execute.perf.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
---
"wrangler": minor
---

fix: D1 execute and backup commands improvements

- Better and faster handling when importing big SQL files using execute --file
- Increased visibility during imports, sends output with each batch API call
- Backups are now downloaded to the directory where wrangler was initiated from
16 changes: 11 additions & 5 deletions packages/wrangler/src/d1/backups.tsx
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import fs from "node:fs/promises";
import * as path from "path";
import { render } from "ink";
import Table from "ink-table";
import React from "react";
Expand Down Expand Up @@ -189,15 +190,20 @@ export const DownloadHandler = withConfig<BackupDownloadArgs>(
accountId,
name
);
const filename = output || `./${name}.${backupId.slice(0, 8)}.sqlite3`;

console.log(`Downloading backup ${backupId} of ${name} to: ${filename}`);
const filename =
output ||
path.join(
process.env.INIT_CWD as string,
`${name}.${backupId.slice(0, 8)}.sqlite3`
);

logger.log(`🌀 Downloading backup ${backupId} from '${name}'`);
const response = await getBackupResponse(accountId, db.uuid, backupId);
console.log(`Got file. Saving...`);
logger.log(`🌀 Saving to ${filename}`);
// TODO: stream this once we upgrade to Node18 and can use Writable.fromWeb
const buffer = await response.arrayBuffer();
await fs.writeFile(filename, new Buffer(buffer));
console.log(`Done! Wrote ${filename} (${formatBytes(buffer.byteLength)})`);
logger.log(`🌀 Done!`);
}
);

Expand Down
99 changes: 52 additions & 47 deletions packages/wrangler/src/d1/execute.tsx
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import { existsSync } from "node:fs";
import { mkdir } from "node:fs/promises";
import path from "node:path";
import chalk from "chalk";
import { render, Static, Text } from "ink";
import Table from "ink-table";
import { npxImport } from "npx-import";
Expand Down Expand Up @@ -52,7 +51,7 @@ type QueryResult = {
query?: string;
};
// Max number of bytes to send in a single /execute call
const QUERY_LIMIT = 1_000_000; // 1MB
const QUERY_LIMIT = 10_000;

export function Options(yargs: Argv): Argv<ExecuteArgs> {
return Name(yargs)
Expand Down Expand Up @@ -119,28 +118,23 @@ export const Handler = withConfig<ExecuteArgs>(
if (!response) return;

if (isInteractive) {
// Render table if single result
render(
<Static items={response}>
{(result) => {
const { results, duration, query } = result;
// batch results
if (!Array.isArray(result)) {
const { results, query } = result;

if (Array.isArray(results) && results.length > 0) {
const shortQuery = shorten(query, 48);
return (
<>
{shortQuery ? <Text dimColor>{shortQuery}</Text> : null}
<Table data={results}></Table>
</>
);
} else {
const shortQuery = shorten(query, 24);
return (
<Text>
Executed{" "}
{shortQuery ? <Text dimColor>{shortQuery}</Text> : "command"}{" "}
in {duration}ms.
</Text>
);
if (Array.isArray(results) && results.length > 0) {
const shortQuery = shorten(query, 48);
return (
<>
{shortQuery ? <Text dimColor>{shortQuery}</Text> : null}
<Table data={results}></Table>
</>
);
}
}
}}
</Static>
Expand Down Expand Up @@ -187,7 +181,7 @@ async function executeLocally(
await mkdir(dbDir, { recursive: true });
}

console.log(`Loading DB at ${readableRelative(dbPath)}`);
logger.log(`🌀 Loading DB at ${readableRelative(dbPath)}`);
const db = await createSQLiteDB(dbPath);

const results: QueryResult[] = [];
Expand All @@ -206,17 +200,14 @@ async function executeRemotely(
batches: string[]
) {
if (batches.length > 1) {
const warning =
chalk.red(`WARNING! `) +
`Too much SQL to send at once, this execution will be sent as ${batches.length} batches.`;
const warning = `⚠️ Too much SQL to send at once, this execution will be sent as ${batches.length} batches.`;

if (isInteractive) {
const ok = await confirm(
`${warning}\nNOTE: each batch is sent individually and may leave your DB in an unexpected state if a later batch fails.\n${chalk.green(
`Make sure you have a recent backup.`
)}\nOk to proceed?`
`${warning}\nℹ️ Each batch is sent individually and may leave your DB in an unexpected state if a later batch fails.\n⚠️ Make sure you have a recent backup. Ok to proceed?`
);
if (!ok) return null;
logger.log(`🌀 Let's go`);
} else {
console.error(warning);
}
Expand All @@ -230,32 +221,45 @@ async function executeRemotely(
);

if (isInteractive) {
console.log(`Executing on ${name} (${db.uuid}):`);
logger.log(`🌀 Executing on ${name} (${db.uuid}):`);
} else {
// Pipe to error so we don't break jq
console.error(`Executing on ${name} (${db.uuid}):`);
}

const results: QueryResult[] = [];
for (const sql of batches) {
results.push(
...(await fetchResult<QueryResult[]>(
`/accounts/${accountId}/d1/database/${db.uuid}/query`,
{
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ sql }),
}
))
const result = await fetchResult<QueryResult[]>(
`/accounts/${accountId}/d1/database/${db.uuid}/query`,
{
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ sql }),
}
);
result.map(logResult);
results.push(...result);
}
return results;
}

function logResult(r: QueryResult | QueryResult[]) {
logger.log(
`🚣 Executed ${Array.isArray(r) ? r.length : "1"} command(s) in ${
Array.isArray(r)
? r
.map((d: QueryResult) => d.duration)
.reduce((a: number, b: number) => a + b, 0)
: r.duration
}ms`
);
}

function splitSql(splitter: (query: SQLQuery) => SQLQuery[], sql: SQLQuery) {
// We have no interpolations, so convert everything to text
logger.log(`🌀 Mapping SQL input into an array of statements`);
return splitter(sql).map(
(q) =>
q.format({
Expand All @@ -267,16 +271,17 @@ function splitSql(splitter: (query: SQLQuery) => SQLQuery[], sql: SQLQuery) {

function batchSplit(splitter: typeof splitSqlQuery, sql: SQLQuery) {
const queries = splitSql(splitter, sql);

logger.log(`🌀 Parsing ${queries.length} statements`);
const batches: string[] = [];
for (const query of queries) {
const last = batches.at(-1);
if (!last || last.length + query.length > QUERY_LIMIT) {
batches.push(query);
} else {
batches.splice(-1, 1, [last, query].join("; "));
}
const nbatches = Math.floor(queries.length / QUERY_LIMIT);
for (let i = 0; i <= nbatches; i++) {
batches.push(
queries.slice(i * QUERY_LIMIT, (i + 1) * QUERY_LIMIT).join("; ")
);
}
logger.log(
`🌀 We are sending ${batches.length} batch(es) to D1 (limited to ${QUERY_LIMIT} statements per batch)`
);
return batches;
}

Expand Down

0 comments on commit 511943e

Please sign in to comment.