Skip to content

Commit

Permalink
D1 export api change (#6203)
Browse files Browse the repository at this point in the history
* Adding staging header for export

* Switched to snake_case for all D1 import/export actions

* Update .changeset/lovely-teachers-breathe.md

Co-authored-by: Carmen Popoviciu <cpopoviciu@cloudflare.com>

* fix: mock export API should expect current_bookmark not currentBookmark

* add small delay in recursive poll to avoid OOM within reasonable time periods

---------

Co-authored-by: Rahul Sethi <5822355+RamIdeas@users.noreply.github.com>
Co-authored-by: Carmen Popoviciu <cpopoviciu@cloudflare.com>
  • Loading branch information
3 people authored Jul 19, 2024
1 parent fa42d7c commit 5462ead
Show file tree
Hide file tree
Showing 5 changed files with 44 additions and 30 deletions.
5 changes: 5 additions & 0 deletions .changeset/lovely-teachers-breathe.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"wrangler": patch
---

fix: Updating to match new D1 import/export API format
13 changes: 10 additions & 3 deletions packages/wrangler/src/__tests__/d1/export.test.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import fs from "fs";
import { setTimeout } from "node:timers/promises";
import { http, HttpResponse } from "msw";
import { mockAccountId, mockApiToken } from "../helpers/mock-account-id";
import { mockConsoleMethods } from "../helpers/mock-console";
Expand All @@ -9,7 +10,7 @@ import { runInTempDir } from "../helpers/run-in-tmp";
import { runWrangler } from "../helpers/run-wrangler";
import writeWranglerToml from "../helpers/write-wrangler-toml";

describe("execute", () => {
describe("export", () => {
mockAccountId({ accountId: null });
mockApiToken();
mockConsoleMethods();
Expand Down Expand Up @@ -117,10 +118,16 @@ describe("execute", () => {
http.post(
"*/accounts/:accountId/d1/database/:databaseId/export",
async ({ request }) => {
// This endpoint is polled recursively. If we respond immediately,
// the callstack builds up quickly leading to a hard-to-debug OOM error.
// This timeout ensures that if the endpoint is accidently polled infinitely
// the test will timeout before breaching available memory
await setTimeout(10);

const body = (await request.json()) as Record<string, unknown>;

// First request, initiates a new task
if (!body.currentBookmark) {
if (!body.current_bookmark) {
return HttpResponse.json(
{
success: true,
Expand Down Expand Up @@ -151,7 +158,7 @@ describe("execute", () => {
status: "complete",
result: {
filename: "xxxx-yyyy.sql",
signedUrl: "https://example.com/xxxx-yyyy.sql",
signed_url: "https://example.com/xxxx-yyyy.sql",
},
messages: [
"Uploaded part 3",
Expand Down
20 changes: 10 additions & 10 deletions packages/wrangler/src/d1/execute.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -400,10 +400,10 @@ async function executeRemotely({
startMessage: "Checking if file needs uploading",
});

// An init response usually returns a {filename, uploadUrl} pair, except if we've detected that file
// An init response usually returns a {filename, upload_url} pair, except if we've detected that file
// already exists and is valid, to save people reuploading. In which case `initResponse` has already
// kicked the import process off.
const uploadRequired = "uploadUrl" in initResponse;
const uploadRequired = "upload_url" in initResponse;
if (!uploadRequired) {
logger.log(`🌀 File already uploaded. Processing.`);
}
Expand Down Expand Up @@ -432,29 +432,29 @@ async function executeRemotely({
}

const {
result: { numQueries, finalBookmark, meta },
result: { num_queries, final_bookmark, meta },
} = finalResponse;
logger.log(
`🚣 Executed ${numQueries} queries in ${(meta.duration / 1000).toFixed(
`🚣 Executed ${num_queries} queries in ${(meta.duration / 1000).toFixed(
2
)} seconds (${meta.rows_read} rows read, ${
meta.rows_written
} rows written)\n` +
chalk.gray(` Database is currently at bookmark ${finalBookmark}.`)
chalk.gray(` Database is currently at bookmark ${final_bookmark}.`)
);

return [
{
results: [
{
"Total queries executed": numQueries,
"Total queries executed": num_queries,
"Rows read": meta.rows_read,
"Rows written": meta.rows_written,
"Database size (MB)": (meta.size_after / 1_000_000).toFixed(2),
},
],
success: true,
finalBookmark,
finalBookmark: final_bookmark,
meta,
},
];
Expand All @@ -474,12 +474,12 @@ async function uploadAndBeginIngestion(
etag: string,
initResponse: ImportInitResponse
) {
const { uploadUrl, filename } = initResponse;
const { upload_url, filename } = initResponse;

const { size } = await fs.stat(file);

const uploadResponse = await spinnerWhile({
promise: fetch(uploadUrl, {
promise: fetch(upload_url, {
method: "PUT",
headers: {
"Content-length": `${size}`,
Expand Down Expand Up @@ -542,7 +542,7 @@ async function pollUntilComplete(
"import",
{
action: "poll",
currentBookmark: response.at_bookmark,
current_bookmark: response.at_bookmark,
}
);
return await pollUntilComplete(newResponse, accountId, db);
Expand Down
24 changes: 14 additions & 10 deletions packages/wrangler/src/d1/export.ts
Original file line number Diff line number Diff line change
Expand Up @@ -171,8 +171,8 @@ async function exportRemotely(

logger.log(`🌀 Executing on remote database ${name} (${db.uuid}):`);
const dumpOptions = {
noSchema,
noData,
no_schema: noSchema,
no_data: noData,
tables,
};

Expand All @@ -189,14 +189,14 @@ async function exportRemotely(

logger.log(
chalk.gray(
`You can also download your export from the following URL manually. This link will be valid for one hour: ${finalResponse.result.signedUrl}`
`You can also download your export from the following URL manually. This link will be valid for one hour: ${finalResponse.result.signed_url}`
)
);

await spinnerWhile({
startMessage: `Downloading SQL to ${output}`,
async promise() {
const contents = await fetch(finalResponse.result.signedUrl);
const contents = await fetch(finalResponse.result.signed_url);
await fs.writeFile(output, contents.body || "");
},
});
Expand All @@ -209,8 +209,8 @@ async function pollExport(
db: Database,
dumpOptions: {
tables: string[];
noSchema?: boolean;
noData?: boolean;
no_schema?: boolean;
no_data?: boolean;
},
currentBookmark: string | undefined,
num_parts_uploaded = 0
Expand All @@ -219,10 +219,14 @@ async function pollExport(
`/accounts/${accountId}/d1/database/${db.uuid}/export`,
{
method: "POST",
headers: {
...(db.internal_env ? { "x-d1-internal-env": db.internal_env } : {}),
"content-type": "application/json",
},
body: JSON.stringify({
outputFormat: "polling",
dumpOptions,
currentBookmark,
output_format: "polling",
dump_options: dumpOptions,
current_bookmark: currentBookmark,
}),
}
);
Expand All @@ -245,7 +249,7 @@ async function pollExport(
return response;
} else if (response.status === "error") {
throw new APIError({
text: response.errors.join("\n"),
text: response.error,
notes: response.messages.map((text) => ({ text })),
});
} else {
Expand Down
12 changes: 5 additions & 7 deletions packages/wrangler/src/d1/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -106,9 +106,8 @@ export interface D1QueriesGraphQLResponse {
}

export type ImportInitResponse = {
success: true;
filename: string;
uploadUrl: string;
upload_url: string;
};
export type ImportPollingResponse = {
success: true;
Expand All @@ -123,9 +122,8 @@ export type ImportPollingResponse = {
| {
status: "complete";
result: {
success: boolean;
finalBookmark: string;
numQueries: number;
final_bookmark: string;
num_queries: number;
meta: {
served_by: string;
duration: number;
Expand All @@ -145,14 +143,14 @@ export type ExportPollingResponse = {
type: "export";
at_bookmark: string;
messages: string[];
errors: string[];
error: string;
} & (
| {
status: "active" | "error";
}
| {
status: "complete";
result: { filename: string; signedUrl: string };
result: { filename: string; signed_url: string };
}
);

Expand Down

0 comments on commit 5462ead

Please sign in to comment.