Skip to content

Commit

Permalink
Address issues from code review
Browse files Browse the repository at this point in the history
  • Loading branch information
zner0L committed Feb 2, 2024
1 parent fcfc1df commit efc69ae
Show file tree
Hide file tree
Showing 4 changed files with 28 additions and 14 deletions.
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
archive-config.json
research-docs/archive-errors.json
research-docs/adapterExamples.json
research-docs/adapter-examples.json


tmp/
Expand Down
6 changes: 3 additions & 3 deletions scripts/debug-adapter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import deepmerge from 'deepmerge';
import { writeFile } from 'fs/promises';
import { allAdapters } from '../src/common/adapters';
import { debugAdapter, mergeAdapterResults } from './lib/debug';
import { loadTestDataFromDb, mergeTestDataResults } from './lib/test-data';

const adapterArgument = process.argv[2];
if (!adapterArgument) throw new Error('You need to specify the adapter as the first argument.');
Expand All @@ -14,11 +14,11 @@ const mergeResult = process.argv.includes('--merge-result');
const adapter = allAdapters.find((a) => a.tracker.slug === trackerSlug && a.slug === adapterSlug);
if (!adapter) throw new Error(`Adapter ${adapterSlug} not found.`);

const { adapterResults, decodingResults } = await debugAdapter(adapter);
const { adapterResults, decodingResults } = await loadTestDataFromDb(adapter);

// We print the adapter results to the console and save the deepmerged decoding results to a file.
if (mergeResult) {
console.dir(mergeAdapterResults(adapterResults), { depth: null });
console.dir(mergeTestDataResults(adapterResults), { depth: null });
} else {
for (const r of adapterResults) {
console.dir(r, { depth: null });
Expand Down
9 changes: 4 additions & 5 deletions scripts/generate-example-data.ts
Original file line number Diff line number Diff line change
@@ -1,14 +1,13 @@
import { writeFile } from 'fs/promises';
import { allAdapters } from '../src/common/adapters';
import { debugAdapter, mergeAdapterResults } from './lib/debug';
import { loadTestDataFromDb, mergeTestDataResults } from './lib/test-data';

(async () => {
const result = await Promise.all(
allAdapters.map(async (adapter) => {
// Without any limit, this might run for very long.
const { adapterResults } = await debugAdapter(adapter, { accumulateToPath: true, rowLimit: 200 });
return { [`${adapter.tracker.slug}/${adapter.slug}`]: mergeAdapterResults(adapterResults) };
const { adapterResults } = await loadTestDataFromDb(adapter, { accumulateToPath: true });
return { [`${adapter.tracker.slug}/${adapter.slug}`.toLowerCase()]: mergeTestDataResults(adapterResults) };
})
).then((results) => Object.assign({}, ...results));
await writeFile('./research-docs/adapterExamples.json', JSON.stringify(result));
await writeFile('./research-docs/adapter-examples.json', JSON.stringify(result));
})();
25 changes: 20 additions & 5 deletions scripts/lib/debug.ts → scripts/lib/test-data.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,22 @@ import { fetch } from 'cross-fetch';
import deepmerge from 'deepmerge';
import { adapterForRequest, decodeRequest, processRequest, type Adapter, type Request } from '../../src';

export const debugAdapter = async (adapter: Adapter, options?: { accumulateToPath?: boolean; rowLimit?: number }) => {
type LoadTestDataOptions = {
/** Whether to return an object where the results are grouped by the path or as single results. */
accumulateToPath?: boolean;
/** The maximum number of requests to load. Defaults to `max` which loads all entries in the database. */
rowLimit?: number;
};

/**
* Loads test requests concerning the given adapter from the online database and processes them in it.
*
* @param adapter The adapter to load and process requests for.
* @param options Options for loading the adapters.
*
* @returns An object containing the processed `adapterResults` and the only decoded request in `decodingResults`.
*/
export const loadTestDataFromDb = async (adapter: Adapter, options?: LoadTestDataOptions) => {
// Fetch requests from Datasette.
const adapterClauses = adapter.endpointUrls.map((u) =>
u instanceof RegExp
Expand All @@ -20,14 +35,14 @@ export const debugAdapter = async (adapter: Adapter, options?: { accumulateToPat
)}&_json=headers&_json=cookies&_nocol=initiator&_nocol=platform&_nocol=runType&_nofacet=1&_nosuggest=1&_nocount=1&_size=${
options?.rowLimit || 'max'
}`;
while (nextUrl && options?.rowLimit && requests.length < options?.rowLimit) {
while (nextUrl && (!options?.rowLimit || requests.length < options.rowLimit)) {
const res = await fetch(nextUrl).then((r) => r.json());
requests.push(...res.rows);
nextUrl = res.next_url;
}

const requestsForAdapter = requests
// And if the content is binary, Datasette encodes it as base64
// If the content is binary, Datasette encodes it as base64
// (https://docs.datasette.io/en/stable/binary_data.html).
.map((r) => {
const content = r.content as string | { $base64: true; encoded: string } | undefined;
Expand Down Expand Up @@ -62,8 +77,8 @@ export const debugAdapter = async (adapter: Adapter, options?: { accumulateToPat
return { adapterResults, decodingResults };
};

export const mergeAdapterResults = (adapterResults: (Record<string, unknown[]> | undefined)[]) => {
const mergedResult = deepmerge.all(adapterResults.filter(Boolean) as Record<string, unknown[]>[]);
export const mergeTestDataResults = (testDataResults: (Record<string, unknown[]> | undefined)[]) => {
const mergedResult = deepmerge.all(testDataResults.filter(Boolean) as Record<string, unknown[]>[]);
for (const key of Object.keys(mergedResult)) mergedResult[key] = [...new Set(mergedResult[key] as unknown[])];
return mergedResult;
};

0 comments on commit efc69ae

Please sign in to comment.