diff --git a/Dockerfile b/Dockerfile index dba36b22f..e8f4e9cbe 100644 --- a/Dockerfile +++ b/Dockerfile @@ -14,6 +14,7 @@ ENV CYPRESS_CACHE_FOLDER=/app/.cache ENV npm_config_cache=/app/.npm RUN mkdir /app/upload && \ + mkdir /app/webservice_temp && \ cp -n ./config/production-dist.js ./config/production.js && \ npm run build && \ npm cache clean --force && \ diff --git a/Dockerfile.spec b/Dockerfile.spec index dec53d507..f0bb65826 100644 --- a/Dockerfile.spec +++ b/Dockerfile.spec @@ -15,6 +15,7 @@ ENV CYPRESS_CACHE_FOLDER=/app/.cache ENV npm_config_cache=/app/.npm RUN mkdir /app/upload && \ + mkdir /app/webservice_temp && \ cp -n ./config/production-dist.js ./config/production.js && \ npm run build diff --git a/config.json b/config.json index 0164242b7..315af64b2 100644 --- a/config.json +++ b/config.json @@ -191,5 +191,7 @@ "multilingual": false }, "webServiceBaseURL": "https://data-computer.services.istex.fr/v1/", - "ISOLATED_MODE": true + "webhookBaseUrlForDevelopment": "https://5863-81-250-164-94.ngrok-free.app", + "isolatedMode": false, + "webserviceAnswerMode": "retrieve" } diff --git a/docker-compose.yml b/docker-compose.yml index c663bbca3..29bf08aa8 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -18,6 +18,7 @@ services: WORKERS_URL: 'http://localhost:31976' volumes: - ./upload:/app/upload + - ./webservice_temp:/app/webservice_temp links: - mongo ports: diff --git a/package-lock.json b/package-lock.json index 6dae846d4..477a2089c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -159,6 +159,7 @@ "socket.io-client": "4.5.4", "stream-to-string": "^1.2.0", "style-loader": "^2.0.0", + "tar-fs": "3.0.4", "tar-stream": "2.2.0", "through": "^2.3.8", "triple-beam": "1.3.0", @@ -26535,6 +26536,11 @@ "mkdirp": "bin/cmd.js" } }, + "node_modules/mkdirp-classic": { + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz", + "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==" + }, "node_modules/mocha": { "version": "10.2.0", "resolved": "https://registry.npmjs.org/mocha/-/mocha-10.2.0.tgz", @@ -33008,6 +33014,26 @@ "node": ">=10" } }, + "node_modules/tar-fs": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-3.0.4.tgz", + "integrity": "sha512-5AFQU8b9qLfZCX9zp2duONhPmZv0hGYiBPJsyUdqMjzq/mqVpy/rEUSeHk1+YitmxugaptgBh5oDGU3VsAJq4w==", + "dependencies": { + "mkdirp-classic": "^0.5.2", + "pump": "^3.0.0", + "tar-stream": "^3.1.5" + } + }, + "node_modules/tar-fs/node_modules/tar-stream": { + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-3.1.6.tgz", + "integrity": "sha512-B/UyjYwPpMBv+PaFSWAmtYjwdrlEaZQEhMIBFNC5oEG8lpiW8XjcSdmEaClj28ArfKScKHs2nshz3k2le6crsg==", + "dependencies": { + "b4a": "^1.6.4", + "fast-fifo": "^1.2.0", + "streamx": "^2.15.0" + } + }, "node_modules/tar-stream": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz", @@ -62742,6 +62768,11 @@ "minimist": "^1.2.5" } }, + "mkdirp-classic": { + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz", + "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==" + }, "mocha": { "version": "10.2.0", "resolved": "https://registry.npmjs.org/mocha/-/mocha-10.2.0.tgz", @@ -67937,6 +67968,28 @@ } } }, + "tar-fs": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-3.0.4.tgz", + "integrity": "sha512-5AFQU8b9qLfZCX9zp2duONhPmZv0hGYiBPJsyUdqMjzq/mqVpy/rEUSeHk1+YitmxugaptgBh5oDGU3VsAJq4w==", + "requires": { + "mkdirp-classic": "^0.5.2", + "pump": "^3.0.0", + "tar-stream": "^3.1.5" + }, + "dependencies": { + "tar-stream": { + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-3.1.6.tgz", + "integrity": "sha512-B/UyjYwPpMBv+PaFSWAmtYjwdrlEaZQEhMIBFNC5oEG8lpiW8XjcSdmEaClj28ArfKScKHs2nshz3k2le6crsg==", + "requires": { + "b4a": "^1.6.4", + "fast-fifo": "^1.2.0", + "streamx": "^2.15.0" + } + } + } + }, "tar-stream": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz", diff --git a/package.json b/package.json index c1ceccc34..bb982f6ca 100644 --- a/package.json +++ b/package.json @@ -204,6 +204,7 @@ "socket.io-client": "4.5.4", "stream-to-string": "^1.2.0", "style-loader": "^2.0.0", + "tar-fs": "3.0.4", "tar-stream": "2.2.0", "through": "^2.3.8", "triple-beam": "1.3.0", diff --git a/src/api/controller/webhook.js b/src/api/controller/webhook.js index cfb282922..83e8b842f 100644 --- a/src/api/controller/webhook.js +++ b/src/api/controller/webhook.js @@ -3,16 +3,36 @@ import route from 'koa-route'; import bodyParser from 'koa-bodyparser'; import getLogger from '../services/logger'; -import { getComputedFromWebservice } from '../services/precomputed/precomputed'; +import { + getComputedFromWebservice, + getFailureFromWebservice, +} from '../services/precomputed/precomputed'; export const getComputedWebserviceData = async ctx => { - const { precomputedId, tenant, jobId } = ctx.request.query; + const { precomputedId, tenant, jobId, failure } = ctx.request.query; const { identifier, generator, state } = ctx.request.body; const logger = getLogger(ctx.tenant); logger.info(`Precompute webhook call for ${tenant}`); - logger.info('Body', ctx.request.body); + logger.info('Query', ctx.request.query); + + if (failure !== undefined) { + const { type, message } = ctx.request.body.error; + logger.info('Precompute webservice call with failure'); + await getFailureFromWebservice( + ctx, + tenant, + precomputedId, + callId, + jobId, + type, + message, + ); + ctx.body = 'webhook failure'; + ctx.status = 200; + return; + } - if (!state == 'ready') { + if (state !== 'ready') { return; } diff --git a/src/api/services/precomputed/precomputed.js b/src/api/services/precomputed/precomputed.js index 4267f2f0d..73d5d8a2b 100644 --- a/src/api/services/precomputed/precomputed.js +++ b/src/api/services/precomputed/precomputed.js @@ -2,6 +2,8 @@ import progress from '../progress'; import localConfig from '../../../../config.json'; import { getHost } from '../../../common/uris'; import tar from 'tar-stream'; +import { createGzip, createGunzip } from 'zlib'; +import tarFS from 'tar-fs'; import fs from 'fs'; import { pipeline } from 'stream'; import { promisify } from 'util'; @@ -19,14 +21,17 @@ import { PRECOMPUTER } from '../../workers/precomputer'; import getLogger from '../logger'; const baseUrl = getHost(); -//Warning : This have to be done better for dev env const webhookBaseUrl = process.env.NODE_ENV === 'development' - ? ' https://1d97-81-250-164-94.ngrok-free.app' + ? localConfig.webhookBaseUrlForDevelopment : baseUrl; const { precomputedBatchSize: BATCH_SIZE = 10 } = localConfig; -const { ISOLATED_MODE = true } = localConfig; +const { isolatedMode: ISOLATED_MODE = true } = localConfig; +const ANSWER_ROUTES = { RETRIEVE: 'retrieve', COLLECT: 'collect' }; +const { + webserviceAnswerMode: ANSWER_ROUTE = ANSWER_ROUTES.RETRIEVE, +} = localConfig; export const getPrecomputedDataPreview = async ctx => { const { sourceColumns } = ctx.request.body; @@ -64,6 +69,9 @@ const processZippedData = async (precomputed, ctx) => { const initDate = new Date(); const pack = tar.pack(); const dataSetSize = await ctx.dataset.count(); + const fileNameSize = dataSetSize.toString().length + ? 10 + : dataSetSize.toString().length; for ( let indexDataset = 0; indexDataset < dataSetSize; @@ -88,11 +96,11 @@ const processZippedData = async (precomputed, ctx) => { name: `data/${'f' + (indexDataset + indexBatch + 1) .toString() - .padStart(10, 0)}.json`, + .padStart(fileNameSize, 0)}.json`, }, JSON.stringify({ id: entry.uri, - value: colums.length > 1 ? colums : colums[0], + value: JSON.parse(colums.length > 1 ? colums : colums[0]), }), ); } @@ -117,7 +125,7 @@ const processZippedData = async (precomputed, ctx) => { const fileName = `./webservice_temp/__entry_${ ctx.tenant }_${Date.now().toString()}.tar.gz`; - await pipe(pack, fs.createWriteStream(fileName)); + await pipe(pack, createGzip(), fs.createWriteStream(fileName)); return fileName; }; @@ -141,8 +149,10 @@ export const getTokenFromWebservice = async ( body: fs.createReadStream(fileName), headers: { 'Content-Type': 'application/gzip', - 'X-Hook': `${webhookBaseUrl}/webhook/compute_webservice/?precomputedId=${precomputedId}&tenant=${tenant}&jobId=${jobId}`, + 'X-Webhook-Success': `${webhookBaseUrl}/webhook/compute_webservice/?precomputedId=${precomputedId}&tenant=${tenant}&jobId=${jobId}`, + 'X-Webhook-Failure': `${webhookBaseUrl}/webhook/compute_webservice/?precomputedId=${precomputedId}&tenant=${tenant}&jobId=${jobId}&failure`, }, + compress: false, }); if (response.status != 200) { throw new Error('Calling token webservice error'); @@ -159,6 +169,122 @@ export const getTokenFromWebservice = async ( return callId; }; +const extractResultFromZip = async (tenant, job, room, data) => { + let logData = JSON.stringify({ + level: 'ok', + message: `[Instance: ${tenant}] Saving result zip file`, + timestamp: new Date(), + status: IN_PROGRESS, + }); + jobLogger.info(job, logData); + notifyListeners(room, logData); + + const pipe = promisify(pipeline); + const fileName = `./webservice_temp/__result_${tenant}_${Date.now().toString()}.tar.gz`; + + await pipe(data, fs.createWriteStream(fileName)); + + logData = JSON.stringify({ + level: 'ok', + message: `[Instance: ${tenant}] Extract result zip file OK`, + timestamp: new Date(), + status: IN_PROGRESS, + }); + jobLogger.info(job, logData); + notifyListeners(room, logData); + + logData = JSON.stringify({ + level: 'ok', + message: `[Instance: ${tenant}] Extracting result zip file`, + timestamp: new Date(), + status: IN_PROGRESS, + }); + jobLogger.info(job, logData); + notifyListeners(room, logData); + + const folderName = fileName.replace('.tar.gz', ''); + await pipe( + fs.createReadStream(fileName), + createGunzip(), + tarFS.extract(folderName), + ); + + fs.unlink(fileName, error => { + if (error) { + throw error; + } + }); + + logData = JSON.stringify({ + level: 'ok', + message: `[Instance: ${tenant}] Compile result zip file OK`, + timestamp: new Date(), + status: IN_PROGRESS, + }); + jobLogger.info(job, logData); + notifyListeners(room, logData); + + logData = JSON.stringify({ + level: 'ok', + message: `[Instance: ${tenant}] Compiling json result`, + timestamp: new Date(), + status: IN_PROGRESS, + }); + jobLogger.info(job, logData); + notifyListeners(room, logData); + + const files = await fs.promises.readdir(`${folderName}/data`); + logData = JSON.stringify({ + level: 'ok', + message: `[Instance: ${tenant}] ${files.length} files found`, + timestamp: new Date(), + status: IN_PROGRESS, + }); + jobLogger.info(job, logData); + notifyListeners(room, logData); + let result = []; + for (const file of files) { + const JsonName = `${folderName}/data/${file}`; + const json = await fs.promises.readFile(JsonName, { encoding: 'utf8' }); + result.push(JSON.parse(json)); + + fs.unlink(JsonName, error => { + if (error) { + throw error; + } + }); + } + + logData = JSON.stringify({ + level: 'ok', + message: `[Instance: ${tenant}] Compile json result OK`, + timestamp: new Date(), + status: IN_PROGRESS, + }); + jobLogger.info(job, logData); + notifyListeners(room, logData); + + fs.unlink(`${folderName}/manifest.json`, error => { + if (error) { + throw error; + } + }); + + fs.rmdir(`${folderName}/data`, error => { + if (error) { + throw error; + } + }); + + fs.rmdir(folderName, error => { + if (error) { + throw error; + } + }); + + return result; +}; + export const getComputedFromWebservice = async ( ctx, tenant, @@ -181,11 +307,13 @@ export const getComputedFromWebservice = async ( const activeJobs = await workerQueue.getActive(); const job = activeJobs.filter(job => { const { id, jobType, tenant: jobTenant } = job.data; + return ( - id == precomputedId && - jobType == PRECOMPUTER && - jobTenant == tenant && - (ISOLATED_MODE || job.opts.jobId == jobId) + id === precomputedId && + jobType === PRECOMPUTER && + jobTenant === tenant && + (ISOLATED_MODE || + `${tenant}-precomputed-job-${job.opts.jobId}` === jobId) ); })?.[0]; @@ -193,13 +321,25 @@ export const getComputedFromWebservice = async ( throw new CancelWorkerError('Job has been canceled'); } progress.incrementProgress(tenant, 70); + const room = `${tenant}-precomputed-job-${jobId}`; + const logData = JSON.stringify({ + level: 'ok', + message: `[Instance: ${tenant}] Webservice response ok`, + timestamp: new Date(), + status: IN_PROGRESS, + }); + jobLogger.info(job, logData); + notifyListeners(room, logData); + + //WS doc here: //openapi.services.istex.fr/?urls.primaryName=data-computer%20-%20Calculs%20sur%20fichier%20coprus%20compress%C3%A9#/data-computer/post-v1-collect - const room = `${tenant}-precomputed-job-${jobId}`; try { + const ROUTE = { RETRIEVE: 'retrieve', COLLECT: 'collect' }; + const callRoute = ANSWER_ROUTE; const response = await fetch( - `${localConfig.webServiceBaseURL}collect`, + `${localConfig.webServiceBaseURL}${callRoute}`, { method: 'POST', body: callId, @@ -210,11 +350,30 @@ export const getComputedFromWebservice = async ( }, ); if (response.status === 200 || ISOLATED_MODE) { - const data = ISOLATED_MODE ? { what: 'it worked' } : response.body; + let data = ISOLATED_MODE ? { what: 'it worked' } : response.body; + if (callRoute === ROUTE.RETRIEVE) { + const logData = JSON.stringify({ + level: 'ok', + message: `[Instance: ${tenant}] Using tar.gz mode webservice`, + timestamp: new Date(), + status: IN_PROGRESS, + }); + jobLogger.info(job, logData); + notifyListeners(room, logData); + data = await extractResultFromZip(tenant, job, room, data); + } await ctx.precomputed.updateStatus(precomputedId, FINISHED, { data, }); + + job.progress(100); + job.moveToCompleted(); + const isFailed = await job.isFailed(); + notifyListeners(`${job.data.tenant}-precomputer`, { + isPrecomputing: false, + success: !isFailed, + }); progress.finish(tenant); const logData = JSON.stringify({ level: 'ok', @@ -236,6 +395,61 @@ export const getComputedFromWebservice = async ( } }; +export const getFailureFromWebservice = async ( + ctx, + tenant, + precomputedId, + callId, + jobId, + errorType, + errorMessage, +) => { + if (!tenant || !precomputedId || !callId) { + throw new Error( + `Precompute webhook failure error: missing data ${JSON.stringify({ + tenant: !tenant ? 'missing' : tenant, + precomputedId: !precomputedId ? 'missing' : precomputedId, + callId: !callId ? 'missing' : callId, + })}`, + ); + } + const workerQueue = workerQueues[tenant]; + const activeJobs = await workerQueue.getActive(); + const job = activeJobs.filter(job => { + const { id, jobType, tenant: jobTenant } = job.data; + + return ( + id === precomputedId && + jobType === PRECOMPUTER && + jobTenant === tenant && + (ISOLATED_MODE || + `${tenant}-precomputed-job-${job.opts.jobId}` === jobId) + ); + })?.[0]; + + if (!job) { + return; + } + + const room = `${tenant}-precomputed-job-${jobId}`; + + await ctx.precomputed.updateStatus(precomputedId, ERROR, { + message: errorMessage, + }); + + job.progress(100); + job.moveToFailed(new Error(errorMessage)); + progress.finish(tenant); + const logData = JSON.stringify({ + level: 'error', + message: `[Instance: ${tenant}] Precomputing data failed ${errorType} ${errorMessage}`, + timestamp: new Date(), + status: ERROR, + }); + jobLogger.info(job, logData); + notifyListeners(room, logData); +}; + export const processPrecomputed = async (precomputed, ctx) => { let logData = {}; await ctx.precomputed.updateStatus(precomputed._id, IN_PROGRESS); @@ -298,9 +512,6 @@ export const processPrecomputed = async (precomputed, ctx) => { }); jobLogger.info(ctx.job, logData); notifyListeners(room, logData); - if (ISOLATED_MODE) { - await new Promise(resolve => setTimeout(resolve, 20000)); - } }; export const setPrecomputedJobId = async (ctx, precomputedID, job) => { diff --git a/src/api/workers/precomputer.js b/src/api/workers/precomputer.js index ee41fd0a0..6d53efe82 100644 --- a/src/api/workers/precomputer.js +++ b/src/api/workers/precomputer.js @@ -9,15 +9,13 @@ import repositoryMiddleware from '../services/repositoryMiddleware'; export const PRECOMPUTER = 'precomputer'; export const processPrecomputed = (job, done) => { - startJobPrecomputed(job) + startJobPrecomputed(job, done) .then(async () => { - job.progress(100); const isFailed = await job.isFailed(); notifyListeners(`${job.data.tenant}-precomputer`, { - isPrecomputing: false, + isPrecomputing: isFailed ? false : true, success: !isFailed, }); - done(); }) .catch(err => { handlePrecomputedError(job, err); @@ -36,10 +34,6 @@ const startJobPrecomputed = async job => { const handlePrecomputedError = async (job, err) => { const ctx = await prepareContext({ job }); - if (err instanceof CancelWorkerError) { - const precomputed = await ctx.precomputed.findOneById(ctx.job.data.id); - ctx.dataset.removeAttribute(precomputed.name); - } await setPrecomputedError(ctx, err); }; diff --git a/src/app/custom/translations.tsv b/src/app/custom/translations.tsv index 1edef6b28..ce6e3682b 100644 --- a/src/app/custom/translations.tsv +++ b/src/app/custom/translations.tsv @@ -692,6 +692,8 @@ "rows_per_page" "Rows per page" "Lignes par page" "cancelEnrichmentTitle" "Cancel running enrichment ?" "Annuler l'enrichissement en cours ?" "cancelEnrichmentContent" "By cancelling running enrichment, you also clear already enriched data." "En annulant l'enrichissement en cours, vous supprimerez également les données déjà enrichies." +"cancelPrecomputerTitle" "Cancel running precomputing ?" "Annuler le précalcul en cours ?" +"cancelPrecomputerContent" "Confirm cancelling of running precomputing." "Confirmer l'annulation du précalcul en cours." "advancedModePreviewInfo" "Source preview replaces [URLConnect] with [transit] and runs your ini file, as if all instructions were executed, except the call to web services." "L'aperçu de la source remplace [URLConnect] par [transit] et exécute votre fichier ini, comme si toutes les instructions étaient exécutées, sauf l'appel aux services web." "enrichment_logs" "Enrichment logs" "Logs de l'enrichissement" "precomputed_logs" "Precomputed data logs" "Logs des données précalculées" diff --git a/src/app/js/admin/Appbar/CancelPublicationDialog.js b/src/app/js/admin/Appbar/CancelProcessDialog.js similarity index 90% rename from src/app/js/admin/Appbar/CancelPublicationDialog.js rename to src/app/js/admin/Appbar/CancelProcessDialog.js index 473aafa78..b24095c7b 100644 --- a/src/app/js/admin/Appbar/CancelPublicationDialog.js +++ b/src/app/js/admin/Appbar/CancelProcessDialog.js @@ -12,7 +12,7 @@ import { polyglot as polyglotPropTypes } from '../../propTypes'; import translate from 'redux-polyglot/translate'; import CancelButton from '../../lib/components/CancelButton'; -const CancelPublicationDialog = props => { +const CancelProcessDialog = props => { const { p: polyglot, isOpen, title, content, onConfirm, onCancel } = props; return ( @@ -30,7 +30,7 @@ const CancelPublicationDialog = props => { ); }; -CancelPublicationDialog.propTypes = { +CancelProcessDialog.propTypes = { p: polyglotPropTypes.isRequired, title: PropTypes.string.isRequired, content: PropTypes.string.isRequired, @@ -38,4 +38,4 @@ CancelPublicationDialog.propTypes = { onCancel: PropTypes.func.isRequired, isOpen: PropTypes.bool.isRequired, }; -export default translate(CancelPublicationDialog); +export default translate(CancelProcessDialog); diff --git a/src/app/js/admin/Appbar/JobProgress.js b/src/app/js/admin/Appbar/JobProgress.js index e7b01cab8..6b97f4859 100644 --- a/src/app/js/admin/Appbar/JobProgress.js +++ b/src/app/js/admin/Appbar/JobProgress.js @@ -21,7 +21,7 @@ import { } from '../../../../common/progressStatus'; import { Cancel } from '@mui/icons-material'; import jobsApi from '../api/job'; -import CancelPublicationDialog from './CancelPublicationDialog'; +import CancelProcessDialog from './CancelProcessDialog'; import { publicationCleared } from '../publication'; import Warning from '@mui/icons-material/Warning'; import { loadParsingResult } from '../parsing'; @@ -285,22 +285,10 @@ const JobProgressComponent = props => { )} - { setIsCancelDialogOpen(false); }} @@ -331,6 +319,33 @@ JobProgressComponent.propTypes = { loadEnrichments: PropTypes.func.isRequired, loadPrecomputed: PropTypes.func.isRequired, }; + +const getTitle = type => { + switch (type) { + case 'publisher': + return 'cancelPublicationTitle'; + case 'precomputer': + return 'cancelPrecomputerTitle'; + case 'enricher': + return 'cancelEnrichmentTitle'; + default: + return 'cancelImportTitle'; + } +}; + +const getContent = type => { + switch (type) { + case 'publisher': + return 'cancelPublicationContent'; + case 'precomputer': + return 'cancelPrecomputerContent'; + case 'enricher': + return 'cancelEnrichmentContent'; + default: + return 'cancelImportContent'; + } +}; + const mapStateToProps = state => ({ hasPublishedDataset: fromPublication.hasPublishedDataset(state), }); diff --git a/src/app/js/admin/api/job.js b/src/app/js/admin/api/job.js index d803cd1dd..8f31c4fa2 100644 --- a/src/app/js/admin/api/job.js +++ b/src/app/js/admin/api/job.js @@ -15,7 +15,6 @@ export const getJobLogs = jobId => { const cancelJob = type => { const state = getUserSessionStorageInfo(); - const request = getCancelJobRequest(state, type); return fetch(request); };