diff --git a/.circleci/config.yml b/.circleci/config.yml
index 986e966a16a2d..d12ffd7e09754 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -580,6 +580,43 @@ jobs:
- store_test_results:
path: ./test-results/jest-node/
+ windows_adapters_smoke:
+ executor:
+ name: win/default
+ shell: bash.exe
+ steps:
+ - checkout
+ - run:
+ command: ./scripts/assert-changed-files.sh "packages/*|(e2e|integration)-tests/*|.circleci/*|scripts/e2e-test.sh|yarn.lock"
+ - <<: *attach_to_bootstrap
+ - run:
+ name: Install node 18.19.0, yarn and netlify-cli
+ command: |
+ nvm install 18.19.0
+ nvm alias default 18.19.0
+ nvm use 18.19.0
+ npm install -g yarn netlify-cli
+ - run:
+ name: Clear out sharp
+ command: |
+ Remove-Item -Recurse -Force -Path "node_modules/sharp/"
+ shell: powershell.exe
+ - run:
+ command: yarn
+ - run:
+ command: mkdir -p /tmp/e2e-tests/
+ - run:
+ command: cp -r ./e2e-tests/adapters /tmp/e2e-tests/adapters
+ - run:
+ command: pwd && ls
+ working_directory: /tmp/e2e-tests/adapters
+ - run: # Set project dir
+ command: node ./packages/gatsby-dev-cli/dist/index.js --set-path-to-repo .
+ - run: # Copy over packages
+ command: cd /tmp/e2e-tests/adapters && node ~/project/packages/gatsby-dev-cli/dist/index.js --force-install --scan-once
+ - run: # run smoke test
+ command: cd /tmp/e2e-tests/adapters && node scripts/deploy-and-run/netlify.mjs test:smoke
+
workflows:
version: 2
@@ -611,6 +648,14 @@ workflows:
requires:
- lint
- bootstrap
+ - windows_adapters_smoke:
+ requires:
+ # ideally we wait for windows unit tests here, but because those are flaky
+ # feedback loop would be not practical, so at least wait for linux unit tests
+ # to resemble setup for more robust E2E tests
+ - lint
+ - bootstrap
+ - unit_tests_node18
- unit_tests_node18:
<<: *ignore_docs
requires:
diff --git a/e2e-tests/adapters/package.json b/e2e-tests/adapters/package.json
index bec04e60328e9..7ddcfda4f06f3 100644
--- a/e2e-tests/adapters/package.json
+++ b/e2e-tests/adapters/package.json
@@ -17,6 +17,7 @@
"test:template:debug": "cross-env-shell CYPRESS_GROUP_NAME=\"adapter:$ADAPTER / trailingSlash:${TRAILING_SLASH:-always} / pathPrefix:${PATH_PREFIX:--}\" TRAILING_SLASH=$TRAILING_SLASH PATH_PREFIX=$PATH_PREFIX npm run cy:open -- --config-file \"cypress/configs/$ADAPTER.ts\" --env TRAILING_SLASH=$TRAILING_SLASH,PATH_PREFIX=$PATH_PREFIX",
"test:debug": "npm-run-all -s build:debug ssat:debug",
"test:netlify": "cross-env TRAILING_SLASH=always node scripts/deploy-and-run/netlify.mjs test:template",
+ "test:smoke": "node smoke-test.mjs",
"test:netlify:debug": "cross-env TRAILING_SLASH=always node scripts/deploy-and-run/netlify.mjs test:template:debug",
"test:netlify:prefix-never": "cross-env TRAILING_SLASH=never PATH_PREFIX=/prefix node scripts/deploy-and-run/netlify.mjs test:template",
"test:netlify:prefix-never:debug": "cross-env TRAILING_SLASH=never PATH_PREFIX=/prefix node scripts/deploy-and-run/netlify.mjs test:template:debug",
diff --git a/e2e-tests/adapters/smoke-test.mjs b/e2e-tests/adapters/smoke-test.mjs
new file mode 100644
index 0000000000000..901a8f6f35505
--- /dev/null
+++ b/e2e-tests/adapters/smoke-test.mjs
@@ -0,0 +1,24 @@
+import assert from "node:assert"
+
+{
+ // check index page (SSG)
+ const response = await fetch(process.env.DEPLOY_URL)
+ assert.equal(response.status, 200)
+
+ const body = await response.text()
+ assert.match(body, /
Adapters<\/h1>/)
+ assert.match(body, /]*>Adapters E2E<\/title>/)
+}
+
+{
+ // check SSR page
+ const response = await fetch(
+ process.env.DEPLOY_URL + `/routes/ssr/remote-file/`
+ )
+ assert.equal(response.status, 200)
+
+ const body = await response.text()
+ // inline css for placeholder - this tests both LMDB and SHARP
+ // (LMDB because of page query and sharp because page query will use sharp to generate placeholder values)
+ assert.match(body, /background-color:rgb\(232,184,8\)/)
+}
diff --git a/integration-tests/lmdb-regeneration/__tests__/index.js b/integration-tests/lmdb-regeneration/__tests__/index.js
index 3da14d6ce0d54..08bab95b7d587 100644
--- a/integration-tests/lmdb-regeneration/__tests__/index.js
+++ b/integration-tests/lmdb-regeneration/__tests__/index.js
@@ -38,7 +38,13 @@ describe(`Lmdb regeneration`, () => {
// If the fix worked correctly we should have installed the prebuilt binary for our platform under our `.cache` directory
const lmdbRequire = mod.createRequire(
- path.resolve(rootPath, ".cache", "internal-packages", "package.json")
+ path.resolve(
+ rootPath,
+ ".cache",
+ "internal-packages",
+ `${process.platform}-${process.arch}`,
+ "package.json"
+ )
)
expect(() => {
lmdbRequire.resolve(lmdbPackage)
diff --git a/packages/gatsby-adapter-netlify/package.json b/packages/gatsby-adapter-netlify/package.json
index ed4e0a62cf50f..c20920a1d7af4 100644
--- a/packages/gatsby-adapter-netlify/package.json
+++ b/packages/gatsby-adapter-netlify/package.json
@@ -37,7 +37,8 @@
"@netlify/functions": "^1.6.0",
"cookie": "^0.5.0",
"fastq": "^1.15.0",
- "fs-extra": "^11.1.1"
+ "fs-extra": "^11.1.1",
+ "gatsby-core-utils": "^4.13.1"
},
"devDependencies": {
"@babel/cli": "^7.20.7",
diff --git a/packages/gatsby-adapter-netlify/src/__tests__/fixtures/lambda-handler/entry.js b/packages/gatsby-adapter-netlify/src/__tests__/fixtures/lambda-handler/entry.js
new file mode 100644
index 0000000000000..e69de29bb2d1d
diff --git a/packages/gatsby-adapter-netlify/src/__tests__/fixtures/lambda-handler/included.js b/packages/gatsby-adapter-netlify/src/__tests__/fixtures/lambda-handler/included.js
new file mode 100644
index 0000000000000..e69de29bb2d1d
diff --git a/packages/gatsby-adapter-netlify/src/__tests__/lambda-handler.ts b/packages/gatsby-adapter-netlify/src/__tests__/lambda-handler.ts
new file mode 100644
index 0000000000000..c101f616fcf9f
--- /dev/null
+++ b/packages/gatsby-adapter-netlify/src/__tests__/lambda-handler.ts
@@ -0,0 +1,46 @@
+import fs from "fs-extra"
+import { prepareFunction } from "../lambda-handler"
+import { join, relative } from "path"
+import { slash } from "gatsby-core-utils/path"
+
+const writeFileSpy = jest
+ .spyOn(fs, `writeFile`)
+ .mockImplementation(async () => {})
+const writeJsonSpy = jest
+ .spyOn(fs, `writeJSON`)
+ .mockImplementation(async () => {})
+
+const fixturePath = join(
+ relative(process.cwd(), __dirname),
+ `fixtures`,
+ `lambda-handler`
+)
+const pathToEntryPoint = join(fixturePath, `entry.js`)
+const requiredFile = join(fixturePath, `included.js`)
+
+test(`produced handler is correct`, async () => {
+ await prepareFunction({
+ functionId: `test`,
+ name: `test`,
+ pathToEntryPoint,
+ requiredFiles: [requiredFile],
+ })
+ const handlerCode = writeFileSpy.mock.calls[0][1]
+ // expect require in produced code (this is to mostly to make sure handlerCode is actual handler code)
+ expect(handlerCode).toMatch(/require\(["'][^"']*["']\)/)
+ // require paths should not have backward slashes (win paths)
+ expect(handlerCode).not.toMatch(/require\(["'][^"']*\\[^"']*["']\)/)
+
+ expect(writeJsonSpy).toBeCalledWith(
+ expect.any(String),
+ expect.objectContaining({
+ config: expect.objectContaining({
+ name: `test`,
+ generator: expect.stringContaining(`gatsby-adapter-netlify`),
+ includedFiles: [slash(requiredFile)],
+ externalNodeModules: [`msgpackr-extract`],
+ }),
+ version: 1,
+ })
+ )
+})
diff --git a/packages/gatsby-adapter-netlify/src/index.ts b/packages/gatsby-adapter-netlify/src/index.ts
index b34fafbb8718a..57769b7f8c671 100644
--- a/packages/gatsby-adapter-netlify/src/index.ts
+++ b/packages/gatsby-adapter-netlify/src/index.ts
@@ -162,6 +162,8 @@ const createNetlifyAdapter: AdapterInit = options => {
fileCDNUrlGeneratorModulePath: useNetlifyImageCDN
? require.resolve(`./file-cdn-url-generator`)
: undefined,
+ functionsPlatform: `linux`,
+ functionsArch: `x64`,
}
},
}
diff --git a/packages/gatsby-adapter-netlify/src/lambda-handler.ts b/packages/gatsby-adapter-netlify/src/lambda-handler.ts
index ab617a0b7edc7..ef70257ba2c2e 100644
--- a/packages/gatsby-adapter-netlify/src/lambda-handler.ts
+++ b/packages/gatsby-adapter-netlify/src/lambda-handler.ts
@@ -2,6 +2,7 @@ import type { IFunctionDefinition } from "gatsby"
import packageJson from "gatsby-adapter-netlify/package.json"
import fs from "fs-extra"
import * as path from "path"
+import { slash } from "gatsby-core-utils/path"
interface INetlifyFunctionConfig {
externalNodeModules?: Array
@@ -25,7 +26,7 @@ interface INetlifyFunctionManifest {
version: number
}
-async function prepareFunction(
+export async function prepareFunction(
fun: IFunctionDefinition,
odbfunctionName?: string
): Promise {
@@ -58,7 +59,7 @@ async function prepareFunction(
name: displayName,
generator: `gatsby-adapter-netlify@${packageJson?.version ?? `unknown`}`,
includedFiles: fun.requiredFiles.map(file =>
- file.replace(/\[/g, `*`).replace(/]/g, `*`)
+ slash(file).replace(/\[/g, `*`).replace(/]/g, `*`)
),
externalNodeModules: [`msgpackr-extract`],
},
@@ -73,7 +74,10 @@ async function prepareFunction(
function getRelativePathToModule(modulePath: string): string {
const absolutePath = require.resolve(modulePath)
- return `./` + path.relative(internalFunctionsDir, absolutePath)
+ return (
+ `./` +
+ path.posix.relative(slash(internalFunctionsDir), slash(absolutePath))
+ )
}
const handlerSource = /* javascript */ `
diff --git a/packages/gatsby-cli/src/create-cli.ts b/packages/gatsby-cli/src/create-cli.ts
index dc25aa3afb9f7..2c3d605b2a18d 100644
--- a/packages/gatsby-cli/src/create-cli.ts
+++ b/packages/gatsby-cli/src/create-cli.ts
@@ -273,6 +273,14 @@ function buildLocalCommands(cli: yargs.Argv, isLocalSite: boolean): void {
default: false,
describe: `Save the log of changed pages for future comparison.`,
hidden: true,
+ })
+ .option(`functions-platform`, {
+ type: `string`,
+ describe: `The platform bundled functions will execute on. Defaults to current platform or settings provided by used adapter.`,
+ })
+ .option(`functions-arch`, {
+ type: `string`,
+ describe: `The architecture bundled functions will execute on. Defaults to current architecture or settings provided by used adapter.`,
}),
handler: handlerP(
getCommandHandler(
diff --git a/packages/gatsby-cli/src/structured-errors/error-map.ts b/packages/gatsby-cli/src/structured-errors/error-map.ts
index 1ec3757c1cf8a..c0aea02ca5318 100644
--- a/packages/gatsby-cli/src/structured-errors/error-map.ts
+++ b/packages/gatsby-cli/src/structured-errors/error-map.ts
@@ -83,6 +83,12 @@ const errors: Record = {
level: Level.ERROR,
category: ErrorCategory.USER,
},
+ "98051": {
+ text: (): string => `Built Rendering Engines failed to load.`,
+ type: Type.ENGINE_EXECUTION,
+ level: Level.ERROR,
+ category: ErrorCategory.UNKNOWN,
+ },
"98123": {
text: (context): string =>
`${context.stageLabel} failed\n\n${
diff --git a/packages/gatsby-legacy-polyfills/package.json b/packages/gatsby-legacy-polyfills/package.json
index 8e6c4bbe494f7..ad84d1fdeb9d8 100644
--- a/packages/gatsby-legacy-polyfills/package.json
+++ b/packages/gatsby-legacy-polyfills/package.json
@@ -16,7 +16,7 @@
"license": "MIT",
"scripts": {
"build": "npm-run-all --npm-path npm -p build:*",
- "build:exclude": "cpy 'exclude.js' '../dist' --cwd=./src",
+ "build:exclude": "cpy \"exclude.js\" \"../dist\" --cwd=./src",
"build:polyfills": "microbundle -f iife -i src/polyfills.js --no-sourcemap --external=none",
"prepare": "cross-env NODE_ENV=production npm run build",
"watch": "npm-run-all --npm-path npm -p watch:*",
diff --git a/packages/gatsby-plugin-offline/package.json b/packages/gatsby-plugin-offline/package.json
index d24753444ad85..71efcaec7ae8c 100644
--- a/packages/gatsby-plugin-offline/package.json
+++ b/packages/gatsby-plugin-offline/package.json
@@ -47,7 +47,7 @@
"scripts": {
"build": "npm run build:src && npm run build:sw-append",
"build:src": "babel src --out-dir . --ignore \"**/__tests__,src/sw-append.js\"",
- "build:sw-append": "cpy 'sw-append.js' '../' --cwd=./src",
+ "build:sw-append": "cpy \"sw-append.js\" \"../\" --cwd=./src",
"prepare": "cross-env NODE_ENV=production npm run build",
"watch": "npm run build:sw-append -- --watch & npm run build:src -- --watch"
},
diff --git a/packages/gatsby/src/commands/build-html.ts b/packages/gatsby/src/commands/build-html.ts
index 02a8c5494ee25..701fdda2c5d4b 100644
--- a/packages/gatsby/src/commands/build-html.ts
+++ b/packages/gatsby/src/commands/build-html.ts
@@ -42,6 +42,8 @@ export interface IBuildArgs extends IProgram {
profile: boolean
graphqlTracing: boolean
openTracingConfigFile: string
+ functionsPlatform?: string
+ functionsArch?: string
// TODO remove in v4
keepPageRenderer: boolean
}
diff --git a/packages/gatsby/src/commands/build.ts b/packages/gatsby/src/commands/build.ts
index 364329e7b26cc..489d99ada5c6f 100644
--- a/packages/gatsby/src/commands/build.ts
+++ b/packages/gatsby/src/commands/build.ts
@@ -66,7 +66,7 @@ import {
getPageMode,
preparePageTemplateConfigs,
} from "../utils/page-mode"
-import { validateEngines } from "../utils/validate-engines"
+import { validateEnginesWithActivity } from "../utils/validate-engines"
import { constructConfigObject } from "../utils/gatsby-cloud-config"
import { waitUntilWorkerJobsAreComplete } from "../utils/jobs/worker-messaging"
import { getSSRChunkHashes } from "../utils/webpack/get-ssr-chunk-hashes"
@@ -295,20 +295,7 @@ module.exports = async function build(
}
if (shouldGenerateEngines()) {
- const validateEnginesActivity = report.activityTimer(
- `Validating Rendering Engines`,
- {
- parentSpan: buildSpan,
- }
- )
- validateEnginesActivity.start()
- try {
- await validateEngines(store.getState().program.directory)
- } catch (error) {
- validateEnginesActivity.panic({ id: `98001`, context: {}, error })
- } finally {
- validateEnginesActivity.end()
- }
+ await validateEnginesWithActivity(program.directory, buildSpan)
}
const cacheActivity = report.activityTimer(`Caching Webpack compilations`, {
diff --git a/packages/gatsby/src/commands/serve.ts b/packages/gatsby/src/commands/serve.ts
index 3ce95bb8fa96c..1727e2c0c420e 100644
--- a/packages/gatsby/src/commands/serve.ts
+++ b/packages/gatsby/src/commands/serve.ts
@@ -29,6 +29,7 @@ import {
thirdPartyProxyPath,
partytownProxy,
} from "../internal-plugins/partytown/proxy"
+import { slash } from "gatsby-core-utils/path"
interface IMatchPath {
path: string
@@ -184,125 +185,146 @@ module.exports = async (program: IServeProgram): Promise => {
}
// Handle SSR & DSG Pages
+ let graphqlEnginePath: string | undefined
+ let pageSSRModule: string | undefined
try {
- const { GraphQLEngine } = require(path.join(
- program.directory,
- `.cache`,
- `query-engine`
- )) as typeof import("../schema/graphql-engine/entry")
- const { getData, renderPageData, renderHTML } = require(path.join(
- program.directory,
- `.cache`,
- `page-ssr`
- )) as typeof import("../utils/page-ssr-module/entry")
- const graphqlEngine = new GraphQLEngine({
- dbPath: path.join(program.directory, `.cache`, `data`, `datastore`),
- })
+ graphqlEnginePath = require.resolve(
+ path.posix.join(slash(program.directory), `.cache`, `query-engine`)
+ )
+ pageSSRModule = require.resolve(
+ path.posix.join(slash(program.directory), `.cache`, `page-ssr`)
+ )
+ } catch (error) {
+ // TODO: Handle case of engine not being generated
+ }
- router.get(
- `/page-data/:pagePath(*)/page-data.json`,
- async (req, res, next) => {
- const requestedPagePath = req.params.pagePath
- if (!requestedPagePath) {
- return void next()
- }
+ if (graphqlEnginePath && pageSSRModule) {
+ try {
+ const { GraphQLEngine } =
+ require(graphqlEnginePath) as typeof import("../schema/graphql-engine/entry")
+ const { getData, renderPageData, renderHTML } =
+ require(pageSSRModule) as typeof import("../utils/page-ssr-module/entry")
+ const graphqlEngine = new GraphQLEngine({
+ dbPath: path.posix.join(
+ slash(program.directory),
+ `.cache`,
+ `data`,
+ `datastore`
+ ),
+ })
+
+ router.get(
+ `/page-data/:pagePath(*)/page-data.json`,
+ async (req, res, next) => {
+ const requestedPagePath = req.params.pagePath
+ if (!requestedPagePath) {
+ return void next()
+ }
+
+ const potentialPagePath = reverseFixedPagePath(requestedPagePath)
+ const page = graphqlEngine.findPageByPath(potentialPagePath)
- const potentialPagePath = reverseFixedPagePath(requestedPagePath)
- const page = graphqlEngine.findPageByPath(potentialPagePath)
-
- if (page && (page.mode === `DSG` || page.mode === `SSR`)) {
- const requestActivity = report.phantomActivity(
- `request for "${req.path}"`
- )
- requestActivity.start()
- try {
- const spanContext = requestActivity.span.context()
- const data = await getData({
- pathName: req.path,
- graphqlEngine,
- req,
- spanContext,
- })
- const results = await renderPageData({ data, spanContext })
- if (data.serverDataHeaders) {
- for (const [name, value] of Object.entries(
- data.serverDataHeaders
- )) {
- res.setHeader(name, value)
+ if (page && (page.mode === `DSG` || page.mode === `SSR`)) {
+ const requestActivity = report.phantomActivity(
+ `request for "${req.path}"`
+ )
+ requestActivity.start()
+ try {
+ const spanContext = requestActivity.span.context()
+ const data = await getData({
+ pathName: req.path,
+ graphqlEngine,
+ req,
+ spanContext,
+ })
+ const results = await renderPageData({ data, spanContext })
+ if (data.serverDataHeaders) {
+ for (const [name, value] of Object.entries(
+ data.serverDataHeaders
+ )) {
+ res.setHeader(name, value)
+ }
}
- }
- if (page.mode === `SSR` && data.serverDataStatus) {
- return void res.status(data.serverDataStatus).send(results)
- } else {
- return void res.send(results)
+ if (page.mode === `SSR` && data.serverDataStatus) {
+ return void res.status(data.serverDataStatus).send(results)
+ } else {
+ return void res.send(results)
+ }
+ } catch (e) {
+ report.error(
+ `Generating page-data for "${requestedPagePath}" / "${potentialPagePath}" failed.`,
+ e
+ )
+ return res
+ .status(500)
+ .contentType(`text/plain`)
+ .send(`Internal server error.`)
+ } finally {
+ requestActivity.end()
}
- } catch (e) {
- report.error(
- `Generating page-data for "${requestedPagePath}" / "${potentialPagePath}" failed.`,
- e
- )
- return res
- .status(500)
- .contentType(`text/plain`)
- .send(`Internal server error.`)
- } finally {
- requestActivity.end()
}
- }
- return void next()
- }
- )
+ return void next()
+ }
+ )
- router.use(async (req, res, next) => {
- if (req.accepts(`html`)) {
- const potentialPagePath = req.path
- const page = graphqlEngine.findPageByPath(potentialPagePath)
- if (page && (page.mode === `DSG` || page.mode === `SSR`)) {
- const requestActivity = report.phantomActivity(
- `request for "${req.path}"`
- )
- requestActivity.start()
-
- try {
- const spanContext = requestActivity.span.context()
- const data = await getData({
- pathName: potentialPagePath,
- graphqlEngine,
- req,
- spanContext,
- })
- const results = await renderHTML({ data, spanContext })
- if (data.serverDataHeaders) {
- for (const [name, value] of Object.entries(
- data.serverDataHeaders
- )) {
- res.setHeader(name, value)
+ router.use(async (req, res, next) => {
+ if (req.accepts(`html`)) {
+ const potentialPagePath = req.path
+ const page = graphqlEngine.findPageByPath(potentialPagePath)
+ if (page && (page.mode === `DSG` || page.mode === `SSR`)) {
+ const requestActivity = report.phantomActivity(
+ `request for "${req.path}"`
+ )
+ requestActivity.start()
+
+ try {
+ const spanContext = requestActivity.span.context()
+ const data = await getData({
+ pathName: potentialPagePath,
+ graphqlEngine,
+ req,
+ spanContext,
+ })
+ const results = await renderHTML({ data, spanContext })
+ if (data.serverDataHeaders) {
+ for (const [name, value] of Object.entries(
+ data.serverDataHeaders
+ )) {
+ res.setHeader(name, value)
+ }
}
- }
- if (page.mode === `SSR` && data.serverDataStatus) {
- return void res.status(data.serverDataStatus).send(results)
- } else {
- return void res.send(results)
- }
- } catch (e) {
- report.error(`Rendering html for "${potentialPagePath}" failed.`, e)
- return res.status(500).sendFile(`500.html`, { root }, err => {
- if (err) {
- res.contentType(`text/plain`).send(`Internal server error.`)
+ if (page.mode === `SSR` && data.serverDataStatus) {
+ return void res.status(data.serverDataStatus).send(results)
+ } else {
+ return void res.send(results)
}
- })
- } finally {
- requestActivity.end()
+ } catch (e) {
+ report.error(
+ `Rendering html for "${potentialPagePath}" failed.`,
+ e
+ )
+ return res.status(500).sendFile(`500.html`, { root }, err => {
+ if (err) {
+ res.contentType(`text/plain`).send(`Internal server error.`)
+ }
+ })
+ } finally {
+ requestActivity.end()
+ }
}
}
- }
- return next()
- })
- } catch (error) {
- // TODO: Handle case of engine not being generated
+ return next()
+ })
+ } catch (error) {
+ report.panic({
+ id: `98051`,
+ error,
+ context: {},
+ })
+ }
}
const matchPaths = await readMatchPaths(program)
diff --git a/packages/gatsby/src/commands/types.ts b/packages/gatsby/src/commands/types.ts
index 9f481af90fd90..45e9c62ad0b02 100644
--- a/packages/gatsby/src/commands/types.ts
+++ b/packages/gatsby/src/commands/types.ts
@@ -34,6 +34,8 @@ export interface IProgram {
graphqlTracing?: boolean
verbose?: boolean
prefixPaths?: boolean
+ functionsPlatform?: string
+ functionsArch?: string
setStore?: (store: Store) => void
disablePlugins?: Array<{
name: string
diff --git a/packages/gatsby/src/internal-plugins/functions/api-function-webpack-loader.ts b/packages/gatsby/src/internal-plugins/functions/api-function-webpack-loader.ts
index 2642f691431c8..2b6905b2c55eb 100644
--- a/packages/gatsby/src/internal-plugins/functions/api-function-webpack-loader.ts
+++ b/packages/gatsby/src/internal-plugins/functions/api-function-webpack-loader.ts
@@ -10,17 +10,17 @@ const APIFunctionLoader: LoaderDefinition = async function () {
return /* javascript */ `
const preferDefault = m => (m && m.default) || m
- const functionModule = require('${modulePath}');
+ const functionModule = require('${slash(modulePath)}');
const functionToExecute = preferDefault(functionModule);
const matchPath = '${matchPath}';
- const { match: reachMatch } = require('${require.resolve(
- `@gatsbyjs/reach-router`
+ const { match: reachMatch } = require('${slash(
+ require.resolve(`@gatsbyjs/reach-router`)
)}');
- const { urlencoded, text, json, raw } = require('${require.resolve(
- `body-parser`
+ const { urlencoded, text, json, raw } = require('${slash(
+ require.resolve(`body-parser`)
)}')
- const multer = require('${require.resolve(`multer`)}')
- const { createConfig } = require('${require.resolve(`./config`)}')
+ const multer = require('${slash(require.resolve(`multer`))}')
+ const { createConfig } = require('${slash(require.resolve(`./config`))}')
function functionWrapper(req, res) {
if (matchPath) {
diff --git a/packages/gatsby/src/schema/graphql-engine/bootstrap.ts b/packages/gatsby/src/schema/graphql-engine/bootstrap.ts
index 8b7c0c1033572..e4e4b9cf81e72 100644
--- a/packages/gatsby/src/schema/graphql-engine/bootstrap.ts
+++ b/packages/gatsby/src/schema/graphql-engine/bootstrap.ts
@@ -1,6 +1,7 @@
// "engines-fs-provider" must be first import, as it sets up global
// fs and this need to happen before anything else tries to import fs
import "../../utils/engines-fs-provider"
+import "./platform-and-arch-check"
import { getCache as getGatsbyCache } from "../../utils/get-cache"
diff --git a/packages/gatsby/src/schema/graphql-engine/bundle-webpack.ts b/packages/gatsby/src/schema/graphql-engine/bundle-webpack.ts
index bf73d6021cdd3..0a559b951ea97 100644
--- a/packages/gatsby/src/schema/graphql-engine/bundle-webpack.ts
+++ b/packages/gatsby/src/schema/graphql-engine/bundle-webpack.ts
@@ -9,20 +9,32 @@ import { dependencies } from "gatsby/package.json"
import { printQueryEnginePlugins } from "./print-plugins"
import mod from "module"
import { WebpackLoggingPlugin } from "../../utils/webpack/plugins/webpack-logging"
+import { getAssetMeta } from "@vercel/webpack-asset-relocator-loader"
import reporter from "gatsby-cli/lib/reporter"
import { schemaCustomizationAPIs } from "./print-plugins"
import type { GatsbyNodeAPI } from "../../redux/types"
import * as nodeApis from "../../utils/api-node-docs"
import { store } from "../../redux"
import { PackageJson } from "../../.."
+import { slash } from "gatsby-core-utils/path"
+import { isEqual } from "lodash"
+import {
+ IPlatformAndArch,
+ getCurrentPlatformAndTarget,
+ getFunctionsTargetPlatformAndTarget,
+} from "../../utils/engines-helpers"
type Reporter = typeof reporter
const extensions = [`.mjs`, `.js`, `.json`, `.node`, `.ts`, `.tsx`]
-const outputDir = path.join(process.cwd(), `.cache`, `query-engine`)
-const cacheLocation = path.join(
- process.cwd(),
+const outputDir = path.posix.join(
+ slash(process.cwd()),
+ `.cache`,
+ `query-engine`
+)
+const cacheLocation = path.posix.join(
+ slash(process.cwd()),
`.cache`,
`webpack`,
`query-engine`
@@ -38,69 +50,236 @@ function getApisToRemoveForQueryEngine(): Array {
return apisToRemove
}
-const getInternalPackagesCacheDir = (): string =>
- path.join(process.cwd(), `.cache/internal-packages`)
+const getInternalPackagesCacheDir = (
+ functionsTarget: IPlatformAndArch
+): string =>
+ path.posix.join(
+ slash(process.cwd()),
+ `.cache`,
+ `internal-packages`,
+ `${functionsTarget.platform}-${functionsTarget.arch}`
+ )
// Create a directory and JS module where we install internally used packages
-const createInternalPackagesCacheDir = async (): Promise => {
- const cacheDir = getInternalPackagesCacheDir()
+const createInternalPackagesCacheDir = async (
+ functionsTarget: IPlatformAndArch
+): Promise => {
+ const cacheDir = getInternalPackagesCacheDir(functionsTarget)
await fs.ensureDir(cacheDir)
- await fs.emptyDir(cacheDir)
const packageJsonPath = path.join(cacheDir, `package.json`)
- await fs.outputJson(packageJsonPath, {
- name: `gatsby-internal-packages`,
- description: `This directory contains internal packages installed by Gatsby used to comply with the current platform requirements`,
- version: `1.0.0`,
- private: true,
- author: `Gatsby`,
- license: `MIT`,
- })
+ if (!fs.existsSync(packageJsonPath)) {
+ await fs.emptyDir(cacheDir)
+
+ await fs.outputJson(packageJsonPath, {
+ name: `gatsby-internal-packages`,
+ description: `This directory contains internal packages installed by Gatsby used to comply with the current platform requirements`,
+ version: `1.0.0`,
+ private: true,
+ author: `Gatsby`,
+ license: `MIT`,
+ functionsTarget,
+ })
+ }
}
-// lmdb module with prebuilt binaries for our platform
-const lmdbPackage = `@lmdb/lmdb-${process.platform}-${process.arch}`
-
-// Detect if the prebuilt binaries for lmdb have been installed. These are installed under @lmdb and are tied to each platform/arch. We've seen instances where regular installations lack these modules because of a broken lockfile or skipping optional dependencies installs
-function installPrebuiltLmdb(): boolean {
+function getLMDBBinaryFromSiteLocation(
+ lmdbPackageName: string,
+ version: string,
+ functionsTarget: IPlatformAndArch
+): string | undefined {
// Read lmdb's package.json, go through its optional depedencies and validate if there's a prebuilt lmdb module with a compatible binary to our platform and arch
let packageJson: PackageJson
try {
const modulePath = path
- .dirname(require.resolve(`lmdb`))
+ .dirname(slash(require.resolve(`lmdb`)))
.replace(`/dist`, ``)
const packageJsonPath = path.join(modulePath, `package.json`)
packageJson = JSON.parse(fs.readFileSync(packageJsonPath, `utf-8`))
} catch (e) {
// If we fail to read lmdb's package.json there's bigger problems here so just skip installation
- return false
+ return undefined
}
// If there's no lmdb prebuilt package for our arch/platform listed as optional dep no point in trying to install it
- const { optionalDependencies } = packageJson
- if (!optionalDependencies) return false
- if (!Object.keys(optionalDependencies).find(p => p === lmdbPackage))
- return false
+ const { optionalDependencies = {} } = packageJson
+ if (!Object.keys(optionalDependencies).find(p => p === lmdbPackageName)) {
+ throw new Error(
+ `Target platform/arch for functions execution (${functionsTarget.platform}/${functionsTarget.arch}) is not supported.`
+ )
+ }
+ return getPackageLocationFromRequireContext(
+ slash(require.resolve(`lmdb`)),
+ lmdbPackageName,
+ version
+ )
+}
+
+function getPackageLocationFromRequireContext(
+ location: string,
+ packageName: string,
+ packageVersion?: string
+): string | undefined {
try {
- const lmdbRequire = mod.createRequire(require.resolve(`lmdb`))
- lmdbRequire.resolve(lmdbPackage)
- return false
+ const requireId = `${packageName}/package.json`
+ const locationRequire = mod.createRequire(location)
+ const packageJsonLocation = slash(locationRequire.resolve(requireId))
+
+ if (packageVersion) {
+ // delete locationRequire.cache[requireId]
+ const { version } = JSON.parse(
+ fs.readFileSync(packageJsonLocation, `utf-8`)
+ )
+ if (packageVersion !== version) {
+ return undefined
+ }
+ }
+
+ return path.dirname(packageJsonLocation)
} catch (e) {
- return true
+ return undefined
+ }
+}
+
+interface ILMDBBinaryPackageStatusBase {
+ packageName: string
+ needToInstall: boolean
+ packageVersion: string
+}
+
+interface ILMDBBinaryPackageStatusInstalled
+ extends ILMDBBinaryPackageStatusBase {
+ needToInstall: false
+ packageLocation: string
+}
+
+interface ILMDBBinaryPackageStatusNeedAlternative
+ extends ILMDBBinaryPackageStatusBase {
+ needToInstall: true
+}
+
+type IBinaryPackageStatus =
+ | ILMDBBinaryPackageStatusInstalled
+ | ILMDBBinaryPackageStatusNeedAlternative
+
+function checkIfInstalledInInternalPackagesCache(
+ packageStatus: IBinaryPackageStatus,
+ functionsTarget: IPlatformAndArch
+): IBinaryPackageStatus {
+ const cacheDir = getInternalPackagesCacheDir(functionsTarget)
+
+ const packageLocationFromInternalPackageCache =
+ getPackageLocationFromRequireContext(
+ path.posix.join(cacheDir, `:internal:`),
+ packageStatus.packageName,
+ packageStatus.packageVersion
+ )
+
+ if (
+ packageLocationFromInternalPackageCache &&
+ !path.posix
+ .relative(cacheDir, packageLocationFromInternalPackageCache)
+ .startsWith(`..`)
+ ) {
+ return {
+ ...packageStatus,
+ needToInstall: false,
+ packageLocation: packageLocationFromInternalPackageCache,
+ }
+ }
+
+ return {
+ ...packageStatus,
+ needToInstall: true,
}
}
// Install lmdb's native system module under our internal cache if we detect the current installation
// isn't using the pre-build binaries
-async function installIfMissingLmdb(): Promise {
- if (!installPrebuiltLmdb()) return undefined
+function checkIfNeedToInstallMissingLmdb(
+ functionsTarget: IPlatformAndArch
+): IBinaryPackageStatus {
+ // lmdb module with prebuilt binaries for target platform
+ const lmdbPackageName = `@lmdb/lmdb-${functionsTarget.platform}-${functionsTarget.arch}`
+
+ const lmdbBinaryFromSiteLocation = getLMDBBinaryFromSiteLocation(
+ lmdbPackageName,
+ dependencies.lmdb,
+ functionsTarget
+ )
+
+ const sharedPackageStatus: ILMDBBinaryPackageStatusNeedAlternative = {
+ needToInstall: true,
+ packageName: lmdbPackageName,
+ packageVersion: dependencies.lmdb,
+ }
+
+ if (lmdbBinaryFromSiteLocation) {
+ return {
+ ...sharedPackageStatus,
+ needToInstall: false,
+ packageLocation: lmdbBinaryFromSiteLocation,
+ }
+ }
+
+ return checkIfInstalledInInternalPackagesCache(
+ sharedPackageStatus,
+ functionsTarget
+ )
+}
+
+function checkIfNeedToInstallMissingSharp(
+ functionsTarget: IPlatformAndArch,
+ currentTarget: IPlatformAndArch
+): IBinaryPackageStatus | undefined {
+ try {
+ // check if shapr is resolvable
+ const { version: sharpVersion } = require(`sharp/package.json`)
+
+ if (isEqual(functionsTarget, currentTarget)) {
+ return undefined
+ }
+
+ return checkIfInstalledInInternalPackagesCache(
+ {
+ needToInstall: true,
+ packageName: `sharp`,
+ packageVersion: sharpVersion,
+ },
+ functionsTarget
+ )
+ } catch (e) {
+ return undefined
+ }
+}
- await createInternalPackagesCacheDir()
+async function installMissing(
+ packages: Array,
+ functionsTarget: IPlatformAndArch
+): Promise> {
+ function shouldInstall(
+ p: IBinaryPackageStatus | undefined
+ ): p is IBinaryPackageStatus {
+ return Boolean(p?.needToInstall)
+ }
+
+ const packagesToInstall = packages.filter(shouldInstall)
+
+ if (packagesToInstall.length === 0) {
+ return packages
+ }
+
+ await createInternalPackagesCacheDir(functionsTarget)
+
+ const cacheDir = getInternalPackagesCacheDir(functionsTarget)
- const cacheDir = getInternalPackagesCacheDir()
const options: ExecaOptions = {
stderr: `inherit`,
cwd: cacheDir,
+ env: {
+ npm_config_arch: functionsTarget.arch,
+ npm_config_platform: functionsTarget.platform,
+ },
}
const npmAdditionalCliArgs = [
@@ -113,15 +292,35 @@ async function installIfMissingLmdb(): Promise {
`always`,
`--legacy-peer-deps`,
`--save-exact`,
+ // target platform might be different than current and force allows us to install it
+ `--force`,
]
await execa(
`npm`,
- [`install`, ...npmAdditionalCliArgs, `${lmdbPackage}@${dependencies.lmdb}`],
+ [
+ `install`,
+ ...npmAdditionalCliArgs,
+ ...packagesToInstall.map(p => `${p.packageName}@${p.packageVersion}`),
+ ],
options
)
- return path.join(cacheDir, `node_modules`, lmdbPackage)
+ return packages.map(info =>
+ info
+ ? info.needToInstall
+ ? {
+ ...info,
+ needToInstall: false,
+ packageLocation: path.posix.join(
+ cacheDir,
+ `node_modules`,
+ info.packageName
+ ),
+ }
+ : info
+ : undefined
+ )
}
export async function createGraphqlEngineBundle(
@@ -151,17 +350,45 @@ export async function createGraphqlEngineBundle(
require.resolve(`gatsby-plugin-typescript`)
)
- // Alternative lmdb path we've created to self heal from a "broken" lmdb installation
- const alternativeLmdbPath = await installIfMissingLmdb()
+ const currentTarget = getCurrentPlatformAndTarget()
+ const functionsTarget = getFunctionsTargetPlatformAndTarget()
- // We force a specific lmdb binary module if we detected a broken lmdb installation or if we detect the presence of an adapter
+ const dynamicAliases: Record = {}
let forcedLmdbBinaryModule: string | undefined = undefined
- if (state.adapter.instance) {
- forcedLmdbBinaryModule = `${lmdbPackage}/node.abi83.glibc.node`
+
+ // we need to make sure we have internal packages cache directory setup for current lambda target
+ // before we attempt to check if we can reuse those packages
+ await createInternalPackagesCacheDir(functionsTarget)
+
+ const [lmdbPackageInfo, sharpPackageInfo] = await installMissing(
+ [
+ checkIfNeedToInstallMissingLmdb(functionsTarget),
+ checkIfNeedToInstallMissingSharp(functionsTarget, currentTarget),
+ ],
+ functionsTarget
+ )
+
+ if (!lmdbPackageInfo) {
+ throw new Error(`Failed to find required LMDB binary`)
+ } else if (functionsTarget.platform === `linux`) {
+ // function execution platform is primarily linux, which is tested the most, so we only force that specific binary
+ // to not cause untested code paths
+ if (lmdbPackageInfo.needToInstall) {
+ throw new Error(
+ `Failed to locate or install LMDB binary for functions execution platform/arch (${functionsTarget.platform}/${functionsTarget.arch})`
+ )
+ }
+
+ forcedLmdbBinaryModule = `${lmdbPackageInfo.packageLocation}/node.abi83.glibc.node`
}
- // We always force the binary if we've installed an alternative path
- if (alternativeLmdbPath) {
- forcedLmdbBinaryModule = `${alternativeLmdbPath}/node.abi83.glibc.node`
+
+ if (sharpPackageInfo) {
+ if (sharpPackageInfo.needToInstall) {
+ throw new Error(
+ `Failed to locate or install Sharp binary for functions execution platform/arch (${functionsTarget.platform}/${functionsTarget.arch})`
+ )
+ }
+ dynamicAliases[`sharp$`] = sharpPackageInfo.packageLocation
}
const compiler = webpack({
@@ -185,6 +412,7 @@ export async function createGraphqlEngineBundle(
buildDependencies: {
config: [__filename],
},
+ version: JSON.stringify(functionsTarget),
},
// those are required in some runtime paths, but we don't need them
externals: [
@@ -299,6 +527,7 @@ export async function createGraphqlEngineBundle(
resolve: {
extensions,
alias: {
+ ...dynamicAliases,
".cache": process.cwd() + `/.cache/`,
[require.resolve(`gatsby-cli/lib/reporter/loggers/ink/index.js`)]:
@@ -318,7 +547,6 @@ export async function createGraphqlEngineBundle(
plugins: [
new webpack.EnvironmentPlugin([`GATSBY_CLOUD_IMAGE_CDN`]),
new webpack.DefinePlugin({
- // "process.env.GATSBY_LOGGER": JSON.stringify(`yurnalist`),
"process.env.GATSBY_SKIP_WRITING_SCHEMA_TO_FILE": `true`,
"process.env.NODE_ENV": JSON.stringify(`production`),
SCHEMA_SNAPSHOT: JSON.stringify(schemaSnapshotString),
@@ -327,6 +555,12 @@ export async function createGraphqlEngineBundle(
"process.env.GATSBY_SLICES": JSON.stringify(
!!process.env.GATSBY_SLICES
),
+ "process.env.GATSBY_FUNCTIONS_PLATFORM": JSON.stringify(
+ functionsTarget.platform
+ ),
+ "process.env.GATSBY_FUNCTIONS_ARCH": JSON.stringify(
+ functionsTarget.arch
+ ),
}),
process.env.GATSBY_WEBPACK_LOGGING?.includes(`query-engine`) &&
new WebpackLoggingPlugin(rootDir, reporter, isVerbose),
@@ -334,7 +568,7 @@ export async function createGraphqlEngineBundle(
})
return new Promise((resolve, reject) => {
- compiler.run((err, stats): void => {
+ compiler.run(async (err, stats): Promise => {
function getResourcePath(
webpackModule?: Module | NormalModule | ConcatenatedModule | null
): string | undefined {
@@ -387,6 +621,36 @@ export async function createGraphqlEngineBundle(
iterateModules(stats.compilation.modules, stats.compilation)
}
+ if (!isEqual(functionsTarget, currentTarget)) {
+ const binaryFixingPromises: Array> = []
+ // sigh - emitAsset used by relocator seems to corrupt binaries
+ // resulting in "ELF file's phentsize not the expected size" errors
+ // - see size diff
+ // > find . -name node.abi83.glibc.node
+ // ./.cache/internal-packages/node_modules/@lmdb/lmdb-linux-x64/node.abi83.glibc.node
+ // ./.cache/query-engine/assets/node.abi83.glibc.node
+ // > ls -al ./.cache/query-engine/assets/node.abi83.glibc.node
+ // -rw-r--r-- 1 misiek 197121 1285429 Mar 14 11:36 ./.cache/query-engine/assets/node.abi83.glibc.node
+ // > ls -al ./.cache/internal-packages/node_modules/@lmdb/lmdb-linux-x64/node.abi83.glibc.node
+ // -rw-r--r-- 1 misiek 197121 693544 Mar 14 11:35 ./.cache/internal-packages/node_modules/@lmdb/lmdb-linux-x64/node.abi83.glibc.node
+ // so this tries to fix it by straight copying it over
+ for (const asset of (
+ stats?.compilation?.assetsInfo ?? new Map()
+ ).keys()) {
+ if (asset?.endsWith(`.node`)) {
+ const targetRelPath = path.posix.relative(`assets`, asset)
+ const assetMeta = getAssetMeta(targetRelPath, stats?.compilation)
+ const sourcePath = assetMeta?.path
+ if (sourcePath) {
+ const dist = path.join(outputDir, asset)
+ binaryFixingPromises.push(fs.copyFile(sourcePath, dist))
+ }
+ }
+ }
+
+ await Promise.all(binaryFixingPromises)
+ }
+
compiler.close(closeErr => {
if (err) {
return reject(err)
diff --git a/packages/gatsby/src/schema/graphql-engine/platform-and-arch-check.ts b/packages/gatsby/src/schema/graphql-engine/platform-and-arch-check.ts
new file mode 100644
index 0000000000000..d608d6094f25c
--- /dev/null
+++ b/packages/gatsby/src/schema/graphql-engine/platform-and-arch-check.ts
@@ -0,0 +1,12 @@
+if (
+ process.env.GATSBY_FUNCTIONS_PLATFORM !== process.platform ||
+ process.env.GATSBY_FUNCTIONS_ARCH !== process.arch
+) {
+ throw new Error(
+ `Incompatible DSG/SSR executing environment. Function was built for "${process.env.GATSBY_FUNCTIONS_PLATFORM}/${process.env.GATSBY_FUNCTIONS_ARCH}" but is executing on "${process.platform}/${process.arch}".` +
+ (process.env.gatsby_executing_command === `serve`
+ ? `\n\nIf you are trying to run DSG/SSR engine locally, consider using experimental utility to rebuild functions for your local platform:\n\nnode node_modules/gatsby/dist/schema/graphql-engine/standalone-regenerate.js`
+ : ``) +
+ `\n\nTo generate engines for "${process.platform}/${process.arch}" run 'gatsby build --functions-platform=${process.platform} --functions-arch=${process.arch}' or run 'gatsby build' with following envirnment variables:\n\nGATSBY_FUNCTIONS_PLATFORM=${process.platform}\nGATSBY_FUNCTIONS_ARCH=${process.arch}`
+ )
+}
diff --git a/packages/gatsby/src/schema/graphql-engine/standalone-regenerate.ts b/packages/gatsby/src/schema/graphql-engine/standalone-regenerate.ts
index a76ef7bc14a96..8bbb8ae92a3e0 100644
--- a/packages/gatsby/src/schema/graphql-engine/standalone-regenerate.ts
+++ b/packages/gatsby/src/schema/graphql-engine/standalone-regenerate.ts
@@ -1,16 +1,19 @@
#!/usr/bin/env node
/*
-this is used for development purposes only
-to be able to run `gatsby build` once to source data
-and print schema and then just rebundle graphql-engine
+This is used mostly for development purposes, but can be attempted to be used
+to regenerate just engines for local platform/arch if previous full build
+was done to deploy on platform with different arch/platform.
+
+For development purposes this is used to be able to run `gatsby build` once to
+source data and print schema and then just rebundle graphql-engine
with source file changes and test re-built engine quickly
Usage:
There need to be at least one successful `gatsby build`
before starting to use this script (warm up datastore,
generate "page-ssr" bundle). Once that's done you can
-run following command in test site directory:
+run following command in site directory:
```shell
node node_modules/gatsby/dist/schema/graphql-engine/standalone-regenerate.js
@@ -23,18 +26,18 @@ import reporter from "gatsby-cli/lib/reporter"
import { loadConfigAndPlugins } from "../../utils/worker/child/load-config-and-plugins"
import * as fs from "fs-extra"
import { store } from "../../redux"
-import { validateEngines } from "../../utils/validate-engines"
+import { validateEnginesWithActivity } from "../../utils/validate-engines"
async function run(): Promise {
process.env.GATSBY_SLICES = `1`
// load config
- console.log(`loading config and plugins`)
+ reporter.verbose(`loading config and plugins`)
await loadConfigAndPlugins({
siteDirectory: process.cwd(),
})
try {
- console.log(`clearing webpack cache\n\n`)
+ reporter.verbose(`clearing webpack cache`)
// get rid of cache if it exist
await fs.remove(process.cwd() + `/.cache/webpack/query-engine`)
await fs.remove(process.cwd() + `/.cache/webpack/page-ssr`)
@@ -46,7 +49,7 @@ async function run(): Promise {
// recompile
const buildActivityTimer = reporter.activityTimer(
- `Building Rendering Engines`
+ `(Re)Building Rendering Engines`
)
try {
buildActivityTimer.start()
@@ -67,20 +70,9 @@ async function run(): Promise {
buildActivityTimer.end()
}
- // validate
- const validateEnginesActivity = reporter.activityTimer(
- `Validating Rendering Engines`
- )
- validateEnginesActivity.start()
- try {
- await validateEngines(process.cwd())
- } catch (error) {
- validateEnginesActivity.panic({ id: `98001`, context: {}, error })
- } finally {
- validateEnginesActivity.end()
- }
+ await validateEnginesWithActivity(process.cwd())
- console.log(`DONE`)
+ reporter.info(`Rebuilding Rendering Engines finished`)
}
run()
diff --git a/packages/gatsby/src/utils/adapter/init.ts b/packages/gatsby/src/utils/adapter/init.ts
index 5d7aecd50edc9..0afab63c2f5e2 100644
--- a/packages/gatsby/src/utils/adapter/init.ts
+++ b/packages/gatsby/src/utils/adapter/init.ts
@@ -9,6 +9,7 @@ import { satisfies } from "semver"
import type { AdapterInit } from "./types"
import { preferDefault } from "../../bootstrap/prefer-default"
import { getLatestAdapters } from "../get-latest-gatsby-files"
+import { maybeAddFileProtocol } from "../../bootstrap/resolve-js-file-path"
export const getAdaptersCacheDir = (): string =>
join(process.cwd(), `.cache/adapters`)
@@ -85,7 +86,9 @@ const tryLoadingAlreadyInstalledAdapter = async ({
}
}
- const required = locationRequire.resolve(adapterToUse.module)
+ const required = maybeAddFileProtocol(
+ locationRequire.resolve(adapterToUse.module)
+ )
if (required) {
return {
found: true,
diff --git a/packages/gatsby/src/utils/adapter/manager.ts b/packages/gatsby/src/utils/adapter/manager.ts
index 0178da120c4f9..7879565715030 100644
--- a/packages/gatsby/src/utils/adapter/manager.ts
+++ b/packages/gatsby/src/utils/adapter/manager.ts
@@ -286,6 +286,8 @@ export async function initAdapterManager(): Promise {
deployURL: configFromAdapter?.deployURL,
supports: configFromAdapter?.supports,
pluginsToDisable: configFromAdapter?.pluginsToDisable ?? [],
+ functionsArch: configFromAdapter?.functionsArch,
+ functionsPlatform: configFromAdapter?.functionsPlatform,
}
},
}
diff --git a/packages/gatsby/src/utils/adapter/types.ts b/packages/gatsby/src/utils/adapter/types.ts
index c23dee9667294..3536ae0b2b8bf 100644
--- a/packages/gatsby/src/utils/adapter/types.ts
+++ b/packages/gatsby/src/utils/adapter/types.ts
@@ -201,6 +201,18 @@ export interface IAdapterConfig {
* example for the Netlify adapter.
*/
fileCDNUrlGeneratorModulePath?: string
+ /**
+ * The platform bundled functions will execute on. Usually should be `linux`.
+ * This will be used if user didn't specify `GATSBY_FUNCTIONS_PLATFORM` environment variable
+ * or used `-functions-platform` CLI toggle. If none is defined current platform (process.platform) will be used.
+ */
+ functionsPlatform?: string
+ /**
+ * The architecture bundled functions will execute on. Usually should be `x64`.
+ * This will be used if user didn't specify `GATSBY_FUNCTIONS_ARCH` environment variable
+ * or used `-functions-arch` CLI toggle. If none is defined current arch (process.arch) will be used.
+ */
+ functionsArch?: string
}
type WithRequired = T & { [P in K]-?: T[P] }
diff --git a/packages/gatsby/src/utils/engines-helpers.ts b/packages/gatsby/src/utils/engines-helpers.ts
index 20c7814986339..0e01e5e6eb7de 100644
--- a/packages/gatsby/src/utils/engines-helpers.ts
+++ b/packages/gatsby/src/utils/engines-helpers.ts
@@ -33,3 +33,34 @@ function getCDNObfuscatedPath(path: string): string {
}
export const LmdbOnCdnPath = getCDNObfuscatedPath(`data.mdb`)
+
+export interface IPlatformAndArch {
+ platform: string
+ arch: string
+}
+
+const currentTarget: IPlatformAndArch = {
+ platform: process.platform,
+ arch: process.arch,
+}
+
+export function getCurrentPlatformAndTarget(): IPlatformAndArch {
+ return currentTarget
+}
+
+export function getFunctionsTargetPlatformAndTarget(): IPlatformAndArch {
+ const state = store.getState()
+
+ return {
+ platform:
+ process.env.GATSBY_FUNCTIONS_PLATFORM ??
+ state.program.functionsPlatform ??
+ state.adapter.config.functionsPlatform ??
+ currentTarget.platform,
+ arch:
+ process.env.GATSBY_FUNCTIONS_ARCH ??
+ state.program.functionsArch ??
+ state.adapter.config.functionsArch ??
+ currentTarget.arch,
+ }
+}
diff --git a/packages/gatsby/src/utils/validate-engines/index.ts b/packages/gatsby/src/utils/validate-engines/index.ts
index 276e7c780c8f5..50f3a8b3f8b43 100644
--- a/packages/gatsby/src/utils/validate-engines/index.ts
+++ b/packages/gatsby/src/utils/validate-engines/index.ts
@@ -1,6 +1,45 @@
+import reporter from "gatsby-cli/lib/reporter"
import { WorkerPool } from "gatsby-worker"
+import { isEqual } from "lodash"
+import type { Span } from "opentracing"
+import {
+ getCurrentPlatformAndTarget,
+ getFunctionsTargetPlatformAndTarget,
+} from "../engines-helpers"
-export async function validateEngines(directory: string): Promise {
+export async function validateEnginesWithActivity(
+ directory: string,
+ buildSpan?: Span
+): Promise {
+ if (
+ !isEqual(
+ getCurrentPlatformAndTarget(),
+ getFunctionsTargetPlatformAndTarget()
+ )
+ ) {
+ reporter.info(
+ `Skipping Rendering Engines validation as they are build for different platform and/or architecture`
+ )
+ return
+ }
+
+ const validateEnginesActivity = reporter.activityTimer(
+ `Validating Rendering Engines`,
+ {
+ parentSpan: buildSpan,
+ }
+ )
+ validateEnginesActivity.start()
+ try {
+ await validateEngines(directory)
+ } catch (error) {
+ validateEnginesActivity.panic({ id: `98001`, context: {}, error })
+ } finally {
+ validateEnginesActivity.end()
+ }
+}
+
+async function validateEngines(directory: string): Promise {
const worker = new WorkerPool(
require.resolve(`./child`),
{