diff --git a/.changesets/11154.md b/.changesets/11154.md new file mode 100644 index 000000000000..920a5b7ae784 --- /dev/null +++ b/.changesets/11154.md @@ -0,0 +1,124 @@ +- feat(rw-uploads): Create uploads package with prisma extension and upload processor (#11154) by @dac09 + +Introduces `@redwoodjs/uploads` package which houses + +- Prisma extension for handling uploads. Currently + a) Query Extension: will save, delete, replace files on disk during CRUD + b) Result Extension: gives you functions like `.withSignedUri` on configured prisma results - which will take the paths, and convert it to a signed url +- Storage adapters e.g. FS and Memory to use with the prisma extension +- Processors - i.e. utility functions which will take [`Files`](https://developer.mozilla.org/en-US/docs/Web/API/File) and save them to storage + +## Usage + +In `api/src/uploads.ts` - setup uploads - processors, storage and the prisma extension. + +```ts +// api/src/lib/uploads.ts + +import { UploadsConfig } from '@redwoodjs/uploads' +import { setupUploads } from '@redwoodjs/uploads' +import { FileSystemStorage } from '@redwoodjs/uploads/FileSystemStorage' +import { UrlSigner } from '@redwoodjs/uploads/signedUrl' + +const uploadConfig: UploadsConfig = { + // 👇 prisma model + profile: { + // 👇 pass in fields that are going to be File uploads + // these should be configured as string in the Prisma.schema + fields: ['avatar', 'coverPhoto'], + }, +} + +// 👇 exporting these allows you access elsewhere on the api side +export const storage = new FileSystemStorage({ + baseDir: './uploads', +}) + +// Optional +export const urlSigner = new UrlSigner({ + secret: process.env.UPLOADS_SECRET, + endpoint: '/signedUrl', +}) + +const { uploadsProcessors, prismaExtension, fileListProcessor } = setupUploads( + uploadConfig, + storage, + urlSigner, +) + +export { uploadsProcessors, prismaExtension, fileListProcessor } +``` + +### Configuring db to use the prisma extension + +```ts +// api/src/lib/db.ts + +import { PrismaClient } from '@prisma/client' + +import { emitLogLevels, handlePrismaLogging } from '@redwoodjs/api/logger' + +import { logger } from './logger' +import { prismaExtension } from './uploads' + +// 👇 Notice here we create prisma client, and don't export it yet +export const prismaClient = new PrismaClient({ + log: emitLogLevels(['info', 'warn', 'error']), +}) + +handlePrismaLogging({ + db: prismaClient, + logger, + logLevels: ['info', 'warn', 'error'], +}) + +// 👇 Export db after adding uploads extension +export const db = prismaClient.$extends(prismaExtension) +``` + +## Using Prisma extension + +### A) CRUD operations + +No need to do anything here, but you have to use processors to supply Prisma with data in the correct format. + +### B) Result extensions + +```ts +// api/src/services/profiles/profiles.ts + +export const profile: QueryResolvers['profile'] = async ({ id }) => { + // 👇 await the result from your prisma query + const profile = await db.profile.findUnique({ + where: { id }, + }) + + // Convert the avatar and coverPhoto fields to signed URLs + // Note that you still need to add a api endpoint to handle these signed urls + return profile?.withSignedUrl() +} +``` + +## Using processors + +In your services, you can use the preconfigured "processors" to convert Files to strings for Prisma to save into the database. The processors, and storage adapters determine where the file is saved. + +```ts +// api/src/services/profiles/profiles.ts + +export const updateProfile: MutationResolvers['updateProfile'] = async ({ + id, + input, +}) => { + const processedInput = await uploadsProcessors.processProfileUploads(input) + + // This becomes a string 👇 + // The configuration on where it was saved is passed when we setup uploads in src/lib/uploads.ts + // processedInput.avatar = '/mySavePath/profile/avatar/generatedId.jpg' + + return db.profile.update({ + data: processedInput, + where: { id }, + }) +} +``` diff --git a/.eslintrc.js b/.eslintrc.js index a7094e6c548f..3ee6aec61361 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -59,6 +59,7 @@ module.exports = { 'packages/babel-config/src/__tests__/__fixtures__/**/*', 'packages/codemods/**/__testfixtures__/**/*', 'packages/cli/**/__testfixtures__/**/*', + 'packages/uploads/src/__tests__/prisma-client/*', ], rules: { curly: 'error', diff --git a/packages/uploads/.gitignore b/packages/uploads/.gitignore new file mode 100644 index 000000000000..9b5d07c455ec --- /dev/null +++ b/packages/uploads/.gitignore @@ -0,0 +1,4 @@ +src/__tests__/migrations/* +src/__tests__/for_unit_test.db* +.attw.json +src/__tests__/prisma-client/* diff --git a/packages/uploads/README.md b/packages/uploads/README.md new file mode 100644 index 000000000000..764a58aec2cd --- /dev/null +++ b/packages/uploads/README.md @@ -0,0 +1,124 @@ +# `@redwoodjs/uploads` + +This package houses + +- Prisma extension for handling uploads. Currently + a) Query Extension: will save, delete, replace files on disk during CRUD + b) Result Extension: gives you functions like `.withSignedUri` on configured prisma results - which will take the paths, and convert it to a signed url +- Storage adapters e.g. FS and Memory to use with the prisma extension +- Processors - i.e. utility functions which will take [`Files`](https://developer.mozilla.org/en-US/docs/Web/API/File) and save them to storage + +## Usage + +In `api/src/uploads.ts` - setup uploads - processors, storage and the prisma extension. + +```ts +// api/src/lib/uploads.ts + +import { UploadsConfig } from '@redwoodjs/uploads' +import { setupUploads } from '@redwoodjs/uploads' +import { FileSystemStorage } from '@redwoodjs/uploads/FileSystemStorage' +import { UrlSigner } from '@redwoodjs/uploads/signedUrl' + +const uploadConfig: UploadsConfig = { + // 👇 prisma model + profile: { + // 👇 pass in fields that are going to be File uploads + // these should be configured as string in the Prisma.schema + fields: ['avatar', 'coverPhoto'], + }, +} + +// 👇 exporting these allows you access elsewhere on the api side +export const storage = new FileSystemStorage({ + baseDir: './uploads', +}) + +// Optional +export const urlSigner = new UrlSigner({ + secret: process.env.UPLOADS_SECRET, + endpoint: '/signedUrl', +}) + +const { uploadsProcessors, prismaExtension, fileListProcessor } = setupUploads( + uploadConfig, + storage, + urlSigner, +) + +export { uploadsProcessors, prismaExtension, fileListProcessor } +``` + +### Configuring db to use the prisma extension + +```ts +// api/src/lib/db.ts + +import { PrismaClient } from '@prisma/client' + +import { emitLogLevels, handlePrismaLogging } from '@redwoodjs/api/logger' + +import { logger } from './logger' +import { prismaExtension } from './uploads' + +// 👇 Notice here we create prisma client, and don't export it yet +export const prismaClient = new PrismaClient({ + log: emitLogLevels(['info', 'warn', 'error']), +}) + +handlePrismaLogging({ + db: prismaClient, + logger, + logLevels: ['info', 'warn', 'error'], +}) + +// 👇 Export db after adding uploads extension +export const db = prismaClient.$extends(prismaExtension) +``` + +## Using Prisma extension + +### A) CRUD operations + +No need to do anything here, but you have to use processors to supply Prisma with data in the correct format. + +### B) Result extensions + +```ts +// api/src/services/profiles/profiles.ts + +export const profile: QueryResolvers['profile'] = async ({ id }) => { + // 👇 await the result from your prisma query + const profile = await db.profile.findUnique({ + where: { id }, + }) + + // Convert the avatar and coverPhoto fields to signed URLs + // Note that you still need to add a api endpoint to handle these signed urls + return profile?.withSignedUrl() +} +``` + +## Using processors + +In your services, you can use the preconfigured "processors" to convert Files to strings for Prisma to save into the database. The processors, and storage adapters determine where the file is saved. + +```ts +// api/src/services/profiles/profiles.ts + +export const updateProfile: MutationResolvers['updateProfile'] = async ({ + id, + input, +}) => { + const processedInput = await uploadsProcessors.processProfileUploads(input) + + // This becomes a string 👇 + // The configuration on where it was saved is passed when we setup uploads in src/lib/uploads.ts + // processedInput.avatar = '/mySavePath/profile/avatar/generatedId.jpg' + + return db.profile.update({ + data: processedInput, + where: { id }, + }) +} +``` diff --git a/packages/uploads/attw.ts b/packages/uploads/attw.ts new file mode 100644 index 000000000000..a377f7b5f320 --- /dev/null +++ b/packages/uploads/attw.ts @@ -0,0 +1,31 @@ +import { $ } from 'zx' + +interface Problem { + kind: string + entrypoint?: string + resolutionKind?: string +} + +await $({ nothrow: true })`yarn attw -P -f json > .attw.json` +const output = await $`cat .attw.json` +await $`rm .attw.json` + +const json = JSON.parse(output.stdout) + +if (!json.analysis.problems || json.analysis.problems.length === 0) { + console.log('No errors found') + process.exit(0) +} + +if ( + json.analysis.problems.every( + (problem: Problem) => problem.resolutionKind === 'node10', + ) +) { + console.log("Only found node10 problems, which we don't care about") + process.exit(0) +} + +console.log('Errors found') +console.log(json.analysis.problems) +process.exit(1) diff --git a/packages/uploads/build.mts b/packages/uploads/build.mts new file mode 100644 index 000000000000..da389f21e834 --- /dev/null +++ b/packages/uploads/build.mts @@ -0,0 +1,33 @@ +import { build, defaultBuildOptions } from '@redwoodjs/framework-tools' +import { + generateTypesCjs, + generateTypesEsm, + insertCommonJsPackageJson, +} from '@redwoodjs/framework-tools/generateTypes' + +// ESM build +await build({ + buildOptions: { + ...defaultBuildOptions, + format: 'esm', + packages: 'external', + }, +}) + +await generateTypesEsm() + +// CJS build +await build({ + buildOptions: { + ...defaultBuildOptions, + outdir: 'dist/cjs', + packages: 'external', + }, +}) + +await generateTypesCjs() + +await insertCommonJsPackageJson({ + buildFileUrl: import.meta.url, + cjsDir: 'dist/cjs', +}) diff --git a/packages/uploads/package.json b/packages/uploads/package.json new file mode 100644 index 000000000000..2f58a831df53 --- /dev/null +++ b/packages/uploads/package.json @@ -0,0 +1,78 @@ +{ + "name": "@redwoodjs/uploads", + "version": "7.0.0", + "repository": { + "type": "git", + "url": "git+https://github.com/redwoodjs/redwood.git", + "directory": "packages/uploads" + }, + "license": "MIT", + "type": "module", + "exports": { + ".": { + "require": { + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.js" + }, + "import": { + "types": "./dist/index.d.ts", + "default": "./dist/index.js" + } + }, + "./FileSystemStorage": { + "require": "./dist/cjs/FileSystemStorage.js", + "import": "./dist/FileSystemStorage.js" + }, + "./MemoryStorage": { + "require": "./dist/cjs/MemoryStorage.js", + "import": "./dist/MemoryStorage.js" + }, + "./signedUrl": { + "require": "./dist/cjs/signedUrls.js", + "import": "./dist/signedUrls.js" + }, + "./prisma": { + "require": { + "types": "./dist/cjs/prismaExtension.d.ts", + "default": "./dist/cjs/prismaExtension.js" + }, + "import": { + "types": "./dist/prismaExtension.d.ts", + "default": "./dist/prismaExtension.js" + } + } + }, + "files": [ + "dist", + "!dist/**/*.test.d.*" + ], + "scripts": { + "build": "yarn setup:test && tsx ./build.mts", + "build:pack": "yarn pack -o redwoodjs-uploads.tgz", + "build:types": "tsc --build --verbose", + "build:types-cjs": "tsc --build --verbose tsconfig.types-cjs.json", + "check:attw": "tsx attw.ts", + "check:package": "concurrently npm:check:attw yarn publint", + "setup:test": "npx prisma db push --accept-data-loss --schema ./src/__tests__/unit-test-schema.prisma", + "test": "vitest run", + "test:watch": "vitest watch" + }, + "dependencies": { + "@redwoodjs/project-config": "workspace:*", + "mime-types": "2.1.35", + "ulid": "2.3.0" + }, + "devDependencies": { + "@arethetypeswrong/cli": "0.15.4", + "@prisma/client": "5.18.0", + "@redwoodjs/framework-tools": "workspace:*", + "@types/mime-types": "2.1.4", + "concurrently": "8.2.2", + "esbuild": "0.23.0", + "publint": "0.2.10", + "tsx": "4.17.0", + "typescript": "5.5.4", + "vitest": "2.0.5" + }, + "gitHead": "3905ed045508b861b495f8d5630d76c7a157d8f1" +} diff --git a/packages/uploads/prisma-override.d.ts b/packages/uploads/prisma-override.d.ts new file mode 100644 index 000000000000..f976517c35be --- /dev/null +++ b/packages/uploads/prisma-override.d.ts @@ -0,0 +1,9 @@ +// Locally, within this project we override the type for @prisma/client to the one we generate locally +// This is so that we get accurate types (rather than the default anys) - and when the prismaExtension runs +// it will still use the types from '@prisma/client' which points to the user's prisma client and not ours + +import type { PrismaClient as LocalPrismaClient } from './src/__tests__/prisma-client/index.d.ts' + +declare module '@prisma/client' { + export class PrismaClient extends LocalPrismaClient {} +} diff --git a/packages/uploads/src/FileSystemStorage.ts b/packages/uploads/src/FileSystemStorage.ts new file mode 100644 index 000000000000..5f03c96a2b53 --- /dev/null +++ b/packages/uploads/src/FileSystemStorage.ts @@ -0,0 +1,47 @@ +import { existsSync, mkdirSync } from 'node:fs' +import fs from 'node:fs/promises' +import path from 'node:path' + +import mime from 'mime-types' + +import { ensurePosixPath } from '@redwoodjs/project-config' + +import type { SaveOptionsOverride } from './StorageAdapter.js' +import { StorageAdapter } from './StorageAdapter.js' + +export class FileSystemStorage + extends StorageAdapter + implements StorageAdapter +{ + constructor(opts: { baseDir: string }) { + super(opts) + if (!existsSync(opts.baseDir)) { + const posixBaseDir = ensurePosixPath(opts.baseDir) + console.log('Creating baseDir >', posixBaseDir) + mkdirSync(posixBaseDir, { recursive: true }) + } + } + async save(file: File, saveOverride?: SaveOptionsOverride) { + const fileName = this.generateFileNameWithExtension(saveOverride, file) + + const location = path.join( + ensurePosixPath(saveOverride?.path || this.adapterOpts.baseDir), + fileName, + ) + const nodeBuffer = await file.arrayBuffer() + + await fs.writeFile(location, Buffer.from(nodeBuffer)) + return { location } + } + + async read(filePath: string) { + return { + contents: await fs.readFile(filePath), + type: mime.lookup(filePath), + } + } + + async remove(filePath: string) { + await fs.unlink(filePath) + } +} diff --git a/packages/uploads/src/MemoryStorage.ts b/packages/uploads/src/MemoryStorage.ts new file mode 100644 index 000000000000..905ee7b8e2ff --- /dev/null +++ b/packages/uploads/src/MemoryStorage.ts @@ -0,0 +1,41 @@ +import path from 'node:path' + +import mime from 'mime-types' + +import { StorageAdapter } from './StorageAdapter.js' +import type { SaveOptionsOverride } from './StorageAdapter.js' + +export class MemoryStorage extends StorageAdapter implements StorageAdapter { + store: Record = {} + + async save(file: File, saveOpts?: SaveOptionsOverride) { + const fileName = this.generateFileNameWithExtension(saveOpts, file) + + const location = path.join( + saveOpts?.path || this.adapterOpts.baseDir, + fileName, + ) + const nodeBuffer = await file.arrayBuffer() + + this.store[location] = Buffer.from(nodeBuffer) + + return { + location, + } + } + + async remove(filePath: string) { + delete this.store[filePath] + } + + async read(filePath: string) { + return { + contents: this.store[filePath], + type: mime.lookup(filePath), + } + } + + async clear() { + this.store = {} + } +} diff --git a/packages/uploads/src/StorageAdapter.ts b/packages/uploads/src/StorageAdapter.ts new file mode 100644 index 000000000000..c2df085752bc --- /dev/null +++ b/packages/uploads/src/StorageAdapter.ts @@ -0,0 +1,55 @@ +/** + * The storage adapter will just save the file and return + * { + * fileId: string, + * location: string, // depending on storage it could be a path + * } + */ + +import mime from 'mime-types' +import { ulid } from 'ulid' + +export type AdapterResult = { + location: string +} + +export type SaveOptionsOverride = { + fileName?: string + path?: string +} + +export type AdapterOptions = { + baseDir: string +} + +export abstract class StorageAdapter { + adapterOpts: AdapterOptions + constructor(adapterOpts: AdapterOptions) { + this.adapterOpts = adapterOpts + } + + getAdapterOptions() { + return this.adapterOpts + } + + generateFileNameWithExtension( + saveOpts: SaveOptionsOverride | undefined, + file: File, + ) { + const fileName = saveOpts?.fileName || ulid() + const extension = mime.extension(file.type) + ? `.${mime.extension(file.type)}` + : '' + return `${fileName}${extension}` + } + + abstract save( + file: File, + saveOpts?: SaveOptionsOverride, + ): Promise + abstract remove(fileLocation: AdapterResult['location']): Promise + abstract read(fileLocation: AdapterResult['location']): Promise<{ + contents: Buffer | string + type: ReturnType + }> +} diff --git a/packages/uploads/src/__tests__/FileSystemsStorage.test.ts b/packages/uploads/src/__tests__/FileSystemsStorage.test.ts new file mode 100644 index 000000000000..7da26ef9f7df --- /dev/null +++ b/packages/uploads/src/__tests__/FileSystemsStorage.test.ts @@ -0,0 +1,87 @@ +import { vol } from 'memfs' +import { beforeEach, describe, expect, test, vi } from 'vitest' + +import { ensurePosixPath } from '@redwoodjs/project-config' + +import { FileSystemStorage } from '../FileSystemStorage.js' + +// Mock the entire fs module +vi.mock('node:fs', async () => { + const memfs = await import('memfs') + return { + ...memfs.fs, + default: memfs.fs, + } +}) + +// Mock the fs/promises module +vi.mock('node:fs/promises', async () => { + const memfs = await import('memfs') + return { + ...memfs.fs.promises, + default: memfs.fs.promises, + } +}) + +describe('FileSystemStorage', () => { + let storage: FileSystemStorage + const baseDir = '/tmp/test_uploads' + + beforeEach(() => { + vol.reset() + storage = new FileSystemStorage({ baseDir }) + }) + + const plainFile = new File(['test content'], 'test.txt', { + type: 'text/plain', + }) + + test('save should store a file on the file system', async () => { + const result = await storage.save(plainFile) + + expect(result).toHaveProperty('location') + const posixLocation = ensurePosixPath(result.location) + expect(posixLocation).toMatch(/\/tmp\/test_uploads\/.*\.txt$/) + expect(vol.existsSync(result.location)).toBe(true) + }) + + test('remove should delete a file fron ', async () => { + const { location } = await storage.save(plainFile) + + await storage.remove(location) + expect(vol.existsSync(location)).toBe(false) + }) + + test('read should return file contents and type', async () => { + const { location: plainFileLocation } = await storage.save(plainFile) + + const plainFileReadResult = await storage.read(plainFileLocation) + expect(plainFileReadResult.contents).toBeInstanceOf(Buffer) + expect(plainFileReadResult.contents.toString()).toBe('test content') + expect(plainFileReadResult.type).toBe('text/plain') + + const imageFile = new File(['ABCDEF'], 'test.png', { type: 'image/png' }) + const { location } = await storage.save(imageFile) + + const result = await storage.read(location) + expect(result.contents).toBeInstanceOf(Buffer) + expect(result.contents.toString()).toBe('ABCDEF') + expect(result.type).toBe('image/png') + }) + + test('save should use custom path, with no baseDir, when provided', async () => { + // Note that using a custom path means you need to create the directory yourself! + vol.mkdirSync('/my_custom/path', { recursive: true }) + + const result = await storage.save(plainFile, { + path: '/my_custom/path', + fileName: 'bazinga', + }) + + // Note that it doesn't have the baseDir! + expect(ensurePosixPath(result.location)).toEqual( + '/my_custom/path/bazinga.txt', + ) + expect(vol.existsSync(result.location)).toBe(true) + }) +}) diff --git a/packages/uploads/src/__tests__/MemoryStorage.test.ts b/packages/uploads/src/__tests__/MemoryStorage.test.ts new file mode 100644 index 000000000000..2f5f427ca998 --- /dev/null +++ b/packages/uploads/src/__tests__/MemoryStorage.test.ts @@ -0,0 +1,54 @@ +import { describe, expect, test } from 'vitest' + +import { ensurePosixPath } from '@redwoodjs/project-config' + +import { MemoryStorage } from '../MemoryStorage.js' + +describe('MemoryStorage', () => { + const storage = new MemoryStorage({ baseDir: 'uploads' }) + + test('save should store a file in memory', async () => { + const file = new File(['test content'], 'test.txt', { type: 'text/plain' }) + const result = await storage.save(file) + + expect(result).toHaveProperty('location') + expect(ensurePosixPath(result.location)).toMatch(/uploads\/.*\.txt$/) + expect(storage.store[result.location]).toBeDefined() + }) + + test('remove should delete a file from memory', async () => { + const file = new File(['test content'], 'test.txt', { type: 'text/plain' }) + const { location } = await storage.save(file) + + await storage.remove(location) + expect(storage.store[location]).toBeUndefined() + }) + + test('read should return file contents and type', async () => { + const file = new File(['ABCDEF'], 'test.txt', { type: 'image/png' }) + const { location } = await storage.save(file) + + const result = await storage.read(location) + expect(result.contents).toBeInstanceOf(Buffer) + expect(result.contents.toString()).toBe('ABCDEF') + expect(result.type).toBe('image/png') + }) + + test('clear should remove all stored files', async () => { + const file1 = new File(['content 1'], 'file1.txt', { type: 'text/plain' }) + const file2 = new File(['content 2'], 'file2.txt', { type: 'text/plain' }) + + await storage.save(file1) + await storage.save(file2) + + await storage.clear() + expect(Object.keys(storage.store).length).toBe(0) + }) + + test('save should use custom path when provided', async () => { + const file = new File(['test content'], 'test.txt', { type: 'text/plain' }) + const result = await storage.save(file, { path: 'custom/path' }) + + expect(ensurePosixPath(result.location)).toContain('custom/path') + }) +}) diff --git a/packages/uploads/src/__tests__/createProcessors.test.ts b/packages/uploads/src/__tests__/createProcessors.test.ts new file mode 100644 index 000000000000..632b9d6ac22e --- /dev/null +++ b/packages/uploads/src/__tests__/createProcessors.test.ts @@ -0,0 +1,165 @@ +import { describe, it, expect } from 'vitest' + +import { ensurePosixPath } from '@redwoodjs/project-config' + +import { createUploadProcessors } from '../createProcessors.js' +import { MemoryStorage } from '../MemoryStorage.js' +import type { UploadsConfig } from '../prismaExtension.js' + +const memStore = new MemoryStorage({ + baseDir: '/memory_store_basedir', +}) + +const uploadsConfig: UploadsConfig = { + dumbo: { + fields: ['firstUpload', 'secondUpload'], + }, + dummy: { + fields: 'uploadField', + }, +} + +describe('Create processors', () => { + const processors = createUploadProcessors(uploadsConfig, memStore) + + it('should create processors with CapitalCased model name', () => { + expect(processors.processDumboUploads).toBeDefined() + expect(processors.processDummyUploads).toBeDefined() + }) + + it('Should replace file types with location strings', async () => { + const data = { + firstUpload: new File(['Meaow'], 'kitten.txt', { + type: 'text/plain', + }), + secondUpload: new File(['Woof'], 'puppy.txt', { + type: 'text/plain', + }), + } + + const result = await processors.processDumboUploads(data) + + // Location strings in this format: {baseDir/{model}-{field}-{ulid}.{ext} + expect(ensurePosixPath(result.firstUpload)).toMatch( + /\/memory_store_basedir\/dumbo-*.*\.txt/, + ) + expect(ensurePosixPath(result.secondUpload)).toMatch( + /\/memory_store_basedir\/dumbo-*.*\.txt/, + ) + + const { contents: firstContents } = await memStore.read(result.firstUpload) + expect(firstContents.toString()).toBe('Meaow') + + const { contents: secondContents } = await memStore.read( + result.secondUpload, + ) + expect(secondContents.toString()).toBe('Woof') + }) + + it('Should be able to override save options', async () => { + const data = { + uploadField: new File(['Hello'], 'hello.png', { + type: 'image/png', + }), + } + + const fileNameOverrideOnly = await processors.processDummyUploads(data, { + fileName: 'overridden', + }) + + const pathOverrideOnly = await processors.processDummyUploads(data, { + path: '/bazinga', + }) + + const bothOverride = await processors.processDummyUploads(data, { + path: '/bazinga', + fileName: 'overridden', + }) + + expect(ensurePosixPath(fileNameOverrideOnly.uploadField)).toBe( + '/memory_store_basedir/overridden.png', + ) + + expect(ensurePosixPath(pathOverrideOnly.uploadField)).toMatch( + /\/bazinga\/.*\.png/, + ) + // Overriding path ignores the baseDir + expect(pathOverrideOnly.uploadField).not.toContain('memory_store_basedir') + + expect(ensurePosixPath(bothOverride.uploadField)).toBe( + '/bazinga/overridden.png', + ) + }) + + it('Should not add extension for unknown file type', async () => { + const data = { + uploadField: new File(['Hello'], 'hello', { + type: 'bazinga/unknown', // we don't use this anyway + }), + } + + const noOverride = await processors.processDummyUploads(data) + + // No extension + expect(ensurePosixPath(noOverride.uploadField)).toMatch( + /\/memory_store_basedir\/.*[^.]+$/, + ) + + const withOverride = await processors.processDummyUploads(data, { + fileName: 'hello', + }) + + expect(withOverride.uploadField).toMatch(/[^.]+$/) + expect(ensurePosixPath(withOverride.uploadField)).toBe( + '/memory_store_basedir/hello', + ) + }) +}) +// FileLists +// Problem is - in the database world, a string[] is not a thing +// so we need a generic way of doing this +describe('FileList processing', () => { + const processors = createUploadProcessors(uploadsConfig, memStore) + + const notPrismaData = [ + new File(['Hello'], 'hello.png', { + type: 'image/png', + }), + new File(['World'], 'world.jpeg', { + type: 'image/jpeg', + }), + ] + + it('Should handle FileLists', async () => { + const result = await processors.processFileList(notPrismaData) + + expect(result).toHaveLength(2) + + expect(ensurePosixPath(result[0])).toMatch( + /\/memory_store_basedir\/.*\.png/, + ) + expect(ensurePosixPath(result[1])).toMatch( + /\/memory_store_basedir\/.*\.jpeg/, + ) + }) + + it('Should handle FileLists with SaveOptions', async () => { + const result = await processors.processFileList(notPrismaData, { + path: '/bazinga_not_mem_store', + }) + + expect(result).toHaveLength(2) + expect(ensurePosixPath(result[0])).toMatch( + /\/bazinga_not_mem_store\/.*\.png/, + ) + expect(ensurePosixPath(result[1])).toMatch( + /\/bazinga_not_mem_store\/.*\.jpeg/, + ) + }) + + it('Should handle empty FileLists', async () => { + const promise = processors.processFileList() + + await expect(promise).resolves.not.toThrow() + }) +}) diff --git a/packages/uploads/src/__tests__/queryExtensions.test.ts b/packages/uploads/src/__tests__/queryExtensions.test.ts new file mode 100644 index 000000000000..70cfad682547 --- /dev/null +++ b/packages/uploads/src/__tests__/queryExtensions.test.ts @@ -0,0 +1,253 @@ +import fs from 'node:fs/promises' + +import type { MockedFunction } from 'vitest' +import { describe, it, vi, expect, beforeEach, beforeAll } from 'vitest' + +import { ensurePosixPath } from '@redwoodjs/project-config' + +import { FileSystemStorage } from '../FileSystemStorage.js' +import { setupUploads } from '../index.js' +import type { UploadsConfig } from '../prismaExtension.js' + +// @MARK: use the local prisma client in the test +import type { Dumbo, Dummy } from './prisma-client/index.js' +import { PrismaClient } from './prisma-client/index.js' + +vi.mock('node:fs/promises', () => ({ + default: { + writeFile: vi.fn(), + unlink: vi.fn(), + readFile: vi.fn(() => { + return 'MOCKED_FILE_CONTENT' + }), + copyFile: vi.fn(), + }, +})) + +// For creation of FS adapter +vi.mock('node:fs', () => ({ + existsSync: vi.fn(() => true), + mkdirSync: vi.fn(), +})) + +describe('Query extensions', () => { + const uploadConfig: UploadsConfig = { + dummy: { + fields: 'uploadField', + }, + dumbo: { + fields: ['firstUpload', 'secondUpload'], + }, + } + + const { prismaExtension, uploadsProcessors } = setupUploads( + uploadConfig, + new FileSystemStorage({ + baseDir: '/tmp', + }), + ) + + const prismaClient = new PrismaClient().$extends(prismaExtension) + + beforeEach(() => { + vi.resetAllMocks() + }) + + const sampleFile = new File(['heres-some-content'], 'dummy.txt', { + type: 'text/plain', + }) + + describe('create', () => { + it('create will save files', async () => { + const processedData = await uploadsProcessors.processDummyUploads({ + uploadField: sampleFile, + }) + + expect(fs.writeFile).toHaveBeenCalled() + const dummy = await prismaClient.dummy.create({ + data: processedData, + }) + + // On windows the slahes are different + const uploadFieldPath = ensurePosixPath(dummy.uploadField) + + expect(uploadFieldPath).toMatch(/\/tmp\/.*\.txt$/) + }) + + it('will remove the file if the create fails', async () => { + try { + await prismaClient.dumbo.create({ + data: { + firstUpload: '/tmp/first.txt', + secondUpload: '/bazinga/second.txt', + // @ts-expect-error Checking the error here + id: 'this-is-the-incorrect-type', + }, + }) + } catch { + expect(fs.unlink).toHaveBeenNthCalledWith(1, '/tmp/first.txt') + expect(fs.unlink).toHaveBeenNthCalledWith(2, '/bazinga/second.txt') + } + + expect.assertions(2) + }) + }) + + describe('update', () => { + let ogDummy: Dummy + let ogDumbo: Dumbo + beforeAll(async () => { + ogDummy = await prismaClient.dummy.create({ + data: { + uploadField: '/tmp/old.txt', + }, + }) + + ogDumbo = await prismaClient.dumbo.create({ + data: { + firstUpload: '/tmp/oldFirst.txt', + secondUpload: '/tmp/oldSecond.txt', + }, + }) + }) + + beforeEach(() => { + vi.resetAllMocks() + }) + + it('update will remove the old file, save new one', async () => { + const updatedDummy = await prismaClient.dummy.update({ + data: { + uploadField: '/tmp/new.txt', + }, + where: { + id: ogDummy.id, + }, + }) + + expect(fs.unlink).toHaveBeenCalledWith('/tmp/old.txt') + expect(updatedDummy.uploadField).toBe('/tmp/new.txt') + }) + + it('should not delete the file if the update fails', async () => { + const failedUpdatePromise = prismaClient.dummy.update({ + data: { + // @ts-expect-error Intentional + id: 'this-is-the-incorrect-type', + }, + where: { + id: ogDummy.id, + }, + }) + + // Id is invalid, so the update should fail + await expect(failedUpdatePromise).rejects.toThrowError() + + // The old one should NOT be deleted + expect(fs.unlink).not.toHaveBeenCalled() + }) + + it('should only delete old files from the fields that are being updated', async () => { + const updatedDumbo = await prismaClient.dumbo.update({ + data: { + firstUpload: '/tmp/newFirst.txt', + }, + where: { + id: ogDumbo.id, + }, + }) + + expect(updatedDumbo.firstUpload).toBe('/tmp/newFirst.txt') + expect(updatedDumbo.secondUpload).toBe('/tmp/oldSecond.txt') + expect(fs.unlink).toHaveBeenCalledOnce() + expect(fs.unlink).toHaveBeenCalledWith('/tmp/oldFirst.txt') + }) + + it('should not delete files on update of non-upload fields', async () => { + // In this case, we're only updating the message field + await prismaClient.dumbo.update({ + data: { + message: 'Hello world', + }, + where: { + id: ogDumbo.id, + }, + }) + + expect(fs.unlink).not.toHaveBeenCalled() + }) + }) + + describe('delete', () => { + it('delete will remove all uploads', async () => { + const dumbo = await prismaClient.dumbo.create({ + data: { + firstUpload: '/tmp/first.txt', + secondUpload: '/tmp/second.txt', + }, + }) + + await prismaClient.dumbo.delete({ + where: { + id: dumbo.id, + }, + }) + + expect(fs.unlink).toHaveBeenCalledTimes(2) + expect(fs.unlink).toHaveBeenCalledWith('/tmp/first.txt') + expect(fs.unlink).toHaveBeenCalledWith('/tmp/second.txt') + }) + + it('delete will not remove any uploads if the delete fails', async () => { + const bookWithCover = await prismaClient.book.create({ + data: { + name: 'Prisma extensions for dummies', + cover: { + create: { + photo: '/tmp/book-covers/prisma-for-dummies.jpg', + }, + }, + }, + }) + + // This delete will fail because the book is associated with a cover BUTTTT + // test serves more as documentation (and to prevent regression if Prisma changes behavior) + // Because Prisma will throw the validation __before__ the delete in the extension is called + + try { + await prismaClient.bookCover.delete({ + where: { + id: bookWithCover.coverId, + }, + }) + // eslint-disable-next-line no-empty + } catch {} + + expect(fs.unlink).not.toHaveBeenCalled() + }) + + it('Should handle if a bad path is provided', async () => { + ;(fs.unlink as MockedFunction).mockRejectedValueOnce( + new Error('unlink error'), + ) + + const invalidPathDumbo = await prismaClient.dumbo.create({ + data: { + firstUpload: '', + secondUpload: 'im-a-invalid-path', + }, + }) + + const deletePromise = prismaClient.dumbo.delete({ + where: { + id: invalidPathDumbo.id, + }, + }) + + await expect(deletePromise).resolves.not.toThrow() + + expect(fs.unlink).toHaveBeenCalledOnce() + expect(fs.unlink).toHaveBeenCalledWith('im-a-invalid-path') + }) + }) +}) diff --git a/packages/uploads/src/__tests__/resultExtensions.test.ts b/packages/uploads/src/__tests__/resultExtensions.test.ts new file mode 100644 index 000000000000..2469f4901093 --- /dev/null +++ b/packages/uploads/src/__tests__/resultExtensions.test.ts @@ -0,0 +1,69 @@ +import { describe, it, expect, vi } from 'vitest' + +import { setupUploads } from '../index.js' +import { MemoryStorage } from '../MemoryStorage.js' +import type { UploadsConfig } from '../prismaExtension.js' +import { UrlSigner } from '../signedUrls.js' + +// @MARK: use the local prisma client in the test +import { PrismaClient } from './prisma-client/index.js' + +vi.mock('@redwoodjs/project-config', async (importOriginal) => { + const originalProjectConfig = (await importOriginal()) as any + return { + ...originalProjectConfig, + getConfig: () => { + return { + web: { + apiUrl: '/.redwood/functions', + }, + } + }, + } +}) + +describe('Result extensions', () => { + const uploadConfig: UploadsConfig = { + dummy: { + fields: 'uploadField', + }, + dumbo: { + fields: ['firstUpload', 'secondUpload'], + }, + } + + const { prismaExtension } = setupUploads( + uploadConfig, + new MemoryStorage({ + baseDir: '/tmp', + }), + new UrlSigner({ + endpoint: '/signed-url', + secret: 'my-sekret', + }), + ) + + const prismaClient = new PrismaClient().$extends(prismaExtension) + + describe('withSignedUrl', () => { + it('Generates signed urls for each upload field', async () => { + const dumbo = await prismaClient.dumbo.create({ + data: { + firstUpload: '/dumbo/first.txt', + secondUpload: '/dumbo/second.txt', + }, + }) + + const signedUrlDumbo = await dumbo.withSignedUrl({ + expiresIn: 254, + }) + expect(signedUrlDumbo.firstUpload).toContain( + '/.redwood/functions/signed-url', + ) + expect(signedUrlDumbo.firstUpload).toContain('path=%2Fdumbo%2Ffirst.txt') + expect(signedUrlDumbo.secondUpload).toContain( + 'path=%2Fdumbo%2Fsecond.txt', + ) + }) + }) +}) diff --git a/packages/uploads/src/__tests__/signedUrls.test.ts b/packages/uploads/src/__tests__/signedUrls.test.ts new file mode 100644 index 000000000000..10d9747de74d --- /dev/null +++ b/packages/uploads/src/__tests__/signedUrls.test.ts @@ -0,0 +1,221 @@ +import { describe, expect, beforeEach, afterEach, vi, it, test } from 'vitest' + +import { EXPIRES_IN, UrlSigner } from '../signedUrls.js' + +const signer = new UrlSigner({ + // Doing this means we don't need to mock getConfig + endpoint: 'https://myapiside.com/access-signed-file', + secret: 'bazinga-3-32-151', +}) + +describe('UrlSigner', () => { + it('Can creates a signature', () => { + const { signature, expiry: expires } = signer.generateSignature({ + filePath: '/tmp/myfile.txt', + expiresInMs: EXPIRES_IN.days(5), + }) + + expect(signature).toBeDefined() + + expect(diffInDaysFromNow(expires as number)).toBeCloseTo(5) + }) + + it('throws with correct error when wrong expires passed', () => { + const { signature, expiry: expires } = signer.generateSignature({ + filePath: '/tmp/myfile.txt', + expiresInMs: EXPIRES_IN.days(1), + }) + + expect(() => + signer.validateSignature({ + path: '/tmp/myfile.txt', + s: signature, + expiry: expires, + }), + ).not.toThrow() + + expect(() => + signer.validateSignature({ + path: '/tmp/myfile.txt', + s: signature, + expiry: 12512351, + }), + ).toThrowError('Signature has expired') + }) + + it('Handles url encoded filePaths', () => { + const { signature, expiry: expires } = signer.generateSignature({ + filePath: '/tmp/myfile.txt', + expiresInMs: EXPIRES_IN.days(1), + }) + + expect(() => + signer.validateSignature({ + path: encodeURIComponent('/tmp/myfile.txt'), + s: signature, + expiry: expires, + }), + ).not.toThrow() + }) + + it('Throws an invalid signature when signature is wrong', () => { + const { signature, expiry } = signer.generateSignature({ + filePath: '/tmp/myfile.txt', + expiresInMs: EXPIRES_IN.days(1), + }) + + expect(() => + signer.validateSignature({ + path: '/tmp/myfile.txt', + s: signature, + expiry, + }), + ).not.toThrow() + + expect(() => + signer.validateSignature({ + path: '/tmp/myfile.txt', + s: 'im-the-wrong-signature', + expiry, + }), + ).toThrowError('Invalid signature') + }) + + it('Throws an invalid signature when file path is wrong', () => { + const { signature, expiry } = signer.generateSignature({ + filePath: '/tmp/myfile.txt', + expiresInMs: EXPIRES_IN.days(20), + }) + expect(() => + signer.validateSignature({ + path: '/tmp/some-other-file.txt', + s: signature, + expiry, + }), + ).toThrowError('Invalid signature') + }) +}) + +describe('Expired signature', () => { + // Seprate, so we can mock the times + beforeEach(() => { + vi.useFakeTimers() + }) + + afterEach(() => { + vi.useRealTimers() + }) + + it('throws an error when the signature has expired', () => { + const filePath = '/bazinga/kittens.png' + const { signature, expiry } = signer.generateSignature({ + filePath, + expiresInMs: EXPIRES_IN.minutes(15), + }) + + const validation = () => + signer.validateSignature({ + path: filePath, + s: signature, + expiry, + }) + + expect(validation).not.toThrow() + + // Time travel to the future + vi.advanceTimersByTime(EXPIRES_IN.days(1)) + + expect(validation).toThrowError('Signature has expired') + }) +}) + +test('Generates a signed url', () => { + const signedUrl = signer.generateSignedUrl( + '/files/bazinga', + EXPIRES_IN.days(1), + ) + + expect(signedUrl).toContain('https://myapiside.com/access-signed-file?s=') + expect(signedUrl).toMatch(/s=.*/) + expect(signedUrl).toMatch(/expiry=[0-9]+/) + expect(signedUrl).toContain(`path=${encodeURIComponent('/files/bazinga')}`) // The actual file path +}) + +describe('validatePath', () => { + beforeEach(() => { + vi.useFakeTimers() + }) + + afterEach(() => { + vi.useRealTimers() + }) + + it('validates a path or url with a valid signature and expiry', () => { + const filePath = '/tmp/myfile.txt' + const expiresInMs = EXPIRES_IN.days(1) + const { signature, expiry } = signer.generateSignature({ + filePath, + expiresInMs, + }) + + const signedPath = `/bazinga?s=${signature}&expiry=${expiry}&path=${encodeURIComponent( + filePath, + )}` + + // When its just a path + expect(() => signer.validateSignedUrl(signedPath)).not.toThrow() + expect(signer.validateSignedUrl(signedPath)).toBe(filePath) + + // When its a full url + const signedUrl = `https://myredwoodapp.com/bazinga?s=${signature}&expiry=${expiry}&path=${encodeURIComponent( + filePath, + )}` + + expect(() => signer.validateSignedUrl(signedUrl)).not.toThrow() + expect(signer.validateSignedUrl(signedUrl)).toBe(filePath) + }) + + it('throws an error when the signature has expired', () => { + const filePath = '/tmp/myfile.txt' + const expiresInMs = EXPIRES_IN.minutes(15) + const { signature, expiry } = signer.generateSignature({ + filePath, + expiresInMs, + }) + + const url = `/bazinga?s=${signature}&expiry=${expiry}&path=${encodeURIComponent( + filePath, + )}` + + // Time travel to the future + vi.advanceTimersByTime(EXPIRES_IN.days(1)) + + expect(() => signer.validateSignedUrl(url)).toThrowError( + 'Signature has expired', + ) + }) + + it('throws an error when the signature is invalid', () => { + const filePath = '/tmp/myfile.txt' + const expiresInMs = EXPIRES_IN.days(1) + const { signature, expiry } = signer.generateSignature({ + filePath, + expiresInMs, + }) + + const url = `/bazinga?s=${signature}&expiry=${expiry}&path=${encodeURIComponent( + filePath, + )}` + + const invalidSignatureUrl = url.replace(signature, 'invalid-signature') + + expect(() => signer.validateSignedUrl(invalidSignatureUrl)).toThrowError( + 'Invalid signature', + ) + }) +}) + +// Util functions to make the tests more readable +function diffInDaysFromNow(time: number) { + return Math.abs(time - Date.now()) / 86400000 +} diff --git a/packages/uploads/src/__tests__/unit-test-schema.prisma b/packages/uploads/src/__tests__/unit-test-schema.prisma new file mode 100644 index 000000000000..21f28a1f1ccb --- /dev/null +++ b/packages/uploads/src/__tests__/unit-test-schema.prisma @@ -0,0 +1,40 @@ +datasource db { + provider = "sqlite" + url = "file:for_unit_test.db" +} + +generator client { + provider = "prisma-client-js" + output = "./prisma-client" // <-- we generated a local prisma client so it doesn't interfere with the mono repo +} + +model Dummy { + id Int @id @default(autoincrement()) + uploadField String +} + +model Dumbo { + id Int @id @default(autoincrement()) + firstUpload String + secondUpload String + message String? +} + +model NoUploadFields { + id Int @id @default(autoincrement()) + name String +} + +model Book { + id Int @id @default(autoincrement()) + coverId Int @unique + cover BookCover @relation(fields: [coverId], references: [id]) + name String +} + +model BookCover { + id Int @id @default(autoincrement()) + // This is the upload field, + photo String + book Book? +} diff --git a/packages/uploads/src/createProcessors.ts b/packages/uploads/src/createProcessors.ts new file mode 100644 index 000000000000..08a06c7e2028 --- /dev/null +++ b/packages/uploads/src/createProcessors.ts @@ -0,0 +1,89 @@ +import { ulid } from 'ulid' + +import type { SaveOptionsOverride, StorageAdapter } from './StorageAdapter.js' + +// Assumes you pass in the graphql type +type MakeFilesString = { + [K in keyof T]: T[K] extends File ? string : T[K] +} + +export const createFileListProcessor = (storage: StorageAdapter) => { + return async (files: File[] = [], pathOverrideOnly?: { path?: string }) => { + const locations = await Promise.all( + files.map(async (file) => { + const { location } = await storage.save(file, pathOverrideOnly) + return location + }), + ) + + return locations + } +} + +/* +This creates a processor for each model in the uploads config (i.e. tied to a model in the prisma schema) +The processor will only handle single file uploads, not file lists. +*/ +export const createUploadProcessors = < + TUploadConfig extends Record, +>( + uploadConfig: TUploadConfig, + storage: StorageAdapter, +) => { + type modelNamesInUploadConfig = keyof TUploadConfig + + type uploadProcessorNames = + `process${Capitalize}Uploads` + + // @TODO(TS): Is there a way to make the type of data more specific? + type Processors = { + [K in uploadProcessorNames]: >( + data: T, + overrideSaveOptions?: SaveOptionsOverride, + ) => Promise> + } + + const processors = {} as Processors + + Object.keys(uploadConfig).forEach((model) => { + const modelKey = model as keyof typeof uploadConfig + + const currentModelConfig = uploadConfig[modelKey] + + if (!currentModelConfig) { + return + } + + const currentModelUploadFields = Array.isArray(currentModelConfig.fields) + ? currentModelConfig.fields + : [currentModelConfig.fields] + + const capitalCaseModel = `${model.charAt(0).toUpperCase() + model.slice(1)}` + const processorKey = `process${capitalCaseModel}Uploads` as keyof Processors + + processors[processorKey] = async (data, overrideSaveOptions) => { + const updatedFields = {} as Record + for await (const field of currentModelUploadFields) { + if (data[field]) { + const file = data[field] + + const saveOptions = overrideSaveOptions || { + fileName: `${model}-${field}-${ulid()}`, + } + const { location } = await storage.save(file, saveOptions) + + updatedFields[field] = location + } + } + return { + ...data, + ...updatedFields, + } + } + }) + + return { + ...processors, + processFileList: createFileListProcessor(storage), + } +} diff --git a/packages/uploads/src/fileHandling.ts b/packages/uploads/src/fileHandling.ts new file mode 100644 index 000000000000..aebd14bb302c --- /dev/null +++ b/packages/uploads/src/fileHandling.ts @@ -0,0 +1,9 @@ +import type { StorageAdapter } from './StorageAdapter.js' + +export async function fileToDataUri(filePath: string, storage: StorageAdapter) { + const { contents, type: mimeType } = await storage.read(filePath) + + const base64Data = Buffer.from(contents).toString('base64') + + return `data:${mimeType};base64,${base64Data}` +} diff --git a/packages/uploads/src/index.ts b/packages/uploads/src/index.ts new file mode 100644 index 000000000000..5ebda08c89c2 --- /dev/null +++ b/packages/uploads/src/index.ts @@ -0,0 +1,29 @@ +import { createUploadProcessors } from './createProcessors.js' +import type { ModelNames, UploadsConfig } from './prismaExtension.js' +import { createUploadsExtension } from './prismaExtension.js' +import type { UrlSigner } from './signedUrls.js' +import type { StorageAdapter } from './StorageAdapter.js' + +export const setupUploads = ( + uploadsConfig: UploadsConfig, + storageAdapter: StorageAdapter, + urlSigner?: UrlSigner, +) => { + const prismaExtension = createUploadsExtension( + uploadsConfig, + storageAdapter, + urlSigner, + ) + + const uploadsProcessors = createUploadProcessors( + uploadsConfig, + storageAdapter, + ) + + return { + prismaExtension, + uploadsProcessors, + } +} + +export type { ModelNames, UploadsConfig } from './prismaExtension.js' diff --git a/packages/uploads/src/prismaExtension.ts b/packages/uploads/src/prismaExtension.ts new file mode 100644 index 000000000000..8d532382a98e --- /dev/null +++ b/packages/uploads/src/prismaExtension.ts @@ -0,0 +1,238 @@ +import { PrismaClient } from '@prisma/client' +import type { Prisma } from '@prisma/client' +import { Prisma as PrismaExtension } from '@prisma/client/extension' +import type * as runtime from '@prisma/client/runtime/library' + +import { fileToDataUri } from './fileHandling.js' +import type { UrlSigner } from './signedUrls.js' +import type { StorageAdapter } from './StorageAdapter.js' + +type FilterOutDollarPrefixed = T extends `$${string}` + ? never + : T extends symbol // Remove symbol here, because it doesn't help users + ? never + : T + +// Filter out $on, $connect, etc. +export type ModelNames = FilterOutDollarPrefixed + +type PrismaModelFields = keyof Prisma.Result< + PrismaClient[MName], + any, + 'findFirstOrThrow' +> + +export type UploadConfigForModel = { + fields: + | PrismaModelFields + | PrismaModelFields[] +} + +export type UploadsConfig = { + [K in MNames]?: UploadConfigForModel +} + +type WithSignedUrlArgs = { + expiresIn?: number +} + +export const createUploadsExtension = ( + config: UploadsConfig, + storageAdapter: StorageAdapter, + urlSigner?: UrlSigner, +) => { + // @TODO I think we can use Prisma.getExtensionContext(this) + // instead of creating a new PrismaClient instance + const prismaInstance = new PrismaClient() + + type ResultExtends = { + [K in MNames]: { + withDataUri: { + needs: Record + compute: ( + modelData: Record, + ) => (this: T) => Promise + } + withSignedUrl: { + needs: Record + compute: ( + modelData: Record, + ) => (this: T, signArgs?: WithSignedUrlArgs) => Promise + } + } + } + + const queryExtends: runtime.ExtensionArgs['query'] = {} + + const resultExtends = {} as ResultExtends + for (const modelName in config) { + // Guaranteed to have modelConfig, we're looping over config 🙄 + const modelConfig = config[modelName] + + if (!modelConfig) { + continue + } + + const uploadFields = ( + Array.isArray(modelConfig.fields) + ? modelConfig.fields + : [modelConfig.fields] + ) as string[] + + queryExtends[modelName] = { + async create({ query, args }) { + try { + const result = await query(args) + return result + } catch (e) { + // If the create fails, we need to delete the uploaded files + await removeUploadedFiles( + uploadFields, + args.data as Record, + ) + throw e + } + }, + async update({ query, model, args }) { + // Check if any of the uploadFields are present in args.data + // We only want to process fields that are being updated + const uploadFieldsToUpdate = uploadFields.filter( + (field) => + // All of this non-sense is to make typescript happy. I'm not sure how data could be anything but an object + typeof args.data === 'object' && + args.data !== null && + field in args.data, + ) + + // If no upload fields are present, proceed with the original query + // avoid overhead of extra lookups + if (uploadFieldsToUpdate.length == 0) { + return query(args) + } else { + const originalRecord = await prismaInstance[ + model as ModelNames + // @ts-expect-error TS in strict mode will error due to union type. We cannot narrow it down here. + ].findFirstOrThrow({ + where: args.where, + // @TODO: should we select here to reduce the amount of data we're handling + }) + + // Similar, but not same as create + try { + const result = await query(args) + + // **After** we've updated the record, we need to delete the old file. + await removeUploadedFiles(uploadFieldsToUpdate, originalRecord) + + return result + } catch (e) { + // If the update fails, we need to delete the newly uploaded files + // but not the ones that already exist! + await removeUploadedFiles( + uploadFieldsToUpdate, + args.data as Record, + ) + throw e + } + } + }, + + async delete({ query, args }) { + const deleteResult = await query(args) + await removeUploadedFiles( + uploadFields, + // We don't know the exact type here + deleteResult as Record, + ) + + return deleteResult + }, + } + + // This makes the result extension only available for models with uploadFields + const needs = Object.fromEntries(uploadFields.map((field) => [field, true])) + + resultExtends[modelName] = { + withDataUri: { + needs, + compute(modelData) { + return async () => { + const base64UploadFields: Record = {} + + for await (const field of uploadFields) { + base64UploadFields[field] = await fileToDataUri( + modelData[field] as string, + storageAdapter, + ) + } + + return { + // modelData is of type unknown at this point + ...(modelData as any), + ...base64UploadFields, + } + } + }, + }, + withSignedUrl: { + needs, + compute(modelData) { + return ({ expiresIn }: WithSignedUrlArgs = {}) => { + if (!urlSigner) { + throw new Error( + 'Please supply signed url settings in setupUpload()', + ) + } + const signedUrlFields: Record = {} + + for (const field of uploadFields) { + if (!modelData[field]) { + continue + } + + signedUrlFields[field] = urlSigner.generateSignedUrl( + modelData[field] as string, + expiresIn, + ) + } + + return { + // modelData is of type unknown at this point + ...(modelData as any), + ...signedUrlFields, + } + } + }, + }, + } + } + + return PrismaExtension.defineExtension((client) => { + return client.$extends({ + name: 'redwood-upload-prisma-plugin', + query: queryExtends, + result: resultExtends, + }) + }) + + async function removeUploadedFiles( + fieldsToDelete: string[], + data: Record, + ) { + if (!data) { + console.warn('Empty data object passed to removeUploadedFiles') + return + } + + for await (const field of fieldsToDelete) { + const uploadLocation = data?.[field] + if (uploadLocation) { + try { + await storageAdapter.remove(uploadLocation) + } catch { + // Swallow the error, we don't want to stop the delete operation + } + } + } + } +} diff --git a/packages/uploads/src/signedUrls.ts b/packages/uploads/src/signedUrls.ts new file mode 100644 index 000000000000..1edb2d64816f --- /dev/null +++ b/packages/uploads/src/signedUrls.ts @@ -0,0 +1,161 @@ +import crypto from 'node:crypto' + +import { getConfig } from '@redwoodjs/project-config' + +export type SignedUrlSettings = { + endpoint: string // The path to the signed url endpoint, or a full url (include http(s)://) + secret: string // The secret to sign the urls with +} + +export type SignatureValidationArgs = { + path: string + s: string + expiry?: number | string +} +export class UrlSigner { + private secret: string + private endpoint: string + + constructor({ secret, endpoint }: SignedUrlSettings) { + this.secret = secret + this.endpoint = endpoint + + this.endpoint = endpoint.startsWith('http') + ? endpoint + : `${getConfig().web.apiUrl}${endpoint}` + } + + generateSignature({ + filePath, + expiresInMs, + }: { + filePath: string + expiresInMs?: number + }) { + if (!this.secret) { + throw new Error('Please configure the secret') + } + + if (expiresInMs) { + const expiry = Date.now() + expiresInMs + const signature = crypto + .createHmac('sha256', this.secret) + .update(`${filePath}:${expiry}`) + .digest('hex') + + return { expiry, signature } + } else { + // Does not expire + const signature = crypto + .createHmac('sha256', this.secret) + .update(filePath) + .digest('hex') + + return { + signature, + expiry: undefined, + } + } + } + + /** + * The signature and expires have to be extracted from the URL + */ + validateSignature({ + s: signature, + path: filePath, // In the URL we call it path + expiry, + }: SignatureValidationArgs) { + if (!this.secret) { + throw new Error('Please configure the secret') + } + + if (expiry) { + // No need to validate if the signature has expired, + // but make sure its a number! + if (Date.now() > +expiry) { + throw new Error('Signature has expired') + } + } + + // Decoded filePath + const decodedFilePath = decodeURIComponent(filePath) + + const validSignature = expiry + ? crypto + .createHmac('sha256', this.secret) + .update(`${decodedFilePath}:${expiry}`) + .digest('hex') + : crypto + .createHmac('sha256', this.secret) + .update(`${decodedFilePath}`) + .digest('hex') + + if (validSignature !== signature) { + throw new Error('Invalid signature') + } + + return decodedFilePath + } + + validateSignedUrl(fullPathWithQueryParametersOrUrl: string) { + const url = new URL( + fullPathWithQueryParametersOrUrl, + // We don't care about the host, but just need to create a URL object + // to parse search params + fullPathWithQueryParametersOrUrl.startsWith('http') + ? undefined + : 'http://localhost', + ) + + const path = url.searchParams.get('path') as string + + this.validateSignature({ + // Note the signature is called 's' in the URL + s: url.searchParams.get('s') as string, + expiry: url.searchParams.get('expiry') as string, + path, + }) + + // Return the decoded path + return decodeURIComponent(path) + } + + generateSignedUrl(filePath: string, expiresIn?: number) { + const { signature, expiry } = this.generateSignature({ + filePath, + expiresInMs: expiresIn, + }) + + // This way you can pass in a path with params already + const params = new URLSearchParams() + params.set('s', signature) + if (expiry) { + params.set('expiry', expiry.toString()) + } + + params.set('path', filePath) + + return `${this.endpoint}?${params.toString()}` + } +} + +export const getSignedDetailsFromUrl = (url: string) => { + const urlObj = new URL(url) + const expires = urlObj.searchParams.get('expires') + return { + expires: expires ? parseInt(expires) : undefined, + file: urlObj.searchParams.get('file'), + signature: urlObj.searchParams.get('s'), + } +} + +export const EXPIRES_IN = { + seconds: (s: number) => s * 1000, + minutes: (m: number) => m * 60 * 1000, + hours: (h: number) => h * 60 * 60 * 1000, + days: (d: number) => d * 24 * 60 * 60 * 1000, + weeks: (w: number) => w * 7 * 24 * 60 * 60 * 1000, + months: (m: number) => m * 30 * 24 * 60 * 60 * 1000, + years: (y: number) => y * 365 * 24 * 60 * 60 * 1000, +} diff --git a/packages/uploads/tsconfig.json b/packages/uploads/tsconfig.json new file mode 100644 index 000000000000..d8a2b7c67426 --- /dev/null +++ b/packages/uploads/tsconfig.json @@ -0,0 +1,22 @@ +{ + "extends": "../../tsconfig.compilerOption.json", + "compilerOptions": { + "moduleResolution": "NodeNext", + "module": "NodeNext", + "baseUrl": ".", + "rootDir": "src", + "outDir": "dist" + }, + "include": ["src", "prisma-override.d.ts"], + // Excluding tests (as in root compilerOption) causes types to be inaccurate in tests + // This overrides the exclude in the root compilerOption + "exclude": ["dist", "node_modules", "**/__mocks__"], + "references": [ + { + "path": "../project-config" + }, + { + "path": "../framework-tools" + } + ] +} diff --git a/packages/uploads/tsconfig.types-cjs.json b/packages/uploads/tsconfig.types-cjs.json new file mode 100644 index 000000000000..6bbdc61737c4 --- /dev/null +++ b/packages/uploads/tsconfig.types-cjs.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "outDir": "dist/cjs", + "tsBuildInfoFile": "./tsconfig.types-cjs.tsbuildinfo" + } +} diff --git a/packages/uploads/vitest.config.mts b/packages/uploads/vitest.config.mts new file mode 100644 index 000000000000..ee7fa5cb1f93 --- /dev/null +++ b/packages/uploads/vitest.config.mts @@ -0,0 +1,26 @@ +import path from 'path' +import { fileURLToPath } from 'url' + +import { defineConfig, configDefaults } from 'vitest/config' + +const __filename = fileURLToPath(import.meta.url) +const __dirname = path.dirname(__filename) + +export default defineConfig({ + test: { + exclude: [...configDefaults.exclude, '**/fixtures'], + deps: { + interopDefault: false, + }, + globalSetup: ['vitest.setup.mts'], + alias: { + // We alias prisma client, otherwise you'll get "prisma client not initialized" + // Important to have the subpath first here + '@prisma/client/extension': path.resolve( + __dirname, + '../../node_modules/@prisma/client/extension.js', + ), + '@prisma/client': path.resolve(__dirname, 'src/__tests__/prisma-client'), + }, + }, +}) diff --git a/packages/uploads/vitest.setup.mts b/packages/uploads/vitest.setup.mts new file mode 100644 index 000000000000..702c707e71da --- /dev/null +++ b/packages/uploads/vitest.setup.mts @@ -0,0 +1,8 @@ +import { $ } from 'zx' + +export default async function setup() { + $.verbose = true + console.log('[setup] Setting up unit test prisma db....') + await $`npx prisma db push --accept-data-loss --schema ./src/__tests__/unit-test-schema.prisma` + console.log('[setup] Done! \n') +} diff --git a/yarn.lock b/yarn.lock index fed3431ed22c..6555a867f99c 100644 --- a/yarn.lock +++ b/yarn.lock @@ -8712,6 +8712,26 @@ __metadata: languageName: unknown linkType: soft +"@redwoodjs/uploads@workspace:packages/uploads": + version: 0.0.0-use.local + resolution: "@redwoodjs/uploads@workspace:packages/uploads" + dependencies: + "@arethetypeswrong/cli": "npm:0.15.4" + "@prisma/client": "npm:5.18.0" + "@redwoodjs/framework-tools": "workspace:*" + "@redwoodjs/project-config": "workspace:*" + "@types/mime-types": "npm:2.1.4" + concurrently: "npm:8.2.2" + esbuild: "npm:0.23.0" + mime-types: "npm:2.1.35" + publint: "npm:0.2.10" + tsx: "npm:4.17.0" + typescript: "npm:5.5.4" + ulid: "npm:2.3.0" + vitest: "npm:2.0.5" + languageName: unknown + linkType: soft + "@redwoodjs/vite@workspace:packages/vite": version: 0.0.0-use.local resolution: "@redwoodjs/vite@workspace:packages/vite" @@ -29017,6 +29037,15 @@ __metadata: languageName: node linkType: hard +"ulid@npm:2.3.0": + version: 2.3.0 + resolution: "ulid@npm:2.3.0" + bin: + ulid: ./bin/cli.js + checksum: 10c0/070d237502781085e59cf3d8ece752ff96cd3a0990cf1c1be57273f4550597daeb72e9a7db8e5a320de31102509bb3321d280b54bfc44e98025e4628a9629773 + languageName: node + linkType: hard + "unbox-primitive@npm:^1.0.2": version: 1.0.2 resolution: "unbox-primitive@npm:1.0.2"