Skip to content

Commit

Permalink
Turso (#71)
Browse files Browse the repository at this point in the history
* Update sqlite

* Work

* Deploy info

* Cleanup

* Format

* Fix DB

* Fix seed

* corepack

* Docker

* Sync

* Finish syncing

* Sync

* Sync

* Sync

* Sync
  • Loading branch information
mskelton authored Aug 9, 2024
1 parent ac5ef11 commit e27ad06
Show file tree
Hide file tree
Showing 37 changed files with 6,472 additions and 4,090 deletions.
6 changes: 5 additions & 1 deletion .env.example
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
DATABASE_URL=
# shellcheck disable=SC2034

BYTES_DIR=
GITHUB_TOKEN=
WEBHOOK_SECRET=
TURSO_DATABASE_URL=
TURSO_AUTH_TOKEN=
2 changes: 1 addition & 1 deletion .eslintrc
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"extends": ["@mskelton", "@mskelton/eslint-config/react", "next"],
"extends": ["@mskelton", "@mskelton/eslint-config/react"],
"plugins": ["mskelton"],
"rules": {
"react/no-unescaped-entities": "off",
Expand Down
12 changes: 4 additions & 8 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ jobs:
name: Lint
runs-on: ubuntu-latest
steps:
- uses: mskelton/setup-pnpm@v2
- uses: mskelton/setup-pnpm@v3
- name: Lint
run: pnpm lint
- name: Check formatting
Expand All @@ -15,21 +15,17 @@ jobs:
name: Type Check
runs-on: ubuntu-latest
steps:
- uses: mskelton/setup-pnpm@v2
- name: Build
run: pnpm db:generate
- uses: mskelton/setup-pnpm@v3
- name: Type check
run: pnpm ts

test:
name: Test
runs-on: ubuntu-latest
env:
DATABASE_URL: file:./test.db
steps:
- uses: mskelton/setup-pnpm@v2
- uses: mskelton/setup-pnpm@v3
- run: pnpm playwright install chromium --with-deps
- run: pnpm prisma migrate dev
- run: pnpm drizzle-kit push
- run: pnpm db:seed
- run: pnpm test
- name: Upload test results
Expand Down
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -17,4 +17,5 @@ out/
!.env.example

# DB
prisma/*.db*
data/*
!data/.gitkeep
32 changes: 7 additions & 25 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ FROM node:20-alpine AS base

# Install dependencies only when needed
FROM base AS deps

# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
RUN apk add --no-cache libc6-compat
WORKDIR /app
Expand All @@ -27,8 +28,7 @@ RUN npm run build
FROM base AS runner
WORKDIR /app

# Install LiteFS and SQLite3
RUN apk add ca-certificates fuse3 sqlite
ENV NODE_ENV production

RUN addgroup --system --gid 1001 nodejs
RUN adduser --system --uid 1001 nextjs
Expand All @@ -44,29 +44,11 @@ RUN chown nextjs:nodejs .next
COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./
COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static

# Set environment variables for Next.js
ENV PORT 3000
ENV HOSTNAME "127.0.0.1"
ENV NODE_ENV production
ENV DATABASE_URL "file:/litefs/mskelton.dev.db"

# Copy LiteFS binary
COPY --from=flyio/litefs:0.5 /usr/local/bin/litefs /usr/local/bin/litefs

# Move the appropriate LiteFS config files to /etc
COPY etc/fuse.conf /etc/fuse.conf
COPY etc/litefs.yml /etc/litefs.yml
USER nextjs

# Copy Prisma migrations
COPY prisma /app/prisma
EXPOSE 3000

# Run as a non-root user
# TODO
# USER nextjs

# Expose port 8080 that LiteFS will listen on
EXPOSE 8080
ENV PORT 3000
ENV HOSTNAME "0.0.0.0"

# Run LiteFS as the entrypoint. After it has connected and sync'd with the
# cluster, it will run the commands listed in the "exec" field of the config.
ENTRYPOINT litefs mount
CMD ["node", "server.js"]
8 changes: 4 additions & 4 deletions app/(main)/blog/posts/introducing-bytes/content.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -80,10 +80,10 @@ goals.
With these goals in mind, I decided that it would make the most sense to
host the content for bytes in a separate repo. These files get indexed into
a [SQLite](https://www.sqlite.org) database (backed by
[LiteFS](https://fly.io/docs/litefs)) which makes it very simple to search
and reduces the dependency on using the GitHub API for requests. Not to
mention that searching through raw files on GitHub is kind of tricky if you
want to build any kind of search experience.
[Turso](https://turso.tech)) which makes it very simple to search and
reduces the dependency on using the GitHub API for requests. Not to mention
that searching through raw files on GitHub is kind of tricky if you want to
build any kind of search experience.

Now with the contents of the [bytes](https://github.com/mskelton/bytes)
repo being indexed to the website, I then setup a webhook so that new
Expand Down
116 changes: 63 additions & 53 deletions app/(main)/bytes/api.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import rehypeShiki from "@mskelton/rehype-shiki"
import { and, asc, desc, eq, gt, like, lt, or, sql } from "drizzle-orm"
import matter from "gray-matter"
import { notFound } from "next/navigation"
import { compileMDX } from "next-mdx-remote/rsc"
Expand All @@ -9,7 +10,7 @@ import rehypeSlug from "rehype-slug"
import remarkGfm from "remark-gfm"
import remarkSmartypants from "remark-smartypants"
import { getHighlighter, Highlighter } from "shiki"
import prisma from "lib/prisma"
import { db, schema } from "lib/db"
import MarkdownImage from "../../../components/markdown/MarkdownImage"
import MarkdownLink from "../../../components/markdown/MarkdownLink"
import MarkdownPre from "../../../components/markdown/MarkdownPre"
Expand Down Expand Up @@ -48,10 +49,8 @@ const loadLocalByteContent = async (id: string) => {
}

export const getByte = cache(async (slug: string) => {
const byte = await prisma.byte.findFirst({
where: {
OR: [{ id: slug }, { slug }],
},
const byte = await db.query.bytes.findFirst({
where: or(eq(schema.bytes.id, slug), eq(schema.bytes.slug, slug)),
})

if (!byte) {
Expand Down Expand Up @@ -96,7 +95,7 @@ export const getByte = cache(async (slug: string) => {
remarkPlugins: [remarkGfm, remarkSmartypants as any],
},
},
source: byte.content,
source: byte.content as string,
})

return { ...byte, content }
Expand All @@ -119,16 +118,16 @@ function getPrefix({ query, tag }: Pick<SearchBytesRequest, "query" | "tag">) {
}

export function getAllBytes() {
return prisma.byte.findMany({
orderBy: { createdAt: "desc" },
select: {
createdAt: true,
description: true,
id: true,
slug: true,
title: true,
},
})
return db
.select({
createdAt: schema.bytes.createdAt,
description: schema.bytes.description,
id: schema.bytes.id,
slug: schema.bytes.slug,
title: schema.bytes.title,
})
.from(schema.bytes)
.orderBy(desc(schema.bytes.createdAt))
}

export interface SearchBytesRequest {
Expand All @@ -142,50 +141,61 @@ export const PAGE_SIZE = 10

export const searchBytes = cache(
async ({ cursor, direction, query, tag }: SearchBytesRequest) => {
const res = await prisma.byte.findMany({
cursor: cursor ? { id: cursor } : undefined,
orderBy: { createdAt: "desc" },
select: {
createdAt: true,
description: true,
id: true,
slug: true,
tags: {
select: {
id: true,
name: true,
},
},
title: true,
},
// When using a cursor, we want to skip the current record since we don't
// want it on multiple pages. This doesn't apply when we are on the root
// page without a cursor.
skip: direction === "none" ? undefined : 1,
const res = await db
.select({
createdAt: schema.bytes.createdAt,
description: schema.bytes.description,
id: schema.bytes.id,
slug: schema.bytes.slug,
tags: sql`json_group_array(json_object('id', ${schema.tags.id}, 'name', ${schema.tags.name}))`.mapWith(
(val) => JSON.parse(val) as { id: string; name: string }[],
),
title: schema.bytes.title,
})
.from(schema.bytes)
.innerJoin(
schema.bytesToTags,
eq(schema.bytesToTags.byteId, schema.bytes.id),
)
.innerJoin(schema.tags, eq(schema.bytesToTags.tagId, schema.tags.id))
.where(
and(
cursor
? direction === "left"
? gt(schema.bytes.id, cursor)
: lt(schema.bytes.id, cursor)
: undefined,
query
? or(
like(schema.bytes.title, `%${query}%`),
like(schema.bytes.description, `%${query}%`),
)
: undefined,
tag ? eq(schema.tags.name, tag) : undefined,
),
)
.groupBy(schema.bytes.id)
// To know if there are more pages, we fetch one more record than we need
// and use the total count to determine if there are more pages. This has
// to account for the cursor direction as well.
take: (direction === "left" ? -1 : 1) * (PAGE_SIZE + 1),
// Search by tag, or by title/description
where: {
OR: query
? [
{ title: { contains: query } },
{ description: { contains: query } },
]
: undefined,
tags: tag ? { some: { name: { equals: tag } } } : undefined,
},
})
.limit(PAGE_SIZE + 1)
// When paging backwards, we have to sort backwards to allow our limit
// to work correctly.
.orderBy((direction === "left" ? asc : desc)(schema.bytes.id))
.execute()

const prefix = getPrefix({ query, tag })
const hasMore = res.length > PAGE_SIZE

// Since we fetch extra records, we need to slice the result to the correct
// size. Again, we have to account for cursor direction and trim the first
// or last item accordingly.
const bytes =
direction === "left" ? res.slice(-PAGE_SIZE) : res.slice(0, PAGE_SIZE)
// Since we fetch extra records, we need to slice the result to the correct size.
const bytes = res.slice(0, PAGE_SIZE)

// If paging backwards, we need to reverse the result set to maintain the
// correct order. Mutating arrays isn't that cool, but what do I care about
// being cool, I know what I'm doing.
if (direction === "left") {
bytes.reverse()
}

return {
bytes,
Expand Down
10 changes: 5 additions & 5 deletions app/(main)/resume/Skills.meta.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -125,14 +125,14 @@ export const skills: Skill[] = [
fill="#CE422B"
/>
<path
clip-rule="evenodd"
clipRule="evenodd"
d="M97.68 52.95C97.68 64.7866 92.9779 76.1384 84.6082 84.5082C76.2384 92.8779 64.8866 97.58 53.05 97.58C41.2134 97.58 29.8616 92.8779 21.4918 84.5082C13.1221 76.1384 8.42 64.7866 8.42 52.95C8.42 41.1134 13.1221 29.7616 21.4918 21.3918C29.8616 13.0221 41.2134 8.32 53.05 8.32C64.8866 8.32 76.2384 13.0221 84.6082 21.3918C92.9779 29.7616 97.68 41.1134 97.68 52.95ZM96.84 48.64L103.8 52.95L96.84 57.26L102.82 62.85L95.16 65.72L99.94 72.37L91.85 73.69L95.25 81.15L87.06 80.86L88.94 88.84L80.96 86.96L81.25 95.15L73.79 91.75L72.47 99.84L65.82 95.06L62.95 102.72L57.36 96.74L53.05 103.7L48.74 96.74L43.15 102.72L40.28 95.06L33.63 99.84L32.31 91.75L24.85 95.15L25.14 86.96L17.16 88.84L19.04 80.86L10.85 81.15L14.25 73.69L6.16 72.37L10.94 65.72L3.28 62.85L9.26 57.26L2.3 52.95L9.26 48.64L3.28 43.05L10.94 40.18L6.16 33.53L14.25 32.21L10.85 24.75L19.04 25.04L17.16 17.06L25.14 18.94L24.85 10.75L32.31 14.15L33.63 6.06L40.28 10.84L43.15 3.18L48.74 9.16L53.05 2.2L57.36 9.16L62.95 3.18L65.82 10.84L72.47 6.06L73.79 14.15L81.25 10.75L80.96 18.94L88.94 17.06L87.06 25.04L95.25 24.75L91.85 32.21L99.94 33.53L95.16 40.18L102.82 43.05L96.84 48.64Z"
fill="#CE422B"
fill-rule="evenodd"
fillRule="evenodd"
stroke="#CE422B"
stroke-linecap="round"
stroke-linejoin="round"
stroke-width="3"
strokeLinecap="round"
strokeLinejoin="round"
strokeWidth="3"
/>
</svg>
),
Expand Down
9 changes: 3 additions & 6 deletions app/api/bytes/route.tsx
Original file line number Diff line number Diff line change
@@ -1,22 +1,19 @@
import { inArray } from "drizzle-orm"
import { NextResponse } from "next/server"
import { PushEvent } from "@octokit/webhooks-types"
import { upsertByte } from "lib/api/bytes"
import { getByteSource } from "lib/api/github"
import { verifySignature } from "lib/api/signature"
import { db, schema } from "lib/db"
import { toId } from "lib/parser"
import prisma from "lib/prisma"

async function upsert(file: string) {
const id = toId(file)
return upsertByte(id, await getByteSource(id))
}

async function remove(files: string[]) {
const ids = files.map(toId)

await prisma.byte.deleteMany({
where: { id: { in: ids } },
})
await db.delete(schema.bytes).where(inArray(schema.bytes.id, files.map(toId)))
}

export async function POST(req: Request) {
Expand Down
6 changes: 4 additions & 2 deletions app/api/reindex/route.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@ import { NextResponse } from "next/server"
import { requireToken } from "api/utils/auth"
import { upsertByte } from "lib/api/bytes"
import { getByteSource, octokit } from "lib/api/github"
import { db, schema } from "lib/db"
import { toId } from "lib/parser"
import prisma from "lib/prisma"

async function getAllByteIds() {
const path = "bytes"
Expand Down Expand Up @@ -31,7 +31,9 @@ export async function POST(request: Request) {
const sources = await Promise.all(ids.map(getByteSource))

// Clear all bytes from the database
await prisma.byte.deleteMany()
await db.delete(schema.bytesToTags)
await db.delete(schema.bytes)
await db.delete(schema.tags)

// Add all bytes to the database
for (let i = 0; i < ids.length; i++) {
Expand Down
Loading

0 comments on commit e27ad06

Please sign in to comment.