Skip to content

Commit

Permalink
chore(examples): Added Next.js 14 OpenAI rate limit example (#88)
Browse files Browse the repository at this point in the history
  • Loading branch information
davidmytton authored Jan 2, 2024
1 parent 90e1965 commit 482a472
Show file tree
Hide file tree
Showing 17 changed files with 5,849 additions and 1 deletion.
18 changes: 18 additions & 0 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,24 @@ updates:
- dependency-name: "@types/node"
versions: [">18.18"]

- package-ecosystem: "npm"
directory: "/examples/nextjs-14-openai"
schedule:
# Our dependencies should be checked daily
interval: "daily"
assignees:
- blaine-arcjet
reviewers:
- blaine-arcjet
commit-message:
prefix: "deps"
prefix-development: "deps(dev)"
ignore:
# Ignore updates to the @types/node package due to conflict between
# Headers in DOM.
- dependency-name: "@types/node"
versions: [">18.18"]

- package-ecosystem: "npm"
directory: "/examples/nextjs-14-pages-wrap"
schedule:
Expand Down
34 changes: 34 additions & 0 deletions .github/workflows/examples.yml
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,40 @@ jobs:
working-directory: examples/nextjs-14-app-dir-validate-email
run: npm run build

nextjs-14-openai:
name: "Next.js 14 + OpenAI"
runs-on: ubuntu-latest
steps:
# Environment security
- name: Step Security
uses: step-security/harden-runner@eb238b55efaa70779f274895e782ed17c84f2895 # v2.6.1
with:
egress-policy: audit

# Checkout
# Most toolchains require checkout first
- name: Checkout
uses: actions/checkout@v4

# Language toolchains
- name: Install Node
uses: actions/setup-node@v4.0.0
with:
node-version: 20

# Workflow

- name: Install dependencies
run: npm ci

- name: Install example dependencies
working-directory: examples/nextjs-14-openai
run: npm ci

- name: Build
working-directory: examples/nextjs-14-openai
run: npm run build

nextjs-14-pages-wrap:
name: "Next.js 14 + Page Router + withArcjet"
runs-on: ubuntu-latest
Expand Down
3 changes: 3 additions & 0 deletions examples/nextjs-14-openai/.eslintrc.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"extends": "next/core-web-vitals"
}
36 changes: 36 additions & 0 deletions examples/nextjs-14-openai/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.

# dependencies
/node_modules
/.pnp
.pnp.js
.yarn/install-state.gz

# testing
/coverage

# next.js
/.next/
/out/

# production
/build

# misc
.DS_Store
*.pem

# debug
npm-debug.log*
yarn-debug.log*
yarn-error.log*

# local env files
.env*.local

# vercel
.vercel

# typescript
*.tsbuildinfo
next-env.d.ts
41 changes: 41 additions & 0 deletions examples/nextjs-14-openai/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
<a href="https://arcjet.com" target="_arcjet-home">
<picture>
<source media="(prefers-color-scheme: dark)" srcset="https://arcjet.com/arcjet-logo-minimal-dark-mark-all.svg">
<img src="https://arcjet.com/arcjet-logo-minimal-light-mark-all.svg" alt="Arcjet Logo" height="128" width="auto">
</picture>
</a>

# Arcjet rate limit OpenAI chat route with Next.js 14

This example shows how to implement a rate limit on a Next.js 14 API route which
uses the OpenAI chat API.

## How to use

1. From the root of the project, install the SDK dependencies.

```bash
npm ci
```

2. Enter this directory and install the example's dependencies.

```bash
cd examples/nextjs-14-openai
npm ci
```

3. Add your OpenAI key to `.env.local`

```env
OPENAI_API_KEY=
```

4. Start the dev server.

```bash
npm run dev
```

5. Visit `http://localhost:3000`.
6. Refresh the page to trigger the rate limit.
71 changes: 71 additions & 0 deletions examples/nextjs-14-openai/app/api/chat/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
// This example is adapted from https://sdk.vercel.ai/docs/guides/frameworks/nextjs-app
import arcjet, { rateLimit } from "@arcjet/next";
import { OpenAIStream, StreamingTextResponse } from "ai";
import OpenAI from "openai";
import { promptTokensEstimate } from "openai-chat-tokens";

// Arcjet rate limit rule
const aj = arcjet({
key: "ajkey_yourkey",
rules: [
rateLimit({
mode: "LIVE",
characteristics: ["ip.src"],
window: "1m",
max: 60,
timeout: "10m",
}),
],
});

// OpenAI client
const openai = new OpenAI({
apiKey: process.env["OPENAI_API_KEY"] ?? "OPENAI_KEY_MISSING",
});

export const runtime = "edge";

export async function POST(req: Request) {
// Protect the route with Arcjet
const decision = await aj.protect(req);
console.log("Arcjet decision", decision.conclusion);

if (decision.reason.isRateLimit()) {
console.log("Request count", decision.reason.count);
console.log("Requests remaining", decision.reason.remaining);
}

// If the request is denied, return a 429
if (decision.isDenied()) {
if (decision.reason.isRateLimit()) {
return new Response("Too Many Requests", {
status: 429,
});
} else {
return new Response("Forbidden", {
status: 403,
});
}
}

// If the request is allowed, continue to use OpenAI
const { messages } = await req.json();

const estimate = promptTokensEstimate({
messages,
});

console.log("Token estimate", estimate);

// Ask OpenAI for a streaming chat completion given the prompt
const response = await openai.chat.completions.create({
model: "gpt-3.5-turbo",
stream: true,
messages,
});

// Convert the response into a friendly text-stream
const stream = OpenAIStream(response);
// Respond with the stream
return new StreamingTextResponse(stream);
}
Binary file added examples/nextjs-14-openai/app/favicon.ico
Binary file not shown.
20 changes: 20 additions & 0 deletions examples/nextjs-14-openai/app/globals.css
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
@tailwind base;
@tailwind components;
@tailwind utilities;

:root {
--foreground-rgb: 0, 0, 0;
--background-rgb: 255, 255, 255;
}

@media (prefers-color-scheme: dark) {
:root {
--foreground-rgb: 255, 255, 255;
--background-rgb: 0, 0, 0;
}
}

body {
color: rgb(var(--foreground-rgb));
background: rgb(var(--background-rgb));
}
22 changes: 22 additions & 0 deletions examples/nextjs-14-openai/app/layout.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import type { Metadata } from "next";
import { Inter } from "next/font/google";
import "./globals.css";

const inter = Inter({ subsets: ["latin"] });

export const metadata: Metadata = {
title: "Create Next App",
description: "Generated by create next app",
};

export default function RootLayout({
children,
}: {
children: React.ReactNode;
}) {
return (
<html lang="en">
<body className={inter.className}>{children}</body>
</html>
);
}
34 changes: 34 additions & 0 deletions examples/nextjs-14-openai/app/page.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
"use client";

import { useChat } from "ai/react";

export default function Chat() {
const { messages, input, handleInputChange, handleSubmit } = useChat();
return (
<div className="flex flex-col w-full max-w-md py-24 mx-auto stretch">
{messages.map((m) => (
<div key={m.id} className="whitespace-pre-wrap">
{m.role === "user" ? (
<>
👤 <strong>User:</strong>{" "}
</>
) : (
<>
🤖 <strong>AI:</strong>{" "}
</>
)}
{m.content}
</div>
))}

<form onSubmit={handleSubmit}>
<input
className="fixed bottom-0 w-full max-w-md p-2 mb-8 border border-gray-300 rounded shadow-xl dark: border-gray-700 dark:bg-gray-800 dark:text-gray-100"
value={input}
placeholder="Say something..."
onChange={handleInputChange}
/>
</form>
</div>
);
}
4 changes: 4 additions & 0 deletions examples/nextjs-14-openai/next.config.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
/** @type {import('next').NextConfig} */
const nextConfig = {};

module.exports = nextConfig;
Loading

0 comments on commit 482a472

Please sign in to comment.