diff --git a/README.md b/README.md
index 8357f05b..3b5786b2 100644
--- a/README.md
+++ b/README.md
@@ -6,9 +6,9 @@ This project generates Twitter bios for you using AI.
## How it works
-This project uses the [ChatGPT API](https://openai.com/api/) and [Vercel Edge functions](https://vercel.com/features/edge-functions) with streaming. It constructs a prompt based on the form and user input, sends it to the chatGPT API via a Vercel Edge function, then streams the response back to the application.
+This project uses the [ChatGPT API](https://openai.com/api/) and the [Vercel AI SDK](https://sdk.vercel.ai/docs) with streaming. It constructs a prompt based on the form and user input, sends it to the ChatGPT API with a Vercel Edge Function, then streams the response back to the application UI.
-If you'd like to see how I built this, check out the [video](https://youtu.be/JcE-1xzQTE0) or [blog post](https://vercel.com/blog/gpt-3-app-next-js-vercel-edge-functions).
+> This template has recently been updated for the AI SDK, simplifying the amount of code needed. I previously published a [video](https://youtu.be/JcE-1xzQTE0) and [blog post](https://vercel.com/blog/gpt-3-app-next-js-vercel-edge-functions) showing the older approach.
## Running Locally
@@ -17,7 +17,7 @@ After cloning the repo, go to [OpenAI](https://beta.openai.com/account/api-keys)
Then, run the application in the command line and it will be available at `http://localhost:3000`.
```bash
-npm run dev
+pnpm run dev
```
## One-Click Deploy
diff --git a/app/api/chat/route.ts b/app/api/chat/route.ts
new file mode 100644
index 00000000..748cf01a
--- /dev/null
+++ b/app/api/chat/route.ts
@@ -0,0 +1,39 @@
+import { Configuration, OpenAIApi } from 'openai-edge';
+import { OpenAIStream, StreamingTextResponse } from 'ai';
+
+// Create an OpenAI API client (that's edge friendly!)
+const config = new Configuration({
+ apiKey: process.env.OPENAI_API_KEY,
+});
+const openai = new OpenAIApi(config);
+
+// Set the runtime to edge for best performance
+export const runtime = 'edge';
+
+export async function POST(req: Request) {
+ const { vibe, bio } = await req.json();
+
+ // Ask OpenAI for a streaming completion given the prompt
+ const response = await openai.createChatCompletion({
+ model: 'gpt-3.5-turbo',
+ stream: true,
+ messages: [
+ {
+ role: 'user',
+ content: `Generate 2 ${vibe} twitter biographies with no hashtags and clearly labeled "1." and "2.". ${
+ vibe === 'Funny'
+ ? "Make sure there is a joke in there and it's a little ridiculous."
+ : null
+ }
+ Make sure each generated biography is less than 160 characters, has short sentences that are found in Twitter bios, and base them on this context: ${bio}${
+ bio.slice(-1) === '.' ? '' : '.'
+ }`,
+ },
+ ],
+ });
+
+ // Convert the response into a friendly text-stream
+ const stream = OpenAIStream(response);
+ // Respond with the stream
+ return new StreamingTextResponse(stream);
+}
diff --git a/public/favicon.ico b/app/favicon.ico
similarity index 100%
rename from public/favicon.ico
rename to app/favicon.ico
diff --git a/app/layout.tsx b/app/layout.tsx
new file mode 100644
index 00000000..36367e53
--- /dev/null
+++ b/app/layout.tsx
@@ -0,0 +1,38 @@
+import { Analytics } from '@vercel/analytics/react';
+import { Metadata } from 'next';
+import '../styles/globals.css';
+
+const title = 'Twitter Bio Generator';
+const description = 'Generate your next Twitter bio in seconds.';
+
+export const metadata: Metadata = {
+ metadataBase: new URL('https://twitterbio.io'),
+ title,
+ description,
+ openGraph: {
+ title,
+ description,
+ locale: 'en_US',
+ type: 'website',
+ },
+ twitter: {
+ card: 'summary_large_image',
+ title,
+ description,
+ },
+};
+
+export default function RootLayout({
+ children,
+}: {
+ children: React.ReactNode;
+}) {
+ return (
+
+
+ {children}
+
+
+
+ );
+}
diff --git a/public/og-image.png b/app/opengraph-image.png
similarity index 100%
rename from public/og-image.png
rename to app/opengraph-image.png
diff --git a/pages/index.tsx b/app/page.tsx
similarity index 68%
rename from pages/index.tsx
rename to app/page.tsx
index ce54c69e..98dde219 100644
--- a/pages/index.tsx
+++ b/app/page.tsx
@@ -1,53 +1,39 @@
-import type { NextPage } from "next";
-import Head from "next/head";
-import Image from "next/image";
-import { useRef, useState } from "react";
-import { Toaster, toast } from "react-hot-toast";
-import DropDown, { VibeType } from "../components/DropDown";
-import Footer from "../components/Footer";
-import Github from "../components/GitHub";
-import Header from "../components/Header";
-import LoadingDots from "../components/LoadingDots";
-import {
- createParser,
- ParsedEvent,
- ReconnectInterval,
-} from "eventsource-parser";
+'use client';
-const Home: NextPage = () => {
- const [loading, setLoading] = useState(false);
- const [bio, setBio] = useState("");
- const [vibe, setVibe] = useState("Professional");
- const [generatedBios, setGeneratedBios] = useState("");
+import Image from 'next/image';
+import { useRef, useState } from 'react';
+import { Toaster, toast } from 'react-hot-toast';
+import DropDown, { VibeType } from '../components/DropDown';
+import Footer from '../components/Footer';
+import Github from '../components/GitHub';
+import Header from '../components/Header';
+export default function Page() {
+ const [loading, setLoading] = useState(false);
+ const [bio, setBio] = useState('');
+ const [vibe, setVibe] = useState('Professional');
+ const [generatedBios, setGeneratedBios] = useState('');
const bioRef = useRef(null);
const scrollToBios = () => {
if (bioRef.current !== null) {
- bioRef.current.scrollIntoView({ behavior: "smooth" });
+ bioRef.current.scrollIntoView({ behavior: 'smooth' });
}
};
- const prompt = `Generate 2 ${vibe} twitter biographies with no hashtags and clearly labeled "1." and "2.". ${
- vibe === "Funny"
- ? "Make sure there is a joke in there and it's a little ridiculous."
- : null
- }
- Make sure each generated biography is less than 160 characters, has short sentences that are found in Twitter bios, and base them on this context: ${bio}${
- bio.slice(-1) === "." ? "" : "."
- }`;
-
const generateBio = async (e: any) => {
e.preventDefault();
- setGeneratedBios("");
+ setGeneratedBios('');
setLoading(true);
- const response = await fetch("/api/generate", {
- method: "POST",
+
+ const response = await fetch('/api/chat', {
+ method: 'POST',
headers: {
- "Content-Type": "application/json",
+ 'Content-Type': 'application/json',
},
body: JSON.stringify({
- prompt,
+ vibe,
+ bio,
}),
});
@@ -61,40 +47,24 @@ const Home: NextPage = () => {
return;
}
- const onParse = (event: ParsedEvent | ReconnectInterval) => {
- if (event.type === "event") {
- const data = event.data;
- try {
- const text = JSON.parse(data).text ?? ""
- setGeneratedBios((prev) => prev + text);
- } catch (e) {
- console.error(e);
- }
- }
- }
-
// https://web.dev/streams/#the-getreader-and-read-methods
const reader = data.getReader();
const decoder = new TextDecoder();
- const parser = createParser(onParse);
let done = false;
+
while (!done) {
const { value, done: doneReading } = await reader.read();
done = doneReading;
const chunkValue = decoder.decode(value);
- parser.feed(chunkValue);
+ setGeneratedBios((prev) => prev + chunkValue);
}
+
scrollToBios();
setLoading(false);
};
return (