diff --git a/app/(chat)/opengraph-image.png b/app/(chat)/opengraph-image.png deleted file mode 100644 index 79fbc0f..0000000 Binary files a/app/(chat)/opengraph-image.png and /dev/null differ diff --git a/app/(chat)/twitter-image.png b/app/(chat)/twitter-image.png deleted file mode 100644 index 79fbc0f..0000000 Binary files a/app/(chat)/twitter-image.png and /dev/null differ diff --git a/app/config/index.tsx b/app/config/index.tsx index c01a021..404c78b 100644 --- a/app/config/index.tsx +++ b/app/config/index.tsx @@ -95,14 +95,13 @@ export const siweConfig = createSIWEConfig({ }, verifyMessage: async ({ message, signature }: SIWEVerifyMessageArgs) => { try { - const success = await signIn('credentials', { + await signIn('credentials', { message, - redirect: false, + redirectTo: '/', signature, - callbackUrl: '/protected', }); - return Boolean(success?.ok); + return true; } catch (error) { return false; } @@ -110,7 +109,8 @@ export const siweConfig = createSIWEConfig({ signOut: async () => { try { await signOut({ - redirect: false, + redirect: true, + redirectTo: '/', }); return true; diff --git a/app/layout.tsx b/app/layout.tsx index 55e3bdd..a59e9f8 100644 --- a/app/layout.tsx +++ b/app/layout.tsx @@ -1,27 +1,25 @@ -import type { Metadata } from "next"; -import { headers } from "next/headers"; -import { Toaster } from "sonner"; -import { cookieToInitialState } from "wagmi"; +import type { Metadata } from 'next'; +import { headers } from 'next/headers'; +import { Toaster } from 'sonner'; +import { cookieToInitialState } from 'wagmi'; -import { ThemeProvider } from "@/components/theme-provider"; +import { ThemeProvider } from '@/components/theme-provider'; -import { wagmiAdapter } from "./config"; -import ContextProvider from "./context"; - -import "./globals.css"; +import { wagmiAdapter } from './config'; +import ContextProvider from './context'; +import './globals.css'; export const metadata: Metadata = { - metadataBase: new URL("https://chat.vercel.ai"), - title: "Pattern", - description: "Pattern is a decentralized agentic RAG network", + title: 'Pattern', + description: 'Pattern is a decentralized agentic RAG network', }; export const viewport = { maximumScale: 1, }; -const LIGHT_THEME_COLOR = "hsl(0 0% 100%)"; -const DARK_THEME_COLOR = "hsl(240deg 10% 3.92%)"; +const LIGHT_THEME_COLOR = 'hsl(0 0% 100%)'; +const DARK_THEME_COLOR = 'hsl(240deg 10% 3.92%)'; const THEME_COLOR_SCRIPT = `\ (function() { var html = document.documentElement; @@ -46,7 +44,7 @@ export default async function RootLayout({ children: React.ReactNode; }>) { const headersObj = await headers(); - const cookies = headersObj.get("cookie"); + const cookies = headersObj.get('cookie'); const initialState = cookieToInitialState(wagmiAdapter.wagmiConfig, cookies); return ( diff --git a/components/chat.tsx b/components/chat.tsx index 95017ea..6b1367d 100644 --- a/components/chat.tsx +++ b/components/chat.tsx @@ -4,12 +4,11 @@ import type { Attachment, Message } from 'ai'; import { useChat } from 'ai/react'; import { useState } from 'react'; import { toast } from 'sonner'; -import useSWR, { useSWRConfig } from 'swr'; +import { useSWRConfig } from 'swr'; import { ChatHeader } from '@/components/chat-header'; import { useArtifactSelector } from '@/hooks/use-artifact'; -import type { Vote } from '@/lib/db/schema'; -import { fetcher, generateUUID } from '@/lib/utils'; +import { generateUUID } from '@/lib/utils'; import { Artifact } from './artifact'; import { Messages } from './messages'; @@ -53,11 +52,6 @@ export function Chat({ }, }); - const { data: votes } = useSWR>( - `/api/vote?chatId=${id}`, - fetcher, - ); - const [attachments, setAttachments] = useState>([]); const isArtifactVisible = useArtifactSelector((state) => state.isVisible); @@ -69,7 +63,7 @@ export function Chat({ diff --git a/docs/01-quick-start.md b/docs/01-quick-start.md deleted file mode 100644 index ead8251..0000000 --- a/docs/01-quick-start.md +++ /dev/null @@ -1,40 +0,0 @@ -# Quick Start - -The chatbot template is a web application built using [Next.js](https://nextjs.org) and the [AI SDK](https://sdk.vercel.ai) that can be used as a starting point for building your own AI applications. The template is designed to be easily customizable and extendable, allowing you to add new features and integrations as needed. - -Deploying to [Vercel](https://vercel.com) is the quickest way to get started with the chatbot template, as it automatically sets up the project by connecting to integrations and deploys it to the cloud. You can then later develop the project locally and push changes to the Vercel project. - -### Pre-requisites: - -- Vercel account and [Vercel CLI](https://vercel.com/docs/cli) -- GitHub/GitLab/Bitbucket account -- API Key from [OpenAI](https://platform.openai.com) - -### Deploy to Vercel - -To deploy the chatbot template to Vercel, click this [link](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Fvercel%2Fai-chatbot&env=AUTH_SECRET,OPENAI_API_KEY&envDescription=Learn%20more%20about%20how%20to%20get%20the%20API%20Keys%20for%20the%20application&envLink=https%3A%2F%2Fgithub.com%2Fvercel%2Fai-chatbot%2Fblob%2Fmain%2F.env.example&demo-title=AI%20Chatbot&demo-description=An%20Open-Source%20AI%20Chatbot%20Template%20Built%20With%20Next.js%20and%20the%20AI%20SDK%20by%20Vercel.&demo-url=https%3A%2F%2Fchat.vercel.ai&stores=%5B%7B%22type%22:%22postgres%22%7D,%7B%22type%22:%22blob%22%7D%5D) to enter the 1-click deploy flow. - -During the flow, you will be prompted to create and connect to a postgres database and blob store. You will also need to provide environment variables for the application. - -After deploying the project, you can access the chatbot template by visiting the URL provided by Vercel. - -### Local Development - -To develop the chatbot template locally, you can clone the repository and link it to your Vercel project. This will allow you to pull the environment variables from the Vercel project and use them locally. - -```bash -git clone https://github.com// -cd -pnpm install - -vercel link -vercel env pull -``` - -After linking the project, you can start the development server by running: - -```bash -pnpm dev -``` - -The chatbot template will be available at `http://localhost:3000`. diff --git a/docs/02-update-models.md b/docs/02-update-models.md deleted file mode 100644 index d61d313..0000000 --- a/docs/02-update-models.md +++ /dev/null @@ -1,53 +0,0 @@ -# Update Models - -The chatbot template ships with [OpenAI](https://sdk.vercel.ai/providers/ai-sdk-providers/openai) as the default model provider. Since the template is powered by the [AI SDK](https://sdk.vercel.ai), which supports [multiple providers](https://sdk.vercel.ai/providers/ai-sdk-providers) out of the box, you can easily switch to another provider of your choice. - -To update the models, you will need to update the custom provider called `myProvider` at `/lib/ai/models.ts` shown below. - -```ts -import { customProvider } from "ai"; -import { openai } from "@ai-sdk/openai"; - -export const myProvider = customProvider({ - languageModels: { - "chat-model-small": openai("gpt-4o-mini"), - "chat-model-large": openai("gpt-4o"), - "chat-model-reasoning": wrapLanguageModel({ - model: fireworks("accounts/fireworks/models/deepseek-r1"), - middleware: extractReasoningMiddleware({ tagName: "think" }), - }), - "title-model": openai("gpt-4-turbo"), - "artifact-model": openai("gpt-4o-mini"), - }, - imageModels: { - "small-model": openai.image("dall-e-3"), - }, -}); -``` - -You can replace the `openai` models with any other provider of your choice. You will need to install the provider library and switch the models accordingly. - -For example, if you want to use Anthropic's `claude-3-5-sonnet` model for `chat-model-large`, you can replace the `openai` model with the `anthropic` model as shown below. - -```ts -import { customProvider } from "ai"; -import { anthropic } from "@ai-sdk/anthropic"; - -export const myProvider = customProvider({ - languageModels: { - "chat-model-small": openai("gpt-4o-mini"), - "chat-model-large": anthropic("claude-3-5-sonnet"), // Replace openai with anthropic - "chat-model-reasoning": wrapLanguageModel({ - model: fireworks("accounts/fireworks/models/deepseek-r1"), - middleware: extractReasoningMiddleware({ tagName: "think" }), - }), - "title-model": openai("gpt-4-turbo"), - "artifact-model": openai("gpt-4o-mini"), - }, - imageModels: { - "small-model": openai.image("dall-e-3"), - }, -}); -``` - -You can find the provider library and model names in the [provider](https://sdk.vercel.ai/providers/ai-sdk-providers)'s documentation. Once you have updated the models, you should be able to use the new models in your chatbot. diff --git a/docs/03-artifacts.md b/docs/03-artifacts.md deleted file mode 100644 index 67b54b9..0000000 --- a/docs/03-artifacts.md +++ /dev/null @@ -1,258 +0,0 @@ -# Artifacts - -Artifacts is a special user interface mode that allows you to have a workspace like interface along with the chat interface. This is similar to [ChatGPT's Canvas](https://openai.com/index/introducing-canvas) and [Claude's Artifacts](https://www.anthropic.com/news/artifacts). - -The template already ships with the following artifacts: - -- **Text Artifact**: A artifact that allows you to work with text content like drafting essays and emails. -- **Code Artifact**: A artifact that allows you to write and execute code (Python). -- **Image Artifact**: A artifact that allows you to work with images like editing, annotating, and processing images. -- **Sheet Artifact**: A artifact that allows you to work with tabular data like creating, editing, and analyzing data. - -## Adding a Custom Artifact - -To add a custom artifact, you will need to create a folder in the `artifacts` directory with the artifact name. The folder should contain the following files: - -- `client.tsx`: The client-side code for the artifact. -- `server.ts`: The server-side code for the artifact. - -Here is an example of a custom artifact called `CustomArtifact`: - -```bash -artifacts/ - custom/ - client.tsx - server.ts -``` - -### Client-Side Example (client.tsx) - -This file is responsible for rendering your custom artifact. You might replace the inner UI with your own components, but the overall pattern (initialization, handling streamed data, and rendering content) remains the same. For instance: - -```tsx -import { Artifact } from "@/components/create-artifact"; -import { ExampleComponent } from "@/components/example-component"; -import { toast } from "sonner"; - -interface CustomArtifactMetadata { - // Define metadata your custom artifact might need—the example below is minimal. - info: string; -} - -export const customArtifact = new Artifact<"custom", CustomArtifactMetadata>({ - kind: "custom", - description: "A custom artifact for demonstrating custom functionality.", - // Initialization can fetch any extra data or perform side effects - initialize: async ({ documentId, setMetadata }) => { - // For example, initialize the artifact with default metadata. - setMetadata({ - info: `Document ${documentId} initialized.`, - }); - }, - // Handle streamed parts from the server (if your artifact supports streaming updates) - onStreamPart: ({ streamPart, setMetadata, setArtifact }) => { - if (streamPart.type === "info-update") { - setMetadata((metadata) => ({ - ...metadata, - info: streamPart.content as string, - })); - } - if (streamPart.type === "content-update") { - setArtifact((draftArtifact) => ({ - ...draftArtifact, - content: draftArtifact.content + (streamPart.content as string), - status: "streaming", - })); - } - }, - // Defines how the artifact content is rendered - content: ({ - mode, - status, - content, - isCurrentVersion, - currentVersionIndex, - onSaveContent, - getDocumentContentById, - isLoading, - metadata, - }) => { - if (isLoading) { - return
Loading custom artifact...
; - } - - if (mode === "diff") { - const oldContent = getDocumentContentById(currentVersionIndex - 1); - const newContent = getDocumentContentById(currentVersionIndex); - return ( -
-

Diff View

-
{oldContent}
-
{newContent}
-
- ); - } - - return ( -
- - -
- ); - }, - // An optional set of actions exposed in the artifact toolbar. - actions: [ - { - icon: , - description: "Refresh artifact info", - onClick: ({ appendMessage }) => { - appendMessage({ - role: "user", - content: "Please refresh the info for my custom artifact.", - }); - }, - }, - ], - // Additional toolbar actions for more control - toolbar: [ - { - icon: , - description: "Edit custom artifact", - onClick: ({ appendMessage }) => { - appendMessage({ - role: "user", - content: "Edit the custom artifact content.", - }); - }, - }, - ], -}); -``` - -Server-Side Example (server.ts) - -The server file processes the document for the artifact. It streams updates (if applicable) and returns the final content. For example: - -```ts -import { smoothStream, streamText } from "ai"; -import { myProvider } from "@/lib/ai/models"; -import { createDocumentHandler } from "@/lib/artifacts/server"; -import { updateDocumentPrompt } from "@/lib/ai/prompts"; - -export const customDocumentHandler = createDocumentHandler<"custom">({ - kind: "custom", - // Called when the document is first created. - onCreateDocument: async ({ title, dataStream }) => { - let draftContent = ""; - // For demonstration, use streamText to generate content. - const { fullStream } = streamText({ - model: myProvider.languageModel("artifact-model"), - system: - "Generate a creative piece based on the title. Markdown is supported.", - experimental_transform: smoothStream({ chunking: "word" }), - prompt: title, - }); - - // Stream the content back to the client. - for await (const delta of fullStream) { - if (delta.type === "text-delta") { - draftContent += delta.textDelta; - dataStream.writeData({ - type: "content-update", - content: delta.textDelta, - }); - } - } - - return draftContent; - }, - // Called when updating the document based on user modifications. - onUpdateDocument: async ({ document, description, dataStream }) => { - let draftContent = ""; - const { fullStream } = streamText({ - model: myProvider.languageModel("artifact-model"), - system: updateDocumentPrompt(document.content, "custom"), - experimental_transform: smoothStream({ chunking: "word" }), - prompt: description, - experimental_providerMetadata: { - openai: { - prediction: { - type: "content", - content: document.content, - }, - }, - }, - }); - - for await (const delta of fullStream) { - if (delta.type === "text-delta") { - draftContent += delta.textDelta; - dataStream.writeData({ - type: "content-update", - content: delta.textDelta, - }); - } - } - - return draftContent; - }, -}); -``` - -Once you have created the client and server files, you can import the artifact in the `lib/artifacts/server.ts` file and add it to the `documentHandlersByArtifactKind` array. - -```ts -export const documentHandlersByArtifactKind: Array = [ - ..., - customDocumentHandler, -]; - -export const artifactKinds = [..., "custom"] as const; -``` - -Specify it in document schema at `lib/db/schema.ts`. - -```ts -export const document = pgTable( - "Document", - { - id: uuid("id").notNull().defaultRandom(), - createdAt: timestamp("createdAt").notNull(), - title: text("title").notNull(), - content: text("content"), - kind: varchar("text", { enum: [..., "custom"] }) // Add the custom artifact kind here - .notNull() - .default("text"), - userId: uuid("userId") - .notNull() - .references(() => user.id), - }, - (table) => { - return { - pk: primaryKey({ columns: [table.id, table.createdAt] }), - }; - }, -); -``` - -And also add the client-side artifact to the `artifactDefinitions` array in the `components/artifact.tsx` file. - -```ts -import { customArtifact } from "@/artifacts/custom/client"; - -export const artifactDefinitions = [..., customArtifact]; -``` - -You should now be able to see the custom artifact in the workspace! diff --git a/lib/ai/tools/create-document.ts b/lib/ai/tools/create-document.ts deleted file mode 100644 index 40c9ddd..0000000 --- a/lib/ai/tools/create-document.ts +++ /dev/null @@ -1,71 +0,0 @@ -import { generateUUID } from '@/lib/utils'; -import { DataStreamWriter, tool } from 'ai'; -import { z } from 'zod'; -import { Session } from 'next-auth'; -import { - artifactKinds, - documentHandlersByArtifactKind, -} from '@/lib/artifacts/server'; - -interface CreateDocumentProps { - session: Session; - dataStream: DataStreamWriter; -} - -export const createDocument = ({ session, dataStream }: CreateDocumentProps) => - tool({ - description: - 'Create a document for a writing or content creation activities. This tool will call other functions that will generate the contents of the document based on the title and kind.', - parameters: z.object({ - title: z.string(), - kind: z.enum(artifactKinds), - }), - execute: async ({ title, kind }) => { - const id = generateUUID(); - - dataStream.writeData({ - type: 'kind', - content: kind, - }); - - dataStream.writeData({ - type: 'id', - content: id, - }); - - dataStream.writeData({ - type: 'title', - content: title, - }); - - dataStream.writeData({ - type: 'clear', - content: '', - }); - - const documentHandler = documentHandlersByArtifactKind.find( - (documentHandlerByArtifactKind) => - documentHandlerByArtifactKind.kind === kind, - ); - - if (!documentHandler) { - throw new Error(`No document handler found for kind: ${kind}`); - } - - await documentHandler.onCreateDocument({ - id, - title, - dataStream, - session, - }); - - dataStream.writeData({ type: 'finish', content: '' }); - - return { - id, - title, - kind, - content: 'A document was created and is now visible to the user.', - }; - }, - }); diff --git a/lib/ai/tools/get-weather.ts b/lib/ai/tools/get-weather.ts deleted file mode 100644 index 74ab5d8..0000000 --- a/lib/ai/tools/get-weather.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { tool } from 'ai'; -import { z } from 'zod'; - -export const getWeather = tool({ - description: 'Get the current weather at a location', - parameters: z.object({ - latitude: z.number(), - longitude: z.number(), - }), - execute: async ({ latitude, longitude }) => { - const response = await fetch( - `https://api.open-meteo.com/v1/forecast?latitude=${latitude}&longitude=${longitude}¤t=temperature_2m&hourly=temperature_2m&daily=sunrise,sunset&timezone=auto`, - ); - - const weatherData = await response.json(); - return weatherData; - }, -}); diff --git a/lib/ai/tools/request-suggestions.ts b/lib/ai/tools/request-suggestions.ts deleted file mode 100644 index cbfa386..0000000 --- a/lib/ai/tools/request-suggestions.ts +++ /dev/null @@ -1,89 +0,0 @@ -import { z } from 'zod'; -import { Session } from 'next-auth'; -import { DataStreamWriter, streamObject, tool } from 'ai'; -import { getDocumentById, saveSuggestions } from '@/lib/db/queries'; -import { Suggestion } from '@/lib/db/schema'; -import { generateUUID } from '@/lib/utils'; -import { myProvider } from '../models'; - -interface RequestSuggestionsProps { - session: Session; - dataStream: DataStreamWriter; -} - -export const requestSuggestions = ({ - session, - dataStream, -}: RequestSuggestionsProps) => - tool({ - description: 'Request suggestions for a document', - parameters: z.object({ - documentId: z - .string() - .describe('The ID of the document to request edits'), - }), - execute: async ({ documentId }) => { - const document = await getDocumentById({ id: documentId }); - - if (!document || !document.content) { - return { - error: 'Document not found', - }; - } - - const suggestions: Array< - Omit - > = []; - - const { elementStream } = streamObject({ - model: myProvider.languageModel('artifact-model'), - system: - 'You are a help writing assistant. Given a piece of writing, please offer suggestions to improve the piece of writing and describe the change. It is very important for the edits to contain full sentences instead of just words. Max 5 suggestions.', - prompt: document.content, - output: 'array', - schema: z.object({ - originalSentence: z.string().describe('The original sentence'), - suggestedSentence: z.string().describe('The suggested sentence'), - description: z.string().describe('The description of the suggestion'), - }), - }); - - for await (const element of elementStream) { - const suggestion = { - originalText: element.originalSentence, - suggestedText: element.suggestedSentence, - description: element.description, - id: generateUUID(), - documentId: documentId, - isResolved: false, - }; - - dataStream.writeData({ - type: 'suggestion', - content: suggestion, - }); - - suggestions.push(suggestion); - } - - if (session.user?.id) { - const userId = session.user.id; - - await saveSuggestions({ - suggestions: suggestions.map((suggestion) => ({ - ...suggestion, - userId, - createdAt: new Date(), - documentCreatedAt: document.createdAt, - })), - }); - } - - return { - id: documentId, - title: document.title, - kind: document.kind, - message: 'Suggestions have been added to the document', - }; - }, - }); diff --git a/lib/ai/tools/update-document.ts b/lib/ai/tools/update-document.ts deleted file mode 100644 index 1f858fe..0000000 --- a/lib/ai/tools/update-document.ts +++ /dev/null @@ -1,60 +0,0 @@ -import { DataStreamWriter, tool } from 'ai'; -import { Session } from 'next-auth'; -import { z } from 'zod'; -import { getDocumentById, saveDocument } from '@/lib/db/queries'; -import { documentHandlersByArtifactKind } from '@/lib/artifacts/server'; - -interface UpdateDocumentProps { - session: Session; - dataStream: DataStreamWriter; -} - -export const updateDocument = ({ session, dataStream }: UpdateDocumentProps) => - tool({ - description: 'Update a document with the given description.', - parameters: z.object({ - id: z.string().describe('The ID of the document to update'), - description: z - .string() - .describe('The description of changes that need to be made'), - }), - execute: async ({ id, description }) => { - const document = await getDocumentById({ id }); - - if (!document) { - return { - error: 'Document not found', - }; - } - - dataStream.writeData({ - type: 'clear', - content: document.title, - }); - - const documentHandler = documentHandlersByArtifactKind.find( - (documentHandlerByArtifactKind) => - documentHandlerByArtifactKind.kind === document.kind, - ); - - if (!documentHandler) { - throw new Error(`No document handler found for kind: ${document.kind}`); - } - - await documentHandler.onUpdateDocument({ - document, - description, - dataStream, - session, - }); - - dataStream.writeData({ type: 'finish', content: '' }); - - return { - id, - title: document.title, - kind: document.kind, - content: 'The document has been updated successfully.', - }; - }, - }); diff --git a/next.config.ts b/next.config.ts index 48e8e15..1d31b4c 100644 --- a/next.config.ts +++ b/next.config.ts @@ -1,18 +1,11 @@ -import type { NextConfig } from "next"; +import type { NextConfig } from 'next'; const nextConfig: NextConfig = { experimental: { ppr: true, }, - images: { - remotePatterns: [ - { - hostname: "avatar.vercel.sh", - }, - ], - }, webpack: (config) => { - config.externals.push("pino-pretty", "lokijs", "encoding"); + config.externals.push('pino-pretty', 'lokijs', 'encoding'); return config; }, }; diff --git a/package.json b/package.json index 53b82b8..7376444 100644 --- a/package.json +++ b/package.json @@ -9,12 +9,6 @@ "lint": "next lint && biome lint --write --unsafe", "lint:fix": "next lint --fix && biome lint --write --unsafe", "format": "biome format --write", - "db:generate": "drizzle-kit generate", - "db:studio": "drizzle-kit studio", - "db:push": "drizzle-kit push", - "db:pull": "drizzle-kit pull", - "db:check": "drizzle-kit check", - "db:up": "drizzle-kit up", "prepare": "husky && husky install || true", "type-check": "tsc --noEmit" }, diff --git a/public/images/demo-thumbnail.png b/public/images/demo-thumbnail.png deleted file mode 100644 index 8c6f98a..0000000 Binary files a/public/images/demo-thumbnail.png and /dev/null differ