Skip to content

Commit

Permalink
openai chat traceable
Browse files Browse the repository at this point in the history
  • Loading branch information
GwonHyeok committed Feb 19, 2025
1 parent 3e5de36 commit 29001b2
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 8 deletions.
8 changes: 7 additions & 1 deletion .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -30,4 +30,10 @@ BLOB_READ_WRITE_TOKEN=

# Trigger.dev for cloud deployment
TRIGGER_PROJECT_ID=
TRIGGER_SECRET_KEY=
TRIGGER_SECRET_KEY=

# Langsmith
LANGSMITH_TRACING=false
LANGSMITH_ENDPOINT=https://api.smith.langchain.com
LANGSMITH_API_KEY=
LANGSMITH_PROJECT=
15 changes: 8 additions & 7 deletions src/app/api/chat-rooms/[id]/messages/route.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
import {NextRequest, NextResponse} from "next/server";
import prisma, {Prisma} from "@/lib/prisma";
import OpenAI from "openai";
import Anthropic from "@anthropic-ai/sdk";
import {GoogleGenerativeAI} from "@google/generative-ai";
import {auth} from "@/auth";
import {decrypt} from "@/lib/encryption";
import {currentDeploymentEnv} from "@/lib/current-deployment-env";
import {ChatOpenAI} from "@langchain/openai";

export interface ChatMessage extends Prisma.ChatMessageGetPayload<{
select: {
Expand Down Expand Up @@ -161,17 +161,18 @@ export async function POST(
}
} else if (llmProvider.providerId === 'openai') {
// OpenAI API call
const openai = new OpenAI({apiKey, baseURL: llmProvider.apiURL});
const llmProviderModelId = chatRoom.llmProviderModelId;
if (!llmProviderModelId) throw new Error('No LLM model ID provided');
const chatStream = await openai.chat.completions.create({
const openai = new ChatOpenAI({
apiKey,
model: llmProviderModelId,
messages: messages,
stream: true
});
configuration: {baseURL: llmProvider.apiURL},
streaming: true,
})

const chatStream = await openai.stream(messages)
for await (const part of chatStream) {
const deltaContent = part.choices[0]?.delta.content
const deltaContent = part.content.toString()
if (deltaContent !== undefined) messageContent += deltaContent;
controller.enqueue(`${JSON.stringify({content: messageContent})}\n`);
}
Expand Down

0 comments on commit 29001b2

Please sign in to comment.