Skip to content

Commit

Permalink
Merge branch 'master' into deployment
Browse files Browse the repository at this point in the history
  • Loading branch information
Gabau authored Nov 11, 2023
2 parents bda8396 + ddc9e82 commit 867c049
Show file tree
Hide file tree
Showing 9 changed files with 120 additions and 85 deletions.
3 changes: 2 additions & 1 deletion .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -36,4 +36,5 @@ NEXT_AUTH_SECRET=sample
# Github OAuth Provider
GITHUB_ID=sample
GITHUB_SECRET=sample
OPENAI_API_KEY=sample


2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@
"next": "^14.0.1",
"next-auth": "^4.22.4",
"npm-run-all": "^4.1.5",
"openai": "^4.14.1",
"openai": "^4.16.1",
"react": "18.2.0",
"react-codemirror-merge": "^4.21.15",
"react-dom": "18.2.0",
Expand Down
10 changes: 5 additions & 5 deletions prisma/postgres/schema.prisma
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ model User {
matchRequest MatchRequest?
joinRequest JoinRequest[]
sessionUserAndUserMessages SessionUserAndUserMessage[]
sessionUserAndAIMessages SessionUserAndAIMessage[]
sessionAIThreads SessionAIThread[]
Submission Submission[]
}

Expand Down Expand Up @@ -195,15 +195,15 @@ model SessionUserAndUserMessage {
@@index([sessionId])
}

model SessionUserAndAIMessage {
id String @id @default(cuid())
model SessionAIThread {
id String @id @default(cuid())
sessionId String
userId String
message String
role String
threadId String
createdAt DateTime @default(now())
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
@@unique([sessionId, userId, threadId])
@@index([sessionId])
}

Expand Down
4 changes: 2 additions & 2 deletions src/components/AIBox.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ const AIBox = ({
} text-white p-2 my-2`}
>
<div className="flex justify-between">
<span>{message.role === "user" ? userName : "GPT-3.5"}</span>
<span>{message.role === "user" ? userName : "Code Assistant"}</span>
</div>
<p>{message.message}</p>
</div>
Expand All @@ -54,7 +54,7 @@ const AIBox = ({
{isAIResponding && (
<input
className="w-full rounded-md p-2"
value="GPT-3.5 is responding..."
value="Code Assistant is responding..."
disabled
/>
)}
Expand Down
2 changes: 0 additions & 2 deletions src/env.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@ export const env = createEnv({
// Add `.min(1) on ID and SECRET if you want to make sure they're not empty
GITHUB_ID: z.string().min(1),
GITHUB_SECRET: z.string().min(1),
OPENAI_API_KEY: z.string().min(1),
S3_REGION: z.string().optional(),
S3_ACCESS_KEY_ID: z.string(),
S3_SECRET_ACCESS_KEY: z.string()
Expand Down Expand Up @@ -62,7 +61,6 @@ export const env = createEnv({
J0_URL: process.env.J0_URL,
GITHUB_ID: process.env.GITHUB_ID,
GITHUB_SECRET: process.env.GITHUB_SECRET,
OPENAI_API_KEY: process.env.OPENAI_API_KEY,
NEXT_PUBLIC_WS_URL: process.env.NODE_ENV === "production" ? process.env.NEXT_PUBLIC_WS_URL : `ws://localhost:${process.env.NEXT_PUBLIC_WS_PORT}`,
S3_REGION: process.env.S3_REGION ?? "us-east-1",
S3_ACCESS_KEY_ID: process.env.S3_ACCESS_KEY_ID ?? "S3RVER",
Expand Down
7 changes: 1 addition & 6 deletions src/hooks/useAIComm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -51,9 +51,8 @@ export default function useAIComm(
if (allSessionMessages)
allSessionMessages.push({
...data,
id: data.id!,
message: data.message,
createdAt: data.createdAt!,
role: data.role,
});

setChatState((state) => ({
Expand All @@ -73,12 +72,8 @@ export default function useAIComm(
if (chatState.currentMessage.trim().length === 0) return;

allSessionMessages?.push({
id: "",
sessionId,
userId,
message: chatState.currentMessage,
role: "user",
createdAt: new Date(),
});

addMessageMutation.mutate({
Expand Down
2 changes: 1 addition & 1 deletion src/pages/collab/rooms/[id].tsx
Original file line number Diff line number Diff line change
Expand Up @@ -255,7 +255,7 @@ const Room = () => {
<TabList>
<Tab>Output</Tab>
<Tab>Chat</Tab>
<Tab>GPT-3.5</Tab>
<Tab>Code Assistant</Tab>
{useQuestionObject.submissionStatus && <Tab>Submission</Tab>}
</TabList>
<TabPanel>
Expand Down
167 changes: 104 additions & 63 deletions src/server/api/routers/userAndAIComm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,21 +7,32 @@ import { EventEmitter } from "events";
import { z } from "zod";
import { createTRPCRouter, protectedProcedure } from "~/server/api/trpc";
import OpenAI from "openai";
import type { ChatCompletionRole } from "openai/resources";
import type { MessageContentText } from "openai/resources/beta/threads/messages/messages";
import { TRPCError } from "@trpc/server";

const ee = new EventEmitter();
const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY,
});

const OPENAI_ASSISTANT_ID = "asst_1o7xscIo6R2jPBxVU5Boqmf2";
const openai = new OpenAI();

// Initialization for assistant
/*
async function main() {
const assistant = await openai.beta.assistants.create({
instructions: "You are an assistant that helps to explain code.",
name: "Code Assistant",
tools: [{ type: "code_interpreter" }],
model: "gpt-3.5-turbo-1106",
});
console.log(assistant);
} */

export type UserAndAIMessage = {
id?: string;
sessionId: string;
userId: string;
message: string;
role: ChatCompletionRole;
createdAt?: Date;
role: "user" | "assistant";
};

export const userAndAIMessagesRouter = createTRPCRouter({
Expand All @@ -35,18 +46,38 @@ export const userAndAIMessagesRouter = createTRPCRouter({
.query(async ({ ctx, input }) => {
const { sessionId, userId } = input;

const messages =
await ctx.prismaPostgres.sessionUserAndAIMessage.findMany({
where: {
let sessionThread = await ctx.prismaPostgres.sessionAIThread.findFirst({
where: {
sessionId,
userId,
},
});

if (!sessionThread) {
const newThread = await openai.beta.threads.create({});
sessionThread = await ctx.prismaPostgres.sessionAIThread.create({
data: {
sessionId,
userId,
},
orderBy: {
createdAt: "asc",
threadId: newThread.id,
},
});
}

const messages = await openai.beta.threads.messages.list(
sessionThread.threadId,
);

return messages;
return messages.data
.map((message) => {
const role = message.role;
const text = (message.content[0] as MessageContentText).text.value;
return {
message: text,
role,
};
})
.reverse(); // Messages from OpenAI Assistant are in reverse chronological order
}),

addUserAndAIMessage: protectedProcedure
Expand All @@ -60,63 +91,73 @@ export const userAndAIMessagesRouter = createTRPCRouter({
.mutation(async ({ ctx, input }) => {
const { sessionId, userId, message } = input;

const messageObject =
await ctx.prismaPostgres.sessionUserAndAIMessage.create({
data: {
sessionId,
userId,
message,
role: "user",
},
});
const sessionThread = await ctx.prismaPostgres.sessionAIThread.findFirst({
where: {
sessionId,
userId,
},
});

const currentSessionMessages =
await ctx.prismaPostgres.sessionUserAndAIMessage.findMany({
where: {
sessionId,
userId,
},
orderBy: {
createdAt: "asc",
},
});
// Add Message to the session Thread
await openai.beta.threads.messages.create(sessionThread?.threadId ?? "", {
role: "user",
content: message,
});

const response = await openai.chat.completions
.create({
messages: currentSessionMessages.map((message) => {
return {
role: message.role as ChatCompletionRole,
content: message.message,
};
}),
model: "gpt-3.5-turbo",
})
.catch((errorJsonObj) => {
// Retrieve the assistant
const openaiAssistant =
await openai.beta.assistants.retrieve(OPENAI_ASSISTANT_ID);

// Create the run for the assistant
const run = await openai.beta.threads.runs.create(
sessionThread?.threadId ?? "",
{
assistant_id: openaiAssistant.id,
instructions: "Please answer clearly and concisely",
},
);

// Wait for the run to complete
while (true) {
const response = await openai.beta.threads.runs.retrieve(
sessionThread?.threadId ?? "",
run.id,
);

if (response.status === "completed") break;
else if (response.status === "failed")
throw new TRPCError({
code: "TOO_MANY_REQUESTS",
message: errorJsonObj.error.message,
message: response.last_error?.message,
});
else if (response.status === "expired")
throw new TRPCError({
code: "TIMEOUT",
message: "Request timed out",
});
});

if (response) {
const aiMessage = response.choices[0]?.message;

if (aiMessage) {
const aiMessageObject =
await ctx.prismaPostgres.sessionUserAndAIMessage.create({
data: {
sessionId,
userId,
message: aiMessage.content!,
role: aiMessage.role,
},
});

ee.emit("aiMessage", aiMessageObject);
}
}

return messageObject;
const messages = await openai.beta.threads.messages.list(
sessionThread?.threadId ?? "",
);

const aiResponse = messages.data[0];
const id = aiResponse?.id;
const role = aiResponse?.role;
const text = (aiResponse?.content[0] as MessageContentText).text.value;

ee.emit("aiMessage", {
id,
sessionId,
userId,
message: text,
role,
});

return {
message: text,
role,
};
}),

subscribeToSessionUserAndAIMessages: protectedProcedure
Expand Down
8 changes: 4 additions & 4 deletions yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -5736,10 +5736,10 @@ only@~0.0.2:
resolved "https://registry.npmjs.org/only/-/only-0.0.2.tgz"
integrity sha512-Fvw+Jemq5fjjyWz6CpKx6w9s7xxqo3+JCyM0WXWeCSOboZ8ABkyvP8ID4CZuChA/wxSx+XSJmdOm8rGVyJ1hdQ==

openai@^4.14.1:
version "4.14.1"
resolved "https://registry.yarnpkg.com/openai/-/openai-4.14.1.tgz#26ffa8e86a57da6595b46350355a754d934e61a1"
integrity sha512-aBb7DVdzSnEUBFHTbnVoitauefvjRuUHS5pa7lm1m5JmHifD+1Hff1RzxYC12ogugVcCmWT99NZNfzyD6n/0IQ==
openai@^4.16.1:
version "4.16.1"
resolved "https://registry.yarnpkg.com/openai/-/openai-4.16.1.tgz#6377682ad2af805affd1b401958fb6eb92a87d61"
integrity sha512-Gr+uqUN1ICSk6VhrX64E+zL7skjI1TgPr/XUN+ZQuNLLOvx15+XZulx/lSW4wFEAQzgjBDlMBbBeikguGIjiMg==
dependencies:
"@types/node" "^18.11.18"
"@types/node-fetch" "^2.6.4"
Expand Down

0 comments on commit 867c049

Please sign in to comment.