Skip to content

Commit

Permalink
Separate Keys (#718)
Browse files Browse the repository at this point in the history
  • Loading branch information
rob-gordon authored Aug 21, 2024
1 parent eba6d1b commit 7a23760
Show file tree
Hide file tree
Showing 3 changed files with 34 additions and 123 deletions.
74 changes: 0 additions & 74 deletions api/_lib/_llm.ts

This file was deleted.

42 changes: 0 additions & 42 deletions api/prompt/_edit.ts

This file was deleted.

41 changes: 34 additions & 7 deletions api/prompt/_shared.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,17 +3,16 @@ import { streamText } from "ai";
import { stripe } from "../_lib/_stripe";
import { kv } from "@vercel/kv";
import { Ratelimit } from "@upstash/ratelimit";
import { openai } from "@ai-sdk/openai";
import { createOpenAI, type openai as OpenAI } from "@ai-sdk/openai";

export const reqSchema = z.object({
prompt: z.string().min(1),
document: z.string(),
});

export async function handleRateLimit(req: Request) {
const ip = getIp(req);
let isPro = false,
customerId: null | string = null;
async function checkUserStatus(req: Request) {
let isPro = false;
let customerId: null | string = null;

const token = req.headers.get("Authorization");

Expand All @@ -26,6 +25,16 @@ export async function handleRateLimit(req: Request) {
}
}

return { isPro, customerId };
}

export async function handleRateLimit(
req: Request,
isPro: boolean,
customerId: string | null
) {
const ip = getIp(req);

const ratelimit = new Ratelimit({
redis: kv,
limiter: isPro
Expand Down Expand Up @@ -85,11 +94,16 @@ export async function processRequest(
req: Request,
systemMessage: string,
content: string,
model: Parameters<typeof openai.chat>[0] = "gpt-4-turbo"
model: Parameters<typeof OpenAI.chat>[0] = "gpt-4-turbo"
) {
const rateLimitResponse = await handleRateLimit(req);
const { isPro, customerId } = await checkUserStatus(req);
const rateLimitResponse = await handleRateLimit(req, isPro, customerId);
if (rateLimitResponse) return rateLimitResponse;

const openai = createOpenAI({
apiKey: getOpenAiApiKey(isPro),
});

const result = await streamText({
model: openai.chat(model),
system: systemMessage,
Expand All @@ -105,6 +119,19 @@ export async function processRequest(
return result.toTextStreamResponse();
}

/**
* Returns the right api key depending on the user's subscription
* so we can track usage. Bear in mind a development key is used for
* anything that's not production.
*/
function getOpenAiApiKey(isPro: boolean) {
if (isPro) {
return process.env.OPENAI_API_KEY_PRO;
}

return process.env.OPENAI_API_KEY_FREE;
}

function getIp(req: Request) {
return (
req.headers.get("x-real-ip") ||
Expand Down

0 comments on commit 7a23760

Please sign in to comment.