From ae8dd968e47d084d475a27a8262a0313f3505549 Mon Sep 17 00:00:00 2001 From: Arslan Saleem Date: Wed, 30 Oct 2024 18:14:33 +0100 Subject: [PATCH] feat(chatDraft): rewrite chat draft with AI (#49) * feat[ChatDraft]: adding chat draft UI * feat(ChatDraft): change to make chat ui better * feat(ChatDraft): refactor unnecessary code * feat(ChatDraft): rename quill modules and formats * fix: prettier issue * feat(ChatDraft): position add to draft button * feat(chatDraft): rewrite with AI in chat draft * fix: update successful message text * refactor: improve error handling and message * refactor: display throw error on toast message * refactor: display throw error on toast message --- backend/app/api/v1/chat.py | 46 +++++- backend/app/requests/__init__.py | 30 ++++ frontend/src/components/ChatDraftDrawer.tsx | 150 +++++++++++++++++--- frontend/src/interfaces/chat.ts | 5 + frontend/src/services/chat.tsx | 23 +++ 5 files changed, 232 insertions(+), 22 deletions(-) diff --git a/backend/app/api/v1/chat.py b/backend/app/api/v1/chat.py index ced0456..376d92e 100644 --- a/backend/app/api/v1/chat.py +++ b/backend/app/api/v1/chat.py @@ -3,6 +3,7 @@ from app.config import settings from app.database import get_db +from app.exceptions import CreditLimitExceededException from app.logger import Logger from app.models.asset_content import AssetProcessingStatus from app.repositories import ( @@ -10,11 +11,11 @@ project_repository, user_repository, ) -from app.requests import chat_query +from app.requests import chat_query, request_draft_with_ai from app.utils import clean_text, find_following_sentence_ending, find_sentence_endings from app.vectorstore.chroma import ChromaDB from fastapi import APIRouter, Depends, HTTPException -from pydantic import BaseModel +from pydantic import BaseModel, Field from sqlalchemy.orm import Session chat_router = APIRouter() @@ -24,6 +25,10 @@ class ChatRequest(BaseModel): conversation_id: Optional[str] = None query: str +class DraftRequest(BaseModel): + content: str = Field(..., min_length=1, description="Content cannot be empty") + prompt: str = Field(..., min_length=1, description="Prompt cannot be empty") + logger = Logger() @@ -237,3 +242,40 @@ def chat_status(project_id: int, db: Session = Depends(get_db)): status_code=400, detail="Unable to process the chat query. Please try again.", ) + +@chat_router.post("/draft", status_code=200) +def draft_with_ai(draft_request: DraftRequest, db: Session = Depends(get_db)): + try: + + users = user_repository.get_users(db, n=1) + + if not users: + raise HTTPException(status_code=404, detail="No User Exists!") + + api_key = user_repository.get_user_api_key(db, users[0].id) + + if not api_key: + raise HTTPException(status_code=404, detail="API Key not found!") + + response = request_draft_with_ai(api_key.key, draft_request.model_dump_json()) + + return { + "status": "success", + "message": "Draft successfully generated!", + "data": {"response": response["response"]}, + } + + except HTTPException: + raise + + except CreditLimitExceededException: + raise HTTPException( + status_code=402, detail="Credit limit Reached, Wait next month or upgrade your Plan!" + ) + + except Exception: + logger.error(traceback.format_exc()) + raise HTTPException( + status_code=400, + detail="Unable to generate draft. Please try again.", + ) diff --git a/backend/app/requests/__init__.py b/backend/app/requests/__init__.py index 12b917a..4e46d67 100644 --- a/backend/app/requests/__init__.py +++ b/backend/app/requests/__init__.py @@ -220,3 +220,33 @@ def get_user_usage_data(api_token: str): except requests.exceptions.JSONDecodeError: logger.error(f"Invalid JSON response from API server: {response.text}") raise Exception("Invalid JSON response") + + +def request_draft_with_ai(api_token: str, draft_request: dict) -> dict: + # Prepare the headers with the Bearer token + headers = {"x-authorization": f"Bearer {api_token}"} + # Send the request + response = requests.post( + f"{settings.pandaetl_server_url}/v1/draft", + data=draft_request, + headers=headers, + timeout=360, + ) + + try: + if response.status_code not in [200, 201]: + + if response.status_code == 402: + raise CreditLimitExceededException( + response.json().get("detail", "Credit limit exceeded!") + ) + + logger.error( + f"Failed to draft with AI. It returned {response.status_code} code: {response.text}" + ) + raise Exception(response.text) + + return response.json() + except requests.exceptions.JSONDecodeError: + logger.error(f"Invalid JSON response from API server: {response.text}") + raise Exception("Invalid JSON response") diff --git a/frontend/src/components/ChatDraftDrawer.tsx b/frontend/src/components/ChatDraftDrawer.tsx index 5ae33be..90102eb 100644 --- a/frontend/src/components/ChatDraftDrawer.tsx +++ b/frontend/src/components/ChatDraftDrawer.tsx @@ -1,9 +1,12 @@ "use client"; -import React, { useEffect, useRef } from "react"; +import React, { useEffect, useRef, useState } from "react"; import Drawer from "./ui/Drawer"; import { Button } from "./ui/Button"; import ReactQuill from "react-quill"; -import { BookTextIcon } from "lucide-react"; +import { BookTextIcon, Check, Loader2, X } from "lucide-react"; +import { Textarea } from "./ui/Textarea"; +import { draft_with_ai } from "@/services/chat"; +import toast from "react-hot-toast"; interface IProps { draft: string; @@ -48,6 +51,10 @@ const ChatDraftDrawer = ({ onCancel, }: IProps) => { const quillRef = useRef(null); + const [step, setStep] = useState(0); + const [userInput, setUserInput] = useState(""); + const [aiDraft, setAIDraft] = useState(""); + const [loadingAIDraft, setLoadingAIDraft] = useState(false); useEffect(() => { if (quillRef.current) { @@ -59,28 +66,131 @@ const ChatDraftDrawer = ({ } }, [draft]); + const handleUserInputChange = ( + event: React.ChangeEvent + ) => { + setUserInput(event.target.value); + }; + + const handleUserInputKeyPress = async ( + event: React.KeyboardEvent + ) => { + if (event.key === "Enter" && userInput.trim() !== "") { + event.preventDefault(); + try { + if (userInput.length === 0) { + toast.error("Please provide the prompt and try again!"); + return; + } + setLoadingAIDraft(true); + const data = await draft_with_ai({ content: draft, prompt: userInput }); + setAIDraft(data.response); + setUserInput(""); + setStep(2); + setLoadingAIDraft(false); + } catch (error) { + console.error(error); + toast.error(error instanceof Error ? error.message : String(error)); + setLoadingAIDraft(false); + } + } + }; + return (
- -
-
- + {(step === 0 || step === 1) && ( + <> + + +
+ +
+ + )} + + {step === 2 && ( + <> + + +
+
+ + + +
+
+ + )} + {/* Centered overlay input for step 1 */} + {step === 1 && ( +
+
+ {loadingAIDraft ? ( + + ) : ( + <> +