Skip to content

Commit

Permalink
Merge pull request #4 from Group-Project-II/dev
Browse files Browse the repository at this point in the history
feat:add prompt api
  • Loading branch information
nsavinda authored Jun 8, 2024
2 parents ecfcb09 + 1845a28 commit a980699
Show file tree
Hide file tree
Showing 4 changed files with 86 additions and 4 deletions.
2 changes: 2 additions & 0 deletions app/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,11 @@
from .main import app

from .routers.upload import router as upload_router
from .routers.questionGenerate import router as questionGenerate_router


app.include_router(upload_router, prefix="/api/v1")
app.include_router(questionGenerate_router, prefix="/api/v1")


# app/routers/upload.py
Expand Down
14 changes: 14 additions & 0 deletions app/routers/questionGenerate.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
from fastapi import APIRouter, Query, HTTPException
from typing import List

from ..services.prompt import prompt


router = APIRouter()

@router.get("/generate-question/", response_model=str)
async def generate_question(text: str = Query(..., description="The text to generate a question for"),
examid: str = Query(..., description="The ID of the exam related to the text")) -> str:
"""Endpoint to generate a question for a given text using OpenAI's model."""

return prompt(text, examid)
10 changes: 6 additions & 4 deletions app/routers/upload.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,20 @@
from fastapi import APIRouter, File, UploadFile, HTTPException, Response
from fastapi import APIRouter, File, UploadFile, HTTPException, Response, Query
import os

from ..services.pinecone_upsert import upsert

router = APIRouter()

@router.post("/upload-pdf/", status_code=201)
async def upload_pdf(file: UploadFile = File(...)) -> dict:
async def upload_pdf(file: UploadFile = File(...), examid:str = Query(..., description="The ID of the exam related to the uploaded PDF") ) -> dict:
"""Endpoint to upload a PDF and upsert its contents into a Pinecone vector store."""

if file.content_type != 'application/pdf':
raise HTTPException(status_code=415, detail="Unsupported file type. Please upload a PDF.")

# Call the upsert function from the imported service
upsert(file, examid='examid')
upsert(file, examid)

return {"filename": file.filename}
# return {"filename": file.filename}
Response(status_code=201)
return {"message": "PDF uploaded successfully."}
64 changes: 64 additions & 0 deletions app/services/prompt.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
from typing import Any, BinaryIO
import os
import dotenv
import pdfplumber
from langchain_openai import OpenAIEmbeddings
from langchain_pinecone import PineconeVectorStore
from pinecone import Pinecone, ServerlessSpec

from langchain.chat_models import ChatOpenAI
from langchain.chains import RetrievalQA



dotenv.load_dotenv()

pinecone = Pinecone(api_key=os.getenv('PINECONE_API_KEY'))




def prompt(text: str, examid: str) -> str:
"""Upserts PDF text into a Pinecone vector store and returns the extracted text."""

embed = OpenAIEmbeddings(
model="text-embedding-3-large",
api_key=os.getenv('OPENAI_API_KEY'),
dimensions=3072
)

vectorstore = PineconeVectorStore(
namespace=examid,
index_name="abc",
embedding=embed

)

vectorstore.similarity_search(
text,
# top_k=5
)

llm = ChatOpenAI(
model="gpt-3.5-turbo",
api_key=os.getenv('OPENAI_API_KEY')
)

qa = RetrievalQA.from_chain_type(
llm=llm,
chain_type="stuff",
retriever=vectorstore.as_retriever()
)

print(qa.invoke(text))
return "Question generated successfully."










0 comments on commit a980699

Please sign in to comment.