diff --git a/frontend/components/__init__.py b/frontend/components/__init__.py index eaa8b7a..42c14d6 100644 --- a/frontend/components/__init__.py +++ b/frontend/components/__init__.py @@ -1,4 +1,5 @@ from .authors import * from .user_greetings import * from .logo import add_logo -from .file_streaming import * \ No newline at end of file +from .file_streaming import * +from .display import * \ No newline at end of file diff --git a/frontend/components/display.py b/frontend/components/display.py new file mode 100644 index 0000000..80dcab2 --- /dev/null +++ b/frontend/components/display.py @@ -0,0 +1,11 @@ +import streamlit as st + + +def display_source_document(source_document: list): + for i,source in enumerate(source_document): + st.markdown(f"""{i+1}. ##### Source content + - {source["page_content"]} + + - Page number: {source["metadata"]["page"]} + """ + ) diff --git "a/frontend/pages/2_\360\237\244\226_bot.py" "b/frontend/pages/2_\360\237\244\226_bot.py" index f1bfc3e..446ae76 100644 --- "a/frontend/pages/2_\360\237\244\226_bot.py" +++ "b/frontend/pages/2_\360\237\244\226_bot.py" @@ -6,6 +6,7 @@ from langchain.memory.chat_message_histories import StreamlitChatMessageHistory from components.file_streaming import * +from components.display import * @mainlayout @@ -31,6 +32,7 @@ def display(): display() +BASE_URL = "http://127.0.0.1:8000" uploaded_files = st.sidebar.file_uploader(label="Upload PDF files", type=["pdf"]) if not uploaded_files: @@ -38,23 +40,86 @@ def display(): st.stop() upload_data(uploaded_files) -msgs = StreamlitChatMessageHistory() -if len(msgs.messages) == 0 or st.sidebar.button("Clear message history"): - msgs.clear() - msgs.add_ai_message("How can I help you?") +if "messages" not in st.session_state.keys(): + st.session_state.messages = [ + { + "role": "assistant", + "content": "What's troubling you? Ask me a question right away!", + } + ] -avatars = {"human": "user", "ai": "assistant"} -for msg in msgs.messages: - st.chat_message(avatars[msg.type]).write(msg.content) +# Display or clear chat messages +for message in st.session_state.messages: + with st.chat_message(message["role"]): + st.write(message["content"]) -if user_query := st.chat_input(placeholder="Ask me anything!"): - st.chat_message("user").write(user_query) - with st.chat_message("assistant"): - retrieval_handler = PrintRetrievalHandler(st.container()) - stream_handler = StreamHandler(st.empty()) +def clear_chat_history(): + st.session_state.messages = [ + { + "role": "assistant", + "content": "What's troubling you? Ask me a question right away!", + } + ] + + +st.sidebar.button("Clear Chat History", on_click=clear_chat_history) + + +def generate_mistral_response(question: str): + for dict_message in st.session_state.messages: + if dict_message["role"] == "user": + question = dict_message["content"] + + try: response = requests.post( - "http://127.0.0.1:8000/api/inference", - json={"promptMessage": user_query}, - ).json() + f"{BASE_URL}/api/inference", + json={"promptMessage": question}).json() + + if response["status"]=="error": + st.error("Please refresh the page and try uploading the file again.") + st.stop() + + answer = response["result"]["answer"] + + except Exception as e: + if response.json()=='exception.ModelDeployingException()': + st.error("Model is deploying in the backend servers. Please try again after some time") + st.stop() + + + with st.expander("Source documents 🧐", expanded=True): + source_documents = requests.post( + f"{BASE_URL}/api/inference", + json={"promptMessage": question}).json()["result"]["source_documents"] + display_source_document(source_documents) + + + return answer + + +# User-provided prompt +if prompt := st.chat_input( + disabled=not st.session_state.messages[-1]["role"] == "assistant", + placeholder="Hello, please ask me a question! 🤖"): + st.session_state.messages.append({"role": "user", "content": prompt}) + with st.chat_message("user"): + st.write(prompt) + +# ask question +st.write(st.session_state) + +# Generate a new response if last message is not from assistant +if st.session_state.messages[-1]["role"] != "assistant": + with st.chat_message("assistant"): + with st.spinner("Thinking..."): + response = generate_mistral_response(prompt) + placeholder = st.empty() + full_response = "" + for item in response: + full_response += item + placeholder.markdown(full_response) + placeholder.markdown(full_response) + message = {"role": "assistant", "content": full_response} + st.session_state.messages.append(message) diff --git a/test.py b/test.py index da1a8ab..6bbd773 100644 --- a/test.py +++ b/test.py @@ -20,68 +20,4 @@ # # print(b.bill) # a=A() # a.bill=3 -# print(a.bill) - -# if "uploaded_pdf" in st.session_state.keys(): -# # chatbot -# st.subheader("Ask Studybot a question! 🤖") - -# if "messages" not in st.session_state.keys(): -# st.session_state.messages = [ -# { -# "role": "assistant", -# "content": "What's troubling you? Ask me a question right away!", -# } -# ] - -# # Display or clear chat messages -# for message in st.session_state.messages: -# with st.chat_message(message["role"]): -# st.write(message["content"]) - -# def clear_chat_history(): -# st.session_state.messages = [ -# { -# "role": "assistant", -# "content": "What's troubling you? Ask me a question right away!", -# } -# ] - -# st.sidebar.button("Clear Chat History", on_click=clear_chat_history) - -# def generate_mistral_response(question: str): -# for dict_message in st.session_state.messages: -# if dict_message["role"] == "user": -# question = dict_message["content"] - -# answer = requests.post( -# "https://hemanthsai7-studybotapi.hf.space/api/inference", -# json={"promptMessage": question}, -# ).json() - -# return answer - -# User-provided prompt -# if prompt := st.chat_input( -# disabled=not st.session_state.messages[-1]["role"] == "assistant", -# placeholder="Hello, please ask me a question! 🤖"): -# st.session_state.messages.append({"role": "user", "content": prompt}) -# with st.chat_message("user"): -# st.write(prompt) - -# # ask question -# st.write(st.session_state) - -# # Generate a new response if last message is not from assistant -# if st.session_state.messages[-1]["role"] != "assistant": -# with st.chat_message("assistant"): -# with st.spinner("Thinking..."): -# response = generate_mistral_response(prompt) -# placeholder = st.empty() -# full_response = "" -# for item in response: -# full_response += item -# placeholder.markdown(full_response) -# placeholder.markdown(full_response) -# message = {"role": "assistant", "content": full_response} -# st.session_state.messages.append(message) \ No newline at end of file +# print(a.bill) \ No newline at end of file