-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmain.py
68 lines (51 loc) · 1.78 KB
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
import streamlit as st
import pandas as pd
import numpy as np
import json
from PyPDF2 import PdfReader
from langchain.text_splitter import CharacterTextSplitter
from langchain_community.embeddings import HuggingFaceEmbeddings
from langchain_community.vectorstores import FAISS
# from llama_cpp import Llama
import requests
st.title("DoChat 💬 *Chat with your Document*")
pdf = st.file_uploader("Upload your PDF Document", type="pdf")
def document_processor(doc):
text_splitter = CharacterTextSplitter(
separator="\n",
chunk_size = 1000,
chunk_overlap = 200,
length_function = len
)
chunks = text_splitter.split_text(doc)
emb = HuggingFaceEmbeddings()
knowledgeBase = FAISS.from_texts(chunks, emb)
return knowledgeBase
if pdf is not None:
pdf_reader = PdfReader(pdf)
text = ""
for page in pdf_reader.pages:
text += page.extract_text()
knowledgeBase = document_processor(text)
query = st.text_input('Ask a question to the PDF')
cancel_button = st.button('Cancel')
if cancel_button:
st.stop()
if query:
docs = knowledgeBase.similarity_search(query)
import json
system_message = "You are a helpful assistant"
user_message = query
context = docs
max_tokens = 3000
prompt = f"""<s>[INST] <<SYS>>
{system_message}
<</SYS>>
Context: {context}
Question: {user_message}
Answer:
[/INST]"""
model_path = './llama-2-7b-chat.Q2_K.gguf'
model = Llama(model_path=model_path)
output = model(prompt, max_tokens=max_tokens, echo=True)
st.write(output)