generated from amosproj/amos202Xss0Y-projname
-
Notifications
You must be signed in to change notification settings - Fork 2
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #71 from amosproj/llm_function
Added gemini and llama3 using API
- Loading branch information
Showing
7 changed files
with
263 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -82,3 +82,4 @@ tenacity==8.3.0 | |
tomli==2.0.1 | ||
typing-inspect==0.9.0 | ||
urllib3==2.2.1 | ||
google-generativeai==0.5.4 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,148 @@ | ||
import os | ||
import json | ||
import google.generativeai as genai | ||
from datetime import datetime | ||
from dotenv import load_dotenv | ||
|
||
|
||
def configure_genai(): | ||
""" | ||
Ensure the API key is set in the environment (set it in the .env file or Linux/Mac: export GOOGLE_API_KEY="Your_API_KEY") | ||
Raises: | ||
ValueError: If the API key is not found in the environment variables | ||
""" | ||
# load the API key from the environment variables (could be remove if the env is loaded in the main file) | ||
load_dotenv("../../.env", override=True) | ||
|
||
api_key = os.getenv("GOOGLE_API_KEY") | ||
if not api_key: | ||
raise ValueError("API key not found in environment variables") | ||
|
||
genai.configure( | ||
api_key=api_key, | ||
) | ||
|
||
|
||
def create_model(): | ||
""" | ||
Create and configure a generative model with specified generation and safety settings. | ||
Returns: | ||
genai.GenerativeModel: A configured generative model instance. | ||
""" | ||
generation_config = { | ||
"temperature": 1, | ||
"top_p": 0.95, | ||
"top_k": 64, | ||
"max_output_tokens": 8192, | ||
"response_mime_type": "text/plain", | ||
} | ||
safety_settings = [ | ||
{ | ||
"category": "HARM_CATEGORY_HARASSMENT", | ||
"threshold": "BLOCK_MEDIUM_AND_ABOVE", | ||
}, | ||
{ | ||
"category": "HARM_CATEGORY_HATE_SPEECH", | ||
"threshold": "BLOCK_MEDIUM_AND_ABOVE", | ||
}, | ||
{ | ||
"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", | ||
"threshold": "BLOCK_MEDIUM_AND_ABOVE", | ||
}, | ||
{ | ||
"category": "HARM_CATEGORY_DANGEROUS_CONTENT", | ||
"threshold": "BLOCK_MEDIUM_AND_ABOVE", | ||
}, | ||
] | ||
|
||
return genai.GenerativeModel( | ||
model_name="gemini-1.5-pro-latest", | ||
safety_settings=safety_settings, | ||
generation_config=generation_config, | ||
) | ||
|
||
|
||
def serialize_chat_history(history): | ||
""" | ||
Convert the chat history to a serializable format. | ||
Args: | ||
history (list): A list of chat entries where each entry is an object containing message details. | ||
Returns: | ||
list: A list of dictionaries, each containing the serialized chat entry with message, timestamp, and role. | ||
""" | ||
serialized_history = [] | ||
for entry in history: | ||
# Extract relevant information from the entry object | ||
serialized_entry = { | ||
"message": str(entry), | ||
"timestamp": datetime.now().isoformat(), | ||
"role": entry.role if hasattr(entry, "role") else None, | ||
} | ||
serialized_history.append(serialized_entry) | ||
return serialized_history | ||
|
||
|
||
def generate_response(text_content, prompt_template): | ||
""" | ||
Generate a response from a generative model based on provided text content and a prompt template. | ||
Args: | ||
text_content (str): The main text content to be used within the prompt template. | ||
prompt_template (str): A template string for the prompt, containing a placeholder for the text content. | ||
Returns: | ||
str: A JSON-formatted string containing the response text and the serialized chat history. | ||
Raises: | ||
ValueError: If the API key is not found in the environment variables. | ||
Exception: If there are issues in configuring the model or generating the response. | ||
""" | ||
configure_genai() | ||
model = create_model() | ||
|
||
chat_session = model.start_chat(history=[]) | ||
|
||
# Combine text content and prompt template to form the message | ||
message = prompt_template.format(text_content=text_content) | ||
|
||
# Send the message to the chat session | ||
response = chat_session.send_message(message) | ||
|
||
# Extract the response text and the chat history | ||
response_data = { | ||
"response_text": response.text, | ||
"chat_history": serialize_chat_history(chat_session.history), | ||
} | ||
|
||
# Return the response data in JSON format | ||
return json.dumps(response_data, indent=2) | ||
|
||
|
||
if __name__ == "__main__": | ||
text_content = ( | ||
"When one thinks about what a holiday means for students, " | ||
"we notice how important it is for the kids. It is a time " | ||
"when they finally get the chance to take a break from studies " | ||
"and pursue their hobbies. They can join courses which give them " | ||
"special training to specialize in it. They can get expert in arts, " | ||
"craft, pottery, candle making and more. Furthermore, they also make " | ||
"new friends there who have the same interests. In addition, students " | ||
"get to visit new places on holiday. Like during summer or winter holidays, " | ||
"they go with their families to different cities and countries. Through holidays, " | ||
"they get new experiences and memories which they remember for a lifetime. " | ||
"Furthermore, it also gives them time to relax with their families. Other cousins " | ||
"also visit each other’s places and spend time there. They play games and go out " | ||
"with each other. Moreover, students also get plenty of time to complete their homework " | ||
"and revise the syllabus." | ||
) | ||
prompt_template = "Give important 3 words which is: {text_content}" | ||
|
||
# text_content = input("Input your paragraph: ") | ||
# prompt_template = input("Enter your prompt: ") | ||
|
||
response_json = generate_response(text_content, prompt_template) | ||
print(response_json) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,109 @@ | ||
# 1. Visit [consol.groq.com](https://consol.groq.com). | ||
# 2. Navigate to the API Keys section and create a new key. | ||
# 3. Important: Copy the key immediately as it will only be visible once. | ||
|
||
# !pip install groq | ||
|
||
import os | ||
import json | ||
from datetime import datetime | ||
from dotenv import load_dotenv | ||
|
||
# Assuming 'groq' is the correct library for the API you're using | ||
from groq import Groq | ||
|
||
|
||
def get_groq_client(): | ||
""" | ||
Ensure the API key is set in the environment (set it in the .env file or Linux/Mac: export GROQ_API_KEY="Your_API_KEY") | ||
Raises: | ||
ValueError: If the API key is not found in the environment variables | ||
""" | ||
# load the API key from the environment variables (could be remove if the env is loaded in the main file) | ||
load_dotenv("../../.env", override=True) | ||
|
||
api_key = os.getenv("GROQ_API_KEY") | ||
if not api_key: | ||
raise ValueError("API key not found in environment variables") | ||
return Groq(api_key=api_key) | ||
|
||
|
||
def generate_response(text_content, prompt_template): | ||
""" | ||
Generate a response from a Groq AI client based on provided text content and a prompt template. | ||
This function initializes the Groq client, creates a chat completion request with the given text content | ||
and prompt template, and retrieves the response along with the chat history. The response data is returned in JSON format. | ||
Args: | ||
text_content (str): The main text content to be used within the prompt template. | ||
prompt_template (str): A template string for the prompt, containing a placeholder for the text content. | ||
Returns: | ||
str: A JSON-formatted string containing the response text and the serialized chat history. | ||
Raises: | ||
ValueError: If the API key is not found in the environment variables. | ||
""" | ||
client = get_groq_client() | ||
|
||
# Combine text content and prompt template to form the message | ||
message = prompt_template.format(text_content=text_content) | ||
|
||
chat_completion = client.chat.completions.create( | ||
messages=[ | ||
{ | ||
"role": "user", | ||
"content": message, | ||
} | ||
], | ||
model="llama3-8b-8192", | ||
) | ||
|
||
response_data = { | ||
"response_text": chat_completion.choices[0].message.content, | ||
"chat_history": [ | ||
{ | ||
"message": message, | ||
"role": "user", | ||
"timestamp": datetime.now().isoformat(), # You can replace None with actual timestamp if available | ||
}, | ||
{ | ||
"message": chat_completion.choices[0].message.content, | ||
"role": "model", | ||
"timestamp": datetime.now().isoformat(), # You can replace None with actual timestamp if available | ||
}, | ||
], | ||
} | ||
|
||
# Return the response data in JSON format | ||
return json.dumps(response_data, indent=2) | ||
|
||
|
||
if __name__ == "__main__": | ||
# Take text_content and prompt_template from the user | ||
text_content = ( | ||
"When one thinks about what a holiday means for students, " | ||
"we notice how important it is for the kids. It is a time " | ||
"when they finally get the chance to take a break from studies " | ||
"and pursue their hobbies. They can join courses which give them " | ||
"special training to specialize in it. They can get expert in arts, " | ||
"craft, pottery, candle making and more. Furthermore, they also make " | ||
"new friends there who have the same interests. In addition, students " | ||
"get to visit new places on holiday. Like during summer or winter holidays, " | ||
"they go with their families to different cities and countries. Through holidays, " | ||
"they get new experiences and memories which they remember for a lifetime. " | ||
"Furthermore, it also gives them time to relax with their families. Other cousins " | ||
"also visit each other’s places and spend time there. They play games and go out " | ||
"with each other. Moreover, students also get plenty of time to complete their homework " | ||
"and revise the syllabus." | ||
) | ||
prompt_template = "Give me 3 important words for paragraph" | ||
|
||
# To get the inputs | ||
# text_content = input("Enter the paragraph: ") | ||
# prompt_template = input("Enter the prompt: ") | ||
|
||
response_json = generate_response(text_content, prompt_template) | ||
print(response_json) |
File renamed without changes.
File renamed without changes.
File renamed without changes.