Skip to content

Commit

Permalink
Merged gitignore
Browse files Browse the repository at this point in the history
  • Loading branch information
howethomas committed Feb 8, 2024
2 parents 304cc43 + 7458b8f commit 6d20ba5
Show file tree
Hide file tree
Showing 12 changed files with 718 additions and 34 deletions.
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,9 @@ dump.rdb
.data
tmp
docker-compose.override.yml

admin/.streamlit/secrets.toml
admin/.streamlit/config.toml
admin/.streamlit/*.pem
admin/.streamlit/*.pkl
admin/.streamlit/*.csv

23 changes: 23 additions & 0 deletions admin/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
# Use an official Python runtime as a parent image
FROM python:3.8-slim

# Set the working directory in the container
WORKDIR /app

# Copy the current directory contents into the container at /app
COPY . /app

# Install any needed packages specified in requirements.txt
COPY requirements.txt /app/
COPY custom_info.md /app/
RUN pip install --no-cache-dir -r requirements.txt

# Make port 8501 available to the world outside this container
EXPOSE 8501

# Define environment variable
ENV NAME World

# Run admin.py when the container launches
CMD ["streamlit", "run", "admin.py"]

55 changes: 55 additions & 0 deletions admin/admin.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
"""
## Admin Portal
This is the admin portal for the system. It allows you to view the current configuration, and to make changes to it.
"""

import streamlit as st
import redis
import pandas as pd
import pymongo
import json
import os

from common import manage_session_state
# Check to make sure the user is logged in
manage_session_state()

# Initialize connection.
# Uses st.cache_resource to only run once.
@st.cache_resource
def init_connection():
url = st.secrets["mongo_db"]["url"]
return pymongo.MongoClient(url)

client = init_connection()

# Current directory in the container is /app
# Check if the file exists
if os.path.isfile("custom_info.md"):
# Open the file and read its contents
with open("custom_info.md", "r") as file:
contents = file.read()
st.markdown(contents)

# Add three tabs
st.header("Recent vCons")

# Limit to vCons that have a summary
if st.checkbox("Show only vCons with summaries"):
only_summary = {
'analysis.type': 'summary'
}
else:
only_summary = {}

num_vcons = st.number_input("Number of vCons to display", min_value=1, max_value=1000, value=10)
db = client[str(st.secrets["mongo_db"]["db"])]
vcons = db[st.secrets["mongo_db"]["collection"]].find(only_summary).sort("created_at", -1).limit(num_vcons)
for v in vcons:
# For each vCon, display the created_at timestamp, and a link to the vCon in the inspect page.
# First, make a human readable timestamp
created_at = pd.to_datetime(v['created_at'])
st.write(f"{created_at} - [Inspect vCon](/inspect?uuid={v['uuid']})")
43 changes: 43 additions & 0 deletions admin/common.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
# common.py
import streamlit as st

def redirect(page_name):
st.session_state.current_page = page_name
st.experimental_rerun()

def logged_in():
if 'logged_in' not in st.session_state:
st.session_state.logged_in = False
return st.session_state.logged_in

def login():
st.session_state.logged_in = True
st.experimental_rerun()

def logout():
st.session_state.logged_in = False
st.experimental_rerun()

def manage_session_state():
# Check to make sure the user is logged in
if 'admin_password' not in st.secrets:
st.write("No admin password set. Please set the admin password in the secrets.")
st.stop()
else:
admin_password = st.secrets["admin_password"]['password']

if logged_in() is False:
# Log in the user by setting the session state variable
# Get the password from the secrets and compare it to the user input
# Make a login form
password = st.text_input("Enter password", type="password")
if st.button("Log in"):
if password == admin_password:
login()
else:
st.write("Incorrect password :", password, admin_password)
st.stop()

# Log out the user by setting the session state variable
if st.button("Log out"):
logout()
3 changes: 3 additions & 0 deletions admin/custom_info.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
## Hello

There
77 changes: 77 additions & 0 deletions admin/pages/export.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
import streamlit as st
import pymongo
import json
import redis
import sys
import os

# Add the parent directory to the system path
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from common import manage_session_state
# Check to make sure the user is logged in
manage_session_state()

# Initialize connection.
# Uses st.cache_resource to only run once.
@st.cache_resource
def init_connection():
url = st.secrets["mongo_db"]["url"]
return pymongo.MongoClient(url)

client = init_connection()
# Title of the app
st.title('EXPORT VCONS')
"""
Exports vCons from the database to either a
single JSONL file or individual JSON files.
"""
output_format = st.radio("EXPORT FORMAT", ("JSONL", "JSON"))
DEFAULT_PATH = ""
path = st.text_input("ENTER THE DIRECTORY PATH", value=DEFAULT_PATH)
exporting = st.button("EXPORT VCONS", key="export")

if exporting:
# streamlit_app.py
with st.spinner("EXPORTING VCONS"):
db = client[str(st.secrets["mongo_db"]["db"])]
vcons = db[st.secrets["mongo_db"]["collection"]].find()
if output_format == "JSONL":
# Open a file for writing in JSONL format
with open(f"{path}output.jsonl", "w") as file:
# Iterate through each JSON object in the array
for vcon in vcons:
# Convert the JSON object to a string and write it to the file
json_line = json.dumps(vcon)
file.write(json_line + "\n")
else:
for vcon in vcons:
uuid = vcon['uuid']
filename = path + uuid + ".vcon.json"
with open(filename, "w") as f:
f.write(json.dumps(vcon))
f.close()
st.success("COMPLETE")

st.divider()
"***EXPORT TO REDIS***"

# Get the URL for the Redis instance
redis_url = st.text_input("ENTER THE REDIS URL", value="redis://redis:6379", key="redis_url_export")
if redis_url:
if st.button("EXPORT VCONS", key="export_redis"):
# Connect to Redis
redis_client = redis.Redis.from_url(redis_url)
db = client[str(st.secrets["mongo_db"]["db"])]
vcons = db[st.secrets["mongo_db"]["collection"]].find()

# So we can show progress, count the number of vCons
count = db[st.secrets["mongo_db"]["collection"]].count_documents({})
st.write(f"EXPORTING {count} VCONS")
# Show progress
progress_bar = st.progress(0)
for index, vcon in enumerate(vcons):
uuid = vcon['uuid']
redis_client.json().set(f"vcon:{uuid}", "$", json.dumps(vcon))
progress_bar.progress((index + 1) / count)

st.success("COMPLETE")
164 changes: 164 additions & 0 deletions admin/pages/import.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,164 @@
import streamlit as st
import pymongo
import json
import redis
import boto3
import sys
import os


# Add the parent directory to the system path
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from common import manage_session_state
# Check to make sure the user is logged in
manage_session_state()


# Initialize connection.
# Uses st.cache_resource to only run once.
@st.cache_resource
def init_connection():
url = st.secrets["mongo_db"]["url"]
return pymongo.MongoClient(url)

client = init_connection()

st.title("IMPORT VCONS")

tab_names= ["UPLOAD ONE", "UPLOAD JSONL", "URL", "TEXT", "REDIS", "S3"]
upload_tab, jsonl_tab, url_tab, text_tab, redis_tab, s3_tab = st.tabs(tab_names)

with upload_tab:
"**UPLOAD A SINGLE VCON FILE**"

# Allow the user to upload a single JSON file
uploaded_file = st.file_uploader("UPLOAD", type=["json", "vcon"])
if uploaded_file is not None:
if st.button("UPLOAD AND INSERT"):
db = client[st.secrets["mongo_db"]['db']]
collection = db[st.secrets["mongo_db"]['collection']]
try:
document = json.load(uploaded_file)
collection.replace_one({'_id': document['uuid']}, document, upsert=True)
st.success("INSERTED SUCCESSFULLY!")
except json.JSONDecodeError as e:
st.warning("INVALID JSON")
st.error(e)

with jsonl_tab:
"**UPLOAD BULK VCON**"

uploaded_file = st.file_uploader("UPLOAD JSONL", type="jsonl, vconl")

if uploaded_file is not None:
if st.button("UPLOAD AND INSERT"):
db = client[st.secrets["mongo_db"]['db']]
collection = db[st.secrets["mongo_db"]['collection']]
for i, line in enumerate(uploaded_file):
try:
document = json.loads(line)
collection.replace_one({'_id': document['uuid']}, document, upsert=True)
except json.JSONDecodeError as e:
st.warning(f"SKIPPED INVALID JSON, INDEX {i}")
continue
st.success("INSERTED SUCCESSFULLY!")

with url_tab:
# Import from a URL
"**IMPORT FROM URL**"
url = st.text_input("ENTER URL")
if url:
if st.button("IMPORT"):
db = client[st.secrets["mongo_db"]['db']]
collection = db[st.secrets["mongo_db"]['collection']]
try:
document = json.load(url)
collection.replace_one({'_id': document['uuid']}, document, upsert=True)
st.success("INSERTED SUCCESSFULLY!")
except json.JSONDecodeError as e:
st.warning("INVALID JSON")
st.error(e)

with text_tab:
# Import from a URL
"**IMPORT FROM TEXT**"
text = st.text_area("ENTER TEXT")
if text:
if st.button("IMPORT"):
db = client[st.secrets["mongo_db"]['db']]
collection = db[st.secrets["mongo_db"]['collection']]
try:
document = json.loads(text)
collection.replace_one({'_id': document['uuid']}, document, upsert=True)
st.success("INSERTED SUCCESSFULLY!")
except json.JSONDecodeError as e:
st.warning("INVALID JSON")
st.error(e)

with redis_tab:
# Import from REDIS
"**IMPORT FROM REDIS**"
redis_url= st.text_input("ENTER REDIS URL")
if redis_url:
if st.button("IMPORT"):
db = client[st.secrets["mongo_db"]['db']]
collection = db[st.secrets["mongo_db"]['collection']]

# Connect to the REDIS server, and find all the keys with the pattern "vcon:*"

redis_client = redis.Redis.from_url(redis_url)
keys = redis_client.keys("vcon:*")
for key in keys:
vcon = redis_client.json().get(key)
try:
collection.replace_one({'_id': vcon['uuid']}, vcon, upsert=True)
except json.JSONDecodeError as e:
st.warning("INVALID JSON")
st.error(e)

with s3_tab:
"**IMPORT S3 BUCKET**"
# For inputs, use the
AWS_ACCESS_KEY_ID = st.secrets['aws']["AWS_ACCESS_KEY_ID"]
AWS_SECRET_ACCESS_KEY = st.secrets['aws']["AWS_SECRET_ACCESS_KEY"]
AWS_DEFAULT_REGION = st.secrets['aws']["AWS_DEFAULT_REGION"]
s3_bucket = st.text_input("ENTER S3 BUCKET")
s3_path = st.text_input("ENTER S3 PATH")
if s3_bucket:
if st.button("IMPORT"):
db = client[st.secrets["mongo_db"]['db']]
collection = db[st.secrets["mongo_db"]['collection']]
s3_client = boto3.client('s3', aws_access_key_id=AWS_ACCESS_KEY_ID, aws_secret_access_key=AWS_SECRET_ACCESS_KEY, region_name=AWS_DEFAULT_REGION)

# Connect to the S3 bucket and find all the keys with the pattern "vcon:*"
paginator = s3_client.get_paginator('list_objects_v2')
pages = paginator.paginate(Bucket=s3_bucket, Prefix=s3_path)

# Count the number of vCons we're importing overall
count = 0
for index, page in enumerate(pages):
for obj in page['Contents']:
key = obj['Key']
if key.endswith(".vcon"):
count += 1
st.write(f"IMPORTING {count} VCONS")
# Show progress
progress_bar = st.progress(0)
upload_count = 0
for index, page in enumerate(pages):
for obj in page['Contents']:
# increment the progress bar
key = obj['Key']
if key.endswith(".vcon"):
try:
vcon = s3_client.get_object(Bucket=s3_bucket, Key=key)
vcon = json.loads(vcon['Body'].read())
result = collection.replace_one({'_id': vcon['uuid']}, vcon, upsert=True)
upload_count += 1
progress_bar.progress(upload_count / count)
except json.JSONDecodeError as e:
st.warning("INVALID JSON")
st.error(e)
else:
st.warning(f"SKIPPING {key}")
st.success("COMPLETE")
Loading

0 comments on commit 6d20ba5

Please sign in to comment.