Skip to content

Commit

Permalink
Update db filename (#13)
Browse files Browse the repository at this point in the history
* Rename bedrock.db to springfield.db

* Update sqlite DB export script: fixup overlooked deletion of security app

* Ensure that the git SHA is a string, not bytes, if it has to be obtained from a local checkout

* Don't set an ACL for the uploaded DB files

The new-era S3 bucket doesn't allow this, and instead we have a public-read policy on the bucket

* If in force mode, don't check for previous DB details

This avoids a chicken-and-egg problem when uploading a DB for the first time
  • Loading branch information
stevejalim authored Feb 12, 2025
1 parent cec9f69 commit f6e942b
Show file tree
Hide file tree
Showing 12 changed files with 24 additions and 31 deletions.
4 changes: 2 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
__pycache__/
!.vscode/extensions.json
!root_files/bedrock_db_info.json
!root_files/*_db_info.json
.#*
.cache
.coverage
Expand Down Expand Up @@ -28,7 +28,7 @@ __pycache__/
/locale
/results
assets
bedrock_db_info.json
*_db_info.json
build
build.py
db.sql
Expand Down
6 changes: 3 additions & 3 deletions bin/db_s3_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,10 @@
from os import getenv
from subprocess import CalledProcessError, check_output

JSON_DATA_FILE_NAME = "bedrock_db_info.json"
JSON_DATA_FILE_NAME = "springfield_db_info.json"
DATA_PATH = getenv("DATA_PATH", "data")
JSON_DATA_FILE = getenv("AWS_DB_JSON_DATA_FILE", f"{DATA_PATH}/{JSON_DATA_FILE_NAME}")
DB_FILE = f"{DATA_PATH}/bedrock.db"
DB_FILE = f"{DATA_PATH}/springfield.db"
CACHE = {}
BLOCKSIZE = 65536

Expand Down Expand Up @@ -43,7 +43,7 @@ def get_git_sha():
git_sha = getenv("GIT_SHA")
if not git_sha:
try:
git_sha = check_output("git rev-parse HEAD", shell=True).strip()
git_sha = check_output("git rev-parse HEAD", shell=True).decode("ascii").strip()
except CalledProcessError:
git_sha = "testing"

Expand Down
4 changes: 0 additions & 4 deletions bin/export-db-to-sqlite.sh
Original file line number Diff line number Diff line change
Expand Up @@ -159,10 +159,6 @@ python manage.py dumpdata \
cms.SimpleRichTextPage \
cms.SpringfieldImage \
newsletter.Newsletter \
security.Product \
security.SecurityAdvisory \
security.HallOfFamer \
security.MitreCVE \
releasenotes.ProductRelease \
utils.GitRepoState \
--indent 2 \
Expand Down
2 changes: 1 addition & 1 deletion bin/run-db-download.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
set_db_data,
)

BUCKET_NAME = os.getenv("AWS_DB_S3_BUCKET", "bedrock-db-dev")
BUCKET_NAME = os.getenv("AWS_DB_S3_BUCKET", "springfield-db-dev")
REGION_NAME = os.getenv("AWS_DB_REGION", "us-west-2")
S3_BASE_URL = f"https://s3-{REGION_NAME}.amazonaws.com/{BUCKET_NAME}"

Expand Down
23 changes: 10 additions & 13 deletions bin/run-db-upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
)

CACHE = {}
BUCKET_NAME = os.getenv("AWS_DB_S3_BUCKET", "bedrock-db-dev")
BUCKET_NAME = os.getenv("AWS_DB_S3_BUCKET", "springfield-db-dev")
REGION_NAME = os.getenv("AWS_DB_S3_REGION", "us-west-2")


Expand Down Expand Up @@ -53,15 +53,15 @@ def upload_db_data(db_data):

try:
# upload the new db
s3.upload_file(DB_FILE, BUCKET_NAME, db_data["file_name"], ExtraArgs={"ACL": "public-read"})
except Boto3Error:
return f"ERROR: Failed to upload the new database: {db_data}"
s3.upload_file(DB_FILE, BUCKET_NAME, db_data["file_name"])
except Boto3Error as ex:
return f"ERROR: Failed to upload the new database: {db_data} -- {ex}"

try:
# after successful file upload, upload json metadata
s3.upload_file(JSON_DATA_FILE, BUCKET_NAME, JSON_DATA_FILE_NAME, ExtraArgs={"ACL": "public-read"})
except Boto3Error:
return f"ERROR: Failed to upload the new database info file: {db_data}"
s3.upload_file(JSON_DATA_FILE, BUCKET_NAME, JSON_DATA_FILE_NAME)
except Boto3Error as ex:
return f"ERROR: Failed to upload the new database info file: {db_data} -- {ex}"

return 0

Expand All @@ -83,7 +83,9 @@ def get_db_data():

def main(args):
force = "--force" in args
prev_data = get_prev_db_data()
if not force:
prev_data = get_prev_db_data()

new_data = get_db_data()
if not force and prev_data and prev_data["checksum"] == new_data["checksum"]:
print("No update necessary")
Expand All @@ -96,11 +98,6 @@ def main(args):
return 0

res = upload_db_data(new_data)
# TODO decide if we should do this here or as a separate process
# keeping some number of these around could be good for research
# if res == 0 and prev_data:
# remove old db file
# delete_s3_obj(prev_data['file_name'])

return res

Expand Down
2 changes: 1 addition & 1 deletion bin/run-prod.sh
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ STARTUP_FILES=(
# However, if DATABASE_URL is NOT defined, we need to be sure the sqlite DB file
# is already present at startup
if [[ -z "$DATABASE_URL" ]]; then
STARTUP_FILES+=("data/bedrock.db")
STARTUP_FILES+=("data/springfield.db")
fi

for fname in "${STARTUP_FILES[@]}"; do
Expand Down
2 changes: 1 addition & 1 deletion docker/envfiles/demo.env
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@ DEBUG=False
DEV=True
ALLOWED_HOSTS=*
PROD_DETAILS_STORAGE=product_details.storage.PDDatabaseStorage
AWS_DB_S3_BUCKET=bedrock-db-dev
AWS_DB_S3_BUCKET=springfield-db-dev
2 changes: 1 addition & 1 deletion docker/envfiles/master.env
Original file line number Diff line number Diff line change
Expand Up @@ -3,5 +3,5 @@
DEBUG=True
DEV=True
ALLOWED_HOSTS=*
AWS_DB_S3_BUCKET=bedrock-db-dev
AWS_DB_S3_BUCKET=springfield-db-dev
PROD_DETAILS_STORAGE=product_details.storage.PDDatabaseStorage
2 changes: 1 addition & 1 deletion docker/envfiles/prod.env
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@ DEBUG=False
DEV=False
ALLOWED_HOSTS=*
PROD_DETAILS_STORAGE=product_details.storage.PDDatabaseStorage
AWS_DB_S3_BUCKET=bedrock-db-prod
AWS_DB_S3_BUCKET=springfield-db-prod
2 changes: 1 addition & 1 deletion docker/envfiles/stage.env
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@ DEBUG=False
DEV=False
ALLOWED_HOSTS=*
PROD_DETAILS_STORAGE=product_details.storage.PDDatabaseStorage
AWS_DB_S3_BUCKET=bedrock-db-stage
AWS_DB_S3_BUCKET=springfield-db-stage
4 changes: 2 additions & 2 deletions springfield/base/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,9 +58,9 @@ def get_template_names(self):
("download_database", 600),
)

DB_INFO_FILE = getenv("AWS_DB_JSON_DATA_FILE", f"{settings.DATA_PATH}/bedrock_db_info.json")
DB_INFO_FILE = getenv("AWS_DB_JSON_DATA_FILE", f"{settings.DATA_PATH}/springfield_db_info.json")
GIT_SHA = getenv("GIT_SHA")
BUCKET_NAME = getenv("AWS_DB_S3_BUCKET", "bedrock-db-dev")
BUCKET_NAME = getenv("AWS_DB_S3_BUCKET", "springfield-db-dev")
REGION_NAME = os.getenv("AWS_DB_REGION", "us-west-2")
S3_BASE_URL = f"https://s3-{REGION_NAME}.amazonaws.com/{BUCKET_NAME}"

Expand Down
2 changes: 1 addition & 1 deletion springfield/settings/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def data_path(*args):
db_conn_health_checks = config("DB_CONN_HEALTH_CHECKS", default="false", parser=bool)
db_default_url = config(
"DATABASE_URL",
default=f"sqlite:////{data_path('bedrock.db')}",
default=f"sqlite:////{data_path('springfield.db')}",
)

DATABASES = {
Expand Down

0 comments on commit f6e942b

Please sign in to comment.