Skip to content

Commit

Permalink
Merge pull request #72 from DataDog/dash-workshop-updates
Browse files Browse the repository at this point in the history
Adds updates for Dash workshops
  • Loading branch information
arosenkranz authored May 3, 2024
2 parents 83da12c + f2f6ceb commit 518fa7d
Show file tree
Hide file tree
Showing 73 changed files with 285,067 additions and 24,353 deletions.
18 changes: 10 additions & 8 deletions docker-compose.pinned.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ services:
- /var/run/docker.sock:/var/run/docker.sock:ro
- /proc/:/host/proc/:ro
- /sys/fs/cgroup/:/host/sys/fs/cgroup:ro
- ./services/datadog-agent/postgres/dd-agent-conf.yaml:/conf.d/postgres.d/conf.yaml
frontend:
image: public.ecr.aws/x2b9z2t7/storedog/frontend:1.2.0
command: npm run dev
Expand Down Expand Up @@ -88,7 +87,6 @@ services:
- DD_RUNTIME_METRICS_ENABLED=true
- DD_PROFILING_ENABLED=true
volumes:
- 'postgres:/var/lib/postgresql/data'
- ./services/backend/db/restore:/docker-entrypoint-initdb.d
- ./services/backend/db/postgresql.conf:/postgresql.conf
labels:
Expand Down Expand Up @@ -117,7 +115,10 @@ services:
},
"max_relations": 400,
"collect_function_metrics": true,
"collection_interval": 1
"collection_interval": 1,
"collect_schemas": {
"enabled": true
}
},
{
"dbm":true,
Expand All @@ -135,7 +136,10 @@ services:
},
"max_relations": 400,
"collect_function_metrics": true,
"collection_interval": 1
"collection_interval": 1,
"collect_schemas": {
"enabled": true
}
}]'
com.datadoghq.ad.logs: '[{"source": "postgresql", "service": "postgres", "auto_multi_line_detection":true }]'
my.custom.label.team: 'database'
Expand Down Expand Up @@ -221,7 +225,7 @@ services:
my.custom.label.team: 'backend'
discounts:
image: public.ecr.aws/x2b9z2t7/storedog/discounts:1.2.0
command: ./my-wrapper-script.sh ${DISCOUNTS_PORT}
command: wait-for-it postgres:5432 -- ./my-wrapper-script.sh ${DISCOUNTS_PORT}
depends_on:
- postgres
- dd-agent
Expand All @@ -243,7 +247,6 @@ services:
- ./services/discounts:/app
ports:
- '${DISCOUNTS_PORT}:${DISCOUNTS_PORT}'
- '22:22'
labels:
com.datadoghq.ad.logs: '[{"source": "python", "service": "store-discounts"}]'
com.datadoghq.tags.env: '${DD_ENV-dev}'
Expand All @@ -264,7 +267,7 @@ services:
- DD_VERSION=${DD_VERSION-7}
- DD_ENV=${DD_ENV-dev}
ports:
- '3030:8080'
- '${ADS_PORT}:8080'
labels:
com.datadoghq.ad.logs: '[{"source": "java", "service": "ads-java"}]'
com.datadoghq.tags.env: '${DD_ENV-dev}'
Expand All @@ -274,4 +277,3 @@ services:

volumes:
redis:
postgres:
22 changes: 12 additions & 10 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ services:
- /var/run/docker.sock:/var/run/docker.sock:ro
- /proc/:/host/proc/:ro
- /sys/fs/cgroup/:/host/sys/fs/cgroup:ro
- ./services/datadog-agent/postgres/dd-agent-conf.yaml:/conf.d/postgres.d/conf.yaml
frontend:
build:
context: ./services/frontend
Expand Down Expand Up @@ -114,7 +113,6 @@ services:
- DD_RUNTIME_METRICS_ENABLED=true
- DD_PROFILING_ENABLED=true
volumes:
- 'postgres:/var/lib/postgresql/data'
- ./services/backend/db/restore:/docker-entrypoint-initdb.d
- ./services/backend/db/postgresql.conf:/postgresql.conf
labels:
Expand Down Expand Up @@ -143,7 +141,10 @@ services:
},
"max_relations": 400,
"collect_function_metrics": true,
"collection_interval": 1
"collection_interval": 1,
"collect_schemas": {
"enabled": true
}
},
{
"dbm":true,
Expand All @@ -161,7 +162,10 @@ services:
},
"max_relations": 400,
"collect_function_metrics": true,
"collection_interval": 1
"collection_interval": 1,
"collect_schemas": {
"enabled": true
}
}]'
com.datadoghq.ad.logs: '[{"source": "postgresql", "service": "postgres", "auto_multi_line_detection":true }]'
my.custom.label.team: 'database'
Expand Down Expand Up @@ -232,6 +236,8 @@ services:
- REDIS_URL=redis://redis:6379/0
- DB_HOST=postgres
- DB_PORT=5432
- POSTGRES_USER
- POSTGRES_PASSWORD
- DISABLE_SPRING=1
- DD_AGENT_HOST=dd-agent
- DD_ENV=${DD_ENV-dev}
Expand All @@ -250,7 +256,7 @@ services:
ads:
build:
context: ./services/ads/python
command: flask run --port=${ADS_PORT} --host=0.0.0.0 # If using any other port besides the default 9292, overriding the CMD is required
command: wait-for-it postgres:5432 -- flask run --port=${ADS_PORT} --host=0.0.0.0 # If using any other port besides the default 9292, overriding the CMD is required
depends_on:
- postgres
- dd-agent
Expand Down Expand Up @@ -283,7 +289,7 @@ services:
discounts:
build:
context: ./services/discounts
command: ./my-wrapper-script.sh ${DISCOUNTS_PORT}
command: wait-for-it postgres:5432 -- ./my-wrapper-script.sh ${DISCOUNTS_PORT}
depends_on:
- postgres
- dd-agent
Expand All @@ -305,7 +311,6 @@ services:
- ./services/discounts:/app
ports:
- '${DISCOUNTS_PORT}:${DISCOUNTS_PORT}'
- '22:22'
labels:
com.datadoghq.ad.logs: '[{"source": "python", "service": "store-discounts"}]'
com.datadoghq.tags.env: '${DD_ENV-dev}'
Expand Down Expand Up @@ -363,8 +368,6 @@ services:
- STOREDOG_URL=${STOREDOG_URL-http://localhost}
- PUPPETEER_TIMEOUT
- SKIP_SESSION_CLOSE
profiles:
- puppeteer
depends_on:
- frontend
command: bash puppeteer.sh
Expand All @@ -374,4 +377,3 @@ services:

volumes:
redis:
postgres:
20 changes: 20 additions & 0 deletions scripts/backup-db.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
#! /bin/bash

# exec dump on postgres container

# get root of repository
root=$(git rev-parse --show-toplevel)
prependstatements=$root/scripts/prepend_db_statements.sql
destination=$root/services/backend/db/restore/restore-$(date +%Y-%m-%d-%H-%M-%S).sql

# remove old backups in the restore folder
rm -f $root/services/backend/db/restore/*.sql

# exec dump
docker compose exec postgres pg_dump -U postgres spree_starter_development > $destination

echo "Backup created at $destination"

# prepend statements to the dump
cat $prependstatements $destination > $destination.tmp
mv $destination.tmp $destination
60 changes: 60 additions & 0 deletions scripts/prepend_db_statements.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
CREATE user datadog WITH password 'datadog';

--
-- PostgreSQL database dump
--

-- Dumped from database version 13.8
-- Dumped by pg_dump version 13.8

SET statement_timeout = 0;
SET lock_timeout = 0;
SET idle_in_transaction_session_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SELECT pg_catalog.set_config('search_path', '', false);
SET check_function_bodies = false;
SET xmloption = content;
SET client_min_messages = warning;
SET row_security = off;

\connect postgres

CREATE SCHEMA IF NOT EXISTS datadog;
GRANT USAGE ON SCHEMA datadog TO datadog;
GRANT USAGE ON SCHEMA public TO datadog;
GRANT pg_monitor TO datadog;
CREATE EXTENSION IF NOT EXISTS pg_stat_statements;

CREATE OR REPLACE FUNCTION datadog.explain_statement(
l_query TEXT,
OUT explain JSON
)
RETURNS SETOF JSON AS
$$
DECLARE
curs REFCURSOR;
plan JSON;

BEGIN
OPEN curs FOR EXECUTE pg_catalog.concat('EXPLAIN (FORMAT JSON) ', l_query);
FETCH curs INTO plan;
CLOSE curs;
RETURN QUERY SELECT plan;
END;
$$
LANGUAGE 'plpgsql'
RETURNS NULL ON NULL INPUT
SECURITY DEFINER;

--
-- Name: spree_starter_development; Type: DATABASE; Schema: -; Owner: postgres
--

CREATE DATABASE spree_starter_development WITH TEMPLATE = template0 ENCODING = 'UTF8' LOCALE = 'en_US.utf8';


ALTER DATABASE spree_starter_development OWNER TO postgres;

\connect spree_starter_development

12 changes: 9 additions & 3 deletions services/ads/java/src/main/java/adsjava/AdsJavaApplication.java
Original file line number Diff line number Diff line change
Expand Up @@ -47,11 +47,17 @@ public String home() {
public HashMap[] ads(@RequestHeader HashMap<String, String> headers) {

boolean errorFlag = false;
if(headers.get("x-throw-error") != null) {
errorFlag = Boolean.parseBoolean(headers.get("x-throw-error"));
if(headers.get("X-Throw-Error") != null) {
errorFlag = Boolean.parseBoolean(headers.get("X-Throw-Error"));
}

if(errorFlag) {
// if x-error-rate is present, set to variable errorRate (if missing, set to 1)
double errorRate = 1;
if(headers.get("X-Error-Rate") != null) {
errorRate = Double.parseDouble(headers.get("X-Error-Rate"));
}

if(errorFlag && Math.random() < errorRate) {
// Intentionally throw error here to demonstrate Logs Error Tracking behavior
try {
throw new TimeoutException("took too long to get a response");
Expand Down
3 changes: 1 addition & 2 deletions services/ads/python/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,11 @@
# run with the DOCKER_BUILDKIT=1 environment variable in your
# docker build command (see build.sh)
FROM python:3.9.6-slim-buster

# Update, upgrade, and cleanup debian packages
RUN export DEBIAN_FRONTEND=noninteractive && \
apt-get update && \
apt-get upgrade --yes && \
apt-get install --yes build-essential libpq-dev && \
apt-get install --yes build-essential libpq-dev wait-for-it && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*

Expand Down
9 changes: 9 additions & 0 deletions services/ads/python/ads.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,8 +71,17 @@ def weighted_image(weight):
def status():
if flask_request.method == 'GET':

# determine if should throw error and save to variable
throw_error = False
if 'X-Throw-Error' in flask_request.headers and flask_request.headers['X-Throw-Error'] == 'true':
throw_error = True

# fetch error rate from header if present (0 - 1)
error_rate = 1
if 'X-Error-Rate' in flask_request.headers:
error_rate = float(flask_request.headers['X-Error-Rate'])

if throw_error and random.random() < error_rate:
try:
raise ValueError('something went wrong')
except ValueError:
Expand Down
4 changes: 3 additions & 1 deletion services/backend/db/postgresql.conf
Original file line number Diff line number Diff line change
Expand Up @@ -704,7 +704,9 @@ default_text_search_config = 'pg_catalog.english'
# - Shared Library Preloading -

shared_preload_libraries = 'pg_stat_statements' # (change requires restart)
pg_stat_statements.track = all
pg_stat_statements.track = ALL
pg_stat_statments.max = 10000
pg_stat_statements.track_utility = off
#local_preload_libraries = ''
#session_preload_libraries = ''
#jit_provider = 'llvmjit' # JIT library to use
Expand Down
Loading

0 comments on commit 518fa7d

Please sign in to comment.