From 926cd1ba7bffbc716791b7a3e224f868a7a331f2 Mon Sep 17 00:00:00 2001 From: lonnen Date: Fri, 9 Sep 2016 12:21:44 -0700 Subject: [PATCH 01/13] nobug - make breakpad build idempotent This enhances the breakpad script with some prechecks so that the breakpad make target will successfully run idempotently. Additionally it fixes some shellcheck warnings. --- scripts/build-breakpad.sh | 47 ++++++++++++++++++++++++++++----------- 1 file changed, 34 insertions(+), 13 deletions(-) diff --git a/scripts/build-breakpad.sh b/scripts/build-breakpad.sh index b9bb542ec3..3050d89d66 100755 --- a/scripts/build-breakpad.sh +++ b/scripts/build-breakpad.sh @@ -12,32 +12,53 @@ # any failures in this script should cause the build to fail set -v -e -x -export MAKEFLAGS=-j$(getconf _NPROCESSORS_ONLN) +export MAKEFLAGS +MAKEFLAGS=-j$(getconf _NPROCESSORS_ONLN) -git clone https://chromium.googlesource.com/chromium/tools/depot_tools.git -export PATH=`pwd`/depot_tools:$PATH +if [ ! -d "depot_tools" ]; then + git clone https://chromium.googlesource.com/chromium/tools/depot_tools.git +fi + +cd depot_tools || exit +git pull origin master +echo "using depot_tools version: $(git rev parse HEAD)" +cd .. + +# Breakpad will rely on a bunch of stuff from depot_tools, like fetch +# So we just put it on the path +# see https://chromium.googlesource.com/breakpad/breakpad/+/master/#Getting-started-from-master +export PATH +PATH=$(pwd)/depot_tools:$PATH # Checkout and build Breakpad -echo "PREFIX: ${PREFIX:=`pwd`/build/breakpad}" -mkdir breakpad -cd breakpad -fetch breakpad +echo "PREFIX: ${PREFIX:=$(pwd)/build/breakpad}" +if [ ! -d "breakpad" ]; then + mkdir breakpad + cd breakpad + fetch breakpad +else + cd breakpad + gclient sync +fi + cd src -mkdir -p ${PREFIX} -rsync -a --exclude="*.git" ./src ${PREFIX}/ -./configure --prefix=${PREFIX} +echo "using breakpad version: $(git rev parse HEAD)" + +mkdir -p "${PREFIX}" +rsync -a --exclude="*.git" ./src "${PREFIX}"/ +./configure --prefix="${PREFIX}" make install if test -z "${SKIP_CHECK}"; then #FIXME: get this working again #make check true fi -git rev-parse master > ${PREFIX}/revision.txt +git rev-parse master > "${PREFIX}"/revision.txt cd ../.. -cp breakpad/src/src/third_party/libdisasm/libdisasm.a ${PREFIX}/lib/ +cp breakpad/src/src/third_party/libdisasm/libdisasm.a "${PREFIX}"/lib/ # Optionally package everything up if test -z "${SKIP_TAR}"; then - tar -C ${PREFIX}/.. --mode 755 --owner 0 --group 0 -zcf breakpad.tar.gz `basename ${PREFIX}` + tar -C "${PREFIX}"/.. --mode 755 --owner 0 --group 0 -zcf breakpad.tar.gz "$(basename "${PREFIX}")" fi From 74467295af6fb206315ae56e99a134411294290a Mon Sep 17 00:00:00 2001 From: Peter Bengtsson Date: Tue, 13 Sep 2016 19:33:13 -0400 Subject: [PATCH 02/13] fixes bug 1283296 - Redirect back after sign-in (#3446) * fixes bug 1283296 - Redirect back after sign-in * remove early hack attempt * fixins * nits addressed * adding missing file * using the Qs module --- .../crashstats/jinja2/crashstats/login.html | 23 ++++++++-------- .../static/crashstats/js/socorro/oauth2.js | 16 ++++++++++-- webapp-django/crashstats/manage/decorators.py | 26 +++++++++++++++++++ .../crashstats/manage/tests/test_views.py | 5 +++- webapp-django/crashstats/manage/views.py | 17 +----------- 5 files changed, 57 insertions(+), 30 deletions(-) create mode 100644 webapp-django/crashstats/manage/decorators.py diff --git a/webapp-django/crashstats/crashstats/jinja2/crashstats/login.html b/webapp-django/crashstats/crashstats/jinja2/crashstats/login.html index 627b61eead..58dcc6c11b 100644 --- a/webapp-django/crashstats/crashstats/jinja2/crashstats/login.html +++ b/webapp-django/crashstats/crashstats/jinja2/crashstats/login.html @@ -1,13 +1,14 @@ {% extends "crashstats_base.html" %} {% block page_title %} -{% if request.user.is_authenticated() %}Insufficient Privileges{% else %}Login Required{% endif %} +{% if request.user.is_active %}Insufficient Privileges{% else %}Login Required{% endif %} {% endblock %} {% block content %} +
- {% if request.user.is_authenticated() %} + {% if request.user.is_active %}

Insufficient Privileges

{% else %}

Login Required

@@ -16,15 +17,15 @@

Login Required

- {% if request.user.is_authenticated() %} -

- You are signed in but you do not have sufficient permissions to reach the resource you requested. -

- {% else %} -

- The page you requested requires authentication. Use the login button at the lower right to log in. -

- {% endif %} + {% if request.user.is_authenticated() %} +

+ You are signed in but you do not have sufficient permissions to reach the resource you requested. +

+ {% else %} +

+ The page you requested requires authentication. Use the login button at the lower right to log in. +

+ {% endif %}
diff --git a/webapp-django/crashstats/crashstats/static/crashstats/js/socorro/oauth2.js b/webapp-django/crashstats/crashstats/static/crashstats/js/socorro/oauth2.js index c0277eab9d..64ce8f078d 100644 --- a/webapp-django/crashstats/crashstats/static/crashstats/js/socorro/oauth2.js +++ b/webapp-django/crashstats/crashstats/static/crashstats/js/socorro/oauth2.js @@ -110,8 +110,20 @@ var OAuth2 = (function() { $.post(url, data) .done(function(response) { // It worked! - // TODO: https://bugzilla.mozilla.org/show_bug.cgi?id=1283296 - document.location.reload(); + var next = Qs.parse( + document.location.search.slice(1) + ).next; + // only if ?next=/... exists on the current URL + if (next) { + // A specific URL exits. + // This is most likely the case when you tried + // to access a privileged URL whilst being + // anonymous and being redirected. + // Make sure it's on this server + document.location.pathname = next; + } else { + document.location.reload(); + } }) .fail(function(xhr) { console.error(xhr); diff --git a/webapp-django/crashstats/manage/decorators.py b/webapp-django/crashstats/manage/decorators.py new file mode 100644 index 0000000000..7329e34a6b --- /dev/null +++ b/webapp-django/crashstats/manage/decorators.py @@ -0,0 +1,26 @@ +from django.contrib.auth.decorators import ( + REDIRECT_FIELD_NAME, + user_passes_test, +) + + +def superuser_required( + function=None, + redirect_field_name=REDIRECT_FIELD_NAME, + login_url=None +): + """Same logic as in login_required() (see doc string above) but with + the additional check that we require you to be superuser also. + """ + + def check_user(user): + return user.is_active and user.is_superuser + + actual_decorator = user_passes_test( + check_user, + login_url=login_url, + redirect_field_name=redirect_field_name + ) + if function: + return actual_decorator(function) + return actual_decorator diff --git a/webapp-django/crashstats/manage/tests/test_views.py b/webapp-django/crashstats/manage/tests/test_views.py index e1bec5a8c4..66205583d7 100644 --- a/webapp-django/crashstats/manage/tests/test_views.py +++ b/webapp-django/crashstats/manage/tests/test_views.py @@ -93,7 +93,10 @@ def test_home_page_not_signed_in(self): self._login(is_superuser=False) response = self.client.get(home_url, follow=True) assert response.status_code == 200 - ok_('You need to be a superuser to access this' in response.content) + ok_( + 'You are signed in but you do not have sufficient permissions ' + 'to reach the resource you requested.' in response.content + ) def test_home_page_signed_in(self): user = self._login() diff --git a/webapp-django/crashstats/manage/views.py b/webapp-django/crashstats/manage/views.py index 5502b0e108..3cb5e6bd67 100644 --- a/webapp-django/crashstats/manage/views.py +++ b/webapp-django/crashstats/manage/views.py @@ -1,6 +1,5 @@ import collections import copy -import functools import hashlib import math import urllib @@ -38,6 +37,7 @@ from crashstats.status.models import StatusMessage from crashstats.symbols.models import SymbolsUpload from crashstats.crashstats.utils import json_view +from crashstats.manage.decorators import superuser_required from . import forms from . import utils @@ -86,21 +86,6 @@ def notice_change(before, after): raise NotImplementedError(before.__class__.__name__) -def superuser_required(view_func): - @functools.wraps(view_func) - def inner(request, *args, **kwargs): - if not request.user.is_active: - return redirect(settings.LOGIN_URL) - elif not request.user.is_superuser: - messages.error( - request, - 'You need to be a superuser to access this.' - ) - return redirect('home:home', settings.DEFAULT_PRODUCT) - return view_func(request, *args, **kwargs) - return inner - - @superuser_required def home(request, default_context=None): context = default_context or {} From d1702fc6e38439d2cc53c296e26f3353fef06b89 Mon Sep 17 00:00:00 2001 From: Chris Lonnen Date: Tue, 13 Sep 2016 16:34:08 -0700 Subject: [PATCH 03/13] fixes bug 1300381 - remove scripts/staging (#3464) The scripts/staging directory contains helper methods for loading extracting and loading a minidb. It has been unused in dev since the introduction of setupdb in socorro v8. This removes it from master. --- scripts/staging/README.rst | 8 - scripts/staging/afterload.sh | 19 -- scripts/staging/backupdatadir.sh | 17 -- scripts/staging/dumpschema.sh | 29 ---- scripts/staging/extractMiniDB.py | 127 -------------- scripts/staging/fake_raw_adu.py | 56 ------ scripts/staging/loadMiniDBonDev.py | 162 ------------------ scripts/staging/loadprep.sh | 17 -- scripts/staging/postsql/README.rst | 15 -- .../staging/postsql/crash_by_user_views.sql | 47 ----- .../staging/postsql/current_server_status.sql | 14 -- scripts/staging/postsql/default_versions.sql | 8 - .../postsql/default_versions_builds.sql | 8 - scripts/staging/postsql/hang_report.sql | 16 -- .../staging/postsql/home_page_graph_views.sql | 33 ---- .../staging/postsql/performance_check_1.sql | 21 --- scripts/staging/postsql/postsql.sh | 15 -- .../staging/postsql/product_crash_ratio.sql | 57 ------ scripts/staging/postsql/product_info.sql | 11 -- scripts/staging/postsql/product_selector.sql | 5 - scripts/staging/restoredatadir.sh | 17 -- scripts/staging/sync_raw_adu.py | 48 ------ 22 files changed, 750 deletions(-) delete mode 100644 scripts/staging/README.rst delete mode 100755 scripts/staging/afterload.sh delete mode 100755 scripts/staging/backupdatadir.sh delete mode 100755 scripts/staging/dumpschema.sh delete mode 100755 scripts/staging/extractMiniDB.py delete mode 100644 scripts/staging/fake_raw_adu.py delete mode 100644 scripts/staging/loadMiniDBonDev.py delete mode 100755 scripts/staging/loadprep.sh delete mode 100644 scripts/staging/postsql/README.rst delete mode 100644 scripts/staging/postsql/crash_by_user_views.sql delete mode 100644 scripts/staging/postsql/current_server_status.sql delete mode 100644 scripts/staging/postsql/default_versions.sql delete mode 100644 scripts/staging/postsql/default_versions_builds.sql delete mode 100644 scripts/staging/postsql/hang_report.sql delete mode 100644 scripts/staging/postsql/home_page_graph_views.sql delete mode 100644 scripts/staging/postsql/performance_check_1.sql delete mode 100755 scripts/staging/postsql/postsql.sh delete mode 100644 scripts/staging/postsql/product_crash_ratio.sql delete mode 100644 scripts/staging/postsql/product_info.sql delete mode 100644 scripts/staging/postsql/product_selector.sql delete mode 100755 scripts/staging/restoredatadir.sh delete mode 100755 scripts/staging/sync_raw_adu.py diff --git a/scripts/staging/README.rst b/scripts/staging/README.rst deleted file mode 100644 index f39930ef31..0000000000 --- a/scripts/staging/README.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. This Source Code Form is subject to the terms of the Mozilla Public -.. License, v. 2.0. If a copy of the MPL was not distributed with this -.. file, You can obtain one at http://mozilla.org/MPL/2.0/. - -MiniDB Scripts -============== - -docs moved to docs/databasescripts.rst diff --git a/scripts/staging/afterload.sh b/scripts/staging/afterload.sh deleted file mode 100755 index 84adcadd4e..0000000000 --- a/scripts/staging/afterload.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/bash -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -set -e - -cp /pgdata/9.0/data/postgresql.conf.prod /pgdata/9.0/data/postgresql.conf - -/etc/init.d/postgresql-9.0 restart - -su -l -c "psql -f ~postgres/update_staging_passwords.sql" postgres - -/etc/init.d/pgbouncer-web start -/etc/init.d/pgbouncer-processor start - -exit 0 - diff --git a/scripts/staging/backupdatadir.sh b/scripts/staging/backupdatadir.sh deleted file mode 100755 index 16c6c4a667..0000000000 --- a/scripts/staging/backupdatadir.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -set -e - -/etc/init.d/postgresql-9.0 stop - -rm -rf /pgdata/backupdata/* - -cp -r -p -v /pgdata/9.0/data/* /pgdata/backupdata/ - -/etc/init.d/postgresql-9.0 start - -exit 0 \ No newline at end of file diff --git a/scripts/staging/dumpschema.sh b/scripts/staging/dumpschema.sh deleted file mode 100755 index 9d21ab93db..0000000000 --- a/scripts/staging/dumpschema.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/bash - -DB=$1 -USER=$2 -PORT=$4 -: ${USER:="postgres"} -: ${DB:="breakpad"} -if [ -z $3 ] -then - HOST='' -else - HOST=" -h $2" -fi -: ${PORT:="5432"} - -TODAY=`date +%Y%m%d` - -pg_dump $HOST -p $PORT -s -U $USER \ - -T high_load_temp \ - -T locks* \ - -T activity_snapshot \ - -T product_info_changelog \ - -T '*_201*' \ - -T 'priority_jobs_*' \ - $DB > schema-$DB-$TODAY.sql - -echo 'schema dumped' - -exit 0 diff --git a/scripts/staging/extractMiniDB.py b/scripts/staging/extractMiniDB.py deleted file mode 100755 index 3b843b031a..0000000000 --- a/scripts/staging/extractMiniDB.py +++ /dev/null @@ -1,127 +0,0 @@ -#!/usr/bin/python -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -import sys -import os -import psycopg2 -import psycopg2.extensions -from optparse import OptionParser - -# extracts a database from a copy of production breakpad -# consisting of only the last # weeks of data, more or less -# the resulting tgz file needs to be loaded with loadMiniDBonDev.py -# does not currently dump users - -parser = OptionParser() -parser.add_option("-w", "--weeks", dest="num_weeks", type="int", - help="number of weeks to extract", metavar="#", - default=2) -parser.add_option("-d", "--database", dest="database_name", - help="database to be extracted", metavar="DBNAME", - default="breakpad") -parser.add_option("-f", "--file", dest="tar_file", - help="extractdb tarball to be created", metavar="FILE", - default="extractdb.tgz") - -(options, args) = parser.parse_args() - -# simple shell command runner -def rundump(dump_command): - dump_result = os.system(dump_command) - if dump_result != 0: - sys.exit(dump_result) - -print "Extracting %s weeks of data" % options.num_weeks - -#connect to postgresql -conn = psycopg2.connect("dbname=%s user=postgres" % options.database_name) - -cur = conn.cursor() - -# get the list of weekly partitions to NOT dump -cur.execute(""" -SELECT array_to_string( array_agg ( ' -T ' || relname ), ' ' ) - FROM pg_stat_user_tables - WHERE relname ~* $x$_20\d+$$x$ -AND substring(relname FROM $x$_(20\d+)$$x$) < - to_char( ( now() - ( ( %s + 1 ) * interval '1 week') ), 'YYYYMMDD'); - """, (options.num_weeks,)) - -no_dump = str(cur.fetchone()[0]) - -#get the date of truncation -cur.execute(""" - SELECT to_date(substring(relname FROM $x$_(20\d+)$$x$),'YYYYMMDD') - FROM pg_stat_user_tables - WHERE relname LIKE 'reports_20%%' - AND substring(relname FROM $x$_(20\d+)$$x$) >= -to_char( ( now() - ( ( %s + 1 ) * interval '1 week') ), 'YYYYMMDD') - ORDER BY relname LIMIT 1; - """, (options.num_weeks,)) - -cutoff_date = str(cur.fetchone()[0]) - -# dump the list of matviews one at a time. consult dictionary -# for the queries to retrieve each set of truncated data - -# cycle through the list of matviews -# and tables with data that needs to be cleaned -# dump those with no data - -matviews = {'raw_adi' - : """SELECT * FROM raw_adi WHERE raw_adi.date >= '%s'""" % cutoff_date, - 'releases_raw' - : """SELECT releases_raw.* FROM releases_raw WHERE build_date(build_id) - >= ( DATE '%s' - 180 ) """ % cutoff_date, - 'product_adu' : """SELECT product_adu.* FROM product_adu WHERE adu_date >= '%s'""" % cutoff_date, - 'tcbs' : """SELECT tcbs.* FROM tcbs WHERE report_date >= '%s'""" % cutoff_date, - 'tcbs_build' : """SELECT * FROM tcbs_build WHERE build_date >= '%s'""" % cutoff_date, - 'sessions' : """SELECT * FROM sessions WHERE false""", - 'server_status' : """SELECT * FROM server_status WHERE false""", - 'reports_bad' : """SELECT * FROM reports_bad WHERE false""", - 'reports_duplicates' - : """SELECT * FROM reports_duplicates WHERE date_processed >= '%s'""" % cutoff_date, - 'daily_hangs' - : """SELECT * FROM daily_hangs WHERE report_date >= '%s'""" % cutoff_date, - 'build_adu' : """SELECT * FROM build_adu WHERE build_date >= '%s'""" % cutoff_date, - 'crashes_by_user' : """SELECT * FROM crashes_by_user WHERE report_date >= '%s'""" % cutoff_date, - 'crashes_by_user_build' : """SELECT * FROM crashes_by_user_build WHERE build_date >= '%s'""" % cutoff_date, - 'home_page_graph' : """SELECT * FROM home_page_graph WHERE report_date >= '%s'""" % cutoff_date, - 'home_page_graph_build' : """SELECT * FROM home_page_graph_build WHERE build_date >= '%s'""" % cutoff_date, - 'nightly_builds' : """SELECT * FROM nightly_builds WHERE report_date >= '%s'""" % cutoff_date - } - -no_dump_all = no_dump + ' -T "priority_jobs_*" -T ' + ' -T '.join(matviews) -# don't dump priority jobs queues either - -print "truncating all data before %s" % cutoff_date - -#pg_dump most of the database -print 'dumping most of the database' -rundump('pg_dump -Fc -U postgres ' + no_dump_all + ' breakpad -f minidb.dump') - -# copy truncated data for each matview - -for matview in matviews: - print 'dumping %s' % matview - dumpstring = """psql -U postgres -c "\copy ( """ + matviews[matview] + """ ) to """ + matview + """.dump" breakpad""" - rundump(dumpstring) - -# dump the schema for the matviews: -rundump('pg_dump -Fc -s -t' + ' -t '.join(matviews) + ' -f matview_schemas.dump breakpad') - -#DUMP the users and logins - -rundump('pg_dumpall -U postgres -r -f users.dump') - -#remove password sets - -rundump('sed -i "s/PASSWORD \'.*\'//" users.dump') - -rundump("tar -cvzf %s *.dump" % options.tar_file) -rundump('rm *.dump') - -print 'done extracting database' diff --git a/scripts/staging/fake_raw_adu.py b/scripts/staging/fake_raw_adu.py deleted file mode 100644 index 26500f36e6..0000000000 --- a/scripts/staging/fake_raw_adu.py +++ /dev/null @@ -1,56 +0,0 @@ -#!/usr/bin/python -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - -import sys -import psycopg2 -import psycopg2.extensions -import psycopg2.extras - -#connect to CSD database -csd = psycopg2.connect("dbname=breakpad user=postgres port=5432") -csd_cur = csd.cursor() - -# check if we already have ADU for the day -csd_cur.execute(""" - SELECT COUNT(*) FROM raw_adi WHERE "date" = 'yesterday'::date -""") - -if csd_cur.fetchone()[0] > 0: - sys.stderr.write('raw_adi has already been exported for yesterday\n') - sys.exit(-1) - -#dump raw_adi from previous day and reinsert faked data -csd_cur.execute(""" - INSERT into raw_adi ( - adi_count, - date, - product_name, - product_os_platform, - product_os_version, - product_version, - build, - product_guid, - update_channel - ) - ( - SELECT adi_count, - 'yesterday'::date as "date", - product_name, - product_os_platform, - product_os_version, - product_version, - build, - product_guid, - update_channel - FROM raw_adi - WHERE date in (select max(date) from raw_adi) - ) -""") -csd.commit() -csd.close() - -print 'raw_adi successfully updated' - -sys.exit(0) diff --git a/scripts/staging/loadMiniDBonDev.py b/scripts/staging/loadMiniDBonDev.py deleted file mode 100644 index 0edc52d95b..0000000000 --- a/scripts/staging/loadMiniDBonDev.py +++ /dev/null @@ -1,162 +0,0 @@ -#!/usr/bin/python -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -import sys -import os -import psycopg2 -import psycopg2.extensions -from optparse import OptionParser - -# loads a file created with extractminidb.py - -# intended only for use on DevDB, as it uses an experimental -# version of PostgreSQL's pg_restore which is installed there -# if you need a minidb on another server, restore this on devdb -# and then dump from there - -# creates users without changing passwords -# takes two arguments, the archive name holding the data -# and optionally the database name to restore - -# note that this script will fail unless you first kick -# all users off the database system. on stagedb, try -# running beforeload.sh first - -parser = OptionParser() -parser.add_option("-f", "--file", dest="tar_file", - help="extractdb tarball to be loaded", metavar="FILE", - default="extractdb.tgz") -parser.add_option("-d", "--database", dest="database_name", - help="database to be loaded", metavar="DBNAME", - default="breakpad") -parser.add_option("-P", "--postscript", dest="postsql", - help="post-load shell script", - default="/data/socorro/application/scripts/staging/postsql/postsql.sh") -(options, args) = parser.parse_args() - -print "Loading data" - -def runload(load_command): - load_result = os.system(load_command) - if load_result != 0: - sys.exit(load_result) - -matviews = ['raw_adi', - 'releases_raw', - 'product_adu', - 'tcbs', - 'tcbs_build', - 'sessions', - 'server_status', - 'reports_bad', - 'reports_duplicates', - 'daily_hangs', - 'build_adu', - 'crashes_by_user', - 'crashes_by_user_build', - 'home_page_graph', - 'home_page_graph_build', - 'nightly_builds' - ] - -# untar the file -runload('tar -xzf %s' % options.tar_file) - -#connect to postgresql -conn = psycopg2.connect("dbname=postgres user=postgres") - -conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) - -cur = conn.cursor() - -print 'drop and recreate the database' - -# drop the database and recreate it -try: - cur.execute("""DROP DATABASE %s;""" % options.database_name) -except psycopg2.Error as exc: - code = exc.pgcode - if code == '3D000': - pass - else: - # if this failed, check why. - sys.exit('unable to drop database %s probably because connections to it are still open: %s' - % (options.database_name, code,)) - -cur.execute("""CREATE DATABASE %s""" % options.database_name) - -print 'load users. please ignore any errors you see here' - -os.system('psql -q -v verbosity=terse -U postgres -f users.dump %s' % options.database_name) - -print 'load most of the database' - -# dump a list of objects - -# load everything else but not indexes and constraints -# needs to ignore errors - -os.system('/usr/local/pgsql/bin/pg_restore -j 3 -Fc --no-post-data -U postgres minidb.dump -d %s' - % options.database_name) - -print 'load the truncated materialized views' - -# restore the matview schema -# needs to ignore errors - -os.system('/usr/local/pgsql/bin/pg_restore -Fc --no-post-data -U postgres matview_schemas.dump -d %s' - % options.database_name) - -# restore matview data, one matview at a time - -for matview in matviews: - print "loading %s" % matview - runload("""psql -c "\copy %s FROM %s.dump" -U postgres %s""" % (matview, matview, options.database_name,)) - -# restore indexes and constraints - -print 'restore indexes and constraints' - -runload('/usr/local/pgsql/bin/pg_restore -j 3 -Fc --post-data-only -U postgres minidb.dump -d %s' % options.database_name) -runload('/usr/local/pgsql/bin/pg_restore -j 3 -Fc --post-data-only -U postgres matview_schemas.dump -d %s' % options.database_name) - -# truncate soon-to-be-dropped tables -# conn.disconnect() - -conn = psycopg2.connect("dbname=%s user=postgres" % options.database_name) - -conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) - -cur = conn.cursor() - -cur.execute(""" - DO $f$ - DECLARE tab TEXT; - BEGIN - FOR tab IN SELECT relname - FROM pg_stat_user_tables - WHERE relname LIKE 'frames%' LOOP - - EXECUTE 'TRUNCATE ' || tab; - - END LOOP; - END; $f$; - """) - -# load views which break on pg_restore, such as hang_report - -runload(options.postsql) - -#delete all the dump files - -runload('rm *.dump') - -# analyze - -cur.execute("""SET maintenance_work_mem = '512MB'""") -cur.execute('ANALYZE') - -print 'done loading database.' diff --git a/scripts/staging/loadprep.sh b/scripts/staging/loadprep.sh deleted file mode 100755 index 7cb1c6b4e5..0000000000 --- a/scripts/staging/loadprep.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -set -e - -/etc/init.d/pgbouncer-web stop -/etc/init.d/pgbouncer-processor stop - -cp /pgdata/9.0/data/postgresql.conf.localonly /pgdata/9.0/data/postgresql.conf - -/etc/init.d/postgresql-9.0 restart - -exit 0 - diff --git a/scripts/staging/postsql/README.rst b/scripts/staging/postsql/README.rst deleted file mode 100644 index a219481c3a..0000000000 --- a/scripts/staging/postsql/README.rst +++ /dev/null @@ -1,15 +0,0 @@ -.. This Source Code Form is subject to the terms of the Mozilla Public -.. License, v. 2.0. If a copy of the MPL was not distributed with this -.. file, You can obtain one at http://mozilla.org/MPL/2.0/. - -View Restore Scripts for Staging -================================ - -This directory contains SQL scripts for views which depend on matviews, and generally fail to load during backup/restore as part of the MiniDB database-shrinking process. The new LoadMiniDB.py script will load these views, one at a time, at the end of restoring the database. - -If loadMiniDB.py does not run these scripts because it cannot find the file location, then they can be run with the one-line shell script, loadviews.sh: - -loadviews.sh {databasename} - -Default databasename is "breakpad". This script must be run as the database superuser. - diff --git a/scripts/staging/postsql/crash_by_user_views.sql b/scripts/staging/postsql/crash_by_user_views.sql deleted file mode 100644 index bd59fd749d..0000000000 --- a/scripts/staging/postsql/crash_by_user_views.sql +++ /dev/null @@ -1,47 +0,0 @@ -\set ON_ERROR_STOP 1 - -BEGIN; - -DROP VIEW IF EXISTS crashes_by_user_build_view; - -CREATE OR REPLACE VIEW crashes_by_user_build_view AS -SELECT crashes_by_user_build.product_version_id, - product_versions.product_name, version_string, - os_short_name, os_name, crash_type, crash_type_short, - crashes_by_user_build.build_date, - sum(report_count) as report_count, - sum(report_count / throttle) as adjusted_report_count, - sum(adu) as adu, throttle -FROM crashes_by_user_build - JOIN product_versions USING (product_version_id) - JOIN product_release_channels ON - product_versions.product_name = product_release_channels.product_name - AND product_versions.build_type = product_release_channels.release_channel - JOIN os_names USING (os_short_name) - JOIN crash_types USING (crash_type_id) -WHERE crash_types.include_agg -GROUP BY crashes_by_user_build.product_version_id, - product_versions.product_name, version_string, - os_short_name, os_name, crash_type, crash_type_short, - crashes_by_user_build.build_date, throttle; - -ALTER VIEW crashes_by_user_build_view OWNER TO breakpad_rw; - -DROP VIEW IF EXISTS crashes_by_user_view; - -CREATE OR REPLACE VIEW crashes_by_user_view AS -SELECT crashes_by_user.product_version_id, - product_versions.product_name, version_string, - os_short_name, os_name, crash_type, crash_type_short, report_date, - report_count, (report_count / throttle) as adjusted_report_count, - adu, throttle -FROM crashes_by_user - JOIN product_versions USING (product_version_id) - JOIN product_release_channels ON - product_versions.product_name = product_release_channels.product_name - AND product_versions.build_type = product_release_channels.release_channel - JOIN os_names USING (os_short_name) - JOIN crash_types USING (crash_type_id) -WHERE crash_types.include_agg; - -ALTER VIEW crashes_by_user_view OWNER TO breakpad_rw; \ No newline at end of file diff --git a/scripts/staging/postsql/current_server_status.sql b/scripts/staging/postsql/current_server_status.sql deleted file mode 100644 index 2b221169c4..0000000000 --- a/scripts/staging/postsql/current_server_status.sql +++ /dev/null @@ -1,14 +0,0 @@ - - -CREATE OR REPLACE VIEW current_server_status AS - SELECT server_status.date_recently_completed, server_status.date_oldest_job_queued, date_part('epoch'::text, (server_status.date_created - server_status.date_oldest_job_queued)) AS oldest_job_age, server_status.avg_process_sec, server_status.avg_wait_sec, server_status.waiting_job_count, server_status.processors_count, server_status.date_created FROM server_status ORDER BY server_status.date_created DESC LIMIT 1; - - -ALTER TABLE public.current_server_status OWNER TO breakpad_rw; - -REVOKE ALL ON TABLE current_server_status FROM PUBLIC; -REVOKE ALL ON TABLE current_server_status FROM breakpad_rw; -GRANT ALL ON TABLE current_server_status TO breakpad_rw; -GRANT SELECT ON TABLE current_server_status TO monitoring; - - diff --git a/scripts/staging/postsql/default_versions.sql b/scripts/staging/postsql/default_versions.sql deleted file mode 100644 index 72fd908e02..0000000000 --- a/scripts/staging/postsql/default_versions.sql +++ /dev/null @@ -1,8 +0,0 @@ - -CREATE VIEW default_versions AS - SELECT count_versions.product_name, count_versions.version_string, count_versions.product_version_id FROM (SELECT product_info.product_name, product_info.version_string, product_info.product_version_id, row_number() OVER (PARTITION BY product_info.product_name ORDER BY ((('now'::text)::date >= product_info.start_date) AND (('now'::text)::date <= product_info.end_date)) DESC, product_info.is_featured DESC, product_info.channel_sort DESC) AS sort_count FROM product_info) count_versions WHERE (count_versions.sort_count = 1); - - -ALTER TABLE public.default_versions OWNER TO breakpad_rw; - - diff --git a/scripts/staging/postsql/default_versions_builds.sql b/scripts/staging/postsql/default_versions_builds.sql deleted file mode 100644 index 6a5f20dde4..0000000000 --- a/scripts/staging/postsql/default_versions_builds.sql +++ /dev/null @@ -1,8 +0,0 @@ - - -CREATE OR REPLACE VIEW default_versions_builds AS - SELECT count_versions.product_name, count_versions.version_string, count_versions.product_version_id FROM (SELECT product_info.product_name, product_info.version_string, product_info.product_version_id, row_number() OVER (PARTITION BY product_info.product_name ORDER BY ((('now'::text)::date >= product_info.start_date) AND (('now'::text)::date <= product_info.end_date)) DESC, product_info.is_featured DESC, product_info.channel_sort DESC) AS sort_count FROM product_info WHERE product_info.has_builds) count_versions WHERE (count_versions.sort_count = 1); - - -ALTER TABLE public.default_versions_builds OWNER TO breakpad_rw; - diff --git a/scripts/staging/postsql/hang_report.sql b/scripts/staging/postsql/hang_report.sql deleted file mode 100644 index d1b0368115..0000000000 --- a/scripts/staging/postsql/hang_report.sql +++ /dev/null @@ -1,16 +0,0 @@ - - -CREATE OR REPLACE VIEW hang_report AS - SELECT product_versions.product_name AS product, product_versions.version_string AS version, browser_signatures.signature AS browser_signature, plugin_signatures.signature AS plugin_signature, daily_hangs.hang_id AS browser_hangid, flash_versions.flash_version, daily_hangs.url, daily_hangs.uuid, daily_hangs.duplicates, daily_hangs.report_date AS report_day FROM ((((daily_hangs JOIN product_versions USING (product_version_id)) JOIN signatures browser_signatures ON ((daily_hangs.browser_signature_id = browser_signatures.signature_id))) JOIN signatures plugin_signatures ON ((daily_hangs.plugin_signature_id = plugin_signatures.signature_id))) LEFT JOIN flash_versions USING (flash_version_id)); - - -ALTER TABLE public.hang_report OWNER TO breakpad_rw; - -REVOKE ALL ON TABLE hang_report FROM PUBLIC; -REVOKE ALL ON TABLE hang_report FROM breakpad_rw; -GRANT ALL ON TABLE hang_report TO breakpad_rw; -GRANT SELECT ON TABLE hang_report TO breakpad; -GRANT SELECT ON TABLE hang_report TO breakpad_ro; -GRANT ALL ON TABLE hang_report TO monitor; - - diff --git a/scripts/staging/postsql/home_page_graph_views.sql b/scripts/staging/postsql/home_page_graph_views.sql deleted file mode 100644 index 5557dad7a6..0000000000 --- a/scripts/staging/postsql/home_page_graph_views.sql +++ /dev/null @@ -1,33 +0,0 @@ -CREATE OR REPLACE VIEW home_page_graph_view -AS -SELECT product_version_id, - product_name, - version_string, - report_date, - report_count, - adu, - crash_hadu -FROM home_page_graph - JOIN product_versions USING (product_version_id); - -ALTER VIEW home_page_graph_view OWNER TO breakpad_rw; - - -CREATE OR REPLACE VIEW home_page_graph_build_view -AS -SELECT product_version_id, - product_versions.product_name, - version_string, - home_page_graph_build.build_date, - sum(report_count) as report_count, - sum(adu) as adu, - crash_hadu(sum(report_count), sum(adu), throttle) as crash_hadu -FROM home_page_graph_build - JOIN product_versions USING (product_version_id) - JOIN product_release_channels ON - product_versions.product_name = product_release_channels.product_name - AND product_versions.build_type = product_release_channels.release_channel -GROUP BY product_version_id, product_versions.product_name, - version_string, home_page_graph_build.build_date, throttle; - -ALTER VIEW home_page_graph_build_view OWNER TO breakpad_rw; \ No newline at end of file diff --git a/scripts/staging/postsql/performance_check_1.sql b/scripts/staging/postsql/performance_check_1.sql deleted file mode 100644 index 0c2cbaba27..0000000000 --- a/scripts/staging/postsql/performance_check_1.sql +++ /dev/null @@ -1,21 +0,0 @@ - - -CREATE VIEW performance_check_1 AS - SELECT sum(report_count) FROM tcbs - WHERE report_date BETWEEN ( current_date - 7 ) and current_date; - - -ALTER TABLE public.performance_check_1 OWNER TO ganglia; - --- --- Name: performance_check_1; Type: ACL; Schema: public; Owner: ganglia --- - -REVOKE ALL ON TABLE performance_check_1 FROM PUBLIC; -REVOKE ALL ON TABLE performance_check_1 FROM ganglia; -GRANT ALL ON TABLE performance_check_1 TO ganglia; -GRANT SELECT ON TABLE performance_check_1 TO breakpad; -GRANT SELECT ON TABLE performance_check_1 TO breakpad_ro; -GRANT ALL ON TABLE performance_check_1 TO monitor; - - diff --git a/scripts/staging/postsql/postsql.sh b/scripts/staging/postsql/postsql.sh deleted file mode 100755 index b9e2b034dc..0000000000 --- a/scripts/staging/postsql/postsql.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -set -e - -CURDIR=$(dirname $0) - -# load all views in this directory -psql -f $CURDIR/*.sql breakpad - -#done -exit 0 diff --git a/scripts/staging/postsql/product_crash_ratio.sql b/scripts/staging/postsql/product_crash_ratio.sql deleted file mode 100644 index 7813567cdc..0000000000 --- a/scripts/staging/postsql/product_crash_ratio.sql +++ /dev/null @@ -1,57 +0,0 @@ -\set ON_ERROR_STOP 1 - -BEGIN; - -DROP VIEW IF EXISTS crashes_by_user_rollup; - -CREATE VIEW crashes_by_user_rollup AS -SELECT product_version_id, report_date, - os_short_name, - sum(report_count) as report_count, - min(adu) as adu - FROM crashes_by_user - JOIN crash_types USING (crash_type_id) - WHERE crash_types.include_agg - GROUP BY product_version_id, report_date, os_short_name; - -DROP VIEW IF EXISTS product_crash_ratio; - -CREATE OR REPLACE VIEW product_crash_ratio AS -SELECT crcounts.product_version_id, product_versions.product_name, - version_string, report_date as adu_date, - sum(report_count)::bigint as crashes, - sum(adu) as adu_count, throttle::numeric(5,2), - sum(report_count/throttle)::int as adjusted_crashes, - crash_hadu(sum(report_count)::bigint, sum(adu), throttle) as crash_ratio -FROM crashes_by_user_rollup as crcounts - JOIN product_versions ON crcounts.product_version_id = product_versions.product_version_id - JOIN product_release_channels - ON product_versions.product_name = product_release_channels.product_name - AND product_versions.build_type = product_release_channels.release_channel -GROUP BY crcounts.product_version_id, product_versions.product_name, - version_string, report_date, throttle; - -ALTER VIEW product_crash_ratio OWNER TO breakpad_rw; -GRANT SELECT ON product_crash_ratio TO analyst; - -DROP VIEW IF EXISTS product_os_crash_ratio; - -CREATE OR REPLACE VIEW product_os_crash_ratio AS -SELECT crcounts.product_version_id, product_versions.product_name, - version_string, os_names.os_short_name, os_names.os_name, report_date as adu_date, - sum(report_count)::bigint as crashes, sum(adu) as adu_count, throttle::numeric(5,2), - sum(report_count/throttle)::int as adjusted_crashes, - crash_hadu(sum(report_count)::bigint, sum(adu), throttle) as crash_ratio -FROM crashes_by_user_rollup AS crcounts - JOIN product_versions ON crcounts.product_version_id = product_versions.product_version_id - JOIN os_names ON crcounts.os_short_name::citext = os_names.os_short_name - JOIN product_release_channels ON product_versions.product_name - = product_release_channels.product_name - AND product_versions.build_type = product_release_channels.release_channel -GROUP BY crcounts.product_version_id, product_versions.product_name, - version_string, os_name, os_names.os_short_name, report_date, throttle;; - -ALTER VIEW product_os_crash_ratio OWNER TO breakpad_rw; -GRANT SELECT ON product_os_crash_ratio TO analyst; - -COMMIT; \ No newline at end of file diff --git a/scripts/staging/postsql/product_info.sql b/scripts/staging/postsql/product_info.sql deleted file mode 100644 index 12ca4c1905..0000000000 --- a/scripts/staging/postsql/product_info.sql +++ /dev/null @@ -1,11 +0,0 @@ - -CREATE OR REPLACE VIEW product_info AS - SELECT product_versions.product_version_id, product_versions.product_name, product_versions.version_string, 'new'::text AS which_table, product_versions.build_date AS start_date, product_versions.sunset_date AS end_date, product_versions.featured_version AS is_featured, product_versions.build_type, ((product_release_channels.throttle * (100)::numeric))::numeric(5,2) AS throttle, product_versions.version_sort, products.sort AS product_sort, release_channels.sort AS channel_sort, ((product_versions.build_type = ANY (ARRAY['Aurora'::citext, 'Nightly'::citext])) OR ((product_versions.build_type = 'Beta'::citext) AND (major_version_sort((product_versions.major_version)::text) <= major_version_sort((products.rapid_beta_version)::text)))) AS has_builds FROM (((product_versions JOIN product_release_channels ON (((product_versions.product_name = product_release_channels.product_name) AND (product_versions.build_type = product_release_channels.release_channel)))) JOIN products ON ((product_versions.product_name = products.product_name))) JOIN release_channels ON ((product_versions.build_type = release_channels.release_channel))) ORDER BY product_versions.product_name, product_versions.version_string; - - -ALTER TABLE public.product_info OWNER TO breakpad_rw; - --- --- PostgreSQL database dump complete --- - diff --git a/scripts/staging/postsql/product_selector.sql b/scripts/staging/postsql/product_selector.sql deleted file mode 100644 index 08f8e4e665..0000000000 --- a/scripts/staging/postsql/product_selector.sql +++ /dev/null @@ -1,5 +0,0 @@ - -CREATE OR REPLACE VIEW product_selector AS - SELECT product_versions.product_name, product_versions.version_string, 'new'::text AS which_table, product_versions.version_sort FROM product_versions WHERE (now() <= product_versions.sunset_date) ORDER BY product_versions.product_name, product_versions.version_string; - -ALTER TABLE product_selector OWNER TO breakpad_rw; diff --git a/scripts/staging/restoredatadir.sh b/scripts/staging/restoredatadir.sh deleted file mode 100755 index ca26910053..0000000000 --- a/scripts/staging/restoredatadir.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -set -e - -/etc/init.d/postgresql-9.0 stop - -rm -rf /pgdata/9.0/data/* - -cp -r -p -v /pgdata/backupdata/* /pgdata/9.0/data/ - -/etc/init.d/postgresql-9.0 start - -exit 0 \ No newline at end of file diff --git a/scripts/staging/sync_raw_adu.py b/scripts/staging/sync_raw_adu.py deleted file mode 100755 index 4ea046535a..0000000000 --- a/scripts/staging/sync_raw_adu.py +++ /dev/null @@ -1,48 +0,0 @@ -#!/usr/bin/python -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - -# In production on stage db - -import os -from os.path import join, getsize -import sys -import psycopg2 -import psycopg2.extensions -import psycopg2.extras - -#connect to CSD database -csd = psycopg2.connect("dbname=breakpad user=postgres port=5432") -csd_cur = csd.cursor() -# check if we already have ADU for the day -csd_cur.execute("""SELECT COUNT(*) FROM raw_adi WHERE "date" = 'yesterday'::date""") - -if (csd_cur.fetchone()[0]) > 0: - sys.stderr.write('raw_adi has already been exported for yesterday') - sys.exit(-1) - -#connect to replayDB -replay = psycopg2.connect("dbname=breakpad user=postgres port=5499") -rep_cur = replay.cursor() - -# check if we already have ADU for the day -rep_cur.execute("""SELECT count(*) FROM raw_adi WHERE "date" = 'yesterday'::date""") - -if (rep_cur.fetchone()[0]) == 0: - sys.stderr.write('no raw_adi in replayDB for yesterday') - sys.exit(-2) - -#dump raw_adi to file -rep_cur.execute("""COPY ( SELECT * FROM raw_adi WHERE "date" = 'yesterday'::date ) -TO '/tmp/raw_adi_update.csv' with csv;""") -replay.close() - -#import raw_adi into CSD -csd_cur.execute("""COPY raw_adi FROM '/tmp/raw_adi_update.csv' with csv;""") -csd.commit() -csd.close() - -print 'raw_adi successfully updated' - -sys.exit(0) From 991171dcf54fbe40b247ede4a72e4b77b7c64a29 Mon Sep 17 00:00:00 2001 From: Chris Lonnen Date: Tue, 13 Sep 2016 16:35:26 -0700 Subject: [PATCH 04/13] fixes bug 1301222 with a transform rule (#3462) * fixes bug 1301222 with a transform rule add a transform rule to mutate the default theme's extension id into a more easily identifiable string * (no bug) - moar docstring for ThemePrettyNameRule Add an additional caveat to the docstring of the ThemePrettyNameRule * bug 130122 - fixup for reviewer comments alters the prettified name of the default theme according to nnethercote's PR comments over the suggestion in the original bug * bug 1301222 - reviewers comments 1 rework the ThemePrettyNameRule to use a dictionary of conversions rather than detecting one specific conversion. * (nobug) - pep8 mozilla_transform_rules pep8 fixes to make mozilla_transform_rules comply with the style guide * Bug 1301222 - Added tests for predicate. * bug 1301222 - reviewers comments 2 This alters the predicate behavior in the case that a processed crash is missing an addons property to use a null list rather than test for the property. This preserves the structural similarity between the predicate and action methods without sacrificing the safety of the test method. --- socorro/processor/mozilla_processor_2015.py | 1 + socorro/processor/mozilla_transform_rules.py | 52 +++++++++- .../processor/test_mozilla_transform_rules.py | 94 +++++++++++++++++++ 3 files changed, 143 insertions(+), 4 deletions(-) diff --git a/socorro/processor/mozilla_processor_2015.py b/socorro/processor/mozilla_processor_2015.py index ae05e2e803..161bb12cfc 100644 --- a/socorro/processor/mozilla_processor_2015.py +++ b/socorro/processor/mozilla_processor_2015.py @@ -54,6 +54,7 @@ "socorro.processor.mozilla_transform_rules.OSPrettyVersionRule, " "socorro.processor.mozilla_transform_rules.TopMostFilesRule, " "socorro.processor.mozilla_transform_rules.MissingSymbolsRule, " + "socorro.processor.mozilla_transform_rules.ThemePrettyNameRule, " "socorro.processor.signature_utilities.SignatureGenerationRule," "socorro.processor.signature_utilities.StackwalkerErrorSignatureRule, " "socorro.processor.signature_utilities.OOMSignature, " diff --git a/socorro/processor/mozilla_transform_rules.py b/socorro/processor/mozilla_transform_rules.py index 1db8112c63..b7b92fe043 100644 --- a/socorro/processor/mozilla_transform_rules.py +++ b/socorro/processor/mozilla_transform_rules.py @@ -1054,10 +1054,10 @@ def _get_pretty_os_version(self, processed_crash): elif processed_crash.os_name == 'Mac OS X': if ( - major_version >= 10 - and major_version < 11 - and minor_version >= 0 - and minor_version < 20 + major_version >= 10 and + major_version < 11 and + minor_version >= 0 and + minor_version < 20 ): pretty_name = 'OS X %s.%s' % (major_version, minor_version) else: @@ -1071,3 +1071,47 @@ def _action(self, raw_crash, raw_dumps, processed_crash, processor_meta): processed_crash ) return True + + +#============================================================================== +class ThemePrettyNameRule(Rule): + """The Firefox theme shows up commonly in crash reports referenced by its + internal ID. The ID is not easy to change, and is referenced by id in other + software. + + This rule attempts to modify it to have a more identifiable name, like + other built-in extensions. + + Must be run after the Addons Rule.""" + + #-------------------------------------------------------------------------- + def __init__(self, config): + super(ThemePrettyNameRule, self).__init__(config) + self.conversions = { + "{972ce4c6-7e08-4474-a285-3208198ce6fd}": + "{972ce4c6-7e08-4474-a285-3208198ce6fd} " + "(default Firefox theme)", + } + + #-------------------------------------------------------------------------- + def version(self): + return '1.0' + + #-------------------------------------------------------------------------- + def _predicate(self, raw_crash, raw_dumps, processed_crash, proc_meta): + '''addons is a list of tuples containing (extension, version)''' + addons = processed_crash.get('addons', []) + + for extension, version in addons: + if extension in self.conversions: + return True + return False + + #-------------------------------------------------------------------------- + def _action(self, raw_crash, raw_dumps, processed_crash, processor_meta): + addons = processed_crash.addons + + for index, (extension, version) in enumerate(addons): + if extension in self.conversions: + addons[index] = (self.conversions[extension], version) + return True diff --git a/socorro/unittest/processor/test_mozilla_transform_rules.py b/socorro/unittest/processor/test_mozilla_transform_rules.py index b1d97138fc..0bb0a421ca 100644 --- a/socorro/unittest/processor/test_mozilla_transform_rules.py +++ b/socorro/unittest/processor/test_mozilla_transform_rules.py @@ -36,6 +36,7 @@ MissingSymbolsRule, BetaVersionRule, OSPrettyVersionRule, + ThemePrettyNameRule, ) canonical_standard_raw_crash = DotDict({ @@ -1020,6 +1021,7 @@ def test_extract_memory_info_too_big(self): opened.read.return_value = json.dumps({ 'some': 'notveryshortpieceofjson' }) + def gzip_open(filename, mode): assert mode == 'rb' return opened @@ -2119,3 +2121,95 @@ def test_everything_we_hoped_for(self): rule.act(raw_crash, raw_dumps, processed_crash, processor_meta) ok_('os_pretty_version' in processed_crash) eq_(processed_crash['os_pretty_version'], 'Windows NT') + + +#============================================================================== +class TestThemePrettyNameRule(TestCase): + + #-------------------------------------------------------------------------- + def get_basic_config(self): + config = CDotDict() + config.logger = Mock() + config.chatty = False + return config + + #-------------------------------------------------------------------------- + def get_basic_processor_meta(self): + processor_meta = DotDict() + processor_meta.processor_notes = [] + + return processor_meta + + #-------------------------------------------------------------------------- + def test_everything_we_hoped_for(self): + config = self.get_basic_config() + + raw_crash = copy.copy(canonical_standard_raw_crash) + raw_dumps = {} + processed_crash = DotDict() + processor_meta = self.get_basic_processor_meta() + + rule = ThemePrettyNameRule(config) + + processed_crash.addons = [ + ('adblockpopups@jessehakanen.net', '0.3'), + ('dmpluginff@westbyte.com', '1,4.8'), + ('firebug@software.joehewitt.com', '1.9.1'), + ('killjasmin@pierros14.com', '2.4'), + ('support@surfanonymous-free.com', '1.0'), + ('uploader@adblockfilters.mozdev.org', '2.1'), + ('{a0d7ccb3-214d-498b-b4aa-0e8fda9a7bf7}', '20111107'), + ('{d10d0bf8-f5b5-c8b4-a8b2-2b9879e08c5d}', '2.0.3'), + ('anttoolbar@ant.com', '2.4.6.4'), + ('{972ce4c6-7e08-4474-a285-3208198ce6fd}', '12.0'), + ('elemhidehelper@adblockplus.org', '1.2.1') + ] + + # the call to be tested + rule.act(raw_crash, raw_dumps, processed_crash, processor_meta) + + # the raw crash & raw_dumps should not have changed + eq_(raw_crash, canonical_standard_raw_crash) + eq_(raw_dumps, {}) + + expected_addon_list = [ + ('adblockpopups@jessehakanen.net', '0.3'), + ('dmpluginff@westbyte.com', '1,4.8'), + ('firebug@software.joehewitt.com', '1.9.1'), + ('killjasmin@pierros14.com', '2.4'), + ('support@surfanonymous-free.com', '1.0'), + ('uploader@adblockfilters.mozdev.org', '2.1'), + ('{a0d7ccb3-214d-498b-b4aa-0e8fda9a7bf7}', '20111107'), + ('{d10d0bf8-f5b5-c8b4-a8b2-2b9879e08c5d}', '2.0.3'), + ('anttoolbar@ant.com', '2.4.6.4'), + ('{972ce4c6-7e08-4474-a285-3208198ce6fd} (default Firefox theme)', + '12.0'), + ('elemhidehelper@adblockplus.org', '1.2.1') + ] + eq_(processed_crash.addons, expected_addon_list) + + #-------------------------------------------------------------------------- + def test_missing_key(self): + config = self.get_basic_config() + + processed_crash = DotDict() + processor_meta = self.get_basic_processor_meta() + + rule = ThemePrettyNameRule(config) + + # Test with missing key. + res = rule._predicate({}, {}, processed_crash, processor_meta) + ok_(not res) + + # Test with empty list. + processed_crash.addons = [] + res = rule._predicate({}, {}, processed_crash, processor_meta) + ok_(not res) + + # Test with key missing from list. + processed_crash.addons = [ + ('adblockpopups@jessehakanen.net', '0.3'), + ('dmpluginff@westbyte.com', '1,4.8'), + ] + res = rule._predicate({}, {}, processed_crash, processor_meta) + ok_(not res) From 3ebe97740425450b0387459a9ddf342c9b8f44f4 Mon Sep 17 00:00:00 2001 From: Chris Lonnen Date: Wed, 14 Sep 2016 09:42:03 -0700 Subject: [PATCH 05/13] 1300367 remove server status (#3456) * remove server status api docs The server status api endpoint was removed in e77f82a44ccb9af8ebe5903a142ebf5e60977cc5 and this removes the documentation for the now non-existant service. * remove the database schema docs The database schema pdf was last generated nearly four years ago and the graphml equivalent has gone more than 18 months without update. They are growing increasingly incorrect and this patch removes them. They can be recovered from history, if needed. * remove server_status from development docs the server_status table is deprecated and should be removed from the developer documentation * remove server_status crons remove the server status crons and references to them in configuration. oddly there were both crontabber and non-crontabber versions of the cron lying around. The one without tests predates our adoption of rabbit and has probably been unused quite a while. * remove the ServerStatus model from alembic The class had a typo in the name, but I don't believe the class name was important. It was introduced as part of the schema.sql replacement and modified twice since then, once in 2013 and once in 2014. From the other modifications I don't believe this change requires a migration. Additionally, there are is no equivalent test file to modify. The class in tests that checks the output of this model is already modified to remove processor status in e77f82a44ccb9af8ebe5903a142ebf5e60977cc5 * remove the current_server_status_view Removes the server_status view from the raw_sql directory. This probably needs a migration to accompany it. * migration to remove server_status table this removes the server_status table from models.py and provides a reversible alembic migration for our instance * remove server_status from the endpoint tests while the endpoint for server_status is still around, it has not relied on the table to operate since e77f82a. This removes the step from the server_status test teardown that touched the unused server_status table --- .../5bafdc19756c_drop_server_status_table.py | 40 + config/crontabber.ini-dist | 1 - docs/development/api/middleware.rst | 60 - docs/development/breakpad-schema.graphml | 2594 ----------------- docs/development/breakpad-schema.pdf | Bin 458275 -> 0 bytes docs/development/databasetabledesc.rst | 3 - docs/development/databasetablesbysource.rst | 1 - socorro/cron/crontabber_app.py | 1 - socorro/cron/jobs/serverstatus.py | 124 - socorro/cron/serverstatus.py | 152 - socorro/external/postgresql/models.py | 18 - .../views/current_server_status_view.sql | 3 - .../unittest/cron/jobs/test_serverstatus.py | 105 - .../external/postgresql/test_server_status.py | 2 +- 14 files changed, 41 insertions(+), 3063 deletions(-) create mode 100644 alembic/versions/5bafdc19756c_drop_server_status_table.py delete mode 100644 docs/development/breakpad-schema.graphml delete mode 100644 docs/development/breakpad-schema.pdf delete mode 100644 socorro/cron/jobs/serverstatus.py delete mode 100755 socorro/cron/serverstatus.py delete mode 100644 socorro/external/postgresql/raw_sql/views/current_server_status_view.sql delete mode 100644 socorro/unittest/cron/jobs/test_serverstatus.py diff --git a/alembic/versions/5bafdc19756c_drop_server_status_table.py b/alembic/versions/5bafdc19756c_drop_server_status_table.py new file mode 100644 index 0000000000..8d2a6f330f --- /dev/null +++ b/alembic/versions/5bafdc19756c_drop_server_status_table.py @@ -0,0 +1,40 @@ +"""drop server_status table + +Revision ID: 5bafdc19756c +Revises: 89ef86a3d57a +Create Date: 2016-09-13 15:56:53.898014 + +""" + +# revision identifiers, used by Alembic. +revision = '5bafdc19756c' +down_revision = '89ef86a3d57a' + +from alembic import op +from socorrolib.lib import citexttype, jsontype, buildtype +from socorrolib.lib.migrations import fix_permissions, load_stored_proc + +import sqlalchemy as sa +from sqlalchemy import types +from sqlalchemy.dialects import postgresql +from sqlalchemy.sql import table, column + +from sqlalchemy.dialects import postgresql + + +def upgrade(): + op.drop_table('server_status') + + +def downgrade(): + op.create_table('server_status', + sa.Column('avg_process_sec', sa.REAL(), autoincrement=False, nullable=True), + sa.Column('avg_wait_sec', sa.REAL(), autoincrement=False, nullable=True), + sa.Column('date_created', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=False), + sa.Column('date_oldest_job_queued', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), + sa.Column('date_recently_completed', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), + sa.Column('id', sa.INTEGER(), nullable=False), + sa.Column('processors_count', sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column('waiting_job_count', sa.INTEGER(), autoincrement=False, nullable=False), + sa.PrimaryKeyConstraint('id', name=u'server_status_pkey') + ) diff --git a/config/crontabber.ini-dist b/config/crontabber.ini-dist index a70d2ce758..1dc0cc28ca 100644 --- a/config/crontabber.ini-dist +++ b/config/crontabber.ini-dist @@ -245,7 +245,6 @@ socorro.cron.jobs.matviews.CrashAduByBuildSignatureCronApp|1d|07:30 #socorro.cron.jobs.ftpscraper.FTPScraperCronApp|1h #socorro.cron.jobs.automatic_emails.AutomaticEmailsCronApp|1h - socorro.cron.jobs.serverstatus.ServerStatusCronApp|5m socorro.cron.jobs.reprocessingjobs.ReprocessingJobsApp|5m socorro.cron.jobs.matviews.SignatureSummaryProductsCronApp|1d|05:00 socorro.cron.jobs.matviews.SignatureSummaryInstallationsCronApp|1d|05:00 diff --git a/docs/development/api/middleware.rst b/docs/development/api/middleware.rst index c164f6f3e5..aeb0fd603c 100644 --- a/docs/development/api/middleware.rst +++ b/docs/development/api/middleware.rst @@ -44,7 +44,6 @@ Documented services * `/releases/featured/ <#releases-featured-service>`_ * `/report/list/ <#report-list-service>`_ * `/search/ <#search-service>`_ -* `/server_status/ <#server-status-service>`_ * /signaturesummary/ * `/signaturesummary/report_type/architecture/ <#architecture-signature-summary-service>`_ * `/signaturesummary/report_type/exploitability/ <#exploitability-signature-summary-service>`_ @@ -2134,65 +2133,6 @@ If an error occured, the API will return something like this:: HTTP header... We will improve that soon! :) -.. ############################################################################ - Server Status API - ############################################################################ - -Server Status service ---------------------- - -Return the current state of the server and the revisions of Socorro and -Breakpad. - -API specifications -^^^^^^^^^^^^^^^^^^ - -+----------------+-----------------+ -| HTTP method | GET | -+----------------+-----------------+ -| URL | /server_status/ | -+----------------+-----------------+ - -Mandatory parameters -^^^^^^^^^^^^^^^^^^^^ - -None - -Optional parameters -^^^^^^^^^^^^^^^^^^^ - -+----------+---------------+----------------+--------------------------------+ -| Name | Type of value | Default value | Description | -+==========+===============+================+================================+ -| duration | Integer | 12 | Number of lines of data to get.| -+----------+---------------+----------------+--------------------------------+ - -Return value -^^^^^^^^^^^^ - -Return a list of data about the server status at different recent times -(usually the status is updated every 15 minutes), and the current version of -Socorro and Breakpad:: - - { - "hits": [ - { - "id": 1, - "date_recently_completed": "2000-01-01T00:00:00+00:00", - "date_oldest_job_queued": "2000-01-01T00:00:00+00:00", - "avg_process_sec": 2, - "avg_wait_sec": 5, - "waiting_job_count": 3, - "processors_count": 2, - "date_created": "2000-01-01T00:00:00+00:00" - } - ], - "socorro_revision": 42, - "breakpad_revision": 43, - "total": 1 - } - - .. ############################################################################ Signature Summary API (8 of them) ############################################################################ diff --git a/docs/development/breakpad-schema.graphml b/docs/development/breakpad-schema.graphml deleted file mode 100644 index d67bbafe46..0000000000 --- a/docs/development/breakpad-schema.graphml +++ /dev/null @@ -1,2594 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - Core Socorro - - - - - - - - - - Socorro Core - - - - - - - - - - - - - - - - public.reasons - <html><table> -<tr><td><u>reason_id</u></td><td>integer</td></tr> -<tr><td>reason</td><td>USER-DEFINED</td></tr> -<tr><td><i>first_seen</i></td><td>timestamp with time zone</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.product_version_builds - <html><table> -<tr><td><u>product_version_id</u></td><td>integer</td></tr> -<tr><td><u>build_id</u></td><td>numeric</td></tr> -<tr><td><u>platform</u></td><td>text</td></tr> -<tr><td><i>repository</i></td><td>USER-DEFINED</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.releases_raw - <html><table> -<tr><td><u>product_name</u></td><td>USER-DEFINED</td></tr> -<tr><td><u>version</u></td><td>text</td></tr> -<tr><td><u>platform</u></td><td>text</td></tr> -<tr><td><u>build_id</u></td><td>numeric</td></tr> -<tr><td><u>build_type</u></td><td>USER-DEFINED</td></tr> -<tr><td><i>beta_number</i></td><td>integer</td></tr> -<tr><td><u>repository</u></td><td>USER-DEFINED</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.flash_versions - <html><table> -<tr><td><u>flash_version_id</u></td><td>integer</td></tr> -<tr><td>flash_version</td><td>USER-DEFINED</td></tr> -<tr><td><i>first_seen</i></td><td>timestamp with time zone</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.crash_types - <html><table> -<tr><td><u>crash_type_id</u></td><td>integer</td></tr> -<tr><td>crash_type</td><td>USER-DEFINED</td></tr> -<tr><td>crash_type_short</td><td>USER-DEFINED</td></tr> -<tr><td>process_type</td><td>USER-DEFINED</td></tr> -<tr><td><i>has_hang_id</i></td><td>boolean</td></tr> -<tr><td>old_code</td><td>character</td></tr> -<tr><td>include_agg</td><td>boolean</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.windows_versions - <html><table> -<tr><td>windows_version_name</td><td>USER-DEFINED</td></tr> -<tr><td>major_version</td><td>integer</td></tr> -<tr><td>minor_version</td><td>integer</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.signature_products_rollup - <html><table> -<tr><td><u>signature_id</u></td><td>integer</td></tr> -<tr><td><u>product_name</u></td><td>USER-DEFINED</td></tr> -<tr><td>ver_count</td><td>integer</td></tr> -<tr><td>version_list</td><td>ARRAY</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.signature_products - <html><table> -<tr><td><u>signature_id</u></td><td>integer</td></tr> -<tr><td><u>product_version_id</u></td><td>integer</td></tr> -<tr><td><i>first_report</i></td><td>timestamp with time zone</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.reports_user_info - <html><table> -<tr><td><u>uuid</u></td><td>text</td></tr> -<tr><td>date_processed</td><td>timestamp with time zone</td></tr> -<tr><td><i>user_comments</i></td><td>USER-DEFINED</td></tr> -<tr><td><i>app_notes</i></td><td>USER-DEFINED</td></tr> -<tr><td><i>email</i></td><td>USER-DEFINED</td></tr> -<tr><td><i>url</i></td><td>text</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.release_repositories - <html><table> -<tr><td><u>repository</u></td><td>USER-DEFINED</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.release_channel_matches - <html><table> -<tr><td><u>release_channel</u></td><td>USER-DEFINED</td></tr> -<tr><td><u>match_string</u></td><td>text</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.raw_adi - <html><table> -<tr><td><i>adu_count</i></td><td>integer</td></tr> -<tr><td><i>date</i></td><td>date</td></tr> -<tr><td><i>product_name</i></td><td>text</td></tr> -<tr><td><i>product_os_platform</i></td><td>text</td></tr> -<tr><td><i>product_os_version</i></td><td>text</td></tr> -<tr><td><i>product_version</i></td><td>text</td></tr> -<tr><td><i>build</i></td><td>text</td></tr> -<tr><td><i>update_channel</i></td><td>text</td></tr> -<tr><td><i>product_guid</i></td><td>text</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.product_productid_map - <html><table> -<tr><td>product_name</td><td>USER-DEFINED</td></tr> -<tr><td><u>productid</u></td><td>text</td></tr> -<tr><td>rewrite</td><td>boolean</td></tr> -<tr><td>version_began</td><td>major_version</td></tr> -<tr><td><i>version_ended</i></td><td>major_version</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.process_types - <html><table> -<tr><td><u>process_type</u></td><td>USER-DEFINED</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.plugins - <html><table> -<tr><td><u>id</u></td><td>integer</td></tr> -<tr><td>filename</td><td>text</td></tr> -<tr><td>name</td><td>text</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.tcbs - <html><table> -<tr><td><u>signature_id</u></td><td>integer</td></tr> -<tr><td><u>report_date</u></td><td>date</td></tr> -<tr><td><u>product_version_id</u></td><td>integer</td></tr> -<tr><td><u>process_type</u></td><td>USER-DEFINED</td></tr> -<tr><td><u>release_channel</u></td><td>USER-DEFINED</td></tr> -<tr><td>report_count</td><td>integer</td></tr> -<tr><td>win_count</td><td>integer</td></tr> -<tr><td>mac_count</td><td>integer</td></tr> -<tr><td>lin_count</td><td>integer</td></tr> -<tr><td>hang_count</td><td>integer</td></tr> -<tr><td><i>startup_count</i></td><td>integer</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.os_name_matches - <html><table> -<tr><td><u>os_name</u></td><td>USER-DEFINED</td></tr> -<tr><td><u>match_string</u></td><td>text</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.os_versions - <html><table> -<tr><td><u>os_version_id</u></td><td>integer</td></tr> -<tr><td>os_name</td><td>USER-DEFINED</td></tr> -<tr><td>major_version</td><td>integer</td></tr> -<tr><td>minor_version</td><td>integer</td></tr> -<tr><td><i>os_version_string</i></td><td>USER-DEFINED</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.extensions - <html><table> -<tr><td>report_id</td><td>integer</td></tr> -<tr><td><i>date_processed</i></td><td>timestamp with time zone</td></tr> -<tr><td>extension_key</td><td>integer</td></tr> -<tr><td>extension_id</td><td>text</td></tr> -<tr><td><i>extension_version</i></td><td>text</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.signatures - <html><table> -<tr><td><u>signature_id</u></td><td>integer</td></tr> -<tr><td><i>signature</i></td><td>text</td></tr> -<tr><td><i>first_report</i></td><td>timestamp with time zone</td></tr> -<tr><td><i>first_build</i></td><td>numeric</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.product_versions - <html><table> -<tr><td><u>product_version_id</u></td><td>integer</td></tr> -<tr><td>product_name</td><td>USER-DEFINED</td></tr> -<tr><td>major_version</td><td>major_version</td></tr> -<tr><td>release_version</td><td>USER-DEFINED</td></tr> -<tr><td>version_string</td><td>USER-DEFINED</td></tr> -<tr><td><i>beta_number</i></td><td>integer</td></tr> -<tr><td>version_sort</td><td>text</td></tr> -<tr><td>build_date</td><td>date</td></tr> -<tr><td>sunset_date</td><td>date</td></tr> -<tr><td>featured_version</td><td>boolean</td></tr> -<tr><td>build_type</td><td>USER-DEFINED</td></tr> -<tr><td><i>has_builds</i></td><td>boolean</td></tr> -<tr><td><i>is_rapid_beta</i></td><td>boolean</td></tr> -<tr><td><i>rapid_beta_id</i></td><td>integer</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.crashes_by_user_build - <html><table> -<tr><td><u>product_version_id</u></td><td>integer</td></tr> -<tr><td><u>os_short_name</u></td><td>USER-DEFINED</td></tr> -<tr><td><u>crash_type_id</u></td><td>integer</td></tr> -<tr><td><u>build_date</u></td><td>date</td></tr> -<tr><td><u>report_date</u></td><td>date</td></tr> -<tr><td>report_count</td><td>integer</td></tr> -<tr><td>adu</td><td>integer</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.products - <html><table> -<tr><td><u>product_name</u></td><td>USER-DEFINED</td></tr> -<tr><td>sort</td><td>smallint</td></tr> -<tr><td><i>rapid_release_version</i></td><td>major_version</td></tr> -<tr><td>release_name</td><td>USER-DEFINED</td></tr> -<tr><td><i>rapid_beta_version</i></td><td>major_version</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.domains - <html><table> -<tr><td><u>domain_id</u></td><td>integer</td></tr> -<tr><td>domain</td><td>USER-DEFINED</td></tr> -<tr><td><i>first_seen</i></td><td>timestamp with time zone</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.release_channels - <html><table> -<tr><td><u>release_channel</u></td><td>USER-DEFINED</td></tr> -<tr><td>sort</td><td>smallint</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.product_release_channels - <html><table> -<tr><td><u>product_name</u></td><td>USER-DEFINED</td></tr> -<tr><td><u>release_channel</u></td><td>USER-DEFINED</td></tr> -<tr><td>throttle</td><td>numeric</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.crashes_by_user - <html><table> -<tr><td><u>product_version_id</u></td><td>integer</td></tr> -<tr><td><u>os_short_name</u></td><td>USER-DEFINED</td></tr> -<tr><td><u>crash_type_id</u></td><td>integer</td></tr> -<tr><td><u>report_date</u></td><td>date</td></tr> -<tr><td>report_count</td><td>integer</td></tr> -<tr><td>adu</td><td>integer</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.os_names - <html><table> -<tr><td><u>os_name</u></td><td>USER-DEFINED</td></tr> -<tr><td>os_short_name</td><td>USER-DEFINED</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.bugs - <html><table> -<tr><td><u>id</u></td><td>integer</td></tr> -<tr><td><i>status</i></td><td>text</td></tr> -<tr><td><i>resolution</i></td><td>text</td></tr> -<tr><td><i>short_desc</i></td><td>text</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.addresses - <html><table> -<tr><td><u>address_id</u></td><td>integer</td></tr> -<tr><td>address</td><td>USER-DEFINED</td></tr> -<tr><td><i>first_seen</i></td><td>timestamp with time zone</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.uptime_levels - <html><table> -<tr><td><u>uptime_level</u></td><td>integer</td></tr> -<tr><td>uptime_string</td><td>USER-DEFINED</td></tr> -<tr><td>min_uptime</td><td>interval</td></tr> -<tr><td>max_uptime</td><td>interval</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.reports - <html><table> -<tr><td>id</td><td>integer</td></tr> -<tr><td><i>client_crash_date</i></td><td>timestamp with time zone</td></tr> -<tr><td><i>date_processed</i></td><td>timestamp with time zone</td></tr> -<tr><td>uuid</td><td>character varying</td></tr> -<tr><td><i>product</i></td><td>character varying</td></tr> -<tr><td><i>version</i></td><td>character varying</td></tr> -<tr><td><i>build</i></td><td>character varying</td></tr> -<tr><td><i>signature</i></td><td>character varying</td></tr> -<tr><td><i>url</i></td><td>character varying</td></tr> -<tr><td><i>install_age</i></td><td>integer</td></tr> -<tr><td><i>last_crash</i></td><td>integer</td></tr> -<tr><td><i>uptime</i></td><td>integer</td></tr> -<tr><td><i>cpu_name</i></td><td>character varying</td></tr> -<tr><td><i>cpu_info</i></td><td>character varying</td></tr> -<tr><td><i>reason</i></td><td>character varying</td></tr> -<tr><td><i>address</i></td><td>character varying</td></tr> -<tr><td><i>os_name</i></td><td>character varying</td></tr> -<tr><td><i>os_version</i></td><td>character varying</td></tr> -<tr><td><i>email</i></td><td>character varying</td></tr> -<tr><td><i>user_id</i></td><td>character varying</td></tr> -<tr><td><i>started_datetime</i></td><td>timestamp with time zone</td></tr> -<tr><td><i>completed_datetime</i></td><td>timestamp with time zone</td></tr> -<tr><td><i>success</i></td><td>boolean</td></tr> -<tr><td><i>truncated</i></td><td>boolean</td></tr> -<tr><td><i>processor_notes</i></td><td>text</td></tr> -<tr><td><i>user_comments</i></td><td>character varying</td></tr> -<tr><td><i>app_notes</i></td><td>character varying</td></tr> -<tr><td><i>distributor</i></td><td>character varying</td></tr> -<tr><td><i>distributor_version</i></td><td>character varying</td></tr> -<tr><td><i>topmost_filenames</i></td><td>text</td></tr> -<tr><td><i>addons_checked</i></td><td>boolean</td></tr> -<tr><td><i>flash_version</i></td><td>text</td></tr> -<tr><td><i>hangid</i></td><td>text</td></tr> -<tr><td><i>process_type</i></td><td>text</td></tr> -<tr><td><i>release_channel</i></td><td>text</td></tr> -<tr><td><i>productid</i></td><td>text</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.plugins_reports - <html><table> -<tr><td>report_id</td><td>integer</td></tr> -<tr><td>plugin_id</td><td>integer</td></tr> -<tr><td><i>date_processed</i></td><td>timestamp with time zone</td></tr> -<tr><td>version</td><td>text</td></tr> -</table> - - - - - - - - - - - - - - - - - - - - - - - - - Matviews - - - - - - - - - - Matviews - - - - - - - - - - - - - - - - public.nightly_builds - <html><table> -<tr><td><u>product_version_id</u></td><td>integer</td></tr> -<tr><td><u>build_date</u></td><td>date</td></tr> -<tr><td>report_date</td><td>date</td></tr> -<tr><td><u>days_out</u></td><td>integer</td></tr> -<tr><td>report_count</td><td>integer</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.daily_hangs - <html><table> -<tr><td>uuid</td><td>text</td></tr> -<tr><td><u>plugin_uuid</u></td><td>text</td></tr> -<tr><td><i>report_date</i></td><td>date</td></tr> -<tr><td>product_version_id</td><td>integer</td></tr> -<tr><td>browser_signature_id</td><td>integer</td></tr> -<tr><td>plugin_signature_id</td><td>integer</td></tr> -<tr><td>hang_id</td><td>text</td></tr> -<tr><td><i>flash_version_id</i></td><td>integer</td></tr> -<tr><td><i>url</i></td><td>USER-DEFINED</td></tr> -<tr><td><i>duplicates</i></td><td>ARRAY</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.correlation_cores - <html><table> -<tr><td>correlation_id</td><td>integer</td></tr> -<tr><td>architecture</td><td>USER-DEFINED</td></tr> -<tr><td>cores</td><td>integer</td></tr> -<tr><td>crash_count</td><td>integer</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.tcbs_build - <html><table> -<tr><td><u>signature_id</u></td><td>integer</td></tr> -<tr><td><u>build_date</u></td><td>date</td></tr> -<tr><td><u>report_date</u></td><td>date</td></tr> -<tr><td><u>product_version_id</u></td><td>integer</td></tr> -<tr><td><u>process_type</u></td><td>USER-DEFINED</td></tr> -<tr><td>release_channel</td><td>USER-DEFINED</td></tr> -<tr><td>report_count</td><td>integer</td></tr> -<tr><td>win_count</td><td>integer</td></tr> -<tr><td>mac_count</td><td>integer</td></tr> -<tr><td>lin_count</td><td>integer</td></tr> -<tr><td>hang_count</td><td>integer</td></tr> -<tr><td><i>startup_count</i></td><td>integer</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.home_page_graph_build - <html><table> -<tr><td><u>product_version_id</u></td><td>integer</td></tr> -<tr><td><u>report_date</u></td><td>date</td></tr> -<tr><td><u>build_date</u></td><td>date</td></tr> -<tr><td>report_count</td><td>integer</td></tr> -<tr><td>adu</td><td>integer</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.home_page_graph - <html><table> -<tr><td><u>product_version_id</u></td><td>integer</td></tr> -<tr><td><u>report_date</u></td><td>date</td></tr> -<tr><td>report_count</td><td>integer</td></tr> -<tr><td>adu</td><td>integer</td></tr> -<tr><td>crash_hadu</td><td>numeric</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.explosiveness - <html><table> -<tr><td><u>product_version_id</u></td><td>integer</td></tr> -<tr><td><u>signature_id</u></td><td>integer</td></tr> -<tr><td><u>last_date</u></td><td>date</td></tr> -<tr><td><i>oneday</i></td><td>numeric</td></tr> -<tr><td><i>threeday</i></td><td>numeric</td></tr> -<tr><td><i>day0</i></td><td>numeric</td></tr> -<tr><td><i>day1</i></td><td>numeric</td></tr> -<tr><td><i>day2</i></td><td>numeric</td></tr> -<tr><td><i>day3</i></td><td>numeric</td></tr> -<tr><td><i>day4</i></td><td>numeric</td></tr> -<tr><td><i>day5</i></td><td>numeric</td></tr> -<tr><td><i>day6</i></td><td>numeric</td></tr> -<tr><td><i>day7</i></td><td>numeric</td></tr> -<tr><td><i>day8</i></td><td>numeric</td></tr> -<tr><td><i>day9</i></td><td>numeric</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.correlations - <html><table> -<tr><td><u>correlation_id</u></td><td>integer</td></tr> -<tr><td>product_version_id</td><td>integer</td></tr> -<tr><td>os_name</td><td>USER-DEFINED</td></tr> -<tr><td>reason_id</td><td>integer</td></tr> -<tr><td>signature_id</td><td>integer</td></tr> -<tr><td>crash_count</td><td>integer</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.correlation_modules - <html><table> -<tr><td>correlation_id</td><td>integer</td></tr> -<tr><td>module_signature</td><td>text</td></tr> -<tr><td>module_version</td><td>text</td></tr> -<tr><td>crash_count</td><td>integer</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.correlation_addons - <html><table> -<tr><td>correlation_id</td><td>integer</td></tr> -<tr><td>addon_key</td><td>text</td></tr> -<tr><td>addon_version</td><td>text</td></tr> -<tr><td>crash_count</td><td>integer</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.rank_compare - <html><table> -<tr><td><u>product_version_id</u></td><td>integer</td></tr> -<tr><td><u>signature_id</u></td><td>integer</td></tr> -<tr><td><u>rank_days</u></td><td>integer</td></tr> -<tr><td><i>report_count</i></td><td>integer</td></tr> -<tr><td><i>total_reports</i></td><td>bigint</td></tr> -<tr><td><i>rank_report_count</i></td><td>integer</td></tr> -<tr><td><i>percent_of_total</i></td><td>numeric</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.bug_associations - <html><table> -<tr><td><u>signature</u></td><td>text</td></tr> -<tr><td><u>bug_id</u></td><td>integer</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.build_adu - <html><table> -<tr><td><u>product_version_id</u></td><td>integer</td></tr> -<tr><td><u>build_date</u></td><td>date</td></tr> -<tr><td><u>adu_date</u></td><td>date</td></tr> -<tr><td><u>os_name</u></td><td>USER-DEFINED</td></tr> -<tr><td>adu_count</td><td>integer</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.product_adu - <html><table> -<tr><td><u>product_version_id</u></td><td>integer</td></tr> -<tr><td><u>os_name</u></td><td>USER-DEFINED</td></tr> -<tr><td><u>adu_date</u></td><td>date</td></tr> -<tr><td>adu_count</td><td>bigint</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.reports_clean - <html><table> -<tr><td><u>uuid</u></td><td>text</td></tr> -<tr><td>date_processed</td><td>timestamp with time zone</td></tr> -<tr><td><i>client_crash_date</i></td><td>timestamp with time zone</td></tr> -<tr><td><i>product_version_id</i></td><td>integer</td></tr> -<tr><td><i>build</i></td><td>numeric</td></tr> -<tr><td>signature_id</td><td>integer</td></tr> -<tr><td><i>install_age</i></td><td>interval</td></tr> -<tr><td><i>uptime</i></td><td>interval</td></tr> -<tr><td>reason_id</td><td>integer</td></tr> -<tr><td>address_id</td><td>integer</td></tr> -<tr><td>os_name</td><td>USER-DEFINED</td></tr> -<tr><td>os_version_id</td><td>integer</td></tr> -<tr><td><i>hang_id</i></td><td>text</td></tr> -<tr><td>flash_version_id</td><td>integer</td></tr> -<tr><td>process_type</td><td>USER-DEFINED</td></tr> -<tr><td>release_channel</td><td>USER-DEFINED</td></tr> -<tr><td><i>duplicate_of</i></td><td>text</td></tr> -<tr><td>domain_id</td><td>integer</td></tr> -<tr><td><i>architecture</i></td><td>USER-DEFINED</td></tr> -<tr><td><i>cores</i></td><td>integer</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.reports_duplicates - <html><table> -<tr><td><u>uuid</u></td><td>text</td></tr> -<tr><td>duplicate_of</td><td>text</td></tr> -<tr><td>date_processed</td><td>timestamp with time zone</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.reports_bad - <html><table> -<tr><td>uuid</td><td>text</td></tr> -<tr><td>date_processed</td><td>timestamp with time zone</td></tr> -</table> - - - - - - - - - - - - - - - - - - - - - - - - - Email - - - - - - - - - - Folder 5 - - - - - - - - - - - - - - - - public.email_campaigns - <html><table> -<tr><td><u>id</u></td><td>integer</td></tr> -<tr><td>product</td><td>text</td></tr> -<tr><td>versions</td><td>text</td></tr> -<tr><td>signature</td><td>text</td></tr> -<tr><td>subject</td><td>text</td></tr> -<tr><td>body</td><td>text</td></tr> -<tr><td>start_date</td><td>timestamp with time zone</td></tr> -<tr><td>end_date</td><td>timestamp with time zone</td></tr> -<tr><td><i>email_count</i></td><td>integer</td></tr> -<tr><td>author</td><td>text</td></tr> -<tr><td>date_created</td><td>timestamp with time zone</td></tr> -<tr><td>status</td><td>text</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.email_campaigns_contacts - <html><table> -<tr><td><i>email_campaigns_id</i></td><td>integer</td></tr> -<tr><td><i>email_contacts_id</i></td><td>integer</td></tr> -<tr><td>status</td><td>text</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.email_contacts - <html><table> -<tr><td><u>id</u></td><td>integer</td></tr> -<tr><td>email</td><td>text</td></tr> -<tr><td>subscribe_token</td><td>text</td></tr> -<tr><td><i>subscribe_status</i></td><td>boolean</td></tr> -<tr><td><i>ooid</i></td><td>text</td></tr> -<tr><td><i>crash_date</i></td><td>timestamp with time zone</td></tr> -</table> - - - - - - - - - - - - - - - - - - - - - - - - - Admin - - - - - - - - - - Folder 4 - - - - - - - - - - - - - - - - - - - - Monitoring - - - - - - - - - - Folder 11 - - - - - - - - - - - - - - - - public.replication_test - <html><table> -<tr><td><i>id</i></td><td>smallint</td></tr> -<tr><td><i>test</i></td><td>boolean</td></tr> -</table> - - - - - - - - - - - - - - - - - - - - - public.socorro_db_version_history - <html><table> -<tr><td><u>version</u></td><td>text</td></tr> -<tr><td><u>upgraded_on</u></td><td>timestamp with time zone</td></tr> -<tr><td><i>backfill_to</i></td><td>date</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.socorro_db_version - <html><table> -<tr><td><u>current_version</u></td><td>text</td></tr> -<tr><td><i>refreshed_at</i></td><td>timestamp with time zone</td></tr> -</table> - - - - - - - - - - - - - - - - - - - - - - - - - To remove - - - - - - - - - - Folder 2 - - - - - - - - - - - - - - - - public.locks3 - <html><table> -<tr><td><i>now</i></td><td>timestamp with time zone</td></tr> -<tr><td><i>waiting_locktype</i></td><td>text</td></tr> -<tr><td><i>waiting_table</i></td><td>regclass</td></tr> -<tr><td><i>waiting_query</i></td><td>text</td></tr> -<tr><td><i>waiting_mode</i></td><td>text</td></tr> -<tr><td><i>waiting_pid</i></td><td>integer</td></tr> -<tr><td><i>other_locktype</i></td><td>text</td></tr> -<tr><td><i>other_table</i></td><td>regclass</td></tr> -<tr><td><i>other_query</i></td><td>text</td></tr> -<tr><td><i>other_mode</i></td><td>text</td></tr> -<tr><td><i>other_pid</i></td><td>integer</td></tr> -<tr><td><i>other_granted</i></td><td>boolean</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.high_load_temp - <html><table> -<tr><td><i>now</i></td><td>timestamp with time zone</td></tr> -<tr><td><i>datid</i></td><td>oid</td></tr> -<tr><td><i>datname</i></td><td>name</td></tr> -<tr><td><i>procpid</i></td><td>integer</td></tr> -<tr><td><i>usesysid</i></td><td>oid</td></tr> -<tr><td><i>usename</i></td><td>name</td></tr> -<tr><td><i>application_name</i></td><td>text</td></tr> -<tr><td><i>client_addr</i></td><td>inet</td></tr> -<tr><td><i>client_port</i></td><td>integer</td></tr> -<tr><td><i>backend_start</i></td><td>timestamp with time zone</td></tr> -<tr><td><i>xact_start</i></td><td>timestamp with time zone</td></tr> -<tr><td><i>query_start</i></td><td>timestamp with time zone</td></tr> -<tr><td><i>waiting</i></td><td>boolean</td></tr> -<tr><td><i>current_query</i></td><td>text</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.locks2 - <html><table> -<tr><td><i>now</i></td><td>timestamp with time zone</td></tr> -<tr><td><i>procpid</i></td><td>integer</td></tr> -<tr><td><i>query_start</i></td><td>timestamp with time zone</td></tr> -<tr><td><i>nspname</i></td><td>name</td></tr> -<tr><td><i>relname</i></td><td>name</td></tr> -<tr><td><i>mode</i></td><td>text</td></tr> -<tr><td><i>granted</i></td><td>boolean</td></tr> -<tr><td><i>current_query</i></td><td>text</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.locks1 - <html><table> -<tr><td><i>now</i></td><td>timestamp with time zone</td></tr> -<tr><td><i>procpid</i></td><td>integer</td></tr> -<tr><td><i>query_start</i></td><td>timestamp with time zone</td></tr> -<tr><td><i>nspname</i></td><td>name</td></tr> -<tr><td><i>relname</i></td><td>name</td></tr> -<tr><td><i>mode</i></td><td>text</td></tr> -<tr><td><i>granted</i></td><td>boolean</td></tr> -<tr><td><i>current_query</i></td><td>text</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.locks - <html><table> -<tr><td><i>locktype</i></td><td>text</td></tr> -<tr><td><i>database</i></td><td>oid</td></tr> -<tr><td><i>relation</i></td><td>oid</td></tr> -<tr><td><i>page</i></td><td>integer</td></tr> -<tr><td><i>tuple</i></td><td>smallint</td></tr> -<tr><td><i>virtualxid</i></td><td>text</td></tr> -<tr><td><i>transactionid</i></td><td>xid</td></tr> -<tr><td><i>classid</i></td><td>oid</td></tr> -<tr><td><i>objid</i></td><td>oid</td></tr> -<tr><td><i>objsubid</i></td><td>smallint</td></tr> -<tr><td><i>virtualtransaction</i></td><td>text</td></tr> -<tr><td><i>pid</i></td><td>integer</td></tr> -<tr><td><i>mode</i></td><td>text</td></tr> -<tr><td><i>granted</i></td><td>boolean</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.activity_snapshot - <html><table> -<tr><td><i>datid</i></td><td>oid</td></tr> -<tr><td><i>datname</i></td><td>name</td></tr> -<tr><td><i>procpid</i></td><td>integer</td></tr> -<tr><td><i>usesysid</i></td><td>oid</td></tr> -<tr><td><i>usename</i></td><td>name</td></tr> -<tr><td><i>application_name</i></td><td>text</td></tr> -<tr><td><i>client_addr</i></td><td>inet</td></tr> -<tr><td><i>client_port</i></td><td>integer</td></tr> -<tr><td><i>backend_start</i></td><td>timestamp with time zone</td></tr> -<tr><td><i>xact_start</i></td><td>timestamp with time zone</td></tr> -<tr><td><i>query_start</i></td><td>timestamp with time zone</td></tr> -<tr><td><i>waiting</i></td><td>boolean</td></tr> -<tr><td><i>current_query</i></td><td>text</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.product_info_changelog - <html><table> -<tr><td><u>product_version_id</u></td><td>integer</td></tr> -<tr><td><u>user_name</u></td><td>text</td></tr> -<tr><td><u>changed_on</u></td><td>timestamp with time zone</td></tr> -<tr><td><i>oldrec</i></td><td>USER-DEFINED</td></tr> -<tr><td><i>newrec</i></td><td>USER-DEFINED</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.special_product_platforms - <html><table> -<tr><td><u>platform</u></td><td>USER-DEFINED</td></tr> -<tr><td><u>repository</u></td><td>USER-DEFINED</td></tr> -<tr><td><u>release_channel</u></td><td>USER-DEFINED</td></tr> -<tr><td><u>release_name</u></td><td>USER-DEFINED</td></tr> -<tr><td>product_name</td><td>USER-DEFINED</td></tr> -<tr><td>min_version</td><td>major_version</td></tr> -</table> - - - - - - - - - - - - - - - - - - - - - - - - - Monitor, Processors and crontabber - - - - - - - - - - Folder 1 - - - - - - - - - - - - - - - - - - - - Crontabber - - - - - - - - - - Folder 10 - - - - - - - - - - - - - - - - public.crontabber_state - <html><table> -<tr><td>state</td><td>text</td></tr> -<tr><td><u>last_updated</u></td><td>timestamp with time zone</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.report_partition_info - <html><table> -<tr><td><u>table_name</u></td><td>USER-DEFINED</td></tr> -<tr><td>build_order</td><td>integer</td></tr> -<tr><td>keys</td><td>ARRAY</td></tr> -<tr><td>indexes</td><td>ARRAY</td></tr> -<tr><td>fkeys</td><td>ARRAY</td></tr> -</table> - - - - - - - - - - - - - - - - - - - - - - - - - UI management - - - - - - - - - - Folder 9 - - - - - - - - - - - - - - - - public.sessions - <html><table> -<tr><td><u>session_id</u></td><td>character varying</td></tr> -<tr><td>last_activity</td><td>integer</td></tr> -<tr><td>data</td><td>text</td></tr> -</table> - - - - - - - - - - - - - - - - - - - - - public.processors - <html><table> -<tr><td><u>id</u></td><td>integer</td></tr> -<tr><td>name</td><td>character varying</td></tr> -<tr><td>startdatetime</td><td>timestamp without time zone</td></tr> -<tr><td><i>lastseendatetime</i></td><td>timestamp without time zone</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.priorityjobs_log - <html><table> -<tr><td><i>uuid</i></td><td>character varying</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.server_status - <html><table> -<tr><td><u>id</u></td><td>integer</td></tr> -<tr><td><i>date_recently_completed</i></td><td>timestamp with time zone</td></tr> -<tr><td><i>date_oldest_job_queued</i></td><td>timestamp with time zone</td></tr> -<tr><td><i>avg_process_sec</i></td><td>real</td></tr> -<tr><td><i>avg_wait_sec</i></td><td>real</td></tr> -<tr><td>waiting_job_count</td><td>integer</td></tr> -<tr><td>processors_count</td><td>integer</td></tr> -<tr><td>date_created</td><td>timestamp with time zone</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.transform_rules - <html><table> -<tr><td><u>transform_rule_id</u></td><td>integer</td></tr> -<tr><td>category</td><td>USER-DEFINED</td></tr> -<tr><td>rule_order</td><td>integer</td></tr> -<tr><td>predicate</td><td>text</td></tr> -<tr><td>predicate_args</td><td>text</td></tr> -<tr><td>predicate_kwargs</td><td>text</td></tr> -<tr><td>action</td><td>text</td></tr> -<tr><td>action_args</td><td>text</td></tr> -<tr><td>action_kwargs</td><td>text</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.priorityjobs - <html><table> -<tr><td><u>uuid</u></td><td>character varying</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.priorityjobs_logging_switch - <html><table> -<tr><td><u>log_jobs</u></td><td>boolean</td></tr> -</table> - - - - - - - - - - - - - - - - - - - public.jobs - <html><table> -<tr><td><u>id</u></td><td>integer</td></tr> -<tr><td>pathname</td><td>character varying</td></tr> -<tr><td>uuid</td><td>character varying</td></tr> -<tr><td><i>owner</i></td><td>integer</td></tr> -<tr><td><i>priority</i></td><td>integer</td></tr> -<tr><td><i>queueddatetime</i></td><td>timestamp with time zone</td></tr> -<tr><td><i>starteddatetime</i></td><td>timestamp with time zone</td></tr> -<tr><td><i>completeddatetime</i></td><td>timestamp with time zone</td></tr> -<tr><td><i>success</i></td><td>boolean</td></tr> -<tr><td><i>message</i></td><td>text</td></tr> -</table> - - - - - - - - - - - - - - - - - - - - - - Color Legend -Yellow: Cron/SP populated table -Pink: Externally updated by Processors, FTP or other -Green: Manually updated by Socorro administrator - - - - - - - - - - - - - - - - - - GraphML generated with pgschemagraph.py -Arranged manually with yEd -Last updated 2012-10-22 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/docs/development/breakpad-schema.pdf b/docs/development/breakpad-schema.pdf deleted file mode 100644 index 98edde86d33e8af99c5c40526a033032adc24e28..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 458275 zcmeFa+j=9(buM_%r+{SHTk?o3UPHW7k{~ke|Ue9WmzBh^zzJTZ8Edp2m0>fj~8=1=hX+Zhr3bIxn0>$;wLX} zw_pF_)9pi!N374nh=2bM`-(ZZHwFUlI#-)?m^+!9&8M`T;jJ^W^&+Ne>vg@@OM6%j zouogYKlpLw-RV!_KX`ZX1*P!rF7;uT`n3Ctw4dO1$1cq5LOK}axIPO_o?IxK++Dn> z4l15OrH-|rl$W@Vtrz!9>*iGK`FY)2xEl7l{Xr-G1BT-ZO7UChCRYU}J-_Ppdfm=F zCRwf|gW*tTT`vsrH|<;&xZY1X$*`Nz+bV{AdVrGSizFSSSK;=I==>_P`=EMqm!4&m zbj5Ayt__A8^M&5kpkMIs=~+Pt6`cH zeN1Te4^L?Ig5Iu9sE!dIl(1|g2uvlIK72*AtaXAnFO*H549fSGE1zk+TK9hu-Oj(_ z6V3~a^fPRrTbH*T5((DKRj<>(8f3+gwufJRWjmjC^wrJ9t52{v3TFMQHy1ztaIu_p zzFdqK%jM!Hov*&R`1mXA)G77KZP06F^jnv4&GL>$|8<9UFED~54FCPvjw+0){Sjm@ z&(`eu1G>BTGJbzwS7!PzSVL)n?K}VlT^W~2O3S~Kb@MB1U`a8+8sQ`6*Y~&G&PNNZ z3fm`>eAvV85l#%H8ypyNuD<$wv<4`fcgCxW@b3@5q<>H6>pagptMR--efi<Td4fH z$a{L1o)jqVhuX?r83(mBI=LF)*Yczsqv(Q?euNrhj4}?PY0NT8p^byQPfsc-;#ams zMo@d^u88KY=+NA4-88md#GYX<+0OLxN;vivC~VT!Vun4Opyksi?Lrt~0a=r9K^IK9 zKj7%=_lgeQ=OE2HSA$_U?RIDf+IAi1K|GHqyl&c0;ntP1_2N;NPjq%1a6+6{z{YtH zO>CGQ-O|H;#`J9SEQ#9gfg{kYy}g+(SL^H5WHN8ujq>B_W<9;1tk$FZ zhtBs?=#Yr){MTYWDLef=P}&B1C(uBck3I-P+`6bQlbjYuX9%^zaM(!!WxBaxzGMUQ zWyMv}uV6l*kB0er5pL-*ZW(4djVmEZEl1Z%rzr&E9GK7O46GG&RQ9nwJF-9JT z(llnlfb#3WAwMZl%t3R^*N^6||3WaIEuet;?83pAFP_H}UNYjKU;w&){WzW7903B-4va%C$@v7t zke)!m?gj)Dnv~vX7YrwXepU#$ISX}IFo6z)RKJ1^8(={w2ze^#)NhJy-USxqSG^7P z!lEUxHy7BebaPf+cya!CKUq%4C7jx`*xNwGhuLWTX0g04J5hvR3=HAZONH`C=;o~Y zpycGQ>v9Oq_++b7(`B=qJSV(HY3Xmm;FRoMFr}nLan*y? zaDdgue7=J2Gu^+z}^KQ94-tp5^G_?uwM)S znPDSJ0qh6R;tpv26PXEPOu|kBq~9OH4j}i@9(v1S00WOUfZ|f`^W#dArD*(I@5_Lw zy0c!(erzejKB=g@K7!RY#9KH}>Y%3}NW4mxL zFb>P)ypZTasw||FMat(T5$&zCB5jw$aqFn&Zfqo~T zc|N+YD8n{Yr|^6rVAUKh4@D#DQk`xGB+USp3#_V`RK9Cvw}i|%gTZcd$gJ$TMzL;z zBV9oC8St|5=^92GK3za|rwG&+$>;t;MIfac$OX!$YZxs>AaYg0T5Bfn3#)5*Q$AU| zNwIbd9Nf?XaoX=Huz7rL116Ms_+3mA0X6 zf}QQOkOJiGkxv_`T$pbD_FMzm{`Pi@euM5+22XBUs6hgR*Gmc?g3jet(jD~3B?MZY z0lifc{jv?w?|I!`Fmn~R7f|H7fzvnd_b`*{_F}ZTy#$)wUWTwo44{1zM;2&@x;eS> zsM|}5kE(QYfyCN_a7NBxd_V0$a>F|-_(_3Mw-=+$?WLs2S;d|tjx5%XxxLsp%>8qZrz-$V0T^tb&^<^9rI+= z-OoP2y0&}3e4ivB93?H>Uf#?`t2;A*9KrNKJ1?EXyS-lu`h^{eI1+@$K|$?;6c^}c zmEt1qw^;*%6UMT;TD`fVF|KqmL^|;=iK1 zt@ns7vS>217Xl4!MFnVR7Y>GoVVRs4(#AqMS*&MDn>lN}SH1|%?7vw^ZhoE7XJ{|5{ zNOG{^Z_@joTpb<4kxOZQH(Fiajpnz}SGGrv&{yQ@=k;O%i)M3&dX-gNdilld=6bxi zsn{W!G2-fEd^cK-#)x-p{QixV;IY_1EkB))XOB0N>(T9P<9=*w^YruNwd_{7BUA+& zXMAXcFjMf@XW4|}%T>xMHl(Z+0!G!Vt0QXMIiL>G z0!N{3lbKTT*bS#5Hg=(dhFy=#KtHR@w88&%>_4>+3#p5kM=&}BrMkqH`_VUxrM=hY zfJj(!_0-Z#;1XBvr}OsQxO#fR&2D%n`9e0~9e)6w9#qdzEobCm5F$!@F}x#h`)Wj~ z(no`LQA8=!+T>0_?u;0h#)!hrNa&0l{wbo=Xhh2*qO^yt1r4(TU^NE?ANN3wF9tC5 zL7%R7XP}3qZdZB$e@MQ|PclmSL0h@2<3L-ZISy2xq;am=k5c0c9Y=_Q)ln)PIOHb< ziaBTw@1lrOBvN@1SwzADGI$p*91QQ`c|75DbF%R6yaMXroz0GI>9(HrK{=fe6)UCPOdeo65jEP+3fM51O#mz6u0 z09EGWyt&@IjT?N+x98GiTm1KIx^gq)>ga5_j1+;KONE|z41P)meN zUM0{0;(O0n{3OreehhV4M9rX1)C6cs9}Vh8L8FBRjVAb>VMh#c#YoR4hDFGhfOPP; z2p0-UoB|SwL!qyPD;(}$CcPosA%#R16r?O6yQI*R1*Q6PN{+&J`B{PDt|)^_bu83Y zXrvLOFjRRG(h`c#Klh^){6ushdV$ByJ?jNy16E3USRf^Sx;f^Jf<=)Y@I_$Wu#yb( zh6@K{-i?Wb!%3VY}CDE_{=95PA;|x=dh^8WbJcqr0ik zEu@5hhJ(vfuPp%PT^Z09D2iN#w|Ix7&;j92K)^}|&8FT6^h-p*?`uL8L%@cmw!r=n zC#qRwn&V()+6Bp50!J4F7SCgfPxE+>NJ<=6M+fBL%Shdi$`JI?gm2?A83H_ZJ3TnG zlf!NoX}m)a+8~CKl)I@<5t0fel|B}fL<;(f>oF+Snj)K1-tPyx|mq>`e3 zD3>1J)p1Z;r4iZJ!>{>C!!Cdy1}pt21+A%!Lunc_k5XynU{E*y*?<9R&cb#t=7@w@`+AfL^SZRxh334M^(v=3oD%Q>?vY-~Vh4P3}V z?qxbaSJ#g#sFbGjH@nJ0G|6nD=|pJr#c?;x`Oh}2NG7&$9^x<)n$ig3a)?6<^s{2c z$Hy&jDa4-P-0(UOvyRy1K-?zXgbR<|pkf$l#mPB!Qb{+xM+!;caE2G+>S{k)+S943 zfL#!c7w>p+e^2GPN@-_LPs*OFr$aQnE_EepvR%&?>#A#iGj?3P9Kw#vzjty!n$Em$ z#?`|ithhXUAD3=MT+fepgPnTnlyXFbO;kmUOL!%asL_L*LpVGclB=pZ+zw;e0wE^= z#ieYa^wDICD6A2luoc+?LMS-`mlzRMK0ueU1yw1_f^3mMrIG~MLMZacD1=A8OCA|W zn+VS^xzAn|Gz0vkl41vB@!p`jJP!5dU>ch0PIwa1g(RS~AEttAfwpuv$QDZLDG!RC zR8rC}RhFU@?V%3N_L?}oD2fr(i;ykC0y5bmTsW3&q3`R2*UhQR7AL#{8psx5OAq@U zZ}A7o7Cmf#l;9%CP>9egTX2@b>ySihT7@%hbcTv9$2K8)cbcWA@?&7P!lLE#4P+PvM zZ^s#XbXRTsKKX55uhz@yyb=VhJO_I>Ru}Q?uy?mndPaGXcVjA4+r?NEOc|!4xCLBA zRu-h>gbsoVfAgXv*Ft#c#l?6j{-dh25&CGLDhi|NNf-@MgTO+Kk@esa#04qrLMRnq zUZ7IJl1l^)1_NpU63#4vq+C)kc@KfOeHjq9 zw+rYAxv2DUJr)r{cS3S5DjASmKzmM<$sAG-6^8k7#+`K69A!muHK^$EB2X6gK3ZXz z6jEMcVTKC_qpZrjo);26aa12rb>PXuuJe+J_EuUFbvUe?EaPt$F5_&4p(mA0n(^c6t_?pf8?Euvk{MTDt(YODCGVaMJb}1E zhxgNW6>n(80yLaH7Oqtf;AX|sQ!7>wG}f3ETUxh~UsAJ=grd_LcNB9{Bee2)wIQ@p zTo`)F;eHDpe}TL%&}6Hh0pjE$nKeTK@2Vj{>7&W4Q9K+x07%TrVO&9<8GM#8Fv`%x zbOD!N(3IIrX=GV0kQb|eHSBc?i;4C0_bsW@|f1MHPgM))SDI;|g9w_}Nh0I!f!>gs85%S zA}Cb{@D~yW7Z#9-sNurFB5FL3C%kS>Rw6yGfI1PO@d4OVGD;-8hvLP z{_r|WM7OElN7hlFdS4lKNv<(c+5^SfNUqDt_sgk6{8lDhLhF-MuFd#x$&nU-_%8*h>S+g0yOH_U$|k%8kqv_sg0QjVFhUjqnr{}l zBjnVp_y7xiENBk9ZUtU~B4{4U4-hCW(090yqml@xs<4dvIphkI7O~PMIimcKs=_LX z2ubE7Eo~wf$Q-1lO|*fPF44!8By~sa(G8bI;Q?`byMPG6va|^y?*gQDCnTGbwAKpW zCv%e3Vvz>MPKL`|sYOb=2>*?-dOr#{1obk^Nw%U4tHY0wmfQ@Q?mu44FJ83=nql3X z7ZQKsXLC}ym7JG^Er~ibUw%P)43t};ex!A+_W+1XQnvId13D}@9E8$97pYN+Kz^Z+ zLM5Q<0Hcx^U2>`nx80~zbs?w>&4;5DqJp_L}lYtIXTVDkt6+0s8&1>lnM=OObx%I;z$0g`N1TYKKRyI7D_NTSu&C<2v6W`%bz?E*W7;IJw$ z;0%X{ONyMaeRvSl(GIYcQn_y8j7dNO#J&zwIF!f*f)q$a3#Fnpfc&BN`Eh|HO&~)O zy>AtGFYOmnI5#{_s_KUBtO#!ykX7Lg1J(w{pFXZ6$^WG}B#Gjb;K1`TAc>Xz43NYw z91cn1i3GN_3#Y!2+QU4uWI>w}?%*$1ks>=B%cvR5odAVZbyBc(0(; zcpOSCZ#T+i>Gdx@Emy#KZ?V0l4pf8>#*Tq<=i}nN2dNm5zEN`SP~?Eq!{uT2csrdR z1B}pNfd!2ck92lyQ!9!Ks;(h`5urowEWHuvX9Xj5*%4qN^Enig11Yo4f53VJNcb+C zU*1e-6PMmp8F|0dh;2A<=@4T@=uXAd_7tj_l-9Eps(pk+A%!T`0Q)&_RD&v@LQe;! zVoK?wDW;;}F=V7-#gt~e%q7Pwjt;%h$J-uegaYjXfNTOyj^Lv>snVJ*urJRtO1cU;RCO%WmUF1)(CU$Av^*(?3_#gvC0DCbK%bgJRWI^8%vfSXn@)i~7z>!InZT+S6$Y+r4gc z;hU^$yhq(}iH&st2Nwd8*qoYy4b?xb$FGlp^k^Rj+$j?C##7{`e~{L(Th*7~bQ3z2 z;OpSa9O!37dS&9+I?RXIWgq5yU$dr4>d@=h?|IX{4Bv8}eYs7#)OZMcmUSEZvbhGB zEpyBJ1@0h55&BGdKSLt<25Lp+*aak8VtSW}xT-^bqD58Z>HUTNL;o~q*EN(wJSJQw z zYV7KzEhipd>HE>RB`2O<+H&IYm7cYJC!Stfa*|3|akCZ7#k^a6D6VeWvH}kfoI=f5 zS*=IQ_2WakC6;=6dcw_4)kg|D4;YAX-sO-`9)i%}{N05Oh%(f%_K`Io8-)@Rq^yHm zN=W&w^wBgQQG_?VUPx%q2%utMT^tMK4P>_jEAnc+w|qYe=mzy76dqv#nZhGnI9TBk&*KTNo0HXZoL4}d zhQemYwshOigg(fWmF1{M3nLQgew_tk5LHT+g5#yj;jNd(LPdhN8nm{DY=`tXdRzwj zS=AiM{!`gVGR53qXr(cSV-4(hINn_$-G|6VF00csrO8n@W7kj`nvc{zx`Ckxi(rz0 z0Sd|`(`9G^pvV|%qXOSEmrX(Wys8h8L>y*Fu7rVwvMEX@UDU{C3?=tLBh3%xBg*g? zP($2B54Z*^^iS^6lS)dkjMAZON}##ei9SgSp=dv$ zY>JK}lugl5DjhiFCj|=XBJ!dRxlHz8s3_vF|3V@TLzNW6P~pPi80x%kBqjFgVyKf} z0ks%PXUEE>*nW0|vMDeOA(3ILvMB%&s!TtEoe0}5;fLJ?Qxc36KhP5q&HzG(xI(=V z=;y{yHFkvHo(vs_(@rF|FBgh>qL^uBJaM6xCwOsBA1H1{FU!@(A!yKLc-awS){G@r zPc2`K%dT=ioiCR5A(nu?88@z;T5{tumy~Vh<^kC#W|){ur+VNjeJsu9APP1F#XemYdl$TK?LrUwL5kT9F)%%NGg9tK zjoqX2jGPKmV*7-oU_p3o7F0Kz?~`XgY-lLws248NAuOqUpC4C}?Cc45V7(ta`^yXP zS3q>3K29C=t-IjaAEF;IP8~ml1Sd@p&5_?=dRSry z!}PFB&I@T{A)PF4KQ9SHJ8_6P1s{j6AC9Ve>(K>MJE__Dos zD7|foLt)1S*8f{d9EL@D{q3YuN~YP{dhdXH$&G*Mxo~@|a^5pr>RRW5(IqD;6gL+` zWa+tji`m%;*0%>>03lvwK$w&z5N_HTQurmM8^T6)K*~yAP1>jQ(ZG5XhKr=b4A#{f zI7OvzNEKxOMn(}+l(x@86*7e1B5$7sU}RK47Gj9TJt!FhFq94f7)q(VifiX8&fiva0i zB^gK$7Y+yM=LJN|v9f?p7o4A$f-QwwP;S#?+r4gs;hU^$yyrN*gbEn@WGBr5%~>nd zAlg#{FhVTC5!CB^EC6Ct`U(7j%{bO#!PPD*c84=g=mEqUGzgeH(9ep=1MAmSY@~q+ zLA~w{!+?jo+ex9N*3*Ti1G|lZlXBwfB?eA=7Ovtz7ixYpU9Q$#>2kUDa^mXfSki|J zMhkboMB)0HT>Z=+?cCP zbvjHb>sxTNRr+WMHwrNxNQg1H4Gf2HVu1s2hOU+FK@@-rcUIEb6{<20vBcpd(u1&# zyF<7ZqpeZ|P-#CingA*^38~SbD9KI_VZ(S*ptv7yOLuh~+?HvipUL58B2P-mW0yU7 zsUM}r7$c8EX&N(+QfcE*o@A8pTfrsuP9lRSvKZ9M0JgHAfcCX=Qdm(31KW5WPk7y& zES^2DfI1{tZTam3=Dx{p*2YX{?e>rWJ;OE?&fj4HUsq$pe*gZgk)hu z+<|0w1Qa8kUlqxwMn5Z(Z4h=E@T`WND&0IF@x4<-8`G|2EN!BOw;S;HtmZb*wx!RA z1SPt#ZM&Y7Cl}DPKv}=imcKk#Ux&OXmv-3PvNmg??pYJM_*`8b!jmhs_w{5wx`tTr zda^8~f@Tc4`e`ZZx?*m_`&l?-L<>{)RCH=1?W%u$v(QVH1UFJAG<+%I#^0=)Hi;2j zHIP=1^VMYCjuB5}ZyQEj@wRU!qxIu*a${J-`>nV-YgtOb6>=*pZ2k7ZQIt?l99K7o z>|>8UL@RCc;) z6nV)?q;GBbZah6bIbwDzx3*SxNX1|rDm!$M#FF!BQwUVLQ7C+f4ggvVv{z?Ky@2-W9cm2%!}7R5 zV*NQ{qA-%G9ruOAm4_8&>aK9%V0D+RoAW~2Tu3LY|2i)TakkV?f>h~sN-dkNa17X1 z7(UXv)_V@W%Yw`pgU%3niQ#kE3^k}uY`h$;?k2!OZ?7L$lcivy5=0zY;bBN#`P*&0 z+cjh46jjxX1^TQR!@OW-HY{vpLdR9rxwvewh;LclLGPj7#o9K_fHJE&_UoS7@|ZMgBOzMf{OFO?Ys102p0|(>TFg|e%+j`40m1ubwZcTj(mae z@zMRvzR0?^dycU_%vmo`LaAaq07VeJK!#iznB^)k+KLbc8fKZaQy{x;1|thuFQ9^;)bPX< zQ{#&PYM#RXEmX1`z_+>>8a+T7Ww}dF3Y3}qlr*KfuqR343hT!bUD!B;MlN6- zrO?KKWVt9Qysc{ZQ@xwP5I2hnyB>cL^5`hUoD;A`_;rqmx@VCkxT-ji2t$ zQ3tcb>}Upyu%8{Cz@mrX4N9usB7eor;(m0L%9OO_GC19tyMpUqR}nfxdd^@`iIWTT zvm$h1dz)50I2B_cK6uXcnw;?biN0?S9BX4b5ndasEtL%yHa`UMxzMu2lC36_xtAYT zH~Zt{c7s03V;uz10Q!(=o4yX@=0#LguiqyVC~TkXeX~)-yoX8;NDmOaZnEYQ$nGb0=%@czR@VA zfE86B5>4)BuxjvKeo~;Y7YqTul-6;et^B%x{DG1Q`lJGZdKjhlqZH!OWE@Ik%rZ)) zl>>c}QNnLyy)3FvQ~~n~NtzHAkb%B%;b71g&*KTNo0A27=M_)~`gC@}mTvo3(CM=6-E zWgJRl%rZ))m4gDf3r`9Z^V=LIMIrdZ7ZQRW7LdWDaN%H>6wf2v9!_=LoGeT_uYfwJ zW3yvhy6tDSrPsCHqcNdF1X*2BBFCLa%A&D1;KC8$679hZm-tU}xD=FQ3YP-?tZ>QZ zAvPRtVkN&h`+der8}IxHL_~EXLWZFjveL~Vq6=8PGyU57Zn;>mXO6f@l|rOQX)`Ir zlcX zdg|{n14glILycuiNg0ljC8*9qnmlBymJ2Bi&K%2T7c$HrV%bs{DB+)(5KD+ z&X`0j8GzrAH2iX(JnWD+2^ukL0Iz7h&yO=wx})}bUj{_&!v*NlI^l6-?nnRDosej6 z1Ck49Zv)l_#!iA|jmMQFvD+LsMzL(e7ZS^cd@-`33^#@g2jj+#i99c)P$=$Xf#i8f zM0+dkEp_OyoGv{E%AHTY+PcREV29G zFs8x8$0km|9Z_s4hrNYNrHRq+1XNmHle&DRe>3@=&QWaTI#>DOZ!e0iF19O#v1x4A z-$oeQc=71)%Wab~s->TVD^RUXY*$KqY{QNJ2@kuK<5Wi=7)wm-8H$W$MT`^ZZOi<$ z8%2y0s~R4po&_2ukIU7VIZP*BlBP!kIN2c(0CSS2+1 zA3#S>rIz{E#!(m2THYxsDV`j#=^lwOitR#D-k0GxYGvn3AxBr#!9q?vk0-otPFAKl zuYfus#AYXK>AIiU7g^VKj{uWE`&7W6i)zrM4UaL+#i5mLkDv_K3dR8WQ0HxvTN19F zQ1KlQI`|6c0U79Lm0OhWCpb3vjUwp7=QBX^EKd9P(`R6Z#Ik2yDN9Gtaqq-P3EAME zV5B6c$;5dLXG5q{`;=#qmK6J0ZkCiK(|c7jEcB5y!?XrUUqPbyobRIc1_stEuxNpi zkerO^K2Q)jL6cObtK$%w$IPSDUmJ(=B%_$$ri$PKy6@`Ty_Db@ zTR;IL*@c5KlC6uAUpFUbQlo2Hyh1igV4i%>q9|ihZ@lmZF*9(oHC38cgCu){G@0Y?gFncbn zvRWf!a^=be+B@UG%$Z!15NIz@F+$7Qq+Z#DDMG)co_i>ct}EJhykbF2u=(k<{K36rsQr4SZWkGwh}B= z+G4&-nqs~|@slKq8x;3rG2b>0i}|)u3T+%n%y&s)=WOhsP;{%|KSaF@)m;uZI`Vi4 zu+%Ob3`^sAyoV37tfP}#Hzx~A?Tw%A&QS*~!|d=E2yx%`H~k{(+U_~ShU8j?9kfNv z_vq#ZhyQ9)0WfWy4rtTGNuA(W*^W)Mpn@KR(5)cn2O0#&RlO1DXT_$ZC)-ubV^yy6bK z8~7nj8W#2@@XO`<=yq8(*994Dwg{w-lKoH}x;`ld)n=>o(e#nkfTjY0L86aDEqas( z#s4F359uSBPP!=qomd~KG*zyL1`?7b_MC1;mR3+b(l-jlIz`qK3e7r2VOoBz8py6Z z$tdoJ+Y(06ad2Cq1y6`Ra@ZYkb|KM6DuuQ(4yDmnMrpqmjwp3Gp;V)o-{$(rYCu!b zeIWr&VF4NN3l|Ose(^k>@VYr!;CEgDb>Pir$F_9UM}`luuI(P+2M9TYJUxVY2jQ>H zbR=rCeSDx3?Rqx(?l?i;wDER3!<-I18cBTkVOKz+j`B>z&Mb6LMC$JY{jA`x%6tim zHlvteaPwJj5m)wJ!arr#Sh4YEs} zct84UOI}>vJmqC4fTh$F@CAv#;mZLzzEMOFRBseoj?gRMR<)>S8+F6r`jBK)nqCpC z(nmwE)mW>d8|sGP8_qfane-iU-7vl|Ad@p^BQGEd;1>fp>8Aq}3In5k#Klu~PSoux zC_NF~Ws+i@pv21ic7bA@$b{ar3m^e|>ApNJklY=$m-{*(ZqE%+eXNf>k@h$sUmCD= zrzG0zfb;^|>wv9+vSW`^QoCyoX{)hT1t#J}=rY5KGDsUP91LkO&F6(wen^#tbg~%t zydEvU&FJ{3IV-@IUg}hZ;{GRDe5LSS3yPOc;g{6Hwn@;BIYpI0g zs#T;MxuER+ypMaPQK6s)T#Wrx;hqrE)4qk=wFAf4v@xdIT{1B zcfK1f-%jVZjptBbmn#drX2WRQhrTCQ9|v>f3Vd?s&ey%d6Z~{IN1ht?Tu9FQh7wN? zhx6m9Wxtx<&bdarn9&RW6;S4n%Py2#idzjuarzxrxJsrl8- z#j8&cFeuuFuijkzxGfu=TK40I$Bn|L4VRXylS7YDWX@?mZN$=a{$}A`Vy;dOWhn~@ zO&hF)rk26{+IVSuT?mXLPalVKRB+mhW_!+)=&feoQReC6aE>Si zXw&Tn9JR!6T>P?WH-|HYnCZq$O;D7=jSezT4+ry;i<5n`WyjUZ{Zf#tkHa}aplRbb z1hlhUPj0SBcqOUQyN&0b&W2{wO=k7Lq0xL=;9 zkJpREY%*#dWT1#OM|I!Ee5{v`^D$PUTYKW_r6ngt!YaUKt4{Smi{+$q*I8Tygw%NkF znUbr(>&Nw?xp1j?ukiG9Fjr)WZv3VyT(yNsuKo_^PU_Y+duQv#!~FuX`kU!&LdkjE z-&oJp+vKnA;;TK0$8W|C#Er##bv?eDjKB47<*u$;Zsk1+({Boy-$0LY*Lu}?dTO(g z_t-V2p&bo%LT>%=)zw8C9{O+yZu~m*WW8R$eQ={yS4XWlfj38EPIl&|-gAWOcHQ^% z)ruMNylMKCs5!;Iy8518Vm5aBKtWxO!X0Y8)jyYrZn;7M5Y#X1^{Lz$qBleFwk(aE z_z|T|PVm76O6jAyKvfg36Jj;E3L=CSEI0Ztg{RJcR{h9#7%I!5&YxE-a~t(I>ZVPS(N6-uUUR zfI5$)Fgv=Xhy9G{gNnCN+I$p$YListVfJ_nYvc9NoSM`_2Kx%o)K;ISf+a3J4snzT z9o+WyMxdY7r%AKxdhi1B1OSe_cdkwHBBNl`eE?24ak(_LG*7zRirRWG2|KQ?j-@7b zIT*POEW((pj|K>}+xgdvdPun1yf&GL}?hAr>)8WU%Gm zl)u2!xI7D6Cn~>aHi{h1o`}gWN0E`fkGL#HkV7=9)KL zu=cx1Vi0{n=oj7aTeFWUAx#;*HT#07CIS5>JcZJP=>_})OVWN(KZ-umc;nY$Sr|<& z#1Ieg{GvD^hCX1-#-WT^lT1Mnh?3i5gk)`3q5Sm zR9e`58(%&D6zzlADr6Os+%7bBObmEyy(q&PMm{NQ88V0zA_nE97n)b0Wql66Ku{T%7?TmC*gUH(QY=@Y<$nDv zkctay78^d+65es3G1u}HgewRLq^}^Mqk~%DebFz%eHeq7R-|F~pY;E#B6SaYU1Zt6 zivBEbFX9VIeevl<(u3l+*SW-86$9Fp^#U@IA#{%hBR}8_Wh7-`Pl3jZ%H@)p3_Ca8 zlu%SI030E=h0u_0dZ?%k#)Jk%+CETJ;ishNf!vnv+CaE5U%>0kdi;BO)+I0yMoP`8 zD>s-;y+*}zMvGCiO}&LK?+46P}re^eI$oEv(7q2i@e(* zKhMub>+hzM?<+R82o_gHzIEBwEazzCUw5#rQJ>@k*6se;CIEh?(JNLul*fNa5p3kO>ObX}a+O}VvR^^v8d=v?&& zgEa444Iq7VJG6OkyG~~))-qOeyd!VDh-1dy;3bTR@}=69unBDL5=vVO0+N5cn8&Bx z0Vl(G1#BD$(P6B!Q*P<9zl9I5uI)Yuf@cFm_K+A3!do2#VIaT|MAP`pr?+?O+1qQ5 za@&PRB0hGEMVBc?LMK2k_$kJVna~2CD|(qxDy?3qi|9`XY7X?X8UWN`z#MBL{W*jc z`WPYkk&+Yx2h^wfHbRpawPPkObt<$r-Fs&%;8)Gg)}^nmG8-z9LqH_19hJx-vr+>F zoDG+tD+HVH1v+pk__tP1Z1&b%{j_1krO>|_y6AVR0udl|>)F^(U=Y*5AVN&}c4 z2ggTw*kLtbZezGL4N@l+7j0c{qD85I~{PB~33jImooc6jRVGP=cax z7n?R^I12YcK|e&l<|hS;`r)>8SI5C^g@)D(&O!1d_zm$XApIysIt?C&(sWqxDDBtA zp*+bb-iVsx_$UuMUeb&3)P(a z9zC*DcLuOdcyasP>KaKKR$WlJ6(ko%T>U&d zB(D=4_bib__<8T3_v_{2`_*K59Wum3@^CYzTv)%QJmE@6q~F#dJh}R7%abQz5N-1< z-i<3MQJb9r=LT)A%@j~kNK+ebJe{?DE1o1lt|UZy40)b^NP!GjUZ9)DhuM@upesrqINy_^tHO#P6sY zMxxa)dT>S>Kqt$e1c~2F_eD=eqO8zJdl_aemauz*bb4i}CfexKLP@1a_+*rB$X%l>xFXu30;@Xk>uLfh=vmae;B z_yFtL?mNVKaQVVkNebPjC`Qd_w5_Bh`rguho6 zF{%^bh^Nt-3NCTHC6;r^!K3B)Zn~a~*N@BQ0p7}ttD{3eol79zSe1J|1!cw6L(BK# z5`!gov(GrBytw*#!pm-SD$d@qhU_xK_Ee;uG)MFfd;?Lc4Gz7APJ1yrh4Cb=bRqQ7 z&}mfZB9TfLX%7FfgyOf!s0xLi(&T4N3cNrg${S8zIc(p_JyQne=#&&k?qIVk1dK-RhE?C>f{Ne(eTkS~tUJ;}!PElNi49B0)&7GoL zBWc5`3yaoSnv~<<2BuDmFW(IeOj9<|jSO4%;Uf4Ku9yfP6q{%wbn_SNvQix)qPa{* zt#fgSuGiJqK}>mURfqWVxeQiMM7BNWsT%5CHmmPZHPR=1niKH*W~{m&jazc!FYh!JntNJ+7_Lia!Z%} zEPQ}l7LxEKe004j*9Q_Wjb`)C3>~)r|%qzHrseA;PaqN7QnKy7VBUo5xapyypwh zugvxQCp_#%@*-6!gx^5&NJv3>hc*&x4Bjb{N0|R=JimDL`3OuEpxACW#CJ>HbL`Ft zk3gXVk~dnAJSd9kR_vY(p>qLK0m&!u%nNsc){xUO1pc8`)ZBaO0VfFh8Nc~rT$EkxIim$O;(NaDkff^bNaiMsL+L;Q8skFJ=I^(v}3iBx8w`gv@hlpB*`mb>7G49_0ujC z{{;H(=|h*N3tq49mXkJ&xH@Uch|9kaFQe;y8?Fx8GU4%E1QUsy2~P)YneccMf{E14 zgr|eHOn4j#!9?a}!qY)ZCR{#*U?TT2;p(6*6CNi;Fj2Uf@O03Y36FasnCQ8g@O03Y z3BiZWFf5qpyP5EG(3T09{~EhifQf;d2~P(tnaH|u;b|&OfQg}(30DVCnAoj# zts6;5;0C_#lq{#9%4f_Q<*LoQ8(KGHnW-kr$%Td*={Q=qC|M5D;s!)Qxj( zYib$Nr;%KwNgoOvOJ%YgDvHHSNs=0&Lb4pa8J;^BUCnU?!afD*6q9k>NICIJGP~3vYhZm*0tRu-6lPhjvTNB=YV86l?-!jm2Th>c%zdekpy<- z<^jox<+@NMU7(*;>9#9R4lA5O2=e{K0G09haN|GCBOBklOgeRr0Q;6$3+L5$p;TL} zuIHos=6Qis+2m5(9TK1E(rPUyqZN`?IdV3&(Vew?EiSdzcGYgPHRkH5B{MGdR+~ge z`eI!DJmIBAvZzbLIRGezWFdt-u?e$ek!(m7FnC9jh0sTnETTk5sU$i|hVb3OF@+Nl z+bKCuGF{M_!IFj2u+)=eG3;`JOIX)w^Btmg2AC3x?CMCGgw&ZrlU*I|3%SvtII*TY z$tdZE+tOVf2e)OK>cf)f06!_04G$FcqZA|y8Hdu4EM$~QgJKTlNk$33%_WN{(Ge;j zzYNL37Eq8Z?84!a#d+OaoObwJ{3V^@&QT{F*zANYU3S0RKES%R`;O!xGRqB76aKI? zocD{H$JtR5ankk+0Yl1KZxJvO)IGv+p>hQFXQ2mlwPMu|^nZ(hv2W6;o-B^M_bn_= z-eOsjz@cVkhFpK0xvjVww_W^6p_Y zHbvHb_b91kq$eq*dK3CBrGL^xNtZ^jpp%$ z*Ubs*zE6GUs6&~$Z-y=1_BYw$Nu%o4IERdgJql0nP}Zdbbl=ybo14Y_C>n6G>Ql05 zIx)5ZR7OD+fGTtVqWD{yJ(vNjrWZPMNo5yw?I!ail>L$(p?<~ zw-p+pxloY}6UAz$$*yp45da|5%#w*I` zApidGOUv$%MfnOqy$IE5SU?6$!-a!^sjiFjx;a^3dR_r_;L>I%Z0WM!g%7Zwq?a@3$LIwI+ zA!(hD!lq+}X4ux`1uxh!W4pOr+5Ov#gyQuLN8O6NrM!knD0r7Xe^N=TBC?u z$n=op46cq^GUM{LSTEM2*|o4#d3k%LO?@F;J-wdZf}zG*d3`f3cMD#HhrAwFZ!KSs zEA9HjWI3M9*Vl_T*Zig$PoH_K9276L(_1C!S8Z~^7JzbGI9`&Th+G9Z%ag){y;bs9 z(lboy+r7a*SkV`}S;8`RCJ%odZYT1(_1y@9o zEkLPY3s5@P0_@kK?YAcdiu&QUBx30}f-L}a3WpeI(nxp%#r=xSL8)L1P&(KG?AL)q zeo~-_-^R9pEXqCs>P3iLVF8)Q6)qesa;dHpbA00K=43^#^9raFo5Jkqmaevd@B!Af z-3K(NXl#zn5HONUJT|C~pxGy0Ki*!CR;$H$%7tSq;HItE#b*GHf*cjCwp#*3O{V7| zHfN#Pqp>1(1^QXVt{{Pwm|;`eBE29V*iGHwIdIwO$I`}C!lk{Z7hQF*-W0Mh-NdB| zT@7SP<O|q z0TzAILEr=_AfRCoIt5e$O&EsnrvOXr{Q!VS+G9fTKr{%6B+WBZq&6NyI=Ss>)P(aPDALxVFk?n zB~7JT`h;+U1v0W>Kc66xxVKtC&JI!Rr1O~r_ysn&`S%|VnZ zpj;rTO|egxcLY}SzI)MQ=R7@NynUc3I*nHkfriVmXx>U>bT3c22bMVSNRg}_?QKB(1Yu`Y^{CKCgZNP}UnaqPaFrp5pAc#$h;MWz&nBV3QKMLl{UPv$>l6TUAnq35lZx;@R z__i)ie%+j`di1;k>R`IfPT10IztbE^ZeBO2e^0CWc zQ{Lhuy#wS2pJ)a7fj)zL(z89)63SJzjk~b*le8L}YEp%S&x`O6frMwI-}&n1;?*Yv zvuGW3zIt=<MeDD7P?Pho`ALDIi`16y>NvP9-5`emgyiXy*dYMOSNc&3 zZDkxv^O$Lr_G{x%o@5l+e;kehQIKHvLV^Ut0y5+qE*y@0qj@~xb#uDN_vBYV9eUM$ zv)s~kf0Hf#pdi6C?;&OfF3?>ng9OCc+%Z6%9k0i;$*7Vgpsj+NH(oenBR2=mA4E8QH<~qm`?|i6QC!cI2B+KO1AVEDyD&aHUC=ve#fPU> zMC+89iKmyA%ga?T0#V;f96>F<6jwKGS@D#Oc%nVpv$g2zLcnnI1$>BZ2yn1@M_{J? zx#H@n?W>XSy=JKPZZwbm9~-_JPaka{Tk5GJ@dl-|?hbh?p3Yjn6<19Og`d~!w-4?; z+11e@Uk&A|5}BpJ`LdiqURq7A$5?RlN%ONzw*psR2QgJ}6jHN~=jQQYHXW0^uz2G= zIx|m4ZN8wa?>PQ%7WboRn@rQLUfO;-oB^bvm_AyL@22a?c>P$7m2S4HWuA@>S!FqN zvW-6-q@}N>%527ptA{7=Ww(Ytg$_R1)X-CUeyX^fK{Q0@TY>DLp>T9Q*zirEIH;;` ziqNFkpqwimZ2T1XSGpR63#xFnEFar5exPE`YCO06AD}o}N`~VK1Ix*WTr~7DPz~LK zrb%fE-2+8|`WlP}1q~&94xxL{oKxcI5W2^D_43y}e06k}g8*?)DGA*(TE@W|dcIG} z8~CP>t{+dz$y=Q?^xTh7tfA*|2+bOL9;N;|aL7*z6!F{GHwE>)t8@24vZZifUF4f$ zE9w{;`t!QE_{+tsQ`XR*_zKvgp_kdQEnW9J`vB|O?m5QN95NY2Im1oC>6>C2`hd=k z5WL5y0PW3$+P13c;oTUi>4io}IN92Rnm*7kN=>hk`6P`uwU)F(tR#gu?I65e7xorT zT&^!|IB}`S+Zr5%7gtyNweeo$2M@9#`DO_6%i)y9FOa>Gha1Q*!@O6KpU_7`eo@kf zT#_~zvdai?SCC(ZI2razR-}|c15hHrYzWDgd@~qv3B|q{jL3v$^u~Bif316;J}FSt z5Bp{?(vxwN$S(utEsYw0=*7^Y&&DPs5YQ*UYZFgoI=W9eZg($UEAlg7B|N26fW(2q zWI}WQQb1X+@GnSO%aJZY`WwJ$yV3$r;QLakW5(E_rhgf|maho>XK-u9d{p?)5M5w1 zMEqqiwDK>Me}ZC4^ZbPSd-%8XR2+?k{?m+oV=3j!K>9~`7x+UqO9_UZzt;gz7ab|22c1^l70fl!cI&nP6K=#rY+XpEcUHvnGc*Puw*2#Ue0QQQx= z<#=it2e(z4N(AbYFiLt++K*D>3mu2j7_*F0>5@ZwQb`eiqJfO25AlnAOked6Pha(d z-j1e^28Nd{Y=xl&8kG)d@rCX;X=Y4bZ8RL;+pILzO+NkuPT~tnfs<$t*}F|66&^g^ zDXTXeylcD^>5CSR{*PhDsaV&d@3%7St?RUA{YJz7y2FAx78###e>QUf!V5~cxOD<* zCMLYHll?+A@YdIrdHoBNuqiSjz?c%x>t&WA7MpeqY~>`f!Z|jWUf`Yo(E|BHOguuI z75rUE3RY8#&_@%%qs%|KWd5OSSG0L`sgx0Id`f5Jze- zsPRP~sgp~31{#(mim2dU_p#f{U3yYUQ9smH?#ei*t7(@T{3Cx0QCe{?}rY<0Gx4i>=Uc|75DQ|_o&{iNS3I#>O{Ak8~h zgJBn6Jnh-ruH&eI^g^dXykTs;h@-{!h|7dsU-6~dm0*9hUBe8tN?Qv$RsVP~k59Nm z4ut4SI>i;RaUeuzaQHrSOAq@Q(+3IQ1^jw?P)c@zAf2X{389bcF+AEw)7xW|Pok|C zYZx01>rwAFa1{kj@eg#7lX6rVpwtKpIeLRik1+kLLU>(p1QgIDI5rQCU@;911l!f2 zKZao(&-uiWV-Lb)>l8#X#&fj@v+P5i>l^z!DIcyrp55l69-I}+;GUUME_~LO$zRyRh21RENmjQXL|6$ss+dq)5%&Tpbc+Ny}eImbBCsP!OH%!qLhS=zKjIuPf(gWS@x?JUc+J>hDZ1l2eG} zHs^!{(F5i22t%LH!T(hCB7uHZy~u`I$Q9BmF`)0A7Q|q;ywDaLx%43|!M96W;e>XH zycky(O`#nkfO_eM;`%ayi)J5&9ZFWnws9)$0@0aVO<@6xEnVbPP=$6v9}U_?X~1BV zBX`LGxx$jQiu^sGKR3Eyo6@X8lUD*&Ic5_-+rU!Ih~cuyvN=|`wB#;6uIzDr|PdmPiokvyrS zq~C@mne!+O7^oNFYMR;t3ee6j91QK^c|75DbF$FxyaMW=U6>u+(!>77^g+;WfUwUV zc}>8|)DGGm4baif3%mvwHCS4<0lJh=T00bRk_kDzJHq!x?nceJMieI>3A+Po5%(W^}oIsMvQrmrLTYxhuh0!mnH zu!OAdF9My>hD*mtxWa(_{-S`S!7b>U#xKHsI2sEb-+$sS9Le)RBSD!S(Lc#CLqPhR zY3iR3x^RVlk;pI9N4O8&Y_&zWkG#khtg;(qqJWK4*5xeB7P6{S4LMdeLOHspRiR9McIUZ z7QZ_EJ!+#wbof2J+5_HZr752O)G4gSEjL=j`?jb&frw82%B1?QvucY^3uPex;O2f> zIhP`RSY_DW^&1WQS^Sm5D?ZsTWCOHU{gtb44&I8WWO)_+A+N>gf>PwQNSaFMcaRN5 zcOIcLwUF6;+Q(_c~Ye8q3TW?4AgAr`D`(tt{2Pc{IaUd}eNv!Ejo8>FvWQYNfqEHsb6HRo$YU3d z0C~>q=JzL^oYQuS)7?4hU`v=CP0ktiH{H_f+U_~Vuz^z5!nDhZ{yKF_C2a8VHL%Xr zYJqgt%fyWSiZn}aVrV>0AX<=O82fHx9Pj_dMPGmEJu(CRtl&pO z5KC|Z!JnX6D5nL(kTYTwvO7IMwCl}YOB)lR3%)!)+%88qlbh>BMa{3QYPDGMd16+Z zSC5772eIlJ$*}@vTvqYdqw%+Irl=CMUX*Wo&*!Dgx%!ChTmiM43dS*no(XAfQrVH5K}35HU(3Qb-aJY-MZ{NDu{{vNckbpKymvt4eG{ zt|Ot7Y1<(Kv4w!OU{Jn*BAAD~U9dL_O)w9#4y=(vgJN4H6maw;qqrY#%XeiQ+*WDA z)b=E#hNphDAE6i`@;HPhh$uDz{dM4wpA;zKw>dxUzfI65L_SN8~AiJM^fOT#6oMA~yl|mqEvth0o7J1kO5zCYF0BA^VK54?mMYib0 zQgWiH=*5%{dN4gI1O2SfaQwJjQjN~N5RnqYm5`B~i1xe|3Ojx8AmVcJ1{H4ZfDuQ{ zb#=ibdFG6zRT&%OG=D-O6yptE&rOmj1uIje=wq&gW9ac(Gh{M)RA_cuDT+uU}6p zDHfs)wZfBkp+{o+vp8IpZ}j9prHym>NS;?2N}^{_mgi8s(^h44AAw`^iIp_DC_tPV zrjVnMk`8x~xtH4tHNMD@H>Hnf35~XJ0VUs6{WUbVa+jYJDC&pW%3T=;wKbajrR+(h ztw+Dwk5Fi<;}9Bcb(H$+z#%^=P)G5+vgwhoFnwXz;&~Nl&T?E%XTC`@V^8-drwdZ$ zTaGs-rH-^erEq^lm9;bn+-tf&edyVToNPb;K2*NvQ~gY8wja*S0`M`+pX!)^Rk3Zr zNRHTEPF9_S@ZH{qjw#0(UG=;Y`e-_)C^HMxVPr}}xhdKGkQAL0)hbOvwgf2bK84^? zW)|7~C|4pd;Hw%Yp@T+Ae;DXz z)i7-^y@NG#z9OaBd#5!*&G+^7;{zGKD%E~AEsM}bkcP}*SrlI1g)H$H@30T64 z#;7?zBPn+ngFyIaH}(DoSSk<+Qw2*x9}O%;xlRhnbwaT?7$0KL@ehcKM)v`H2rLDf zzR(U-7l>5^8-Oz14#nP3TcM~l9$hdR!v^nZ_%#J~$X$9;ptv7uD|cla)K+QO2f3^s zKdGSEL|f@csPTo3LuiayM=7*%Q1Fui#r!rm$VItMikFh>#1>F6$k~O1!BRYrC%kS> z7A&1tKpo%-vtwJj?Pp9MlS_t3I8-sd%%rMNDA> zrX!F>w27D!*|GI9TqcCpObBJl6Uit*8Z)XtL8s~-JcDD%k7q`&XOrvs=)Qs#6>J2L zEqHYJ%yGcL?t%Q_%a6af{OIFPe*L?TKPnBQ%{X?Un%9rh8Qiv(H&uJVo_>@eS3mC} zx!t#ux0S4m&6sfcUw!b!7a#n|`#Jzl>GjuHbxO%)I=`9xb>im2)kiBXTq#N3w0RS* z9yWN`YteNom@f%yB3cs;0%UoFMb|F$NOU99V|jtR!!8SK;$-dUK@{#12^TzqqfElG zn!(?+ynyzUUoVA?R~uyT%9H#2xRPW+6F=AcG9YRnE>Pja@VFcZHzQKI6B7N~faC&= zUk9uWjGY{59q_o4B>oS#?3T}yR3nw)(}fB@1sCzA0qQGwa=}>G%$MPO$M48?a!G~6 z22fy7n)0T`lRx~rb3dAoZYTHf5GgmD=n$@qe`}hBnt=UCNgd;6;#w2#C97*!Z3)l#9KtJ9or9qB~ z;`=a0;&CNO-En)lF9YKCc7bBN!s80GNB`EHkhD|kfaC&gL^@z?VEmDt2ahXBSEN@j z3T_AWG9tYST~VdhKrbAuHQ1QQ^FpHI?f2Lis{8!5B7n5dZ@d2TivymQM6|aO3fnkD zqJzAgEIC>UIn@a*>@?%M(GoQY zCd;3pQCzY<8*R|t^tgW(4S?8%qiGM~tW|jx`e*<#iu{KI zkN`w{YXgoWr63JD*Y!ezZ@6GlI$X$*x(N62jYEk^n!thp7W{0`UPu766t1C3xB!4g zMYG|HWDVedsQ3ACB}v^;d%fR9U?}%rULdR<9@j+@1@v#-3CRFa1|$~Ly zvv^!dQg@BbO1&r*0H~J%5N$;TfM^$v01(d$sq!IJ7ShSunKr(y^WL%#AletC$3VFi z@`Zy;PtbG0vm5@9T|hK;`fN~*Q~@HY4UYkp5W-L%vI=AX1GI!tECqWy00L!T8n%*x*;ST$4)@ zB^xMLY%{)GJ;m7!7LV%=UFrPSV&1%8Mer^Y5h3{11od=VGU@895tBP%@%9v;n(+(= zgy>EC$kof60XbJebr~0u>O)mo)td@^G*}#EJLpNagCMN;$*|3^ScUyQr1wZ~+Q*Wq z=uP{;!xg=$kbtG8$dn7L_Y}RUHZVo0KCUFGJ8my(QW+4pw+j@N7WAfl`LloqE%UF9gA_7X_=ru6`I4w=~PnnQ=p#}96vr*;qe}PMJX>yu}TiWeAG9PKWcE}PQXFY zsA*v{r1Nn}S;hu~4ymOFlxTt2jEx)j4zRl_lTG`tZ-zV#&X4frSR!ri2c zcr>m|qcmkiqW5a|Q%oc3Z^~REUlPes(3f#rp^%;c7FbF^AT%P-s9p?ok3tg;k-PMy zl41ac-^yJX2embt(oxxyx?G*d`Vk8C%yk?>W6U~Ap^bwsYL-%U^D4j1;YAer)_W?(8|Y(Wi~cxUTjh*(8dQ+qbxcFPD}rR@AgtMY&X9%;;iv zgUtEY0K(UQd7L~}V*#7->FV-We7dw?qwj97RS$l>nv9#P73D>``dUs#v&J2${?O%~ zP0V~hny%Y$;_9dkC$8d`#0C5%cT>+C^&=Ft z(>4&fz~h#I%7ws27cHTkHm;*eFD8ll+QQG>Q* zoPLmR*#h}AG#HhV=?6ioR5}^3gGVV<8vU$NWz2uAOK*bueDKs%7vBa6dnv~a7=7=A z%rPLY2cZ>O;or<}NYi5-*o zy!tj`VqoKXw7jk8srGcGyto9#HoUk*x007{zi-2htFu1rzW=cO3s)&mnznR3YjI>-=o}S`&vQwI^_qoboyrTfk*yn1q z?y?!%Efm3Q?M4sC|B?huGilicl}YN|g!)meDku&`$oNRc$jntHTri)j@&fx@Wsn9r zhq3rvQDRQ!j7gFzHOxQwTv3n$#j2{$RRV`wX~2&wN%gr37kEG!?;e%1v+j8gQ3x|awq{yU|!g6|`&s7PoTg$kd%9+l>7wB)bX*-4I zkS|j8U3p}Y&r+!;P<6qjxquTKAr*8F#Y9Tw`ese!0zn{E<+{+vQm*%-$VgBDj?l;` zJeys>>7(GG8XPDuC>%k44=C4roEei(fB89igcBU0_z$Re38`>I7$O&Oh9fdyZD8!^<4V#8N218c{!59B zv=tTLh+Q}i962u}`;aQun$w0O=Os~H5_NiWMsGY_dJJH1)18km90o_Yf&;c~N;B-# zx^KFG-;yuF>ig+>e0M~2Qjm^dn_pKVp1nZ>uXOPK)*FF-Rt-En;K>c6q9@!$C^(jU z2t^gUF2(!n#bP!YRq7n?RFiDa*pO0gR*L7+j14&^e@K(hM*q!1yo7D485@K?8ZL=K z)A|;greVSy7U+*w?e*f7%s^vnHD)? zA!x*zu^~Q(TdPNCV}|4o_b(ucZ2$y{lZt3h#~#zPQKMaO0SNT70+8c6mNRI2|7`vb zqxGFHUxP5>viaA<-8HkHxh(RoFv3LGadmJoJ1(>Q;`_NHS-bM%>Y=3zgUcXql1(Yb z?0NIbh^v#9jJRy^5|H4}l%d~$$~##zL-2kF;t$56Ud70<-^rS10RSc z5%-or^RmE>1s{OR1SX&%3;Mf)G$`m9Q}NZCiywcum@Sr_-%f6kgQqdKWmm8tKrdc1 zra0hL)DZC=d&~Ctm=x)eA@@ z&_mEZv=r9n>6Or^-pab?3q)Z8bb%oxNN8EBn5-<9PnV;IyU%{x!OEO~eJm4bBkwv7 zx2y5p-GnotLP7< zERQZIMJdY*JRH$HLj*sk_)L^e2T0^aA4N&`=w}7Oeh@x1V9h!xEM%ilIIw^{6qN?# zF21k=c2n3}g+_`F)P^UDo>Wp?ISaq#yE+bPt2CLF@oRn(_7pOtYClRLP)Ww2G>@4^ zskCwcik=iG=2vu$(e$MWsxc!hs`7`YuX;gmSEr8$hFvBMn+958194r0aypaYCe7`i z)~Db0)@t5ng%dRu#DCB4g1i(OFLJ7vEJ5 zxMQv1Xz0&UUL{^Nr}&j@Xw9BfQyF%z^3*~{bX8rlXDR{uxemy|-6TZt&GK%}*wtQE{!EKc$pIm!VZcpNV zgrdz)#vwG1nMbL=4jl560!92bH?%}adJxa_GW=0(0R=;gT{zg#V(a4M*Uib=Rn9A5 z<3Nbc;4nM7rDF$^{Y#Ms_MgtM2gqZB%oS}LUx#pq`>qRbcHmn`pK%ycL0ghLGS)Yaj-a&nKQ zUvt?rEK3Nz()HVi3Z`tvp{ui4LA9p?)%d#18ODuI|D!L<)zflvJ4UYQ=88#W$7Rhh zcA%(VHrGoiJFcGEycw50!&tdr+_=}6tD`oYxU43|$wSqsx@RrhcHl9Yw6y?vtQM@4 zM1t0K-@eNj)9Sr=%oD;(t1q9lQaEOsZPRVPe3wZ=m}&LhyZUMKR$Mj-VWiDh@9L%% z9}qJezjfiG#W#fwp_@$~r|}#SGP&_w>ZBR^x9<2BarS4>)BG zp?xgiV*}-9hU!_YW@mTP{$`FT%t2M@(PjE*N{=YV6x6~jxKnTl6cd!iSCk$?XGn%c zDobc<_&()bCZz}R-9d4Y0qn9ShqT2?k>Vm_WrW-%#YM(S5B>E3DO9*EKPgbu54YvJ zG7fI5G`2M;J>*GU96pqqO8OCslpcW8%tIJ6k5YdfIOHbH3#K&bs=kE!lmG7iL70!heuRR ze!FtK@kAJLb<$!*xzrnA!u7(H2Uj1i4s=DilntYYhZ$uVLnf$eS7*kJM~Tu#_H=0+ z#SsEiJa0NEW#nP8Y~GK(5s%Yo%a7fqad;f)L#9xuRt0iw9g~!o&12k-`%VD)zOXfWN9E~{yuQp~*)zc+DZMIRQ zo<5!^cJ}n4TR0p~n)5M!TrO$XbLv24+jI34@83IhR2uypdwrpL7MSb7feRfYds?qg ze;`+|j%u)>qsmcprK+P6`dB)uL6kWT6zf@r@GOL*UyiCStfNx8hZ2{pqY8A7bX4%x zBM+e-%*)Uf4dK~~wnD)xF#+TXG=`l(BS%SH_}8Gwn+FsOh%rsgztE2DS-!+;L(lVaN7q+85)% zqek*tEItXq?i{PeDQo&98F~QBoVS*cI9;N=; zIFu(DEse!^$JJ1q>mOCzb{KKiO+5s z3U5BV#|ecWL`jH2sl^SY11etOLpZ{L4qoAbW?$|hRZcKJaMmjGZ_R3kNOf|3XP8Qc&mPrft3;HLC{ zk283SfKm2f@_B@}C{F1T)ZjZkdOcb-H?DAzqHf34Neg#fm-GY~fZbfnCAe>`Q>CS@ zPeg0Mgv-SB;L40C47fUIY3OuGW9!EUN6txIORgSP_sHr9RvN!+GeddoT;ENXYa}N8 ztH+I9-iXUy_v}&`S3Q^dvn^Qh7}Y#eWpBh|=HfgPEqL(s(c%k7R916u&c*9*T5#a% z!SQ^33ob8gaIkuO?dU|ma91a7zi^jpw|?Q352W#iVm!HAyjz^(u7Gw|=#XqxuIINr zJX{{tF82j{3m%v4Hv6s1i`whHU`%*?!k@S=?6mo3l$-mh>IjzR6bh3BDRf!OLa9X6 zSsTTnH&jQ!VO3}CF4ISI){e3$B8M@lj^G9@!T5;LCueP=GovIBVy~xJddjrZH@BCsZ1gq#KYY8O8l@TfW=nad2Cuv2&VZV%kOO zQuM3+2t`ifG7h0hWi{YY>aPQb{G>n;zs*%gQ5Hp{qJA09+O~j7)se2KgH=biPUL-^ z@VYr!)zNtc)Tw{M?AVsB`<;D}b#3>E(?DLbW#edZW>F;JDCx)k%vL<%~?R^j&^CC?$@_nv-Qe(WxV(m~<(uAYwNMCMWi9T0x+sY-P;-aHDSHW5LZ1;VB0 zIvR7HnE$rn_ntnUs6qB%R8x0)^hNEQf$TMTdW!e&8jTL>z&z3DBq-yZQlvUoJ#-NR zC@{Ew7lq&V(+!PIAK0;~(GmJ+8l5ObP$;|HA0UkY zh2M)S4drMgrcfdXZG|Gu3T65-nlvjzj+U4Epg59Vo>Wq#qv5uESH{6@mF9p9eNqq7 z18I_^AE7AxUdACbkD1kHLK_D?l)*w=WyEiDjZTyza_~}8MA`xh8XdcEutq1IM|K7} z&2@9K8lCeBsME^W?1U{{cR%|8>)P%)EwLA}MyKDU>S_%|=*QQ{H+BWr|C>kTO`6W% zEUr;n8HN64i`&w=)>f?pZXwX40uqPMZ41Xo+L@|%P%3nSYMQtl)H;EFR<(}J!grJ9 ziW0C@ZKf!w#Dzb4^p}d{!T%V=CE){0G)3R+dpilvEoc0GOL@elrGebO+(}EL`mz?jJ;6zrdtBXH;`SBN*AAS7EuYdRPM~yquH|bLYO}=lzi>r(6yzIpIsB2f* z##Bjt$vSQ9eu&&7bUG*qq?*+>+r;<~1R{*j^szAhFiNonimkROatdQ$Jr1LW^-bwP zP8qJyb)a*y+9FDjQM{zOqWobjxJVs2&bSh2wAJ|Pf&5xy6$d##DNxi8S#33roBwcI zNyvo=2~3~Gd}A@91B(b}3T<^9J*H{QGD@M1gPuGo;5#?#U4 z+EDHT{IB0EmiLuPU~OSOZ^8ugDTyVGN>4!jLs|g(XC(+g=p^OXfD+~#{k$+=880!) zCb&l|1HnKCxJRjZYT&7ww;f{adH)KRT*!Jkd04Eb>qWH^+Y^Y7Qt)8F0r7=n*FX;8 z*oCkWTQEU<1WU5>$?VCC4tO75krVF=+pf_Z!nO;58{4s&R9`hppA)P17w}D{e<0C7q=WW`uMC%hF=IVS%yswJKF}WKbu|1QA1o` zAS?Q6y_}5h>0fZ0zqo+erbGYebUG;AnrDM>XHeex#V~j{%X2jeLh)!xVV2<%#nh+Gq5|KOlRxKvh($;55D~H*S{VVA5F$6 zXW4o6368>v5PzoM{_>X>Is>1+{$cm$-T%DqfBA#r^~Znzv(Iin>V5U`50|t5+V=dr<@?VgsWv-#u#~S%76LhM?d?I?*Fj{A9HFKmK@o{U2Xn{N{iC@7I6$ z_ka7{?E3X5ze}(G@~aCa|K;bu96tPc_KT~ZEiV4_KmYk({^iGi z_nXgu_013d(Chtf^^2cRe)*e!_@_TF{yqP3_v@d2xyZAduk(Mr{ox;beLL+fNa9`19$PlRsT0zy9Rw$FDA?pN>EL$-jN}vyXoL^{4+o zdvDgKD4unV-t#FA4yZUXgW`ZFh>C(J4uFcHD5!|T&HL#O85vO#)fE{X{dAwKe$Vf@ ziL4aap*!opSZj}y#S`D>W(aXFO;YkLwdtN_E05M1j-q(Cu7i`-lo@v9gwT7r>bvzq zIHb4D$pINsc`4t@;=s#`B`Q--#%IMuc_*kH1-I3W?QluiFK)5Rk$PS%ts{fC2G>(b z@28p6O4^;S9e4cm%{A4Rq>hyy0YBd@m3wOPT%*ue`w8zhb{~b!%e$${wLt{d%zQ*| zUS)jKiWQ`B&tD(U5=;k*wT~J2IHT{3Yo`qq$YZ58E+Ln~4tBFt5;BE&XQ@iV1wD^W zOSiq*9sJTYz(Fo}(2L}vV)f>Qum|=RsL}h31 zRlUZJFjeVRVt)rp%^X}%jHtK^Ec6-!keCx?7LHJdmrP% z8bO|dddLW;o!NW(E*bNHebqico#en>&Qpu|W8BBP?K+yEZX}eudREGApL<#QvJ#ZJ zv5hk|Px74$p(Isv8E&5*4a0f9}XWxp`L8%@&nHhI<7bO;04sO0n-;7X`H|DfdoJ5)!*$jp57vX_5Y)NQ-MKd0 z#9I@TJMR+AGi>jbSZ2?K;&dU8MTHDw^=S+O4-J9-{5=0Sny&|!LaFxyO}@YW2{``X zj^87JKe*%{?|cPE^baU}1xFP5kqLB01P4w8;R-wUI?Z<)^t6lnS_NXglK3MQc;fn~=4lR<)m} zBcz?mc3gFhZQCAs15}=?GV!ia%!l#0@EExc zs_LkAQR21QE!C@6)47{;m^p_*vU~1cq@kuMRI__1Hl+2khTYB1&RtEYTgR2Tr+>Zz zJXfrV;cXkM<+t7qBBpHOwAp0!V%TkY@JR z{&;Pnt(rj033sdun-uW&9ytf)%*>Tfo$yUox@pgpoXhR78628JuH5J)TFWu7b-2Wl zuI-lO+if3B>o&}E0B6Y4YIiJ?*NPSDxPR#w4ly04?}fTlnF!^q+x(a81Mi>xr-^k4t z{`8{lpxU)1AVj-e?9C$|)5a;%VFGQ@Y)_CT^bt!3SMP(eGpl{UMBhNtV!W2*aPmGb*wCE4EVP z8_j$pp6K6?tzQf5AG8w&p}ySly-ywa;*vMg`HrYB-~ZQs{04!4Mbxi+_InX^YOjv5 z6_{>rZk?+8NE+&+#zGZ3R^BREwpWt4Lg$`H%OH&(+X2!zAo&n!T=0sDdl!Xx7@QDa z74cKf(jZkYMe zP?)G6e{_pN>JT)`DS6B=U;hBUzWlFW{hmQ1&5f+EwPOwggWe|k)y_uX`r)B-Ty&?1 zeh1Bjzsg9lf*Al}?(P$5FxF$Q!aB*0P8E;Z-AZl2${7oWYjK*CxU6S(DO+6Q8&7U< z&Y9!ph4Pv?>E&(ss2TNa_tb@kQdv_5TcA|O@of9(_2{#(>(F}9;=LurEV?#zpRw4W zo($O}UetywQgqC6u1t!VdhMQ9WN?@A>y4$qT`BFAJyhT@UUS+2Un7^;=Lh?PnxruD z53YoMDJRiy$jMiP{J~=Xk$sZ?jqFo)|Cuy@W1m0&j5ug~S{jel#xI2Wi<4f#@k7u9 z{gT)JDCK|m*Ox;5D=+?D%p4rDJKaq6$xKymb7t`bPdRMOl$TRQBbELG{ zrR_$lq0KHOS)3|PwO^mQ1MlQwX>&R+x>0z+axq^H7*pyDBe_0zz4@X#%u60MN@%7% zDjh1zWEL1w&sA;pWR-ecs`5y zP~DLuRuf?JsLOQH9uXkN`Z)t>SDG^b&9&(pRa0kTxv(8vsiy#xQ3A{qULNd5`@*r&NC<}cLvzajS|oxcb>fOI&a8a=Z8xf?$y z4B|bq{qpU9NsO^?2#mKV_7@)ey&ykzy9=90`w_Q;lF^pnX=@@ib+5Egno=R3YjC_Z zxYI?a>XO(fr7?MT9bW3Wd!&7r_ne$=TTqu3x>fgBDgtqWvd^keV8) z3`VQ{Ua_0muhFmp_YJh?_3hG`woVhr$v6CJ8w~NqehIpCW+ap?xP8Bi$=KEc!YM&= z^HdiE56@E+m5uVLD|?gG@_E^5l}T7A7vw=zu!$04pTqnG66MF^xN_x`QjLy|lh2!d zgbnaxPMzf!4->c|Y45U%4Hdtn*M^Jcvbt@|P@($pSC}#0CJyRoPV9)&t)0kT^pJk) z^A*{)S6yMx#46za39{=um@PxuM7De}@&KV_QHV^RVOE>g2O9w|B-BWCVI-mQ`#prm z;>?09nO4_hd=*@dLMkREja*DDp5(adl&SbAZY7}nNRy%bt(F4bZNHc%u6cp!_LP3^ zyB7Yz0Pxd-d~r%)SPnOhm?~~2{8>Mr_#xDvx8VD|n$x{$4@-ohm&zSkpJR4$afy~x z&zmNXRm7XDc_EEgJIh=2T9;zfW zN-QQ73G0e1WLOVsrB+kSKRmiwSGvqPluUch8SXL)kq?{AdPjnZ&>aS~DW~O2@}2Vv z#qM+fK};(Pp{szU6It|`)kmTWn4XL~vSYVC_u~g8K`?(j zPp~hA1ojOf@fLLb<4N&Xm;GiWJS~dIHtkTU;v zJl+-0;&Rz}UXrm+CUBw?g%LU+2_4+0fkiCGd5^DW4IZlF>w%E(-3tFAGiz3HEX#2@EoJBQeCA#}{jOikeO49br zqG?X7P+RvCYY8OCG9cr*@EktC+x^nksOLye%??2?_SCM#dL6_{tTUbAsVEzX1y;8- zLe=faot~UaJhSLkm<+VTw)x`d*5yu z{B(ZEGLh7KmI?b(jbh(Wqpv9US(fRmsNUa}W%@TGp)W3ZeO!?5kx=YQ+KhdJHvjiU zLifjJY?|IBPwh^tB}gTQzSO&ws$AP1or~{OD>WiQxU@n);5o&bT)sRkN#ua9>V+kj zJ54jCJQVBikzq0h`1Q+W8Mf!QJ=$nADgu%?)3?NYTA?r8J*R+^0@aM&(X$5;QPr8z z4!U*{M4s0qAD>><+4&7=oeCNnuRTw^5Km%2Yl`QlL91P}>soS9uFgx~c+f13#za$V zQtDc)5zW*@Da^J6LCr|<9$>BfgUD0OV_To4w3#-8AQpYxMB%=O-t2ccGhw!c-d$NZ?lC>UeY|Q8Cp;l{M%MUQ^I^!ms1YWjuAc zHz8U!Y<^HLL5tYf=&_*z8qxnd+s{$(gIc3X?{W?QG9QG0L$3WqLHv99pokv56mp`^(jG~)m@ftz3eK`@Eqt!TawL#M2=+O~6am4o0z#3Hbg(zwaw?L()d~(?%Nxegn?5J7#qQHxXrMcJsoMRe2?!!8FQ9 zt*hh7?IfKRE5mafYc*KLX46bsUnUl#92C(Nex)^oOna&|s)r=%icx6k5ut3Yv%vOP zg?IXi)adCpw)v{#go$G6AqB0f1T~Z9fZA)ZR32^a)FJb{%ulIm@>U0i;DnjJ{XKm2 zl<9-W?y~CMW_k{s%tS`^N%4 ze#fftLJd?TDY8FU6rPfThKTo#59PBqCO}{HZ{7VIhe?tMjg6n6gBV;^_yH9+m08wF%%~a&-I~9Q_SXf9Zl=3t6*9O!F;S3{V<6kRJa0g(z80|=woT}n!=AhA;*c%dWAu8;${_AD zY$ojP08BD#6CF*56}38l*wuZpo^R@Mr@R|afjVicVl~uer(fI+p4wYC@Mi zkV&P+l^xJ5!DgrKmDZePE%&r{Oy~9ur9bfc<&Fiyu`ssl*Fmt{jF%f*q27ELR#mF& z8LsU2FA#W5r_uB?Y4OO#+fR^+hPHgj-@cwZ?2r1Nx6oDTk5~Tt z7+|+rSBIiX*`HGA7gxMGkU;;ysF&~kOU_4pgF%18SHBsg4moBer?hz#V^NrGooEuVG|JYLZ~+jnCHCuk1&S~h)%uYGP0C(wz_97l*c%HOUE zLV`Cu)ql!yK(d0>a7z`VFwk*zfQx0aGE`fG?ojENM?sBcW0BC-F7Xgqwdt@GABzEo zkazm>RI2VKUd+XutmuV!@KlI8($Lol)A-cmi_?eCe`gqut6>@%gX|d zSpN={=0Tj9Ir24exEyW zw?4}(^_4LssClbeyB99Q-Y8LAe|pYdj?5cSMd^Y!GC+KpmLR?%u3mxav$RC%9|=M< z#r^M*s(+$W6Dif^vc#Jv{p!yjRXNnMncQl>B3UmkdUb3Rp!~zHU$Ru<8!Yt|z`pX~ z?**`^IQ+J{Llr@X8;{F#Mq$q3N!QqGwufHXez895BjRb)9~P&R?2LpS{^`_+9}0&% z+&a`o?xxxA2Skqpakc}@JgBH>+0*Hs+a1@l%xnkTibjyH449a^frvdAUb}I166lWo2D{pXEqkoqIX;Ize zK}h6HR@4^GNi?l|P_T&q5-vxE#m2dy~I zcJm!n7dk*w1SiKlyQW4HBp4hFhx1w#GI>^;Mvut;yg)EBWARWC=&!cib5obD#p2B6 z;#7RljGC#;dt^Y-dP{gVvYBNlQo|Z!j4++G0Nc3k7`Oy3x*4e;#jBtSkz98X zZ!72#dGN7KgK+avQzW~u2!}O|vR)it^EY7r z!e_r1n5Tza<#qx|=(Y|~Em466sfovV+-%hB@0mhd8J-)f>P;NF?C=JskHd9;vAspB zrqB!C8u1TPN)TyNt{jx*65xGh+P~}`eVZ}%MVn7l;%04gn9G8k;bzRf5ha@OKve0r z(>o-u@`Xmln_Tx{kHRRvFvHIilwr9p`v@jH(Odu_WM<|ocgA7*9T}XRb;?vFZrkrL z89NOJ1j+7Oq|0cFegdO}D5lty;%7_l)iXn+UD9UyQraRm-)s8w6fHJwU~?twchQrKsLab+3g`= zc+8bJLQWG>I=qH^ZIxSxuoOebrEdBW)XIH7T$G|>S4;4db}pMTou*7iaRLidhgaMq z-f{2cOTWJS*ZxoP8$|yV_r7q{?}a_RwF%N|n0Q{d=f==JwXn)@G;Zr#rjcr23D>VY z`+e#(Fz)cS(lE?PqGC^*^Jiy#nPsJHVWHjQtL&iSze?+`p$!k-Vk*2(H) z#X(O6wPL1HQ->|=YK0kA&^LF_tTYO#Jxnxj1Xq^t>ppv z!Cn}2pFN!$I33@uC)>epj*nb*)9I2eadA3FbA8xed5{}blJ01Hr_2}_&$tJdgj@D# z>TG3n1#XAq{rNnr+e2W!**4ZXXzYjb9x9ni*Wv5lu`*0v+hMMsp4PPNrB&%~^oHct zIweq}vPb$nN&ACK_vjoMrP0sVrMlP8i!VQ-66d%7{LlQ?Uk%D&iu7CYMZ%WECL4n> zX_-J|*DD>Jh@j4L?0`-SVzb03$Fpzo()qr9^tU%ka#kukD*;|@W!7yNo~u#JG|LvA zP)4tmQjI&;%}f?L!|vAfsx~`JsM)V?+0l8`*@ z45~5N$VPQq3#VOXSH~??vhprQ;CZpqsTLITYJ`{eh6CswNP)e5YpV;qd)8T0Zl6PJ zy(zZZm5hD0a9*{l`TVeX%hB|1VNLmQ08hH<*c=@8PKEsPEYkNBNpO5@B8t`?z?)U3 zJhzwQJ`WY{80MIRVJmkSFTHZpO}0{xp;MS-^D9m!y_yuY!3<`~C6K}ZF6nmqm_F!m z+v*o{>CDXY?zRK$Z1&u{38P(~%?WrDMb4Gz^R_l|V`daqq063`vz!SBSyvd$7rDBH z73)E1#}uaZ4&(%MG!({XDl%MVR=J>{kEhEmLhD*H4!AQJ6uhL!K#jq2(}Myh(PZ5mi<#Hi?+BMNa@36{)lL<$vr+wf~Qx&{6Sk8L+^Lj`OGCM@_+d!pAs~*Z; z2aOezr0^7tQT-I_G>r49ZjO#A7+wt}9-`-?d5pbu#y#!eFVOAU?+%`oD1aIN9JhX) zo*z1Upts2rVfJ9z?9 zYYV5^>ujm;B3@gL*_9bdH^L{TsJEgf8nH1VZjhQ(YgURL_@kZ*PLDf;jER+AK`(8h zF=Az=7uC2((Il+Gih<_#cvO8%)V_b^`O)m5K6j{0H@QP+A#+7VS~r;lizlx&Wk?MB zyZsq3!TVOX=g-~Ut?vC{8{%}XFGJ(S_0iPa&1U7dsMyU(TV$Q-Yw4SV2ar%p7}@Ed zQ9^>=V0JUp=OJcELZ{JUIUpPkyNr~eBt)=P^cq6>+{30AFhje##!kiUoxXBsd=1yh z;NZ#AW~!6pAGKdrhX8e6t)}^VJDMDJXVa&*ikP?Bm2pvUqXLNF7*J0iAu%oB>EJ1iLZ_#6Eu3m)#a4WVXd`HI}DP|qk0MJ z6jMSUpWzQXT7D23A)IsU#`Um`LYmth>;=-fQ9&28gC4bf?&|Tya@Yv+bqotz;JQ){ z=g#IB$TZav`=``+M)hZ_qpjHoXT!D*ajs$Cd8>dJV)DjSCz?9y&fHu^n&8p&Bp?@d zE=LHdAa`Ze$uPyz&A5i7TBDNVX+egRvM{)&bZgeIW1l{%UeNFrE0GfOUThidAN%7O z_6dl7L)gAPt3JzMMKd)292jN(Ad$b9V*hEl z15&{_KEtTwPd)9b=k%;-cDfRCH&-rHZ*I8RnkjULa2%Zj+TqS$#Av>almJNZMk3O~a z^R^4Imrac8G)AN96{zFZdCx2-3&?qpg|n6uChT!rOwM#?(YxOB32P`8_NHz({gPN* z-+4Z*X!(q>568pvpqn2lcdQc)QF6|Azu4+=z@~9}-tyB^{b^=8GPxQ#JGpHVlc>H2 zf;WTAMBdC1x4iY7474t`*AKVZUJ>9;pID<-raXv>O^4jzih%w}`MgZ=<%qnVEAm4PIQ6A$rM@9sUt#I9d=36b zRpOiUC;ui2{Q;K@*@M1leEvzHK`{S}KTQ`b7!TlIAV>G{^ui&l7#va1#k`uMXF8v@*dgWwXmhfTZo$mFm z2X)KEz98>+*+teukHDdrR!qeb>^e+05n>NNcjMw^xgc z`>uKj?TH;FGh0<-`B<*@9x@>Cv+QaPO5Z|v4jCQV%$L@~0 zmt81(bOOfccTgKe>&YCi4)#g!#2v;ZRno8#NE3bdvN#l_dta>F+$c&hPW#LAQsAR8 z6=5t}W=m>*E^Km!JEoY-mBr5*1F0ho z$F(RPjL*__@NTgUj%%-<@k+rt0!#0yr8A}2p#KlQ{+NI$0*1mM23AuU&1|L+C{As_ zesss^Ts&sDc(>EBvOhlXN;dS7eZ<4f7JyTr$n>JQjUI9~t)yv+<;@CjP;w@h&1JoF z>C!n5rK8=ZqcJ8kqQa_X7c_I&&B}}U=X5n;eW$)U8`lPIDzwQ0ExlKSGWi8EVynK@ zvmTc@VN7~xR(H|iP?#gQXdE7krm3>`xh+ z_cA{E%Zv^E4T1X;1nJK*Hh&Sl2=OY5f@UpqcH=VFAS(%q}sz zxcsA|EaBr)-L-__6<*fWz?;cnZ8U)d9f)g3KuN4yMhR#VhOg3aJ$!@M{!Pl zumL>d=|p4q>&0iE#-b(}ZLvkqkXKh4i${BBiWg%Q_{-ZdiZQnChZDn>_WQ-)R*thd z>%Cp&UH`tdh2)Q#eW7mj5@ChuU-WM}R*{W?d zhb8wlQiE#+I5w{ty%n6%TChw6qqYcKQeExF zKpk&NDd&leY%D2RjX~ve)c7FrX!N}po&Hke(cjQ`uPFRk7V581@?M_)-zSm(h~q|c z(L0JBK)ug%Q7=w;1K&~i5%)Q!;lP~H)q4{$muzh<}d?NFbl=~1y>+@fs;AC`rNc3>TsKXY*5z|)LA zs1Fa80AQ(cucoum)5uMN`Fxi$$QX9(nX3~bXaX6Zm`yduxhQx+-+OTYcmw`Fbw5Mq4{E6^BrFYr5GSUEWUfd86Ydhb@ZDT2V1_ zCe|C?```pva?k8oUo1X{$qz{{3i|+)Uy4ln8zS>3Oh)@kKjdwG9{6av>OTY$(Rbkd z8)0%d{HKcj56C={nJmIXyI&yYM+xxWU-06LUsBd+J?RHmeU;++(t*DhF(c)UTulhO z8;|aCy?&;`pi%?p0bX^mn@Jd4@=8W2uCQKgBZK+vGBer<5NZ~qCbNtXj59*iSeu!d z^#+u$0D7*;p7lcRT-tfwYE$V>XW4z_Htir!7P`6=y-9@bB{em{M_AvMwEfAh#GIEM*bS7=l9zpi#_Zk&|nNO#{@}b&=xl}VdU_Gb~=)~)P9hYA|=c}g3FaK zU55m{VYaaG0;8DSJlw$95BR-UsxX`T?p$BV?#aZOdjYV0?;%{Hm}SD$RqvTz(d?r5Q>KyPPn59sua4>G;vC8{;R*X!gVNbfVJ8Srwm zJX^IIg*Fj&6PQQT*3RA`@p|*&*%_cx-z|mmvApgWWh7-yX3NS^nna^;x}uB^&?rXx zhgIOk?0?WI@cv={Ggg835Btl{I!=>Rp=`#bv7L&-K`F4ucGBmKP+RWjUDA}#%?E6cd1)#fKt3MLVP@oT^RSterY>VQ zBDOm72gTgpN-D)rw^k}QOGSeKDuB^6na&kjt>|4mNG7IOGoPK>H+g-`Zd<;|U;Q)I zDdV$McIsfxFjnk@jnV^Mv__v|?90J^J@(Xx=56N7WDWBT1^bGzpCxPlQknSQu!Z73 zJ_q>4Nq@NZkNyDWOOnrggXF(b)33bvdx7?U=K%lj9N>R#aPj}n0sgz^0KWjuD>{9g z1I&D>oSAPZ=U3qWEFJb2$U^?E_N5Q$FxmYcxd!QHJr^%{9`pH=^n+R<->V*&FU1P; z4YBeoxc}h0|AqVi9sB-oPmg~ewElOEeb-$05LR={iDlq+r#RKJpwcU!la$)&4Kjiz zo(fXxCI*55|(5l_jmn+|>(6>gwIko1!G^&tEXx;DAO>|z2YL-fS=P};#dAsg2 zU7qDvnX$Uwd&F$3*G;n1(Y3ylYeqY(Hc^9BD|vt7YTnPrm7C2B%GOnN=TL#|O>0Jf z&tI~=On6``{VhAJ^w90NLC4gb2ElMCmRpn;{dvCck4S~S>{AH<*>qaDZg*+*YG-V} zx?O@ov}W#k{~SC%h;sTvUjp)DIr3XI&|fo2QD@3O*X2L)7%6?0n*0CdQsAFnd40D1 zX)W-Vq6R5_L)E;3@mGHPy{Jup|d5eW{L-(l_Ms8{U1E z1bc71daK7O{Soi|z0uu2l_GpPH||HbLbPgVX%QUzt76fg4taGX@!pGtl)j|#Na-6q z{uS!Ja@p^NI(>f_^Zlq`)SJ6wGwYOhy=JtooRJ%Gb+ZlhMl5l?!|T?_AT?~TMu8&)-+h}Xu=lANYu!47apxu;Woy4WQ&qk$$Y5?xeoYz>_=HT+tt|1L*$j)Uc z`Bcgbh79*av$QSds^!Jj6K+1bUqLDNo@(l*JShw}^9Kh3wdF8Gp}IUM45`s7F9rB? z6{M3<-4qzQ(>b@U!}S&7UsTF$9JO;tD=iL$0Zx>Y7R_@@FD7)mf=6NFsd@;;pq~Mx z#`2nwkI`0jnv0R=){{}5ZktFs(M%!(D>Q<87UPr zv<MqwY*CXRL4y30E>uNz6ni- z_Ph1mZZf;bGj7b1l}i`ZbNPH;lCIaLJDs)NJimn#aXsaT(kThBqswJYpYn0G`Hc2U z#;U^1hQWQbXmMV$DsbVVTXxJ(&_q1QWEN|`S6bc}gk3|Jl#tbf>2c2%sB{%<%uyTI z$^t~3;(Gi3mdA@#?bg#3PA>+@)!w?%{A>`_+#pzOdX9?YgAU#}8SA*-$qEa+-`J$B zXqZ#fdLqWGol&_e=Od4Fl0f^0ta0N`KjO2z+)Gw=?R%@O~?ZeCgjJFOvNaLVZ7|S8}+C#b$fs7h4{Y7YDsMHtLIg8~=p7c;gFP`v!x36PaJQ@b}`{ zV1J6s^;G{POWBMt9W(Cg6zk)nKtjf@*mD%t=r(X%j^E+=ep4+(1J(F0GHT$*LTQyu ztcn=4n(oWyGcau6eCp|fNo|(PDYfC8>y1UwOTt_7)MdaT*6tZo-~~QI&e2q~c){6T zq%*WH*5-pa6rK%)NaOaVi{_0H*YEec0Sk--Y7y@q2g?#Z#CM6h?eGGlQ{)(pN!WH0 zV`ZnY2aR(J+-AgDJatT7gt1YuV;eME27%s~g zKTOx$;R8ARFkNcVT`fosrqNoBX}NOi^P@=_7~MMSC@pF7&Hh882Z9UvbJ72ffMiFJjZv8s$Y-nS9D4(Py+#hL5v=H>P zUY|~S8))s<%6g+zG7wdpU!~b{$lvBjZ2(^zWzedvR&({H+c7ShuhnaBdsTNndPbAp zMD($iogXWGh1Ol#1vMrU?Sns@$3T8u%)s_DKlT<~!C}wM>{ByiRu$dgeQg+4gl%p= z*any+@nXHz+mX${cfAg-^)ol3E8df913itbR^^fynVUTDIg>OX};5Z zsLv$vUNm}mkU*q)r8N*`Y6dL$g4?g1)pT?!*3oFBXCuZ|w-eZkx{RaQn>MxYSxVkP zgAkuA#@0OAKYC;th2kgAT|b!%&1m)(`c%G?MR$UcJ+`>!Tx#3AQe~2nNqkRU{`PxuoDeL6aY`!mB<8JJ+D*)Ja zn2Re_Ypujnss11Y31-6+3&LFZDW)nOdwF>RZPx2!p%9C;3ADS=C@HTb)$Yk^=Jxw; zeL?o9V<98S(N$RkPAj2xbDK{uRi6%Td|U{OCHRooUQXR7;H6dD$B%{drG}A}0u1MF z<|g)h4E-2X)^}0k(0Dxz7O6dZDoX=6cpOVWDLZS&QV*_3V5>-?7w0wia&0lKwVIy7 zg4JwwIOmvz{i-d>$r8g%ye)s8mbW|HOu32VqX-W#VuqYv&Y&?HV=NGjOwaUg8c%n` zXK9DEOZJGz*d#l^bE7^-b>KQW89#R^Hp4yrGdqj=jcD99ji#-RTg7f3a-?ODXkQE2 z8mCjp6Cmh83R`cj0&C3edo#aJj)o750r|VuApVIQimA_wETbkM2hVRx*yoc#kr#Koy79yQ2IS?t|C$P--=Nq( z!Rbqf{bqEsqd-n*yA1!Bwrd?akR*MaZx6^zT+y?Tl!mp_%&N3% zgK);hytLRT^_r7qx>o4VmF|bh8`*H z)%0e$h0`cnVcMpMIyKO+4$o|A1Dm^atb}!xhA6ZKYwJ;cZC!^$qf$>7Qs{<0T!72W zyQUbII*+N8bgqG~9+{}_zEG~-wmEgK z4WgZASUHm~N+&f!!@t@G{^1vVZQmSaC(XStpT!nNP&LZ(FSfLGvL`F)Z!bc8r|p5$ zXw+Ca2pku#31!yx^Cc!*)|FY4&wAJMDL#z$%%%vtC9+8JkeT?n;2HdJhu2DLqR=R5 zkIt@<+u-Z~?M-H^&I0<7@!}jhXBW%Th-+HnBMzTOIlZ@DovrT5&rK5eVGbuzpG_`R zD|#+%Ou0J8LBMx4;Ea|`G< zl-VoeLO;tbMB(>;P`rOv?5^9%Eo6}JdvL|_?lG3d(!bd4y4>c;)-OYMFCKZrwfF8u zIHxlhRW9BT}`a>gxC$W!I8@kn$(!1XbH{-f{}-#F9t$ z+A}7!GBJ@j^t*fT@F0{9vYqxW4>hm3(7^`sNTZc1B=;`731x3{@f(8$J50vM@`M=; zoEtZe77UzsO=nl7EB+!!nIWCLYGF3Em(2Zj2G`e6w zm!aqNG+SaS!)UDTkW?x(I||O5d9w`Nr8Eno3cRkhlwmvX<#_Cxj z<#uhd$N+1zu)&GeF4n1!nC3Oi^v7cj+~S3jdG=bUhSni=X{(NRZZgaNm%T6TQWR^p z{?5N}1Vu##6%|1Q6%`dxzyW8#2~-^Z|9i^Htg^c^?d-nKTiy4rXRTgVredg(eZm-a z?7csQ)zRMDOsqXIA)^_=P~yi$YTUV!eZ-Dx*O3p2J+eR2+B1h}IOzZZE~Natx}1H# z5DkwqiXZTVRTsZ)^<`qtB;K!k80@V3cW=x5v=2RA01ZzU?%E(5*xRo^%mjjhUrE)0 z{LjNGeb}eNu}rg(a7R|#jL3S_UsnZ|&EBlW?GTBb|hPPWL)|KFTqu4ipwrBBcW*8YmC5cJ3 z+++WCGD=CkdR(lc@m_O2^w=Uje-sqxKH!|9 z#Qj|;jMRd!7^4$(*|@^duf&Qg+2+zTI|;2$A6J#cQ#q8){UndYiP!ZI@r8QmEyg{h z?0+=i-_9%fd!Y!N`f<;%?dpA{tbE&u`L@FyY7ga3{S#+CO(>qEUGawpJ`Zyb0r4LW zdO4wZ<+gt!%pr$+GZr~E*f|m(GFQ7B+{M-0bhYtX=wN^8%fd?1lu^7=K#9@}^A)@_ zyyrK*Lz(470H)y)scm3Lj8(3_Zka`PkM&D%si#fidRJzzVZFi5n7}!|LJLtsz$i9r;e(4AV3O?%E;NMbeX{gD=H=%YD>Y!vb(d< zSyA;aY^qmEo%X9zv$b(M^IB|OZ7NW9itbFBX-AM0?M;I@O3u1;wXOmdhpIaM{z@}t zwh(L1iHcAPu|_A}fz@nSi@-+9l@sx0yr%6GQi$*k-iJz23agxwiK>06GW&2k0(7^K z65$_st8mG+JHAt?8l+G+l*_b@MOGv)ddMo#I1ZaL<%$K%;IF^EOl)IyKN8MOf^vR# zJc%^4%?cFdxCur&o_^e~tO!h|{fJji_V4u~XE|iww)bN_qD!fK;ACN=GSeFQ?%Ii^ z$H80JP-uL4=+^tttTpj2LU4F%sNzY+rfUi@8fAZ%CfBE3HyVT&v5ybxLct0lJ>v}TrS)vuKebb_tfqu zS{Wx_=g8z2a^$y?^_A0pam*{3>n#qbD>@6dbe(huTV#-nsieuKU~_ab!a<3t4T`yx zX;_|eF}5`!b_W?ss%?(ft797I3N>a?swcK>1`^QJK7Bez$-eJSqLZRmC6&>@OQj=$ zf;Yf-R4!U=X=Wr!E$Y#gJM@AmkKLTq3Lrspqtu|G-7UU*&X?W#_==zds5;naeo>tt z$f_0B#Z$;WyV8kWaK9N9z73V4pg5Q&jT1aTm!|{<8|8ZXEK^>0A8V;n73Fa%O{o<(cU91Hw`8_mI@t1|`ZjzB#X@ND; zm1y=7zLkvaIIff`+Z#Y1UGB$QqrAtAN>;tL&Vanm3ly8geN|LOH|WxhG+Ji)8da9Z zRsg6yS`im70jLR?{N0Vfny5JF2E{xY zi&SPBzRBmFZpiK|rFLst4|?4G2XFp!1`U|l-v$c+y-3Z@J7>o_`gNNPB)U!8A7)1x z+U8X*5xnmP;k7Z>4RLD1;qfTCRIdqmA#Y{;C>?>!X%Y-5ySJ(oA0^68SO`J05bKWE zVyjmVg=<^Q|B9WiPpDVlOG4V$>dkfC`Pi4w0{1|A{>RMrwZc9Ab&30zUi-yy?i&s39yz#T zx0R<|MlX-1S$bR@=J(|Q$;8C@Jz1HpZf(P!WHu)q02WI7#?JJvt(aZ9$>$2)tfyl{ zbdah*O#MJI=22m^Nchm)ZL(F0vBy%O9x980259Kt47U)jS4dh7juDHC`0jbzAJ(Yw zv{op~8MmXhHi%_dn@nI?>F=RZiq5dxbN3R2N2?>>Zi2X%gBiKr@h16!9q;T{o;JKX zPUw=}*EYR|Y_Uar+`uHP*>b8;@wD%6=cjoqmY3QmM~5P}>bOMcb^II~)#NR5su#Gk zugoXW&~>VszBj@eE7cy@#OJdmii=cz;sPPeySHVo?*rD(I8k|_B+3%JYMR(>}Nx=Y84 z19kI#Z-ZR+cmts@FW4UbGOjiJri~|Iq*M0MbU91A% z-53mO4b*$ZJdDy4xVWC@QNYT6YBu=h0oF(GM7%;0dWzObD>t9W!HsqhgmU_vtF7f5 zXt>z}R)obl+>#?kG<7qm@Cq*UKf?+9MvOlF1j~A@Mi1tXUk2_zWZ8e@h3}1Q@^{UD zBKPB-Uz_ZGz4`xxFy@ax1OiBeO#z`GC-X^!d`hb6r_sP`&3gPVh}LI*`0q#2{o|fr z8zCQhs_Sp)H-kSK$dzxLR=LPxmri71N{R$zlcUdycs^L(Jthq4as3b`ut8y#&@M`} zS-ey8`3f=3GrUotL8im54*Buz!aAcF02|G3fl8JZZgfyWL4`rp46jC=S2#>FDnbE;JcXInNNo2$JRVtM0?6A=qs;!0<_o(q@FUhPN>`fXw5Z6OHx(F0*ZiS7rkAdDG z%ZfgMy)PhT_SPHaMR1^DG^yka=WJft3lqa)RNxwV5PJ0WV(*HQU>mIq*9$ zK3#fDBc#nowdpXrLNhbG&(xsvfdpUNVu)&TaJ;DZXStv{J@l{%7Sw1~n`QvN8{1xN zQJ^cp1_Bv?A?$n-Tb`;+I{Q=X2B%(^nGcEjAA$MfhWuN>{*bNzaq0VT;}3Y-e+4Q< zp1o-E0iZJ6mSivGe$wcBIOV-(`ZOqdc;NHl#zThuhl4)$VmQu1JLkLLh7J82&H z(rF8qdiLfV&k4EWR_?ZO@Nd0UJQlgg)*No=HXdz-640~`G#1hw zyAxLvc5#;PiOD5cW7DMB=7`kFVca{U$IE8z?a}&VpRApaXN8-~4Z2OtGKv0ut98%I zOK~|EhDOkyK7o}r8|{du+2lxeVx$Jx_7JxbwLdt9bt88P4yXDpFfwz^W+Ohy&Zg5n zS`NzBXnJ%Cc6!$}yvjFgJvS);lraPoWYw+&g#%s;rp}V6<&4S}sHspkAQ$eNvVADa z-(W`W&DHR;pfT)4cuuU22mx zRC9(z$y^ADY}Q_xdW6eWbi2%L>CUbiQgH4H_W&}l83gm`norz!ABVul@q0hDPZ}zC z=JllGA#(pG$M!u0elY1E(*K%C$6wKx{(b&>%thwePf;xoAHDfD_tXx~ydHgIzEB~& zMZ{NL{IwBLvFMGvf#qzBm1DxB5aqB|bk66e>t5)rnkqlfUHbLmv4-cmI*=6#DbTpFwu5nJRv1nf@nWEGI}QW{ zyG{}tH?CJJyKt(wC+GD>5nYi=$;y!XCDr0%=-oXncU6X}VpEZxRS=jfp68Vk1A(n8 zgHg&bCr{#HY8TDTOF?WOZqVh6DunP1bKa!%P&Gf;2j%5}aWPzL`Fy{F?}F3iC6cn* zQt!5(&25D+Ftr+oWTwE*`H;eay^|kLweS*63L@6Yq^8uPgN3l~GnzjE~#E{6W* z8~M~U^xZ*!?=xO($>aRKX?W5)egc}-giS!}7#)GnOv6;zXeg41#nmV zUvGC?E6CKF0}cvR1uP`YtNLE zs|B3gWJc3SCR$G=pwdhn)ffG7gE&e|+ljU2s}LYls`WX8TOQDB^sqk!ZH09M#b`9c zf+Jl_V2cT8r3TtWK`Mr3njh_}i8#XO{I2OkbH7%?1pbJ`x0Bi+$ZWEcJkAPpQy(S|Csa&@ddA?K8=f~8OtDGm9GXncY|*Uu zwZts8Yl(Kd0{4k~0+#Km7%r#|*RCO0zpXH*Y0ID9(m9?$3psk!*nfW-Mkqrqex6ka zmX<3(MI=?+{O(1Q`<=k0LNdF72beyE`v#rww9<(k=16LOi7iQPqu91(hcMcXj{a6Ei5y7ks8X*W&o>&kBENUP{E)j2z=p z)u+)@4iKvRn3rT+<5fO--UaYiuU;{abips_wdH0l>r8}F+6>Lko)06>>Jnv@?m z^40*Znj^Lk9it(nxVr3yYZ?>o3;lKoEf0;Pbw#n%d@buo&TTpQVP#MZ8O_hCm-p@bBTMH<)!5$F*>Jv_4cL^ z9X8FauTyEj&`>?8X}mhG7^R0@{ zez1vx|6!H&-+)*A8o%}~a^<$4mhKP7yhp~T#_+=nFCycY7gAoj?$<`f=5}?#j)>Jk zmgCvfOGb;kG=>qk^X`0XA>&je6T7#i>N0bUkNui>EUA|?%L`5)561P%b+>e=rXza! z7=S=Si99-)6n0wSasd4x~qME1Bl8>o)EqcWma&^)g2`= ztyyhJtn0&Q_Q)C0p^{rC?}2@(F5`akluSiqjYg?I7U!ih6b4UT4O+)&I;a8EaN(#S zvA_^J)sh!Z1;LKJ&)~+`?qufJjr+pI&|kXx3l~FQdm}LgrDoBj#0|9@5K%5Kwu9Lf zRyL{D8jxcZ(#EE0&TFIldJQ#+ET1xu=9o4(ymMN$@&WGD2oKc>bJ7n(kCs$_NVW@X zjoSw^=Co%vdctJc+D9aIEHm?Q!ERar4cTGCc_mCmp?zl|4}k1cMkJS^?tIt-JsmMb zpyys4ZX#|rco>&XAuWPdG7izM%$d&tu|LTv6 z5%zOJWK%Zu%7iry7mJA?LKa%8!BR&sa4?XR#rFE~o_iKaK zLo_w=jjAtth``9$&PKX*trWLA_P!c#wfoyeb_UAAbH#4Urlkda-9ZM)d^wnD*Qup; zi^_*n`M7Ej`4rU2HI0?uiz2HFhegq1#Au)Rz^n`wIC3YYm`+;-wQu43`e=|!oN2w= z@;d%qoh(UvR4LxrQJ3kmsjHSbrGaCkZ+jWj72D>#i zN9_8^#nn-r1CzqjoEEmPwMlN z6&0voJ7Z+DAI+)Cs9R9w^_pZ0$gvvCi{>xFJ|I>>cuZW}wJ^+<_SHBsR6(E0>(S9Vip2kS8}Ssh5rXe+7G9^XMs=s!rbeu`XK=R zC%*jeNc+Dd?f;In|G$j1AIIXUDhr_B7BYbN(PG0%M=G5;R?UOMd8W|8_jL}Wt%nl9wJv?H%* zeH*u`u$m}qg_JNxVkaUrPar+@N~76)Xk%38v0GfbcBr~A4bu3GsO3JGeKv=V)GK-miGA#*#HDE{jDd}q}yHW!*E7v z;v3jhJsC`QoY}ZOj?H(D8^`BH*cKv{SeolXLn*cQEq{MyW~;g5QUFghX(s4sMNCAd zB|t%wC63)s7F?xoHX!}I(>-)>SeQD<;Jp7S;=yxyhC-x_l zlJ54{`CUn|$<=9^=fWt;y4{XM%<=g>NH(DzQWP5MiqVMnxnLGj$x^wD-NpNHujEb} zzATU_!62{Ui@U3!qm+X3n{EYOVFudoDAO9=o23$&-5TepF3Qwc9x7QvZ;`JI*CO1i zb!P`+pYx7OCg~(E&15KAXO{TxoEWk)m&+HL>Nvj;x}7z?xylrwhTFuToOj9@>gv0q zfabRXB8yv$FX`3YR?;S^y|sTLpT0wLG7U>|KrlP1mzC{&3}i2-k;5ZzgNl+ zT6;U81tOm|`X2sy&zGOt4-aR&-T%P|g#N;!_ZK|)Yona9Vq2MUl+xn+#jtg*fC3jW zm9SH7au-uN6LWiO^098^l4wJb?nsPU zM#H>2*<^KATFpk$$|gSzbpdy0txjHBs@|Dm-EbV2&c&YAYe;v=s*J`_6v@rYc{|JR z%6fZ^HSx)P2<+Ut_tN)JD&X#kP67 zIuds5P25?t&!3|W9iWRB^R=Gx!_)hUaiRQhkQJ%9v)GaM?1d9}cUSj@9aUeCNA}gs z7^ts}Z)e7rZb9@hqkSIH&fFza%tY8hG@F~{N|h?}K5)HU$gJSCPzTR0$Tv6MCzLAOn0yJ*8v**vrOWDoCgp5Mbj?rE@0 zzb^G2s_;Kz=Evyex15pwXn^uZe!}z*Z1_LehWvn^kR#1W?ej4Dw53BmwHzNG`b8N1 z`n=?or+#f1)mHqqq<$kNY|84y$+k1=Y$^u_?fAH?m?)q}Md}utYaeIL8kVpUV(K(! z^Q-BI+c3f2ye3UpfK-d=DpF`e#h%yj<|=YnAOXH?HO$KH*qXSc-UK|G%yONZ^qJl1 zdh~`lSdA~y60%c*y=8ULjvb@%*p_M(sM-{ae^Z&9lucMrFXK38q)WSdycP7h1d?{n z20!VbZnTyjjqTy>8^?BKSySX|S_1qp$bj5$Zjocf&)fY4?1aJx6y-xjRy+x!Ktla~ zjnDWRQ97{*VRomyYhET`eUkyrv1?^M(o9evix*wtgGn6 zX`Z+s5zkqpRzN9?N&_Vha?gnmYa+fJ*Zlp8Q+rfrvq|nd;*NzV1nDkn!MwuH% zPxoqJttM*G@dhivr_7*{_ilK{E9)p8m8vIkPQmwRv)>9k%|sKsO)K|FvZE@Y5EVm7 zgM|n+pV`%uxfmIUZ~>MXQ{=~*={Wsdbv?->+53l_(y8=#2-8Q{|lC@zh!w~ zfzV$sPwl4gaT)t?%v%I{Uh#P?Lyv!5hyJDOer;s>Ni9f4#+Tq2w%8Cmy4`^`dwtPn z-fOS}K3bpH0t>$8vK)ZH0xh_QPj9{j6UUPuSZjsFO zxJ|z9#^UWlS)_%upEvi~DYgqn#^4JaW!9xLJzYnPY`u0n!kcKK>R896OQafIt+7JE zY6qm_C$Lb)AsI$kXUYf19dOjFx7)>g%(?3d%RVvRdtq=o9B!w8r0ymt8*f+bFEDK| zC6uubvOH}#g)h1}q)vVl%5b_%^Z;^vDOsN9@rQOaJeuk|*XH3K5NRLeQ%~6F%0;B-o;(Z?oqSDmU`e4I*b#=dc!t zFP7&_FOb+H8P4{7sJnK7;=zfi&9G8<*r_k4som)aXN;oZajha!ok~fA#q>_1*$L*; zq0vV3WIk-w8h0!nNi9QX&50C>23jW0lVYF`IqAY_Cy=GMW6KS(u$_yoaAk8n)x2>3 z;wv*Jn8=hU8Z|xB>d+n*UZE&}PTPxPriEU6hyA5^7!~8CPl%*{BO?!JPk#(IU+c)@ zUl)?cAG+y3a=Q0M{SWrRf7N{BbshYue;38WG5BfX_N*PBeRk*Ty!S7t$N%x0l5D#hFhCokOIX9BxbF`LGO-T@_2aj|C z^&T%1#Q`t~!`?AHxg6AfelWrp*-hs-+ioyxDUzY@l!PXsEJ(BD_gk64B48^HFFhy; zSKT9fx9>eG2`CBuo7>LMRfyYEmpQk80NhuL=vJRAq%wu6!tIfjkjOP%j3%v>IylI%T~^lnj6x2J zgX1xkyE3Dmcg|DwYlv&!?drCf^Sn;qm{?#`E|0>UC8&SF@VwF^1`&nGz#XoNUT%Qg zzg_6FQ?4u)Rot6p%bE6ScMtCSJz_eiOZ+-7fWOPRgV*BCbv$d?oQRM;z|g}Cq05mN z4D#c%v|bhY7=T@EmzWEnM)T;pd7K5qZA&#SA7?VpLh9LPDumZc>iE~C)bWRg`0qj= z4)_m$JJQXkAjy+PwvfEpZcdF2}2H?J)vHQXp{DG=sPPCpi{73J1?l*bc8 z6ZZ3LYCJppdlz+6ekkoUR>*hp&&G|cGR>F1r#t_NfuVqvUcwADSa`ZvSxnE zOua<8=bsS3Wt``);TCado-twxmhad*UgC$vb9^!2xqf21#SON$Z1#6}PTyrAkr)@6 zh#c7^?MlBRBKF-i_+59no_kI?96M_i#_#%(zPe7v%a!8+sp;k=v+n1Ht9TX9EGl!m zcIZc+<8`?!H4)xlbJnx zPf?CvT<30;p4sZy#|AtUqRDBbQm`{Lo4)4HP_K}*>&;MNG}z&e$R*QrBNY%kcdfPf zB$PfO+?~p1bx;~Yjc~EIoh1_kbze1%S+=sRUvp@%+Wzn+7(PXHuts-)+k<@mH#bGT zRIY-K0*qHx_&<2_*9{uxA;lnEpmz1@*C;oy96W8~wzCfSLk;0BlmR1j+QwnI6>QML zW*oMMwfMTAFkYvi2UtZ1TWg8+tg>9NF;~pgQpHTy=-QL!bcyVfi*B?!waf_`YckgX zx$;OLVr8dKu8C`nYmPdv^WlWT6R46K0jZz@V;S}Ef(w02sh+ez#Mv+u$)VXUVe7prShE_KW2i2W%L&$9?2}QXbO{RF4Kc6_jE-tE`a~ML zR-*4BOaZsDmvujXZKXrur59;82y@owZ8cF(Rsv)@UpzPN{-72(K_qJP%M7;*V#GaJ zN~ifuH6|Q{LS^LD4!uQKE99c3p&iPGiWsQ#vD^vw%i&ZoC5wmm`{)YH-ZN@`Rb~^= zAD{B$Va6&DH3B1?Dqmlh+uS4}+x>bQW$&FxD+16T;b2L%lKhBIs=K{5c)Snj=Nnm{ zhPR{}A%<$QxUR~?vM`2$&yaEFrm>b*RPx~&a7s#Q!nT|bb z__f;>8iX2$iMBDd2D!Nqr(Q9qHu$B#jwZA*TFJs&1n_m~j>N7`0n1|14pu0tH^7eL zT*u>PGU5UPSWpMIM?1fI5lIwE+(*MA0pCiw@kyRrdV!uXmo>jCPS-6peKPvQji&U> z$GBu7{WRCeK5gz2uXWCeuj`x>AL^WwKk}=$5cPBh|G^mbnl$#gc76I8M6j&s+v^IH z`!KEj?yR>!MgQDd`|g>~1K3|sK!4@VUmL-c72gZvh#fhY!Gk@wQ#t$sHor(Q9FF1( zaz`Z9bF^@s8r~^&NkzolCYlMCae&v-v~D5K-PIg+I@QIbQIR3o4Xwz7JeuXtt?u|8+1;mK}v?n>imP?6-l zNRvk4G&~?eJFD2{s@3n6ZW!#wf`RE~u^(b1 zo4Aj5RDNAL87a+SvdOXM*1C1589wyYn9pvS>`j$>gL0%V@>J<#K|28L`>EAIgQNaF zTp?adxD#KOa3?;LaQ`E=(*OMAklb_4B(b=F0UHLLu9?)M)uJ< zEO|2wbN_ZEw0N8VZ+G7NyNX#~UrY;ZwTYR0=b>F}`2Xme6V1{Ypkc8~nXfaiaN@ zTh;uvTp=q77ORzqFyxM7X(B|0<*_JqvLV94kO;qv$czcb+Zufj9yk<_J z`(V1*cw*g})D{JS4QD!8#3YY#ok5ccz=OJ5CSAcALfdSJUet=U@Rc^=WYps$!3`(3 zNM#1L8-~ZeF`f0RsT&|jEpNc0g*)Pd&2{j+K21*xV};UE(vlj%{)8qclT)bLfMg8! zP%jsyqx7iT@yz_f2*=2sk~jn1VNIEKl)?fio{WfT@?I#>t{UlW@AsA|-2>CktlWzV zS=pmoig5!=us~K5S~7r_}hbidkef>{u zJxoF0KjVMI*25H(d}#{}scUvxo`hI+HI&->JGCs8Xq4x;f?7;?^=jcRnwB`vUa3iR zGhPIa5SfbMgKBieS4i}R>BR(Dn&G_ABrb(1%BBc2uGecxKGyc((QJ9_lM!qXGlKo> zsU6>EG|)b0HXx{9j=e%wUAigADFx=Xe<+Tx?gpQt#2kw_`EjLo$d{N?ODB8188#zU zo6^4AQYEl-U>gh4N)!Mg8(=kRTo`3?SWennx3Z&4f?If7O$lPPKOVQIW z*ed!-JbIcG!;j+m9p>46LpU z7o46hAL^`aV>;Gyr@^&1q=}i?B~Ilk6t-P_QeDr6 zw~QECP~nr?zcj2BS5^tnp{CdZb@__4oS#bP-YkQpqGD7I7+4TOH*vB1vQ0hI9y?^mAd<(OTWhj32Mp z{m}@q!WNN_YvpmJp`R`JsBd(&1@&|Dg_7JBYq^ajA9Cko!3Rv~i>*KcxLL7$X-tku zJG?3tX*G%Ow76`uS!~-`_s>?%>#EzV+%6Joz?dxQb9AMiAbR&PUH11Q`gXE^Q<}e) zt0%rLS5JH>SO15i63D;)Ba`s2H85xX;D{E-(&po&_u-BA`1sV{c&$@U`~{u*7ykOS z;Zfgoka;P9<%LtG&mnN~%DdSxJ&_Tm8rlekxNI>&x5cFrhhfWsU4sy=8C2FUq#Pzk zxS)VmMtH=PHh?n`O6ECr$=dF7H?YjO7=anJ*B%ucLWwZEc|oiV&16Zo^~06~%u`1qznT^EAKgIU&QjOt3GMLE zVZ*}=pS5C7Dy7T+nTaN{IjnL`rO#}TEoh8XdHjG zzaN$7{|#uKPrHlX@H~w`rCEYM!}B~G@*c*Xu!*mw=ZUXN&p%-a-p~ardkS;$K7yCyD7V4UUiolo+m6RpW26XQJ9Vd<<3V(=&`Xo5i z3{M@pA*USI`sT8oVN?kgX8;VSLX=C73oD5RYq1MAygThTd)sLTQ<=*xA!`-2#nfYsJfnGLnDW;N^G-MTCjdF zK`0_~y-$&9qqrh@K}93!I!AZ3=9L&(gT|l2Z-D&PxK_X3!>7p^Y-|4!#nTQPrRDEn zJ3mo7zkv_Y73d8s^hYya00X^Q9h$z9FC4EkS~ddEG=RkfKXX-f?%;|TL!-wu`Eco= zs&a%Mo+PglotE}|r;lp6`taVliZ}o}r2)|n)tp{NSyV~48_u$GW!pmFZfqc0+wJ#_ zLCJPjk~Z7b3UMBQw1-+nnGRb)*rVzhb$B%I!2?gv*=xPVyE)7ODiMppm6?>9qfG|v zA}xbkjXXCA70@6u!N8o)S-psdZ2XVV_HifbZIb*@lK*k(TdJ4-(ZYM*X8u<+^**4Td5n$& zw9k-+L4Go$d3fVJ{ylw_JU;s8@$aGF{lhaKJMjzt`nB=zJ5wQ09u1G;m-W`I2OMhO zOkzSOI$7M~m0?V&!nOG==^vYG&b;o4VKKp8>FE*gbStsmJ`>Wg&5=*;fiy3(#Y+O~ zMan66e3+c6ZoU{!Rmot>UBoKX>{Kbc>>aTe?N$&kU|^v?JM2h>8GE8HB!;a;z*N{H z)}s*`ThXmls}Jp^$UUS61FeLy+s6*zrpq__3AdcFN(U?>H`VLdZg0Y7ESMT<3zIs+ zMlL^XT;(%Iss3#1DLcls#`78<9i;mTG|Z-nG>3UrOM3%#$~96EcZ7y>?O8Z5T#Rx}@_F?FeC{q%^sCy22X@-7uJHKcs31g%1wHk+93fp+P@ zHs)SBBL~V8Ekx5EYM z<(~;0^^!S&puRqjcu2heIG~?ZsrXM!@;^9_c&(ibqF5jh!fi7Sh|NCn=M$*tiHL!C zt#eQO1)cjp^xU32_iKaLgQq5g*eU-S#UeD>2DDA1s(9R>!|0d_lSfa?dFdrr&4$#W z=5g5Aa#Q)_eOF7?HV&!8b%}Zp+YxK+CM$(8YYEGoJlGsm(=&Vr^T4zZ;Fd@h) z%r|l7ERu}_x6v|soI;dJ+~<=M*ds=GF&dvn(26ABVJQp*J`)u~bk1~e#P>5zbX(`c zB%LyfNH-oDRIBD81U0hxrW~p1>D;Af9FCb(GG!;RKzwRW2Kkj9lCWZV+E%C}jZt-b zV$PG9paQSfDTONK%PphEI&LqPo~|V{->sjrP-+k%P+Gg^;d-Wr*z|0H&fz$5a`&I~ z`5s62J^6cHLVm42Pkdc{p7=<89{&Su81Nnb`(_o#oUM|d!UUhi=BcL%#cQ#7;_G7b zue|hYgXLhmIzUh#DUjyN;es z``g#U;G=C%s&quQzg+rEm}NIM>@!OQl&e9@0X$rvspMg{Dr&NhN4_c&LOq{jqC(-F-Qd~qQYxz(PD(m{Tu}NTv)(a5h@GL&> z=Q%ui1--PYmXKnTww^zt=eW0-%DQac&RwW6FD?jm~_-MmvK2Lydl6{gKDI3V{_!-tk)oO?}f=6{?|JPW&LGC!G( ziPysJ#Mg!0iHDB)pXX;Z@y~>prj!`B1DQiBUbM{mhG9q~_>M$=@$WZ~vJ?ai6S>dk z3ct~He-1vcGvB|U>;A`7?WYreZS(>9O4!K%ma5g|98fmwN`YoK5v9^MctE<-+AL1B zzeOLcvOQaebAA#Sf>bTrItKXay?d<=137}68$czO=|F8#yw&$JiJ4SgxCr+%SK-Fr zG+0Po$6{y|#zFJFaq&g5%MRjH!WUr0n)~sszGli8Wtqu6-JsmQt&Z4Uuq7{`gUO0Z zJ&?V~bqkRjVQsoTVlh5E@P%VV6e4TuUfX8bZo6LW`c7!wm;f=jz;fKjV$sz722$Dsub8g9zwRNbcUs}9G6f1jWC94NnL>h~byze0fRm*z<)zybFd?hgo!s?^?BL)w;E(Zum8i8*W#=*Wfe% zu;#&u`f))S9JBf1E`rEUD~!@Qc@;QUN0W+!tH9>9qqtpPzuD`*JZY)cv#;VNK`Xg@ zCR^LucLlxmeylU7l$o3Q&F)2b%-0XSCw%>MbXn+?2gm+uqVih4j^CHml;>H#JLW-B zsAxFdf4;cw2m8VF>vnw4eOvqlmZ!FP3jdL8iW9#+_xN=_cTSu^PQvllFg{LPg$R{z zB=IsJd+q4*{Pk#D{%qNszCSW`7NQTz=f1#iP`rONlD{3Bb43xpK-X4d=#_E-;eiQe z3YEiZ;$`zimT{=z*ONf>`mq(rj^(q8I7rpDd!4mnI_Tk}2{tv{aV6I> z{l{Q-Pz!Tfa;u-HJ}YcH-#n~kJFP*neR0|=#LwIVdAHCC2OOHqcit|Adg_%N32rl) z%!8P(T=4EvuE8{F-LtiL&om|vwXH~B( z)I62!mSd~kVv`5a;DN8UzkB@>J#($qqSf<4O^Z~D?9IIpifugjMMUltwg%q&bT-L7RUEHWq#B1gD)QI`9?6w+trb8=m@-?ByNyj zHuvz7Do4wO!Z@^bcNIJB7oglAtt;F-b?|k^2TaWjnV&y*0F}RL@DnzbR z<)9$cuJRYLr@fQAX2-FcP5EkPl}+!!_w-nfY!5tff2nsA-@4nYY;`|o{eb#ws(J3$sph#^RP$f$JAbcx^UFxWuXpFu zyz(b6x34;cqdRNm^3 zq58cLld4^&)9XlE9gVH0G}l>pJuX)Lz3%?i?PD~{#BVR3+JlS0=6y}-c)93vHdWN> z`NuG?R3f-U;m!ou%e{8+s$P2K9r**fafn0QOgVA38P$%j9;NV3wYS|nbZ^E^LW0-5 zrsbKGc=yz~OeBtdM{+Y$*V>h*QgiDeROIez1Rglgj$tv8b57 zPiC@euJWoej#g>x_xhXH_x_UWCE@NzZr!JWW~+6+g-|n(A~gMX%8}>#M)8d|Rm)%Z z#Z+fg;W~$f?MC#xxEVT(Z#wYz?f%VS@v6B#jCi-J#mC_=9?q`a&Hn3I^QDuaZu`1( zqVS!o?gdlI)?yxTt`u5*CVA|3^y+(6)ws~JIu{RTPW>`?zMcfbXp!f z$F-I#+(?ETYr=lan?CyF)0x^LiSRUid^=xKjBvkBxz5d`T&J8`@HPH3p24;M0zO}O z2S0vA{Dk>+$M`U^^O>ViAF($ZatfrrCSvElLB#&mnEqarx~=bP&9d$&Tp!$@?zu|t zfF$p4<+qNcm%HInEa(XgdUvIU+;|CYs_o2HKQzqvx`Ugit6e2M+~GVM_gi2R-*`B6 zc*YN>N6Fs9)4l7oxKn&e5MR*cvAB~f2JQpLueY^vS6N^0dk}x$s9zu62?hQlch%ZR z?^X|MyLU&)!-u{8#aSv>yg2Ig(~ck?aNi32-Xq`3_|CiG@{ZUJrd-eWTHF;xTeY_O@NxXyIS<1c-*L;$4U>%=_j7RUYmV6=`|hCPfEva@wP>q zs=EKHjy#JW&q4Vl|GV?;%iIPIJ?X;CoC~uYd0*D`MgE5Q>iX02Hz+xY=s*?j2_WKa6(GOdcS%~~$oP`B@!O`kiwkt;uhP70;){^jdU zq#tnPYR$n@d-pnfQGYt$*sk4n^9QBi;5sd&qgwH%S3PV#?KWPIYEsnIRWb+ROk(S? z)>P6NcjPQL^4_06cyc$|a6MPsZHl?F!=Jn+A?H&ougT!mPF0SqeUbuenv9-hIOSuh`I?QM`*k*YPUm8`9zr40fcet$ z)J=_la2Nf3Z>PT85E_^9X4~O*6+T;OrkU!M>2d!0Na`C*^P8_kW= zDtv^A%h)IsyiL})oY*aENXe}p7LQZ@N8e#Kb*>E(?xeSKULSkPPcOBcH*@5^R9OC^ z=0D3m$&T0U?b4|anBV8ip}`=FblO+jzUS;2AK$Fx@?}-KtessIUDa(6$Gy}~*KUK} zW2Ujy-H+`Z3*63`rbHgyr{|~J-pY-X>pSwzP4|eayfhQ?%Q@@Hc{byFkA-;TBH)YG zk0Kr<+wHfs(%PLTz;z>D*ULt3c)rD?#?N^n&Zd&ulh5yyOV@`kEeNWCS4Y@YE!+f^ zCVLy}y{^}**=#hor8YC?*?bJATCL7Y6y8%gc2%rPp=$b3Ti*~%ne@hOe>c3v@4l3> z`P<0GX7{0=xPLA_RUM~C@t%5qbmppXm$h{Gxqhp1fzj>o`fA@>@%YzHpWrKwNA9XA ze}?;b)9y>~CVjpeec9ag-VYg$ZJnm`sd4ev@!a1K6R3e}U+;L%0(u+P2i53ddF?SA z%se!Q?(9Py+|!-y^XKzmExXHI);Hy6wh`QBv%cP0z+ZY+1H&`l-nGh|GLC9GFP2H$ ztCqIU*>uOJZU1jg?_J)9x0QnDkz@10a4T|z)OA^<;0WI3uIg#gyZ4Z3k9Q+QIkc_d zDrqOC=Gv*toBMDk7uiO9TBKe&8Hnu7h8W+zt7KE9YC_Azi;sI}?(ANsyn7qGdWk3a zq~khQxID{63UcmB^1W_7<@lXg;b9oM==s<{tW`fPb=cxK?PRzO~tj@-n!U+essNVt?vqX-{3I1 z&!!?;_b6OGt!szRfx%6;Rt_I951H5EUgKeZ`zYqm#nQ=&tI_LvcU#9|-+y^`P;h7$ zPXn)(Y+TChapCO7^C*nGxwXS^p}Xtbxk<^fn7taMq zMP9bHqREobj;MFy2~uA1X~&-Q9F?U5?s)A|3T0lB4@cow&kg5!OkR~98ksvb(92&% zj{^SdE%$zSEgm}PbN<&>zkOWuxd*;u-&(fiX&(+>E4}T~E^?YxPL5P@O9*doT)H>I zy;)_xRm8EJYCdXlSX%t8)_hd+QHw+G_}tZu_79SELCqC*)dRP-yA>ZeiprjpU03#8 zOg(zjxkTFE);V{nW&2zE*Swl23S)OGGCnU91NmV$$@^WQaO8^PEA59!x+v`Q z3wyzS?QwH(*gZ=)Y9TI?Z6CITbc#s@b6l?3OS(=Z_9^pn6;u*WuVvvi5YHw~ByGf0 zy8F$Jx{=;03z_`0D->!zNe97kH}cfDe{ppW1FU~ zL^l7QBu#&+1({Og6Y>mwsNUDW4|LYY<#Tk_8N=U)?Mrl~oirbV`Mc5iYKy<#sl?*j zmyZaOtdB3;YsI+lnmIqr95AVtvY%~xPwS_T?k?MSX$ird?a)>;7D#3qSL=@#P32%W z>pMN%X16yS?Z|2RB|c_-_eb!RZm8<|P5KF}aP?)DGH%7#0}r7nFL!F}mn z7jiqne7|t%j~%)%PA~Riug^PAC)su=dN96r+}3jAz1thrJuJ41QFc$;e+q}6;)N!E zkUzWkKgF2J=`OqBF1{8Tf>^un;Vw-mpWs7Jh3$0j-V-f08u?9mBh_|yUaqeEj zf0x;l_{!m(E5Y3-(t%MgF$Qz}`R@6}&afPd3eDDQCK=fG1uwFLlg_g?y!PIn4zG)| zGA%kjQb2k$hDJN31~zu+%%%$Zp7Q=iaO3zripW|siA8`oMo zl3FX5!CDf`$FJ)`T?~kOr+bmDLHN$2rpo2*d|~Hcr@imU@OJU0(p#a%g5$sRq3{aP|`>FI=$jiGRhaP6z} zQ}xx69(xAE7=J8}Yg^T%H`2V>7oKsys+-8&bO*l7w$iP|_V?Bc$KJzQFLre@^7Qy? zjeF{Qe;1OE z+1pqa5*B<$E8;RT?MVLXz2A@B zNaNrrp851fn?63qDj`p;&-!3|zIv#HvgLiOzS@R-IfB1;^@!uF7;GKa{p|zwR*nw_ z&*?%#TSsn}c)ELyyy4j$u$FTbX{dPm+pmiDbeyY?Zvv00-rf<{FGlYVTG%>lCp2Z` zNR0DsB>-{6e zEY>aGjYo?)?k+F(cTS31V4B}5h2{IcaJvd>oQVy%vk#xAE;hh+$6)|wg~SL-{Qh3C>$zB25$68kTr zAa-4KPdVCUYxi7hEm6$3xI#P7bmaG4f&H!VK9kNyc2gTCqsL*rUF&Wpv$62yX+6O9 z!}*~nxmVbAx3;;5Q&;BUWb>qY7B7@>qqG%4D?OMtRCzuYSE#Qh#Bm^b6zpdyyE|``F&2iyuC2M-Q2SXYKCP9SXFA z>$U7K){9D^M6q6E>&Y8#?OJ6Dh(SzMrL-%=9$##)9Sr321hemI4tH4h#;YTAm3s)D zmkZ(TvqC;|dfY#$Xji;gyYsWpMd7}6Ea&R3mqsLf(CG(bp|eIIx>J6=;5Ya4kGuY> zo|cb|u6=8a;7>MVsqm2_5fS-H;v{)g2%Ht!r|ZzDavt{vZ>r}nFTuS*YGci>ZM&Fs zeqT~sO?R{9Z^wdStk8(pUaQ5ky4JhNDBWIDL*xXm2g{Co?zpg4PpZk(ddXXedQXnh z-e>7#D|Wq?&qbenf!#!HP;>__tD_5bv)`@>C!w`m+Eu>_Dd)SYE4KMq9Wu=2&Q|N3 zX+LHh>hW`R|FW{Zwd?g}kR)JISD~Ns`f|OxWkZ_T-d1@?W;K;pU_v7riGy*+r{7 zt0AAZ!!c(=K5d8N>d8*pvqE|i4&6kvkkL$>#@tJabiJc40TYniK~gg1~G z-yJGx@i5QaSI>f%$Bsh#X}yq5?Pfj!4AaJc+IIW; z6jl9&o&Osw&$`n``IT9bdjsm<>!knkuE>W4{P90Rv$fHBNvZWuhIDG1Ax(v-s$Xw1 z)L+J`P>&O7&oCY2@51pwDv_uTE^mgTa-=E`9ktxx z&QovZ((!hPga7$hNM1Yiot(wLjE3IAIDxu6cabeQF*kTQT=>veDV>uBG~-FjjB* zR>^Vi&9*noNfF$5m#zEENtX}T!b7C_uSB1b%i}?G^;7QR@t!s;uWeSw+~z=DFI}?X z!0y#3Q||6@M=4iO7=&YIFT!mpt2oNxruQMXsWF3t$70~fA8LljT28K{;^Wb1lixaY z`*6$PDAsE?axW)Z9-%cSZFiXj0#`40JG+HV{u5-FIuVuC4;z?gzC01-nP1o_Pr#OE zh&64sfvK!H2vz=Pz}A%4;_~HJ^IE|0UE7cY*HZW7lR%Zp7#WuD6#3p{KtA1G@XRl4 zFWyks>cyEF#ovp%H>J}nDYcEgR|^OEj6daljkAh>>*gexszzFhuM>Dq`A!=3*g#$ncd(R=mQ_UO(1& z@?RZn#SWVr`P~{XiGddvHngJb^ME+@q;v_e-%BJ%a_B6Fv^#WNdR2}k)}>eFSWde> zn&nuyj~{jERXJvl*Y%!UiM`Y|dk^PnF}|0H$;C>t8_AV+&V5SOv*CXjbdFf4$+L@l zsWdlUyZge1`0_mF9mU4OeQ-M_KO8*|`}J=NvPtP~ema8uGxV=vP5}Y=L$3Rk(>%E9$d*kuSS04sL>5a>7CK47#SH9hQ zuJ)v^<-OthS$j`S?_cb6tXQ6$(a6e8&Zjr zAa`<#B6yg>TYU*!$7*zH#*ZJ#v*CjF8ox8Wi^#t+xhKEx4k|-uo@Y4^LjPDnl{}nN z6eJJsnzN$JeY~F(4|dzdM#q^qnmuXmTLVdfi*ctsYIHkJo|$$%kY9!V8sUJ~nI8?t znv?wX_T1?y=@R~aJO7=-XE92HDhE4(HnMYBP8TP%vjZ=3~nL0Lm^YyR#KmqV)T79PXq2yQJQJ<-e+Q(<| zUilxaKC^e4f4usryx_q$ndLXCLVN@#^D7!fFqaFt8HL9nR?%U<%<> z3qsbN(@kqL*>QTr)Z^JQ{^~2iKc>WOuTEKi}r+okRQXrd} z&k773+-@^=Uz{Z4V_R(Z1vBcyUHD!6v8(ya+SGjh0rgqHd}i%xK65rTpWVdG1NQ~X ziVU%Tcuw{ZG~!vo!@e!}1Vg^D|Kb)fp!H{0z<{1uJWj*=vjqExv#SC96AH9|0nM$5 z0lj<$vZ(F}A@&ZE&lB&nqOimm;>88L&)fp$v;OQVSU)PGVi)RBdHlgY5*X;;jKR6Q zKfF!N=jE#pzkueB*Q{&lKyq2jx3)s%n^z;AZx%=|b$ELP33r{mr>IsTWJU_e)2atl`XLzIMjXM+; zFrD>hR{?$WNS`I1dkHo*oqs@m7BHQssoVci^jW(4*wl1(i<0V5NZcXJ9dhojh#ur~ zU=;-c9dquzbPoErYl7d5`}PKKN2j5mSAFanwjtWoeEtFTS-^Z2SH<(ZZ2RXE7WO~? z-o;zk7!UlBKH?4qPW8-t^kt%?|EB$;KWoSR*C#yl|0)m^ME^5&^Gm1Lo_7IT74P%1 z5$F?o@jqXI!1n3E>TF!RaO1+4itw$4?!BA1)}dYBxjHz_QEn>(`V*4%e-+38py|7( zCdT>iD$t6U&`aOKe?me2=PQsAy&*E9heQ62N%W5_afrK{(*7I(g!BBua%@RKxu^pA;={k!U8 zR|EPJa`itRewMF3CZ73ks?RjF^FL`mFa74lC(P7;zWRtB5eHyK?BUzJFGf)3jy*Jy zbbnWUR?B!cZ0JdTyk4?ij-tWCn zk{-G_Rq@~mla+8PHt+eniG=#Q__HF$v!OZ5%lq>Qr}Q7ceEo<5EfD>zlXKHJ|CZKF zA8+n0o9LIx9F~$cb9oB%39;~>uRw&jRt!a3s7_q}dC7Z0dV{GoMu!;m_oWb&meCgnSU0F zf5z$A+_z8lXIEhRzVzdZPe@AtRUhCBftWW3jgvR=wUS?bWA~*$YfJU9ssCA2S>*V6 z3qDjh(ZeGGXc5jG6M5%%)yJ;p^C$G2e?0p9fch-pd)8S_R{(vMy?GZtp>X`;)d#mu zk#J1E^aaKeUS>VWy(1Ck9rf+7nIOoQ#{{OuOE9rneh+;n&#r*_EZg=Gbly(7WB;lT z6olx=yX_;G7(2SI&W`3KZze$TUj0NR!CJTD)8?CN>uRM;L9Q*3nVB=^A<`wzzxP@dw!x+Ez3j?1_!A1W0N>S0J0_e1@T;Fi3ew4%;~= zk0>CJCN1RqS%7X(SQWa#<*ScP&F3Fbp9MV6tHL(8eD$%Z`OL8hdS;vw@*Lv8isQk6 z!8m1*4l?t4o^{5+Id@-nWg8R_NN!8>`3KZz0pr=OY=eSb*#=pO3|a z%@?>%@z1>IvjE$mU{|(5!LDqBKcGGfc%JRbHYf--HJw=r9LtJR;)w4D=p!KIvs2~} z^E@~DEMPjX4BOzcV-D@hHn?>4S-|tWGHioOS0B5Y&Il6quuj-I#PQ5YB-OK%hb1&W z`Yd2NuL{@T(gRlQ$~DL>U40f{8WdKAX>j@KV^gm)hd>FB`0n>1DOSbB{5}gX4GQ|R zt8nVR?8VEjOoKn5J`0%6E5kInboH^R*O^s$8GfHPuQMx<2%v?$&I|Ai3M<1ixODZg zspa&39tTPnaaq+S%%OJ-R2eXr~TEOMY@d9Ffkm?D(=Xo=2EWj}+tP02A(oJW( zat!`}`Yhme*4YVdi9YuBI^#OPl-F6rjrY0zJ_`^G3i`9QR3E#V&XYrnWvb5tUT2+~ z(3a|BTho~#XI?ndBk04i$Suz~MeM)kN1p{uXPukSj_PAqenD1Uw)!lYr z?snxDL_Q7Tb;g!!V*BV`XC5J}bED4!{DOl1tS!;Uwx;va)n@^(^Q!O*E`8$;>KS_m zt1`O!C_K*HVf%3K0*OwQB#xRl-e&=JK|z1kmgr+s)0tbo`Yd2NuL`%|($UAR+=7Pf z!wQm&upd^@Z68+TaQ$M=96t+C3koYnEx5cSEq0|A`~me@!0WtX%z{f)AG?~)3hucB zc9_XMFGi6NZO9=8Xl~nQ0cJr#f7Xt+k6oDs+2yOx0$%4;VHRAz?PFWhS=7;oh4&CA z?j*?u?znT{_nO=GS-^DGISK8EK6d35 z;}6hBkv-y^JFf*;1%;Ji6#I`bZxl@S{0krf4bcRdQIsNZ+0-Wwq@8Aicn+dj5s6l9gjO$+R~fIh_iL+)KD^ZR`kU=$Qrg;8+n zJ1^TZ3NBxL7BHPxg;8+n>SI?%LGZZi?q)_m^J2kgOrj0h`Omy?TsFV$qd#j$+sCeq zf3z*KU!YH_W^s%Yw4E0fnwZqEb(jf=$0?ywN>kNLKIsHBhFbazLv$jMZn|hsp zKz$bQIsj*)8AZD=6BPSMWy^XaOVIuFQg>VAHr~ z+%V-3ZEW1S5E$8mWN|qEnwwQw)Y$>&#UEmR+tC8DE3@G8)n@@_LD8|{+OIVDazEHPj8}L ztTZ|`qo(6yr}01XF5`EmcM*A@+v*Myy73u^yql(qY?f9knFw>zi|oq0D;GOLn9 zugU%kEbo&a7ubI-z%D4-m0eI2ZE8OMhypF(gSIQbplDZqL0y4}1tiM42Z$tH#pPj6 zV&?Y%Ex<4++Ld8Yv@65lk0{UrK4`mg41#sfrZ&*qDHvhenK}gnW76EGU<4Tb@B28z9Rn#~_*>H^3BlqDkH{kSx!818f1FLD8-}gQ9Fx^O;?~ z`Yhmqwkyw|Xjh&=Rz<=w+&C3*l1ez16`aSz`vXg8ZuD8ebhazcpeWnbbY8ytEZ}{% zE6<>4SDrzRGog>LIQlHWGbpYK&*0K85ZaYz@CVdq0nhWQ@C+_ree7yFtE^6QG1+-> zEP@VU0xh!hT7YLzv@6e`Xj`7a<*UyErn6mn21UiDrZe_hqVD%WvQ@y2fJ>%ul$;U!+T%s)HG+_2<g<$}{)_ z>a&2?*{(c;B670Nx{vh7$31f#&e-8+mcg-1Y@dnWN5Ki$ytdB*JcFWLc?Lz*uBP+y z)n@^(vt4-xC2~Vz#%;(6`doB7qqo}?iE(M9U&SE5()s4dC5W418`mE$-hcKNhZ=dx z>5XWu|7fE|x%gH=s8jBeOBPP)gRyUlvTh=!?`2L}^7imsWx+2jbmdhF$>iW1-D-Oo(R&k7MPw=t2l6ad)i=9 zaj}j64vU?N@x%~SwoqHIh@o$3RlH6A@1RkuwJhW}T)V>F@KAl)9~I*)gV5x~>h>iT7O(br)mSOh^CNMYeQmOqHSoO&*}B!-VUzoIK3fgtfc5o!%I9+UbqsijmWM=k(s={*Tj}bb3=x?}O8uc6u{TZ`SF}IlZMp zvE0-~lVK>~|3<$I`TLlZ8;!}lIlbdf1>;%n4$y_#ph&ui@hPD{7<{xI{l+gaVoq3wy2F_h>ICJa5m5cSX_Oe{K%;X-vz?|+=$|8aUTN`GTOCvDQ+T7+_>Ef{rUcF+<- z9o|2e(zw;qP*tT^tC0bl{Htq*)@n6+!v-1Ec152tuuJOyR&nvEffiL!`($)$<3>wY zj>)^7g*Zghw~KmkVe?s!U}@S+kB<3k}Q&%dds@woou)NiK6`dcS;j0VMu)-Db( zXjKef6(e_T%t<_ic{DzvY^4F@&^wkhMEI77{@|gO@jw@C8XCX-U*q5RK?{wE=1#t) zO80*-5|ht`sDDfU>dNB*0Vhn#-$ZvygKksnl*qR=)QlnfXhpf(``hReZj4o-(-S2p zquA2gq;2F&YjtZt(-wek3gre`twCO^L4DfAvW^lJ4f>DvlaJ6Cd3<2QvHty`3Hpcj z@nf+(9-(v_+Juj}2)*n<|0W+R>DQ$4vQ|NBpsn~|Kb}Q<+r?pdj9#OP_XfZ9rbE8R zf&L}pv3juu_152Rh@lg2U+82Uea2VeGaBWaMm(RPyuOd2^G!g`7s6-#-olVkNyw-q zG+jyP*lnae89yi9zLI;(>0_%p3EA*Pu9Bw=ny{7Hjuc~!l02!(!s+hQk<@$Xfy&h8sGH1I2ooQ<_JrM zF_F*`y%No+M4PSzh29=+808v9IRnimt5D;koJNMJW4yd;Jk&KF>P}bQeJk|waHa8| z23e=N#vqN2=f}qL#JlZZ+(qCDQSI`;?z^x zO}|FbOX`Wrrp2mLPd#N4O-fo{g{jbU#7q}uC$pp{|1il}r=HQlEYs`NZ|a-moKwGj zZI(^STKfg!HvOKYsqAE~^y_aXx!}}qG?`_3z4}EJlU#J_cMr@my{sNoZjwvj?zJcZ zJDD*(1k_YkcIt6`X4#}(s&9%@kHRsPoy?jZUSX1}PQ|Jntg6?mABIksg>LI7TxOYG zRzJEh$yulFk2cFDWv%bS$Dw_G8p3`+g7J-UlZj)^NQI9N-x#Z7i>d0# zhsH8iy#%EMtCO~xN>4sEmckEY-(V>O;U?3^vQ!^sl%`%d`RG`k?#lF11>j^w5^geu zEQKXVM$b}c!c8WTMKjIN1=wIIMBxpPq%Z}U%2^6kcvhbrR!`SF-ENdbXoB^Hlc{9& z8^&rvdULv0&OM#AH%q4|7m`9d^9l@AO3U`odkEMV|Fy@j%9;gCKL62a} zrGCg^YB%PR0w2McOWi4HDorjwST$Cd%q2&m57K+#s)TiRCUc2Is+M1n+!_@6h?BYG zDD*-8CJ=vFtC-9sN1+dN8>w2Yr6+TV!+q;7V8&4BBTnWL?r>{qm@*Xlh=wskp%3!D za1{E8ley$5^ubpKej)1%Cv%BAxYl3bVjP7&;$$wt(P%A=xunoXH0F{*A7qQ*DD)AH zxunns-#bnUEH9Ldxunnsi3C7kYb|ZeC51kK430t{$(TzDeQ?Jd2@Ebu%q4|BXaz^1k7Udxg+92p z$WiDc8FNXY4?2Q0JC>Hn##~b9gF9^;g+8(|mlXP7JU9w{WMeKV^udKEolp#ld%}8a)(O)xA8FI3rGR?dpM}N&wWysNA zGg=vP^w$hl6FI(15XjQ~1s?EbxH8J4zh=ZTH3_1F1#w|mR{+eNHBFC4+n~}?qqrYb0GUVv58M_QQ`fG+RLyrEM(aVsdzh>|< za z3_1F1hA~5q{+f}@kfXn5AT#9XuNlh>Ir?jcvWXmZ@MbhK{H!VJ6; zS@d@XUWph>VFq4_@Vy8#@JhrOlQn5-yQ=F0VM!Ne;FXB67iQoUp~V(v;FX9u5oX|( z2p(i%240DnD`5s+5z<`bU!dEGITU8#l?b1OFaxhd%&jm3uL!lJFaxhd%(*ZFuS9Se z3N!FZgf8H&k(lq6K3F*2;C88;1!{#LYNHwUg(rC1FuBz5eYN! zN`$TnGw@2p{#%%VSA@Mqn1NR!_SC`*ydq>B!VJ73GBffGxQ3X!pXNsmscR4^?<~AR?pJXZ zUf~-eI!)Ju{?5WHd|O0E>GJ6BEWE-uM|6%ZkN(cWD}0MYhp6(fuO>`C?_2u`&fpuJ z5V%<7=~58#zq9ZPm^8VbYAuic&cZ8Tl{gEpaQ$g=Q^;Bm`a27+aNlJT+ixw8{?5WH zTyPL);T14%5@u*Xq@aZ^2IHG_mEPa}s)N92@E>VtusC zPa;gZgLd)l$dMHEMH9|yu{G3;htU*>O${QSFWsQqLARVh0LSFW@V$F_2-f6q@LiUP zmtGNkfTGh;xNoU+AvS&Rp}$7q#0XI|_{IB;rKtw}HPEUK2ESAsB+c&Rk5fW=YtdQ# zfA2Dan&gkO^Dg7aCAeuBOdw*Zm}K}tw-Bex5g7?0x5#u2oR$%8TXIv%B4c4aS!Dt& z9!f?yw#?7MsH2{RK}S6+NW==TJS&mdL9-0`=P;dC8U3uNk~AMs(5_M64t8v)BMr?E@X5 zb$%s*AuE*mSr|Q59g?uwu+F6fgN>>WMj2I~glV$AL*fXMo8K#mB-?^TriV3{WwJn6 z5v($jC&nU^>2Z-2>>@1X6qQ(u=4Ta^(s4;ejIgzws(XY@&&mukE?LUqlcDrLX2=fI z{9eeY_kz8a(t8;Pa@MxUtW2y=b6Z%MkkFcCgdLZXNtE8poJxJakWt^Sj7=-`UTj#c z^C4q%MVFH(tdm83cVlid3@_?gWLKCWqx%b(q{dm2=`ocV61H!CudJW%nq>-0=^9Zh zYCaTw_h5Neq;yDD^<9s-oT^*;78yX%+CEhw`LRvq6f8&9ZxAxFQ!2kCTZP!Tb*3749tG&URm`3 zG78%PVbr$(8A-@*Y75YhvR_n2kD0KPqp@9ObdQp`90C!nI)tl%)cmR-rKHQ@!?d;! zO}6$IoGKJ{svNG`QO~N>7^u)Ssy<#JSq{zpfVxm)1;-#=j$SLsCrz~l4kN4Ht8fZY zvaaJ3S$gJNWWH89BZNVC9!3`c3oE5K#jP4uOOAD@{9JQw4NKql) z8}nGf^tOBpJR{=+ww9AwdYly)(AIK_9#Ld|hoXC%tum`#@vua#`xzp$eyfmCb}?j> zU5q19P^DPf2O0Gagq9I5KvVl5v*HGit$}qtf(!^~eHJkq)^Wj-OzAz~)N1?ikkQX# zyJh(XQBL<9ncFM~R{I6Vyp(+fnPRoqh|Fpq@K~jZ7ts6;4thmBt3vTD-Nb7l(mX?1bq^2w2VSgQ`(|!{M40bKqF9uHD#F@DvUKO{wn}GV1$93QBTHWNr)6 zQd09F>!&5=axyv1vpfrd4;s1}(@VI3AnaBea6Dsfmml1D=y9oJ1T4HKyN{Afxb^SM{7L<~FMmwRYnSpPXWv%0WglCYWW2^S0s; zJYUpzDR8XSj>WX;@yX`;X6O*6*J{VYc`93dJVZv}5YD`*br3Sj<`SSUR6l@2R{X;m zG&S~+QF;ouErn~4Q8p&diYbf$g9$w!iXMq<9xH{m!vt)Ut=~Ch6m|+Ky;eYG-IrlQ zhh3s|&Q&OuMTRp`aw=zPA7oY^EswL%8P7`evsgr}`iFB)%Ff1ii<)1u-J;q83x=u> z0bkZ{5a-#{SiuEK*+U}yVOHG$4LOB5IAgT#^LV(qtUgP~^r#2(_(9jKV-HKu>H`3T z(f9`lv--?Yj@tWSp{4wokm->?=C;VR?!uExk3IbSR{bMpA~p8#Z&<%KAQP<{D&=qI zk$8jBNnA~%a2H->Y7NDaly&b6g9{&sMR$S3R=*=JL3geCf^EFj$B07>dY#6Oj&2_z zPqV%k+e_=WB|&$o_Tfvijwx0$x_t<`qTVq>cd?XF`)LUQA6EUtDHF90!ox$^TrjyP z+mX1^sQO@T&@x(gC2aGo`hu^69(y>JsqvGk^AuRS`qdqij$=zq_f3%~Uj{5?3ZHRq zVD*FG#73a|rc%BZgf}vj-GmG`l%0bulhr4KXH{w~gFD-LMvB!^KeadY7iS4n`(%!_ zp1VM+^vF!}vv8kU=L6>k)YxMcq;OKk*4{ejGPZeEeFh9s>#8gWluZNI7iBxbl&AKq z*zHq$dl`Ug{XT$n^w`hX!^${Vqxyk0-fFMOFb}MI5*Y(yT|WS^^w`UkzZ2oA`b9GH zd|;bL)rWIas(mU$w-35YwNI7k_Te1DTAu=Enzg?Qaei5FO<`yo7$-Ya`xKaj)H|TN zR$aqU42g0yje!E)wSN0p@u>X~aSK>=4c-lEp8`0b_Ji2@QF|erYf!qY2>K-#bNgoK zE~brc-wfSV5F|zQ7rIOJ7p_w4`l~>9ss6%;Ow|Y7rRtkuiz?7v>w1LkFJ*JV@}YG@ z;V7F6yvdaA!n;P-6d{n>-P&cHKn@>VA9%Vb%yTZ6;%7w8M=!tJJmjT|ETt< zIIp#~58MY-`^ffl{(+&aauvQPleH7ZVL=_{g%lzzCb2g+Xopm(E2QnO{w}=JzCdX z9~`ol{=)l5?dc%XZ*ZBP#nm-Se^HJ)$Ht*EwWouOij#qi+S9@7h~8QIM#oSQ?<75b zIJ=|ThmbLUoX`@oLE!s z0~Ax^hw~X~{1D|qw-0ez*8buZbo)4J{Se5R4b@-lvZ?xDv03MX#WA~eep!K9KTwKV zKOm#l4@6N=>jz}i`az;C==Nc|MYT_&)(-?vQtJl>k6J$ zgG|3cWzsbk;V%{&6Xht|N5E2R^?O66N0yuGgP+S{BatWoN;hyqZ{4$_9Cmt^?+xAq ztM3r*PNYqy%28)eC`ZLE!0S!L1`toUbsq&82G#lw=&n^K5jA4nvt!lQY5Yv%g5AE= zpM#AlBTI-(v5=nZsCp zEs&wZ*71X1o*Gkx9a8n-6&Cv$C;sphTI++cOw|X2lBy3wPt`}COA44?s(lDvpxTE~ zrrJkLM{E0VSWdMMhr(3*$XSuKeLOe02MuyHtI6F;yS@wp4xaa9GC= z-WnMAGF!(XGK63j0WpF zurZ_FtH@UWGk_hdise}WpZ?Ud4kSKow6uZ4xRBsYH2zl)+b4Gjy!Txs48-a+$-?E0 jcB)+`fDJz)n%^-TAwQoU%a4nKuooSUt*ub}!twtBXt_#* diff --git a/docs/development/databasetabledesc.rst b/docs/development/databasetabledesc.rst index 6b16809371..de7317595a 100644 --- a/docs/development/databasetabledesc.rst +++ b/docs/development/databasetabledesc.rst @@ -428,9 +428,6 @@ priorityjobs processors The registration list for currently active processors. -server_status - Contains summary statistics on the various processor servers. - *UI management tables* diff --git a/docs/development/databasetablesbysource.rst b/docs/development/databasetablesbysource.rst index a6fd595dd3..bd342a41b7 100644 --- a/docs/development/databasetablesbysource.rst +++ b/docs/development/databasetablesbysource.rst @@ -117,7 +117,6 @@ These tables are used by various parts of the application to do other things tha * processor management tables * processors - * server_status * transform_rules * UI management tables diff --git a/socorro/cron/crontabber_app.py b/socorro/cron/crontabber_app.py index 44c885dc35..2a6900967d 100755 --- a/socorro/cron/crontabber_app.py +++ b/socorro/cron/crontabber_app.py @@ -28,7 +28,6 @@ socorro.cron.jobs.matviews.ExploitabilityCronApp|1d|05:00 socorro.cron.jobs.matviews.CrashAduByBuildSignatureCronApp|1d|07:30 socorro.cron.jobs.ftpscraper.FTPScraperCronApp|1h - socorro.cron.jobs.serverstatus.ServerStatusCronApp|5m socorro.cron.jobs.reprocessingjobs.ReprocessingJobsApp|5m socorro.cron.jobs.matviews.SignatureSummaryProductsCronApp|1d|05:00 socorro.cron.jobs.matviews.SignatureSummaryInstallationsCronApp|1d|05:00 diff --git a/socorro/cron/jobs/serverstatus.py b/socorro/cron/jobs/serverstatus.py deleted file mode 100644 index 2a0a92c08a..0000000000 --- a/socorro/cron/jobs/serverstatus.py +++ /dev/null @@ -1,124 +0,0 @@ -#!/usr/bin/python -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - -""" -This job populates the server_status table for RabbitMQ and processors. - -The following fields are updated in server_status table: - id - primary key - date_recently_completed - timestamp for job most recently processed in jobs - table - date_oldest_job_queued - (INACCURATE until we upgrade RabbitMQ) timestamp - for the oldest job which is incomplete - avg_process_sec - Average number of seconds (float) for jobs completed - since last run or 0.0 in edge case where no jobs have been processed - avg_wait_sec- Average number of seconds (float) for jobs completed since - last run - or 0.0 in edge case where no jobs have been processed - waiting_job_count - Number of jobs in queue, not assigned to a processor - date_created - timestamp for this record being udpated -""" - -import datetime - -from configman import Namespace - -from socorrolib.lib.datetimeutil import utc_now -from crontabber.base import BaseCronApp -from crontabber.mixins import ( - with_postgres_transactions, - with_single_postgres_transaction -) -from socorro.cron.mixins import ( - with_rabbitmq_transactions -) - -_server_stats_sql = """ - INSERT INTO server_status ( - date_recently_completed, - date_oldest_job_queued, - avg_process_sec, - avg_wait_sec, - waiting_job_count, - date_created - ) - SELECT - ( SELECT MAX(r.completed_datetime) FROM %(table)s r ) - AS date_recently_completed, - - Null - AS date_oldest_job_queued, -- Need RabbitMQ upgrade to get this info - - ( - SELECT COALESCE ( - EXTRACT ( - EPOCH FROM avg(r.completed_datetime - r.started_datetime) - ), - 0 - ) - FROM %(table)s r - WHERE r.completed_datetime > %%(start_time)s - ) - AS avg_process_sec, - - ( - SELECT COALESCE ( - EXTRACT ( - EPOCH FROM avg(r.completed_datetime - r.date_processed) - ), - 0 - ) - FROM %(table)s r - WHERE r.completed_datetime > %%(start_time)s - ) - AS avg_wait_sec, - - %(count)s - AS waiting_job_count, -- From RabbitMQ - - CURRENT_TIMESTAMP AS date_created - """ - - -@with_postgres_transactions() -@with_single_postgres_transaction() -@with_rabbitmq_transactions() -class ServerStatusCronApp(BaseCronApp): - app_name = 'server-status' - app_description = ( - "Connects to the message queue and investigates " - "the recent reports and processor activity in the database" - ) - app_version = '0.1' - - required_config = Namespace() - required_config.add_option( - 'processing_interval_seconds', - default=5 * 60, - doc='How often we process reports (in seconds)' - ) - - def _report_partition(self): - now = utc_now() - previous_monday = now - datetime.timedelta(now.weekday()) - reports_partition = 'reports_' + previous_monday.strftime('%Y%m%d') - return reports_partition - - def run(self, connection): - message_count = self.queuing_transaction_executor( - lambda conn: int(conn.queue_status_standard.method.message_count) - ) - - start_time = datetime.datetime.utcnow() - start_time -= datetime.timedelta( - seconds=self.config.processing_interval_seconds - ) - - query = _server_stats_sql % { - 'table': self._report_partition(), - 'count': message_count - } - cursor = connection.cursor() - cursor.execute(query, {'start_time': start_time}) diff --git a/socorro/cron/serverstatus.py b/socorro/cron/serverstatus.py deleted file mode 100755 index 3201b56d0e..0000000000 --- a/socorro/cron/serverstatus.py +++ /dev/null @@ -1,152 +0,0 @@ -#!/usr/bin/python -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -""" -This script is what populates the aggregate server_status table for jobs and processors. - -It provides up to date reports on the status of Socorro servers - -The following fields are updated in server_status table: - id - primary key - date_recently_completed - timestamp for job most recently processed in jobs table - date_oldest_job_queued - timestamp for the oldest job which is incomplete - avg_process_sec - Average number of seconds (float) for jobs completed since last run - or 0.0 in edge case where no jobs have been processed - avg_wait_sec- Average number of seconds (float) for jobs completed since last run - or 0.0 in edge case where no jobs have been processed - waiting_job_count - Number of jobs incomplete in queue - processors_count - Number of processors running to process jobs - date_created - timestamp for this record being udpated -""" -import time -import datetime - -import psycopg2 -import psycopg2.extras - -import socorrolib.lib.util -from socorrolib.lib.datetimeutil import utc_now - -def update(configContext, logger): - now = utc_now() - previous_monday = now - datetime.timedelta(now.weekday()) - reports_partition = 'reports_%4d%02d%02d' % ( - previous_monday.year, - previous_monday.month, - previous_monday.day, - ) - serverStatsSql = """ /* serverstatus.serverStatsSql */ - INSERT INTO server_status ( - date_recently_completed, - date_oldest_job_queued, - avg_process_sec, - avg_wait_sec, - waiting_job_count, - processors_count, - date_created - ) - SELECT - - ( - SELECT - MAX(r.completed_datetime) - FROM %s r - ) - AS date_recently_completed, - - ( - SELECT - jobs.queueddatetime - FROM jobs - WHERE jobs.completeddatetime IS NULL - ORDER BY jobs.queueddatetime LIMIT 1 - ) - AS date_oldest_job_queued, - - ( - SELECT COALESCE ( - EXTRACT ( - EPOCH FROM avg(r.completed_datetime - r.started_datetime) - ), - 0 - ) - FROM %s r - WHERE r.completed_datetime > %%s - ) - AS avg_process_sec , - - ( - SELECT COALESCE ( - EXTRACT ( - EPOCH FROM avg(r.completed_datetime - r.date_processed) - ), - 0 - ) - FROM %s r - WHERE r.completed_datetime > %%s - ) - AS avg_wait_sec, - - ( - SELECT - COUNT(jobs.id) - FROM jobs WHERE jobs.completeddatetime IS NULL - ) - AS waiting_job_count, - - ( - SELECT - count(processors.id) - FROM processors - ) - AS processors_count, - - CURRENT_TIMESTAMP AS date_created; - """ % (reports_partition, reports_partition, reports_partition) - - serverStatsLastUpdSql = """ /* serverstatus.serverStatsLastUpdSql */ - SELECT - id, - date_recently_completed, - date_oldest_job_queued, - avg_process_sec, - avg_wait_sec, - waiting_job_count, - processors_count, - date_created - FROM server_status - ORDER BY date_created DESC - LIMIT 1; -""" - - try: - databaseDSN = "host=%(databaseHost)s dbname=%(databaseName)s user=%(databaseUserName)s password=%(databasePassword)s" % configContext - conn = psycopg2.connect(databaseDSN) - cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) - except: - socorrolib.lib.util.reportExceptionAndAbort(logger) - - startTime = datetime.datetime.now() - startTime -= configContext.processingInterval - timeInserting = 0 - if configContext.debug: - logger.debug("Creating stats from now back until %s" % startTime) - try: - before = time.time() - cur.execute(serverStatsSql, (startTime, startTime)) - timeInserting = time.time() - before; - cur.execute(serverStatsLastUpdSql) - row = cur.fetchone() - conn.commit() - except: - socorrolib.lib.util.reportExceptionAndAbort(logger) - - if row: - logger.info("Server Status id=%d was updated at %s -- recent=%s, oldest=%s, avg_proc=%s, avg_wait=%s, waiting=%s, procs=%s -- in %s seconds" % (row['id'], row['date_created'], row['date_recently_completed'], row['date_oldest_job_queued'], row['avg_process_sec'], row['avg_wait_sec'], row['waiting_job_count'], row['processors_count'], timeInserting)) - else: - msg = "Unable to read from server_status table after attempting to insert a new record" - logger.warn(msg) - raise Exception(msg) diff --git a/socorro/external/postgresql/models.py b/socorro/external/postgresql/models.py index 6388b90dca..c1df73bbc2 100644 --- a/socorro/external/postgresql/models.py +++ b/socorro/external/postgresql/models.py @@ -1057,24 +1057,6 @@ class ReprocessingJob(DeclarativeBase): __mapper_args__ = {"primary_key": (uuid)} -class ServerStatu(DeclarativeBase): - __tablename__ = 'server_status' - - #column definitions - avg_process_sec = Column(u'avg_process_sec', REAL()) - avg_wait_sec = Column(u'avg_wait_sec', REAL()) - date_created = Column(u'date_created', TIMESTAMP(timezone=True), nullable=False) - date_oldest_job_queued = Column(u'date_oldest_job_queued', TIMESTAMP(timezone=True)) - date_recently_completed = Column(u'date_recently_completed', TIMESTAMP(timezone=True)) - id = Column(u'id', INTEGER(), primary_key=True, nullable=False) - processors_count = Column(u'processors_count', INTEGER(), nullable=True) - waiting_job_count = Column(u'waiting_job_count', INTEGER(), nullable=False) - - __table_args__ = ( - Index('idx_server_status_date', date_created, id), - ) - - class Session(DeclarativeBase): __tablename__ = 'sessions' diff --git a/socorro/external/postgresql/raw_sql/views/current_server_status_view.sql b/socorro/external/postgresql/raw_sql/views/current_server_status_view.sql deleted file mode 100644 index 30433c5943..0000000000 --- a/socorro/external/postgresql/raw_sql/views/current_server_status_view.sql +++ /dev/null @@ -1,3 +0,0 @@ -CREATE VIEW current_server_status AS - SELECT server_status.date_recently_completed, server_status.date_oldest_job_queued, date_part('epoch'::text, (server_status.date_created - server_status.date_oldest_job_queued)) AS oldest_job_age, server_status.avg_process_sec, server_status.avg_wait_sec, server_status.waiting_job_count, server_status.processors_count, server_status.date_created FROM server_status ORDER BY server_status.date_created DESC LIMIT 1 -; diff --git a/socorro/unittest/cron/jobs/test_serverstatus.py b/socorro/unittest/cron/jobs/test_serverstatus.py deleted file mode 100644 index 07b6de748a..0000000000 --- a/socorro/unittest/cron/jobs/test_serverstatus.py +++ /dev/null @@ -1,105 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - -from mock import Mock, MagicMock -from nose.tools import eq_ - -from crontabber.app import CronTabber - -from socorro.unittest.cron.jobs.base import IntegrationTestBase - -from socorro.unittest.cron.setup_configman import ( - get_config_manager_for_crontabber, -) - - -#============================================================================== -class IntegrationTestServerStatus(IntegrationTestBase): - - def _clear_tables(self): - self.conn.cursor().execute(""" - TRUNCATE - server_status, - report_partition_info, - server_status, - release_channels, - reports - CASCADE - """) - - def setUp(self): - super(IntegrationTestServerStatus, self).setUp() - self._clear_tables() - - def tearDown(self): - """ - The reason why this is all necessary, including the commit, is that - we're testing a multi-process tool, crontabber. - The changes made to the database happen in a transaction - that crontabber doesn't have visibility into. - - TODO drop reports partitions, not just the data - - """ - self._clear_tables() - self.conn.commit() - super(IntegrationTestServerStatus, self).tearDown() - - def _setup_config_manager(self): - queue_mock = Mock() - queue_mock.return_value.return_value = MagicMock() - queue_mock.return_value.return_value.queue_status_standard \ - .method.message_count = 1 - - return get_config_manager_for_crontabber( - jobs='socorro.cron.jobs.serverstatus.ServerStatusCronApp|5m', - overrides={ - 'crontabber.class-ServerStatusCronApp.queuing.queuing_class': - queue_mock - } - ) - - def test_server_status(self): - """ Simple test of status monitor """ - config_manager = self._setup_config_manager() - - cursor = self.conn.cursor() - - # Create partitions to support the status query - # Load report_partition_info data - cursor.execute(""" - INSERT into report_partition_info - (table_name, build_order, keys, indexes, - fkeys, partition_column, timetype) - VALUES - ('reports', '1', '{id,uuid}', - '{date_processed,hangid,"product,version",reason,signature,url}', - '{}', 'date_processed', 'TIMESTAMPTZ') - """) - cursor.execute('SELECT weekly_report_partitions()') - - # We have to do this here to accommodate separate crontabber processes - self.conn.commit() - - with config_manager.context() as config: - tab = CronTabber(config) - tab.run_all() - cursor.execute('select count(*) from server_status') - - res_expected = 1 - res, = cursor.fetchone() - eq_(res, res_expected) - - cursor.execute("""select - date_recently_completed - , date_oldest_job_queued -- is NULL until we upgrade Rabbit - , avg_process_sec - , waiting_job_count -- should be 1 - -- , date_created -- leaving timestamp verification out - from server_status - """) - - res_expected = (None, None, 0.0, 1) - res = cursor.fetchone() - eq_(res, res_expected) diff --git a/socorro/unittest/external/postgresql/test_server_status.py b/socorro/unittest/external/postgresql/test_server_status.py index 15a4175a09..9fd62ee82c 100644 --- a/socorro/unittest/external/postgresql/test_server_status.py +++ b/socorro/unittest/external/postgresql/test_server_status.py @@ -52,7 +52,7 @@ def tearDown(self): os.remove(os.path.join(self.basedir, 'breakpad_revision.txt')) cursor = self.connection.cursor() - cursor.execute("TRUNCATE server_status, alembic_version CASCADE;") + cursor.execute("TRUNCATE alembic_version CASCADE;") self.connection.commit() super(IntegrationTestServerStatus, self).tearDown() From 38f3d7d03be2cd7efe522e03ce65de1535295080 Mon Sep 17 00:00:00 2001 From: Peter Bengtsson Date: Wed, 14 Sep 2016 22:21:35 -0400 Subject: [PATCH 06/13] fixes bug 1262252 - Crash data pure implementation (#3452) * fixes bug 1262252 - Crash data pure implementation * mocking on old tests * test that was not important * leftover comment * leftover comment * import cleanup * fixing test as per new wording * dont test crash_data through middleware in integration tests * remove all use of middleware in integration-test.sh * review nits * remove unncessary rabbitmq connection * nit fixes * unset bad reference_value_from --- scripts/integration-test.sh | 78 +- socorro/external/boto/crash_data.py | 58 +- socorro/external/boto/crashstorage.py | 30 +- socorro/external/crash_data_base.py | 3 + socorro/external/rabbitmq/crashstorage.py | 38 +- socorro/external/rabbitmq/priorityjobs.py | 2 + socorro/middleware/middleware_app.py | 4 +- .../unittest/external/boto/test_crash_data.py | 156 +++ .../external/boto/test_crashstorage.py | 8 +- .../middleware/test_middleware_app.py | 9 - .../crashstats/api/tests/test_views.py | 60 +- webapp-django/crashstats/api/views.py | 10 + .../crashstats/report_index_pending.html | 47 +- .../crashstats/report_index_too_old.html | 10 - webapp-django/crashstats/crashstats/models.py | 59 +- .../static/crashstats/css/report_pending.less | 18 + .../static/crashstats/css/screen.less | 23 +- .../static/crashstats/js/socorro/pending.js | 81 +- .../crashstats/tests/test_models.py | 86 +- .../crashstats/crashstats/tests/test_views.py | 1207 +++++++---------- webapp-django/crashstats/crashstats/urls.py | 8 +- webapp-django/crashstats/crashstats/views.py | 84 +- webapp-django/crashstats/settings/base.py | 13 + webapp-django/crashstats/settings/bundles.py | 12 + 24 files changed, 996 insertions(+), 1108 deletions(-) create mode 100644 socorro/unittest/external/boto/test_crash_data.py delete mode 100644 webapp-django/crashstats/crashstats/jinja2/crashstats/report_index_too_old.html create mode 100644 webapp-django/crashstats/crashstats/static/crashstats/css/report_pending.less diff --git a/scripts/integration-test.sh b/scripts/integration-test.sh index 7301982419..f740ccc322 100755 --- a/scripts/integration-test.sh +++ b/scripts/integration-test.sh @@ -131,7 +131,7 @@ function cleanup() { echo "INFO: Terminating background jobs" echo " any kill usage errors below may be ignored" - for p in collector processor middleware + for p in collector processor do # destroy any running processes started by this shell kill $(jobs -p) > /dev/null 2>&1 @@ -206,7 +206,7 @@ function retry() { } #------------------------------------------------------------------------------ -# setup and run the collector, processor and middleware +# setup and run the collector and processor # The collector should be configured using the 2015 method of having the # ability to collect multiple crash types using different end points. # breakpad crashes on /submit @@ -255,30 +255,13 @@ function start_2015_socorro_apps() { > processor.log 2>&1 & echo ' processor started' - sleep 1 - socorro middleware \ - --admin.conf=./config/middleware.ini \ - --database.database_hostname=$database_hostname \ - --database.database_username=$database_username \ - --database.database_password=$database_password \ - --rabbitmq.host=$rmq_host \ - --rabbitmq.rabbitmq_user=$rmq_user \ - --rabbitmq.rabbitmq_password=$rmq_password \ - --rabbitmq.virtual_host=$rmq_virtual_host \ - --rabbitmq.standard_queue_name=$rmq_normal_queue_name \ - --rabbitmq.priority_queue_name=$rmq_priority_queue_name \ - --rabbitmq.reprocessing_queue_name=$rmq_reprocessing_queue_name \ - --web_server.wsgi_server_class=socorro.webapi.servers.CherryPy \ - > middleware.log 2>&1 & - echo ' middleware started' - # tell the test routine to use the extra submission test extra_submission_test=1 echo " Done." } #------------------------------------------------------------------------------ -# setup and run the collector, processor and middleware +# setup and run the collector and processor # The collector will use the traditional wsgi function that can only receive # breakpad crashes on the endpoint /submit #------------------------------------------------------------------------------ @@ -320,20 +303,6 @@ function start_standard_socorro_apps() { --processor.processor_class=socorro.processor.mozilla_processor_2015.MozillaProcessorAlgorithm2015 \ > processor.log 2>&1 & sleep 1 - socorro middleware \ - --admin.conf=./config/middleware.ini \ - --database.database_hostname=$database_hostname \ - --database.database_username=$database_username \ - --database.database_password=$database_password \ - --rabbitmq.host=$rmq_host \ - --rabbitmq.rabbitmq_user=$rmq_user \ - --rabbitmq.rabbitmq_password=$rmq_password \ - --rabbitmq.virtual_host=$rmq_virtual_host \ - --rabbitmq.standard_queue_name=$rmq_normal_queue_name \ - --rabbitmq.priority_queue_name=$rmq_priority_queue_name \ - --rabbitmq.reprocessing_queue_name=$rmq_reprocessing_queue_name \ - --web_server.wsgi_server_class=socorro.webapi.servers.CherryPy \ - > middleware.log 2>&1 & # tell the test routine NOT to use the extra submission test extra_submission_test=0 @@ -342,7 +311,7 @@ function start_standard_socorro_apps() { } #------------------------------------------------------------------------------ -# setup and run the collector, processor and middleware WITHOUT RabbitMQ +# setup and run the collector and processor WITHOUT RabbitMQ # The collector will use the traditional wsgi function that can only receive # breakpad crashes on the endpoint /submit # The collector saves in @@ -368,21 +337,6 @@ function start_minimal_socorro_apps() { --destination.fs_root=./processedCrashStore \ > processor.log 2>&1 & sleep 1 - socorro middleware \ - --admin.conf=./config/middleware.ini \ - --database.database_hostname=$database_hostname \ - --database.database_username=$database_username \ - --database.database_password=$database_password \ - --filesystem.fs_root=./processedCrashStore \ - --rabbitmq.host=$rmq_host \ - --rabbitmq.rabbitmq_user=$rmq_user \ - --rabbitmq.rabbitmq_password=$rmq_password \ - --rabbitmq.virtual_host=$rmq_virtual_host \ - --rabbitmq.standard_queue_name=$rmq_normal_queue_name \ - --rabbitmq.priority_queue_name=$rmq_priority_queue_name \ - --rabbitmq.reprocessing_queue_name=$rmq_reprocessing_queue_name \ - --web_server.wsgi_server_class=socorro.webapi.servers.CherryPy \ - > middleware.log 2>&1 & # tell the test routine NOT to use the extra submission test extra_submission_test=0 @@ -423,7 +377,7 @@ echo " Done." #****************************************************************************** # Here's where we actually start testing -# Iterate through some combinations of collector/crashmover/processor/middleware/setups +# Iterate through some combinations of collector/crashmover/processor/setups # These setups are defined in functions with their names list in the for loop: for an_app_set in start_2015_socorro_apps start_standard_socorro_apps start_minimal_socorro_apps do @@ -466,28 +420,6 @@ do retry 'collector' "$CRASHID" retry 'processor' "saved - $CRASHID" - #---------------------------------------------------------------------------- - # check that mware has raw crash using curl to hit the HTTP endpoint - curl -s -D middleware_headers.log "http://localhost:8883/crash_data/?datatype=meta&uuid=$CRASHID" > /dev/null - err=$? - echo " looking for errors in hitting the middleware for $CRASHID" - check_for_logged_fatal_errors $err middleware - - echo " looking for "200 OK" in hitting the middleware for $CRASHID" - grep '200 OK' middleware_headers.log > /dev/null - fatal $? "middleware test failed, no raw data for crash ID $CRASHID" - - echo " looking for processed crash through middleware for $CRASHID" - function find_crash_in_middleware() { - curl -s "http://localhost:8883/crash_data/?datatype=processed&uuid=$CRASHID" | grep date_processed - echo "http://localhost:8883/crash_data/?datatype=processed&uuid=$CRASHID" - return $? - } - retry_command middleware find_crash_in_middleware - - # check that mware logs the request for the crash, and logs no errors - retry 'middleware' "/crash_data" - #---------------------------------------------------------------------------- # EXTRA submission test if [ $extra_submission_test = 1 ] diff --git a/socorro/external/boto/crash_data.py b/socorro/external/boto/crash_data.py index 3cacb4c4aa..7ee1e3ca09 100644 --- a/socorro/external/boto/crash_data.py +++ b/socorro/external/boto/crash_data.py @@ -2,9 +2,11 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -# this is a temporary hack to coerse the middleware to talk to boto S3 -# instead of HBase. - +from socorrolib.lib import external_common, MissingArgumentError +from socorro.external.boto.crashstorage import ( + BotoS3CrashStorage, + CrashIDNotFound, +) from socorro.external.crash_data_base import CrashDataBase @@ -24,3 +26,53 @@ def get_storage(self): # implementation details with boto S3. return self.config.hbase.hbase_class(self.config.hbase) + +class SimplifiedCrashData(BotoS3CrashStorage): + """The difference between this and the base CrashData class is that + this one only makes the get() and if it fails it does NOT + try to put the crash ID back into the priority jobs queue. + Also, it returns a python dict instead of a DotDict which + makes this easier to work with from the webapp's model bridge. + """ + + def __init__(self, *args, **kwargs): + super(SimplifiedCrashData, self).__init__(*args, **kwargs) + # Forcibly set this to override the default in the base + # crash storage class for boto. We're confident that at this + # leaf point we want to NOT return a DotDict but just a plain + # python dict. + self.config.json_object_hook = dict + + def get(self, **kwargs): + """Return JSON data of a crash report, given its uuid. """ + filters = [ + ('uuid', None, str), + ('datatype', None, str), + ('name', None, str) # only applicable if datatype == 'raw' + ] + params = external_common.parse_arguments(filters, kwargs, modern=True) + + if not params.uuid: + raise MissingArgumentError('uuid') + + if not params.datatype: + raise MissingArgumentError('datatype') + + datatype_method_mapping = { + 'raw': 'get_raw_dump', + 'meta': 'get_raw_crash', + 'processed': 'get_processed', + 'unredacted': 'get_unredacted_processed', + } + get = self.__getattribute__(datatype_method_mapping[params.datatype]) + try: + if params.datatype == 'raw': + return get(params.uuid, name=params.name) + else: + return get(params.uuid) + except CrashIDNotFound: + # The CrashIDNotFound exception that happens inside the + # crashstorage is too revealing as exception message + # contains information about buckets and prefix keys. + # Re-wrap it here so the message is just the crash ID. + raise CrashIDNotFound(params.uuid) diff --git a/socorro/external/boto/crashstorage.py b/socorro/external/boto/crashstorage.py index 500d7db0a9..3d8442aaee 100644 --- a/socorro/external/boto/crashstorage.py +++ b/socorro/external/boto/crashstorage.py @@ -7,7 +7,6 @@ import json_schema_reducer from socorrolib.lib.converters import change_default -from socorrolib.lib.util import DotDict from configman import Namespace from configman.converters import class_converter, py_obj_to_str @@ -70,6 +69,11 @@ class BotoCrashStorage(CrashStorageBase): default='.dump', reference_value_from='resource.boto', ) + required_config.add_option( + 'json_object_hook', + default='configman.dotdict.DotDict', + from_string_converter=class_converter, + ) def is_operational_exception(self, x): if "not found, no value returned" in str(x): @@ -174,20 +178,27 @@ def save_raw_and_processed( self.save_processed(processed_crash) @staticmethod - def do_get_raw_crash(boto_connection, crash_id): + def do_get_raw_crash(boto_connection, crash_id, json_object_hook): try: raw_crash_as_string = boto_connection.fetch( crash_id, "raw_crash" ) - return json.loads(raw_crash_as_string, object_hook=DotDict) + return json.loads( + raw_crash_as_string, + object_hook=json_object_hook + ) except boto_connection.ResponseError, x: raise CrashIDNotFound( '%s not found: %s' % (crash_id, x) ) def get_raw_crash(self, crash_id): - return self.transaction_for_get(self.do_get_raw_crash, crash_id) + return self.transaction_for_get( + self.do_get_raw_crash, + crash_id, + self.config.json_object_hook + ) @staticmethod def do_get_raw_dump(boto_connection, crash_id, name=None): @@ -244,7 +255,11 @@ def get_raw_dumps_as_files(self, crash_id): ) @staticmethod - def _do_get_unredacted_processed(boto_connection, crash_id): + def _do_get_unredacted_processed( + boto_connection, + crash_id, + json_object_hook, + ): try: processed_crash_as_string = boto_connection.fetch( crash_id, @@ -252,7 +267,7 @@ def _do_get_unredacted_processed(boto_connection, crash_id): ) return json.loads( processed_crash_as_string, - object_hook=DotDict + object_hook=json_object_hook, ) except boto_connection.ResponseError, x: raise CrashIDNotFound( @@ -262,7 +277,8 @@ def _do_get_unredacted_processed(boto_connection, crash_id): def get_unredacted_processed(self, crash_id): return self.transaction_for_get( self._do_get_unredacted_processed, - crash_id + crash_id, + self.config.json_object_hook, ) diff --git a/socorro/external/crash_data_base.py b/socorro/external/crash_data_base.py index d9a57bf6cd..63029c1f8d 100644 --- a/socorro/external/crash_data_base.py +++ b/socorro/external/crash_data_base.py @@ -2,6 +2,9 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. +# XXX this is now deprecated and can be deleted. +# See https://bugzilla.mozilla.org/show_bug.cgi?id=1299465 + from socorrolib.lib import ( MissingArgumentError, ResourceNotFound, diff --git a/socorro/external/rabbitmq/crashstorage.py b/socorro/external/rabbitmq/crashstorage.py index 135959247e..c54490779d 100644 --- a/socorro/external/rabbitmq/crashstorage.py +++ b/socorro/external/rabbitmq/crashstorage.py @@ -105,11 +105,13 @@ def __init__(self, config, quit_check_callback=None): if config.throttle == 100: self.dont_queue_this_crash = lambda: False else: - self.dont_queue_this_crash = lambda: randint(1, 100) > config.throttle + self.dont_queue_this_crash = ( + lambda: randint(1, 100) > config.throttle + ) #-------------------------------------------------------------------------- def save_raw_crash(self, raw_crash, dumps, crash_id): - if self.dont_queue_this_crash(): + if self.dont_queue_this_crash(): self.config.logger.info( 'Crash %s filtered out of RabbitMQ queue %s', crash_id, @@ -118,8 +120,7 @@ def save_raw_crash(self, raw_crash, dumps, crash_id): return try: this_crash_should_be_queued = ( - (not self.config.filter_on_legacy_processing) - or + not self.config.filter_on_legacy_processing or raw_crash.legacy_processing == 0 ) except KeyError: @@ -170,7 +171,6 @@ def new_crashes(self): # queues the crash_id. The '_consume_acknowledgement_queue' function # is run to send acknowledgments back to RabbitMQ self._consume_acknowledgement_queue() - conn = self.rabbitmq.connection() queues = [ self.rabbitmq.config.priority_queue_name, self.rabbitmq.config.standard_queue_name, @@ -326,3 +326,31 @@ def reprocess(self, crash_ids): ): success = False return success + + +#============================================================================== +class PriorityjobRabbitMQCrashStore(RabbitMQCrashStorage): + required_config = Namespace() + required_config.rabbitmq_class = change_default( + RabbitMQCrashStorage, + 'rabbitmq_class', + ConnectionContext, + ) + required_config.add_option( + 'routing_key', + default='socorro.priority', + doc='the name of the queue to receive crashes', + ) + + def process(self, crash_ids): + if not isinstance(crash_ids, (list, tuple)): + crash_ids = [crash_ids] + success = bool(crash_ids) + for crash_id in crash_ids: + if not self.save_raw_crash( + DotDict({'legacy_processing': 0}), + [], + crash_id + ): + success = False + return success diff --git a/socorro/external/rabbitmq/priorityjobs.py b/socorro/external/rabbitmq/priorityjobs.py index a2e406988c..fdd85953a6 100644 --- a/socorro/external/rabbitmq/priorityjobs.py +++ b/socorro/external/rabbitmq/priorityjobs.py @@ -2,6 +2,8 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. +# XXX This file is probably not used anywhere and can be deleted. + import pika from pika.exceptions import ChannelClosed diff --git a/socorro/middleware/middleware_app.py b/socorro/middleware/middleware_app.py index 5d5640e31f..d3e2ec63f6 100755 --- a/socorro/middleware/middleware_app.py +++ b/socorro/middleware/middleware_app.py @@ -41,7 +41,6 @@ (r'/backfill/(.*)', 'backfill.Backfill'), (r'/correlations/signatures/(.*)', 'correlations.CorrelationsSignatures'), (r'/correlations/(.*)', 'correlations.Correlations'), - (r'/crash_data/(.*)', 'crash_data.CrashData'), ( r'/crashes/' r'(comments|count_by_day|daily|frequency|signatures|' @@ -136,8 +135,7 @@ class MiddlewareApp(App): required_config.implementations.add_option( 'service_overrides', doc='comma separated list of class overrides, e.g `Crashes: hbase`', - default='CrashData: fs, ' - 'Correlations: http, ' + default='Correlations: http, ' 'CorrelationsSignatures: http, ' 'SuperSearch: es, ' 'Priorityjobs: rabbitmq, ' diff --git a/socorro/unittest/external/boto/test_crash_data.py b/socorro/unittest/external/boto/test_crash_data.py new file mode 100644 index 0000000000..cb61f6fee1 --- /dev/null +++ b/socorro/unittest/external/boto/test_crash_data.py @@ -0,0 +1,156 @@ +import json + +import mock +from nose.tools import eq_, assert_raises +from boto.exception import StorageResponseError + +from configman import ConfigurationManager + +from socorrolib.lib import MissingArgumentError +from socorro.external.boto.crash_data import SimplifiedCrashData +from socorro.external.crashstorage_base import CrashIDNotFound +from socorro.unittest.testbase import TestCase + + +class TestSimplifiedCrashData(TestCase): + + def _get_config(self, sources, extra_values=None): + self.mock_logging = mock.Mock() + + config_definitions = [] + for source in sources: + conf = source.get_required_config() + conf.add_option('logger', default=self.mock_logging) + config_definitions.append(conf) + + values_source = {'logger': self.mock_logging} + + config_manager = ConfigurationManager( + config_definitions, + app_name='testapp', + app_version='1.0', + app_description='', + values_source_list=[values_source], + argv_source=[], + ) + + return config_manager.get_config() + + def get_s3_store(self): + s3 = SimplifiedCrashData( + config=self._get_config([SimplifiedCrashData]) + ) + s3_conn = s3.connection_source + s3_conn._connect_to_endpoint = mock.Mock() + return s3 + + def test_get_basic_processed(self): + boto_s3_store = self.get_s3_store() + mocked_connection = ( + boto_s3_store.connection_source._connect_to_endpoint() + ) + + def mocked_get_contents_as_string(): + return json.dumps({'foo': 'bar'}) + + mocked_connection.get_bucket().get_key().get_contents_as_string = ( + mocked_get_contents_as_string + ) + result = boto_s3_store.get( + uuid='0bba929f-8721-460c-dead-a43c20071027', + datatype='processed' + ) + eq_(result, {'foo': 'bar'}) + + def test_get_not_found_processed(self): + boto_s3_store = self.get_s3_store() + mocked_connection = ( + boto_s3_store.connection_source._connect_to_endpoint() + ) + + def mocked_get_key(key): + assert '/processed_crash/' in key + assert '0bba929f-8721-460c-dead-a43c20071027' in key + raise StorageResponseError(404, 'not found') + + mocked_connection.get_bucket().get_key = ( + mocked_get_key + ) + assert_raises( + CrashIDNotFound, + boto_s3_store.get, + uuid='0bba929f-8721-460c-dead-a43c20071027', + datatype='processed' + ) + + def test_get_basic_raw_dump(self): + boto_s3_store = self.get_s3_store() + mocked_connection = ( + boto_s3_store.connection_source._connect_to_endpoint() + ) + + def mocked_get_contents_as_string(): + return '\xa0' + + mocked_connection.get_bucket().get_key().get_contents_as_string = ( + mocked_get_contents_as_string + ) + result = boto_s3_store.get( + uuid='0bba929f-8721-460c-dead-a43c20071027', + datatype='raw', + ) + eq_(result, '\xa0') + + def test_get_not_found_raw_dump(self): + boto_s3_store = self.get_s3_store() + mocked_connection = ( + boto_s3_store.connection_source._connect_to_endpoint() + ) + + def mocked_get_key(key): + assert '/dump/' in key + assert '0bba929f-8721-460c-dead-a43c20071027' in key + raise StorageResponseError(404, 'not found') + + mocked_connection.get_bucket().get_key = ( + mocked_get_key + ) + assert_raises( + CrashIDNotFound, + boto_s3_store.get, + uuid='0bba929f-8721-460c-dead-a43c20071027', + datatype='raw' + ) + + def test_get_not_found_raw_crash(self): + boto_s3_store = self.get_s3_store() + mocked_connection = ( + boto_s3_store.connection_source._connect_to_endpoint() + ) + + def mocked_get_key(key): + assert '/raw_crash/' in key + assert '0bba929f-8721-460c-dead-a43c20071027' in key + raise StorageResponseError(404, 'not found') + + mocked_connection.get_bucket().get_key = ( + mocked_get_key + ) + assert_raises( + CrashIDNotFound, + boto_s3_store.get, + uuid='0bba929f-8721-460c-dead-a43c20071027', + datatype='meta' + ) + + def test_bad_arguments(self): + boto_s3_store = self.get_s3_store() + assert_raises( + MissingArgumentError, + boto_s3_store.get + ) + assert_raises( + MissingArgumentError, + boto_s3_store.get, + uuid='0bba929f-8721-460c-dead-a43c20071027', + ) diff --git a/socorro/unittest/external/boto/test_crashstorage.py b/socorro/unittest/external/boto/test_crashstorage.py index 0f25323f44..cde76cfa15 100644 --- a/socorro/unittest/external/boto/test_crashstorage.py +++ b/socorro/unittest/external/boto/test_crashstorage.py @@ -32,7 +32,8 @@ ) from socorro.unittest.external.es.base import ElasticsearchTestCase -import socorro.unittest.testbase +from socorro.unittest.testbase import TestCase + from socorrolib.lib.util import DotDict @@ -60,7 +61,7 @@ class ConditionallyABadDeal(Exception): S3ConnectionContext.conditional_exceptions = (ConditionallyABadDeal, ) -class BaseTestCase(socorro.unittest.testbase.TestCase): +class BaseTestCase(TestCase): @classmethod def setUpClass(cls): @@ -102,7 +103,8 @@ def setup_mocked_s3_storage( 'dump_file_suffix': '.dump', 'bucket_name': bucket_name, 'prefix': 'dev', - 'calling_format': mock.Mock() + 'calling_format': mock.Mock(), + 'json_object_hook': DotDict, }) if isinstance(storage_class, basestring): diff --git a/socorro/unittest/middleware/test_middleware_app.py b/socorro/unittest/middleware/test_middleware_app.py index 6f282e7cce..d66f45dd8b 100644 --- a/socorro/unittest/middleware/test_middleware_app.py +++ b/socorro/unittest/middleware/test_middleware_app.py @@ -999,15 +999,6 @@ def test_missing_argument_yield_bad_request(self): app.main() server = middleware_app.application - response = self.get( - server, - '/crash_data/', - {'xx': 'yy'}, - expect_errors=True - ) - eq_(response.status, 400) - ok_('uuid' in response.body) - response = self.get( server, '/crashes/comments/', diff --git a/webapp-django/crashstats/api/tests/test_views.py b/webapp-django/crashstats/api/tests/test_views.py index 9d1399463d..e070667a29 100644 --- a/webapp-django/crashstats/api/tests/test_views.py +++ b/webapp-django/crashstats/api/tests/test_views.py @@ -28,6 +28,9 @@ Reprocessing, ProductBuildTypes, Status, + ProcessedCrash, + RawCrash, + UnredactedCrash, ) from crashstats.tokens.models import Token @@ -767,8 +770,7 @@ def mocked_get(url, params, **options): ok_(dump['hits']) ok_(dump['total']) - @mock.patch('requests.get') - def test_ProcessedCrash(self, rget): + def test_ProcessedCrash(self): url = reverse('api:model_wrapper', args=('ProcessedCrash',)) response = self.client.get(url) eq_(response.status_code, 400) @@ -776,11 +778,9 @@ def test_ProcessedCrash(self, rget): dump = json.loads(response.content) ok_(dump['errors']['crash_id']) - def mocked_get(url, params, **options): - assert '/crash_data' in url, url - + def mocked_get(**params): if 'datatype' in params and params['datatype'] == 'processed': - return Response({ + return { "client_crash_date": "2012-06-11T06:08:45", "dump": dump, "signature": "FakeSignature1", @@ -812,10 +812,10 @@ def mocked_get(url, params, **options): "upload_file_minidump_flash1": "a crash", "upload_file_minidump_flash2": "a crash", "upload_file_minidump_plugin": "a crash" - }) - raise NotImplementedError(url) + } + raise NotImplementedError - rget.side_effect = mocked_get + ProcessedCrash.implementation().get.side_effect = mocked_get response = self.client.get(url, { 'crash_id': '123', @@ -826,8 +826,7 @@ def mocked_get(url, params, **options): ok_('upload_file_minidump_flash2' in dump) ok_('url' not in dump) - @mock.patch('requests.get') - def test_UnredactedCrash(self, rget): + def test_UnredactedCrash(self): url = reverse('api:model_wrapper', args=('UnredactedCrash',)) response = self.client.get(url) # because we don't have the sufficient permissions yet to use it @@ -855,11 +854,9 @@ def test_UnredactedCrash(self, rget): dump = json.loads(response.content) ok_(dump['errors']['crash_id']) - def mocked_get(url, params, **options): - assert '/crash_data/' in url - + def mocked_get(**params): if 'datatype' in params and params['datatype'] == 'unredacted': - return Response({ + return { "client_crash_date": "2012-06-11T06:08:45", "dump": dump, "signature": "FakeSignature1", @@ -892,10 +889,10 @@ def mocked_get(url, params, **options): "upload_file_minidump_flash2": "a crash", "upload_file_minidump_plugin": "a crash", "exploitability": "Unknown Exploitability", - }) - raise NotImplementedError(url) + } + raise NotImplementedError - rget.side_effect = mocked_get + UnredactedCrash.implementation().get.side_effect = mocked_get response = self.client.get(url, { 'crash_id': '123', @@ -906,13 +903,11 @@ def mocked_get(url, params, **options): ok_('upload_file_minidump_flash2' in dump) ok_('exploitability' in dump) - @mock.patch('requests.get') - def test_RawCrash(self, rget): + def test_RawCrash(self): - def mocked_get(url, params, **options): - assert '/crash_data' in url + def mocked_get(**params): if 'uuid' in params and params['uuid'] == 'abc123': - return Response({ + return { "InstallTime": "1366691881", "AdapterVendorID": "0x8086", "Theme": "classic/1.0", @@ -946,10 +941,10 @@ def mocked_get(url, params, **options): "upload_file_minidump_flash1": "a crash", "upload_file_minidump_flash2": "a crash", "upload_file_minidump_plugin": "a crash" - }) - raise NotImplementedError(url) + } + raise NotImplementedError - rget.side_effect = mocked_get + RawCrash.implementation().get.side_effect = mocked_get url = reverse('api:model_wrapper', args=('RawCrash',)) response = self.client.get(url) @@ -977,16 +972,14 @@ def mocked_get(url, params, **options): ok_('http://p0rn.com' not in dump['Comments']) ok_('mail@email.com' not in dump['Comments']) - @mock.patch('requests.get') - def test_RawCrash_binary_blob(self, rget): + def test_RawCrash_binary_blob(self): - def mocked_get(url, params, **options): - assert '/crash_data' in url + def mocked_get(**params): if 'uuid' in params and params['uuid'] == 'abc': - return Response('\xe0') - raise NotImplementedError(url) + return '\xe0' + raise NotImplementedError - rget.side_effect = mocked_get + RawCrash.implementation().get.side_effect = mocked_get url = reverse('api:model_wrapper', args=('RawCrash',)) response = self.client.get(url, { @@ -996,7 +989,6 @@ def mocked_get(url, params, **options): # because we don't have permission eq_(response.status_code, 403) - url = reverse('api:model_wrapper', args=('RawCrash',)) response = self.client.get(url, { 'crash_id': 'abc', 'format': 'wrong' # note diff --git a/webapp-django/crashstats/api/views.py b/webapp-django/crashstats/api/views.py index 5918575217..898ce7dd2a 100644 --- a/webapp-django/crashstats/api/views.py +++ b/webapp-django/crashstats/api/views.py @@ -17,6 +17,7 @@ from waffle.decorators import waffle_switch from socorrolib.lib import BadArgumentError, MissingArgumentError +from socorro.external.crashstorage_base import CrashIDNotFound import crashstats from crashstats.crashstats.decorators import track_api_pageview @@ -41,6 +42,10 @@ models.RequiredParameterError, ) +NOT_FOUND_EXCEPTIONS = ( + CrashIDNotFound, +) + # See http://www.iana.org/assignments/http-status-codes REASON_PHRASES = { @@ -337,6 +342,11 @@ def model_wrapper(request, model_name): content_type='application/json; charset=UTF-8' ) raise + except NOT_FOUND_EXCEPTIONS as exception: + return http.HttpResponseNotFound( + json.dumps({'error': str(exception)}), + content_type='application/json; charset=UTF-8' + ) except BAD_REQUEST_EXCEPTIONS as exception: return http.HttpResponseBadRequest( json.dumps({'error': str(exception)}), diff --git a/webapp-django/crashstats/crashstats/jinja2/crashstats/report_index_pending.html b/webapp-django/crashstats/crashstats/jinja2/crashstats/report_index_pending.html index 0e06726166..9363e2ac77 100644 --- a/webapp-django/crashstats/crashstats/jinja2/crashstats/report_index_pending.html +++ b/webapp-django/crashstats/crashstats/jinja2/crashstats/report_index_pending.html @@ -1,32 +1,33 @@ {% extends "crashstats_base.html" %} +{% block site_css %} +{{ super() }} +{% stylesheet 'report_pending' %} +{% endblock %} + +{% block site_js %} +{{ super() }} +{% javascript 'report_pending' %} +{% endblock %} + {% block content %}
-
-
- -
-

Please Wait...

-

Fetching this archived report will take 30 seconds to 5 minutes

- -

Next attempt in - 30 seconds...

- -
- - {% endblock %} - -{% block site_js %} -{{ super() }} - - -{% endblock %} diff --git a/webapp-django/crashstats/crashstats/jinja2/crashstats/report_index_too_old.html b/webapp-django/crashstats/crashstats/jinja2/crashstats/report_index_too_old.html deleted file mode 100644 index 3305844bc0..0000000000 --- a/webapp-django/crashstats/crashstats/jinja2/crashstats/report_index_too_old.html +++ /dev/null @@ -1,10 +0,0 @@ -{% extends "crashstats_base.html" %} - -{% block content %} -
-
-

Oh Noes!

-

This archived report has expired because it is greater than 3 years of age.

-
-
-{% endblock %} diff --git a/webapp-django/crashstats/crashstats/models.py b/webapp-django/crashstats/crashstats/models.py index f9d2b4f1d5..2860ce083c 100644 --- a/webapp-django/crashstats/crashstats/models.py +++ b/webapp-django/crashstats/crashstats/models.py @@ -12,10 +12,12 @@ import ujson from configman import configuration, Namespace +from socorrolib.lib import BadArgumentError from socorro.external.es.base import ElasticsearchConfig from socorro.external.postgresql.crashstorage import PostgreSQLCrashStorage from socorro.external.rabbitmq.crashstorage import ( ReprocessingOneRabbitMQCrashStore, + PriorityjobRabbitMQCrashStore, ) import socorro.external.postgresql.platforms import socorro.external.postgresql.bugs @@ -28,6 +30,7 @@ import socorro.external.postgresql.product_build_types import socorro.external.postgresql.signature_first_date import socorro.external.postgresql.server_status +import socorro.external.boto.crash_data from socorrolib.app import socorro_app @@ -67,6 +70,16 @@ def config_from_configman(): 'rabbitmq_reprocessing_class', default=ReprocessingOneRabbitMQCrashStore, ) + definition_source.namespace('priority') + definition_source.priority.add_option( + 'rabbitmq_priority_class', + default=PriorityjobRabbitMQCrashStore, + ) + definition_source.namespace('data') + definition_source.data.add_option( + 'crash_data_class', + default=socorro.external.boto.crash_data.SimplifiedCrashData, + ) config = configuration( definition_source=definition_source, values_source_list=[ @@ -78,6 +91,8 @@ def config_from_configman(): # logger set up by configman as an aggregate, we just use the # same logger as we have here in the webapp. config.queuing.logger = logger + config.priority.logger = logger + config.data.logger = logger return config @@ -395,7 +410,6 @@ def get_implementation(self): config = config_from_configman() if self.implementation_config_namespace: config = config[self.implementation_config_namespace] - _implementations[key] = self.implementation( config=config ) @@ -1010,7 +1024,9 @@ class ReportList(SocorroMiddleware): class ProcessedCrash(SocorroMiddleware): - URL_PREFIX = '/crash_data/' + + implementation = socorro.external.boto.crash_data.SimplifiedCrashData + implementation_config_namespace = 'data' required_params = ( 'crash_id', @@ -1092,7 +1108,6 @@ class ProcessedCrash(SocorroMiddleware): class UnredactedCrash(ProcessedCrash): - URL_PREFIX = '/crash_data/' defaults = { 'datatype': 'unredacted', @@ -1123,7 +1138,8 @@ class RawCrash(SocorroMiddleware): token that carries the "View Raw Dumps" permission. """ - URL_PREFIX = '/crash_data/' + implementation = socorro.external.boto.crash_data.SimplifiedCrashData + implementation_config_namespace = 'data' required_params = ( 'crash_id', @@ -1239,11 +1255,17 @@ class RawCrash(SocorroMiddleware): ) def get(self, **kwargs): - format = kwargs.get('format', 'meta') - if format == 'raw_crash': - format = kwargs['format'] = 'raw' - kwargs['expect_json'] = format != 'raw' - return super(RawCrash, self).get(**kwargs) + format_ = kwargs.get('format', 'meta') + if format_ == 'raw_crash': + # legacy + format_ = kwargs['format'] = 'raw' + expect_dict = format_ != 'raw' + result = super(RawCrash, self).get(**kwargs) + # This 'result', will either be a binary blob or a python dict. + # Unless kwargs['format']==raw, this has to be a python dict. + if expect_dict and not isinstance(result, dict): + raise BadArgumentError('format') + return result class CommentsBySignature(SocorroMiddleware): @@ -1881,6 +1903,25 @@ def post(self, **data): return self.get_implementation().reprocess(**data) +class Priorityjob(SocorroMiddleware): + """Return true if all supplied crash IDs + were sucessfully submitted onto the priority queue. + """ + + implementation = PriorityjobRabbitMQCrashStore + + implementation_config_namespace = 'priority' + + required_params = ( + ('crash_ids', list), + ) + + get = None + + def post(self, **kwargs): + return self.get_implementation().process(**kwargs) + + class Healthcheck(SocorroMiddleware): """Return a sign of life from the middleware. diff --git a/webapp-django/crashstats/crashstats/static/crashstats/css/report_pending.less b/webapp-django/crashstats/crashstats/static/crashstats/css/report_pending.less new file mode 100644 index 0000000000..b24c0d0e0d --- /dev/null +++ b/webapp-django/crashstats/crashstats/static/crashstats/css/report_pending.less @@ -0,0 +1,18 @@ +@import "mixins.less"; + +div.pending { + background-color: #fff; + color: #333; + margin: 20px auto; + padding: 10px; + border: 4px solid #999; + width: 400px; + text-align: center; + .rounded-corners(25px); + p { + font-size: 12px; + } + img { + margin: 10px 0; + } +} diff --git a/webapp-django/crashstats/crashstats/static/crashstats/css/screen.less b/webapp-django/crashstats/crashstats/static/crashstats/css/screen.less index 4bfdf4f7b1..895dde7908 100644 --- a/webapp-django/crashstats/crashstats/static/crashstats/css/screen.less +++ b/webapp-django/crashstats/crashstats/static/crashstats/css/screen.less @@ -863,28 +863,7 @@ div.code { #allthreads { display: none; } -/* Report Pending Status page */ -div.pendingStatus { - background-color: #fff; - color: #333; - margin: 20px auto; - padding: 10px; - border: 4px solid #999; - width: 400px; - font: 12px "Lucida Grande", "Lucida Sans Unicode", verdana, lucida, arial, helvetica, sans-serif; - text-align: center; - .rounded-corners(25px); - p { - font-size: 12px; - } - p.pendingProcessing { - color: green; - font-weight: bold; - } - img { - margin: 10px 0; - } -} + /* Top Crashers Index Page */ #topcrashers { h1 { diff --git a/webapp-django/crashstats/crashstats/static/crashstats/js/socorro/pending.js b/webapp-django/crashstats/crashstats/static/crashstats/js/socorro/pending.js index ad56f18646..5e37e21a21 100644 --- a/webapp-django/crashstats/crashstats/static/crashstats/js/socorro/pending.js +++ b/webapp-django/crashstats/crashstats/static/crashstats/js/socorro/pending.js @@ -1,47 +1,38 @@ +$(function() { + var Checker = (function() { + var intervalTime = 5 * 1000; + var checkInterval; + var totalTime = 0; -/* Javascript for the Pending Reports page */ - -// Begin the timer and Ajax calls for reports -var original_seconds = 30; -var seconds = original_seconds; -var number_calls = 1; - -// Maintain the time in seconds, and make an ajax call every 30 seconds -function pendingReportTimer(url){ - if (seconds == 0){ - $('#next_attempt').hide(); - $('#processing').show(); - - // Upon the third attempt, state that this failed - if (number_calls == 10) { - $('#checking').hide(); - $('#fail').show(); - } else { - pendingReportCheck(url); - number_calls += 1; - seconds = original_seconds; - $('#counter').html(original_seconds); - setTimeout("pendingReportTimer(\""+url+"\")",1000); - } - } - // Decrement the seconds count - else { - $('#processing').hide(); - $('#next_attempt').show(); - seconds -= 1; - $('#counter').html(seconds); - setTimeout("pendingReportTimer(\""+url+"\")",1000); - } -} - -// Perform the ajax call to check for this report -function pendingReportCheck (url) -{ - $.get(url, {}, - function(responseJSON){ - if (responseJSON.status == 'ready') { - top.location = responseJSON.url_redirect; + return { + startChecking: function(crashID) { + checkInterval = setInterval(function() { + $.get('/api/ProcessedCrash/', {crash_id: crashID}) + .then(function() { + clearInterval(checkInterval); + // If it exists, we can reload the page we're on. + $('.pending .searching').hide(); + $('.pending .found').fadeIn(300, function() { + document.location.reload(); + }); + }) + .fail(function(err) { + // Perfectly expected. + // We kind of expect the processed crash to not + // exist for a while. Once it's been processed, + // it should exist and yield a 200 error. + if (err.status !== 404) { + // But it's not a 404 error it's something unexpected. + clearInterval(checkInterval); + throw new Error(err); + } + }); + }, intervalTime); } - },"json" - ); -} + }; + })(); + + var pathname = document.location.pathname.split('/'); + var crashID = pathname[pathname.length - 1]; + Checker.startChecking(crashID); +}); diff --git a/webapp-django/crashstats/crashstats/tests/test_models.py b/webapp-django/crashstats/crashstats/tests/test_models.py index 60c1b8cad7..37dda7714f 100644 --- a/webapp-django/crashstats/crashstats/tests/test_models.py +++ b/webapp-django/crashstats/crashstats/tests/test_models.py @@ -735,17 +735,15 @@ def mocked_get(url, params, **options): ok_(r['hits']) ok_(r['total']) - @mock.patch('requests.get') - def test_processed_crash(self, rget): + def test_processed_crash(self): model = models.ProcessedCrash api = model() - def mocked_get(url, params, **options): - assert '/crash_data' in url + def mocked_get(**params): ok_('datatype' in params) eq_(params['datatype'], 'processed') - return Response({ + return { 'product': 'WaterWolf', 'uuid': '7c44ade2-fdeb-4d6c-830a-07d302120525', 'version': '13.0', @@ -765,23 +763,21 @@ def mocked_get(url, params, **options): '13.0' ] ] - }) + } - rget.side_effect = mocked_get + model.implementation().get.side_effect = mocked_get r = api.get(crash_id='7c44ade2-fdeb-4d6c-830a-07d302120525') ok_(r['product']) - @mock.patch('requests.get') - def test_unredacted_crash(self, rget): + def test_unredacted_crash(self): model = models.UnredactedCrash api = model() - def mocked_get(url, params, **options): - assert '/crash_data' in url + def mocked_get(**params): ok_('datatype' in params) eq_(params['datatype'], 'unredacted') - return Response({ + return { 'product': 'WaterWolf', 'uuid': '7c44ade2-fdeb-4d6c-830a-07d302120525', 'version': '13.0', @@ -802,9 +798,10 @@ def mocked_get(url, params, **options): '13.0', ] ], - }) + } + + model.implementation().get.side_effect = mocked_get - rget.side_effect = mocked_get r = api.get(crash_id='7c44ade2-fdeb-4d6c-830a-07d302120525') ok_(r['product']) ok_(r['exploitability']) @@ -1081,59 +1078,48 @@ def mocked_get(**options): # but this should work api.get(batch='250', page='1') - @mock.patch('requests.get') - def test_raw_crash(self, rget): + def test_raw_crash(self): model = models.RawCrash api = model() - def mocked_get(url, params, **options): - assert '/crash_data/' in url - return Response({ + def mocked_get(**params): + return { 'InstallTime': '1339289895', 'FramePoisonSize': '4096', 'Theme': 'classic/1.0', 'Version': '5.0a1', 'Email': 'socorro-123@restmail.net', 'Vendor': 'Mozilla', - }) + } - rget.side_effect = mocked_get + model.implementation().get.side_effect = mocked_get r = api.get(crash_id='some-crash-id') eq_(r['Vendor'], 'Mozilla') ok_('Email' in r) # no filtering at this level - @mock.patch('requests.get') - def test_raw_crash_raw_data(self, rget): + def test_raw_crash_raw_data(self): model = models.RawCrash api = model() mocked_calls = [] - def mocked_get(url, params, **options): - assert '/crash_data/' in url + def mocked_get(**params): mocked_calls.append(params) assert params['datatype'] == 'raw' if params.get('name') == 'other': - return Response('\xe0\xe0') - elif params.get('name') == 'unknown': - return Response('not found', 404) + return '\xe0\xe0' else: - return Response('\xe0') + return '\xe0' + + model.implementation().get.side_effect = mocked_get - rget.side_effect = mocked_get r = api.get(crash_id='some-crash-id', format='raw') eq_(r, '\xe0') r = api.get(crash_id='some-crash-id', format='raw', name='other') eq_(r, '\xe0\xe0') - assert_raises( - models.BadStatusCodeError, - api.get, - crash_id='some-crash-id', format='raw', name='unknown' - ) - @mock.patch('requests.put') def test_put_featured_versions(self, rput): model = models.ReleasesFeatured @@ -1630,15 +1616,31 @@ def mocked_get(url, params, **options): def test_Reprocessing(self): api = models.Reprocessing() - def mocked_reprocess(crash_id): - if crash_id == 'some-crash-id': + def mocked_reprocess(crash_ids): + if crash_ids == 'some-crash-id': return True - elif crash_id == 'bad-crash-id': + elif crash_ids == 'bad-crash-id': return - raise NotImplementedError(crash_id) + raise NotImplementedError(crash_ids) models.Reprocessing.implementation().reprocess = mocked_reprocess - ok_(api.post(crash_id='some-crash-id')) + ok_(api.post(crash_ids='some-crash-id')) # Note that it doesn't raise an error if # the ReprocessingOneRabbitMQCrashStore choses NOT to queue it. - ok_(not api.post(crash_id='bad-crash-id')) + ok_(not api.post(crash_ids='bad-crash-id')) + + def test_Priorityjob(self): + api = models.Priorityjob() + + def mocked_process(crash_ids): + if crash_ids == 'some-crash-id': + return True + elif crash_ids == 'bad-crash-id': + return + raise NotImplementedError(crash_ids) + + models.Priorityjob.implementation().process = mocked_process + ok_(api.post(crash_ids='some-crash-id')) + # Note that it doesn't raise an error if + # the PriorityjobRabbitMQCrashStore choses NOT to queue it. + ok_(not api.post(crash_ids='bad-crash-id')) diff --git a/webapp-django/crashstats/crashstats/tests/test_views.py b/webapp-django/crashstats/crashstats/tests/test_views.py index 113b569505..9b9ff6a2b0 100644 --- a/webapp-django/crashstats/crashstats/tests/test_views.py +++ b/webapp-django/crashstats/crashstats/tests/test_views.py @@ -11,7 +11,7 @@ import pyquery import mock -from nose.tools import eq_, ok_, assert_raises +from nose.tools import eq_, ok_ from nose.plugins.skip import SkipTest from django.test.client import RequestFactory @@ -28,6 +28,8 @@ from django.core.urlresolvers import reverse from django.contrib.contenttypes.models import ContentType +from socorro.external.crashstorage_base import CrashIDNotFound + from crashstats.base.tests.testbase import DjangoTestCase from crashstats.crashstats import models, views from crashstats.crashstats.management import PERMISSIONS @@ -418,7 +420,7 @@ def mocked_product_versions(**params): ) def mocked_supersearchfields(**params): - results = copy.copy(SUPERSEARCH_FIELDS_MOCKED_RESULTS) + results = copy.deepcopy(SUPERSEARCH_FIELDS_MOCKED_RESULTS) # to be realistic we want to introduce some dupes # that have a different key but its `in_database_name` # is one that is already in the hardcoded list (the @@ -2359,18 +2361,6 @@ def test_report_index(self, rget, rpost): rpost.side_effect = mocked_post_threeothersigs def mocked_get(url, params, **options): - if '/crash_data' in url: - assert 'datatype' in params - - if params['datatype'] == 'meta': - return Response(_SAMPLE_META) - if params['datatype'] == 'unredacted': - return Response(dict( - _SAMPLE_UNREDACTED, - dump=dump, - user_comments=comment0 - )) - if 'correlations/signatures' in url: return Response({ 'hits': [ @@ -2379,10 +2369,34 @@ def mocked_get(url, params, **options): ], 'total': 2 }) - raise NotImplementedError(url) + rget.side_effect = mocked_get + def mocked_raw_crash_get(**params): + assert 'datatype' in params + if params['datatype'] == 'meta': + return copy.deepcopy(_SAMPLE_META) + raise NotImplementedError + + models.RawCrash.implementation().get.side_effect = ( + mocked_raw_crash_get + ) + + def mocked_processed_crash_get(**params): + assert 'datatype' in params + if params['datatype'] == 'unredacted': + crash = copy.deepcopy(_SAMPLE_UNREDACTED) + crash['dump'] = dump + crash['user_comments'] = comment0 + return crash + + raise NotImplementedError(params) + + models.UnredactedCrash.implementation().get.side_effect = ( + mocked_processed_crash_get + ) + url = reverse('crashstats:report_index', args=['11cb72f5-eb28-41e1-a8e4-849982120611']) response = self.client.get(url) @@ -2463,52 +2477,6 @@ def test_report_index_with_additional_raw_dump_links(self, rget, rpost): rpost.side_effect = mocked_post_threeothersigs def mocked_get(url, params, **options): - if '/crash_data' in url: - assert 'datatype' in params - - if params['datatype'] == 'meta': - return Response({ - 'InstallTime': '1339289895', - 'FramePoisonSize': '4096', - 'Theme': 'classic/1.0', - 'Version': '5.0a1', - 'Email': 'secret@email.com', - 'Vendor': 'Mozilla', - 'URL': 'farmville.com', - 'additional_minidumps': 'foo, bar,', - }) - if params['datatype'] == 'unredacted': - return Response({ - 'client_crash_date': '2012-06-11T06:08:45', - 'dump': dump, - 'signature': 'FakeSignature1', - 'user_comments': None, - 'uptime': 14693, - 'release_channel': 'nightly', - 'uuid': '11cb72f5-eb28-41e1-a8e4-849982120611', - 'flash_version': '[blank]', - 'hangid': None, - 'distributor_version': None, - 'truncated': True, - 'process_type': None, - 'id': 383569625, - 'os_version': '10.6.8 10K549', - 'version': '5.0a1', - 'build': '20120609030536', - 'ReleaseChannel': 'nightly', - 'addons_checked': None, - 'product': 'WaterWolf', - 'os_name': 'Mac OS X', - 'last_crash': 371342, - 'date_processed': '2012-06-11T06:08:44', - 'cpu_name': 'amd64', - 'reason': 'EXC_BAD_ACCESS / KERN_INVALID_ADDRESS', - 'address': '0x8', - 'completeddatetime': '2012-06-11T06:08:57', - 'success': True, - 'exploitability': 'Unknown Exploitability' - }) - if 'correlations/signatures' in url: return Response({ 'hits': [ @@ -2522,6 +2490,42 @@ def mocked_get(url, params, **options): rget.side_effect = mocked_get + def mocked_processed_crash_get(**params): + assert 'datatype' in params + + if params['datatype'] == 'unredacted': + crash = copy.deepcopy(_SAMPLE_UNREDACTED) + del crash['json_dump'] + crash['dump'] = dump + return crash + + raise NotImplementedError(params) + + models.UnredactedCrash.implementation().get.side_effect = ( + mocked_processed_crash_get + ) + + def mocked_raw_crash_get(**params): + assert 'datatype' in params + + if params['datatype'] == 'meta': + return { + 'InstallTime': '1339289895', + 'FramePoisonSize': '4096', + 'Theme': 'classic/1.0', + 'Version': '5.0a1', + 'Email': 'secret@email.com', + 'Vendor': 'Mozilla', + 'URL': 'farmville.com', + 'additional_minidumps': 'foo, bar,', + } + + raise NotImplementedError + + models.RawCrash.implementation().get.side_effect = ( + mocked_raw_crash_get + ) + crash_id = '11cb72f5-eb28-41e1-a8e4-849982120611' url = reverse('crashstats:report_index', args=(crash_id,)) response = self.client.get(url) @@ -2565,7 +2569,6 @@ def mocked_get(url, params, **options): @mock.patch('requests.get') def test_report_index_with_symbol_url_in_modules(self, rget, rpost): rpost.side_effect = mocked_post_threeothersigs - dump = 'OS|Mac OS X|10.6.8 10K549\\nCPU|amd64|family 6 mod|1' json_dump = { 'status': 'OK', 'sensitive': { @@ -2598,54 +2601,6 @@ def test_report_index_with_symbol_url_in_modules(self, rget, rpost): } def mocked_get(url, params, **options): - if '/crash_data' in url: - assert 'datatype' in params - - if params['datatype'] == 'meta': - return Response({ - 'InstallTime': '1339289895', - 'FramePoisonSize': '4096', - 'Theme': 'classic/1.0', - 'Version': '5.0a1', - 'Email': 'secret@email.com', - 'Vendor': 'Mozilla', - 'URL': 'farmville.com', - 'additional_minidumps': 'foo, bar,', - }) - if params['datatype'] == 'unredacted': - return Response({ - 'client_crash_date': '2012-06-11T06:08:45', - # 'dump': 'OS|Mac OS X|10.6.8 10K549\nCPU|amd64', - 'dump': dump, - 'signature': 'FakeSignature1', - 'user_comments': None, - 'uptime': 14693, - 'release_channel': 'nightly', - 'uuid': '11cb72f5-eb28-41e1-a8e4-849982120611', - 'flash_version': '[blank]', - 'hangid': None, - 'distributor_version': None, - 'truncated': True, - 'process_type': None, - 'id': 383569625, - 'os_version': '10.6.8 10K549', - 'version': '5.0a1', - 'build': '20120609030536', - 'ReleaseChannel': 'nightly', - 'addons_checked': None, - 'product': 'WaterWolf', - 'os_name': 'Mac OS X', - 'last_crash': 371342, - 'date_processed': '2012-06-11T06:08:44', - 'cpu_name': 'amd64', - 'reason': 'EXC_BAD_ACCESS / KERN_INVALID_ADDRESS', - 'address': '0x8', - 'completeddatetime': '2012-06-11T06:08:57', - 'success': True, - 'exploitability': 'Unknown Exploitability', - 'json_dump': json_dump, - }) - if 'correlations/signatures' in url: return Response({ 'hits': [ @@ -2659,6 +2614,31 @@ def mocked_get(url, params, **options): rget.side_effect = mocked_get + def mocked_raw_crash_get(**params): + assert 'datatype' in params + if params['datatype'] == 'meta': + crash = copy.deepcopy(_SAMPLE_META) + crash['additional_minidumps'] = 'foo, bar,' + return crash + raise NotImplementedError + + models.RawCrash.implementation().get.side_effect = ( + mocked_raw_crash_get + ) + + def mocked_processed_crash_get(**params): + assert 'datatype' in params + if params['datatype'] == 'unredacted': + crash = copy.deepcopy(_SAMPLE_UNREDACTED) + crash['json_dump'] = json_dump + return crash + + raise NotImplementedError(params) + + models.UnredactedCrash.implementation().get.side_effect = ( + mocked_processed_crash_get + ) + crash_id = '11cb72f5-eb28-41e1-a8e4-849982120611' url = reverse('crashstats:report_index', args=(crash_id,)) response = self.client.get(url) @@ -2673,7 +2653,6 @@ def mocked_get(url, params, **options): @mock.patch('crashstats.crashstats.models.Bugs.get') @mock.patch('requests.get') def test_report_index_fennecandroid_report(self, rget, rpost): - dump = 'OS|Mac OS X|10.6.8 10K549\nCPU|amd64|family 6 mod|1' comment0 = 'This is a comment\nOn multiple lines' comment0 += '\npeterbe@mozilla.com' comment0 += '\nwww.p0rn.com' @@ -2681,21 +2660,6 @@ def test_report_index_fennecandroid_report(self, rget, rpost): rpost.side_effect = mocked_post_threeothersigs def mocked_get(url, params, **options): - if '/crash_data' in url: - assert 'datatype' in params - - if params['datatype'] == 'meta': - return Response(_SAMPLE_META) - if params['datatype'] == 'unredacted': - raw_crash = dict( - _SAMPLE_UNREDACTED, - dump=dump, - user_comments=comment0, - ) - raw_crash['product'] = 'WinterSun' - - return Response(raw_crash) - if 'correlations/signatures' in url: return Response({ 'hits': [ @@ -2706,8 +2670,33 @@ def mocked_get(url, params, **options): }) raise NotImplementedError(url) + rget.side_effect = mocked_get + def mocked_raw_crash_get(**params): + assert 'datatype' in params + if params['datatype'] == 'meta': + return copy.deepcopy(_SAMPLE_META) + + raise NotImplementedError + + models.RawCrash.implementation().get.side_effect = ( + mocked_raw_crash_get + ) + + def mocked_processed_crash_get(**params): + assert 'datatype' in params + if params['datatype'] == 'unredacted': + crash = copy.deepcopy(_SAMPLE_UNREDACTED) + crash['product'] = 'WinterSun' + return crash + + raise NotImplementedError + + models.UnredactedCrash.implementation().get.side_effect = ( + mocked_processed_crash_get + ) + url = reverse('crashstats:report_index', args=['11cb72f5-eb28-41e1-a8e4-849982120611']) @@ -2733,7 +2722,6 @@ def test_report_index_odd_product_and_version(self, rget, rpost): """If the processed JSON references an unfamiliar product and version it should not use that to make links in the nav to reports for that unfamiliar product and version.""" - dump = 'OS|Mac OS X|10.6.8 10K549\nCPU|amd64|family 6 mod|1' comment0 = 'This is a comment\nOn multiple lines' comment0 += '\npeterbe@mozilla.com' comment0 += '\nwww.p0rn.com' @@ -2741,21 +2729,6 @@ def test_report_index_odd_product_and_version(self, rget, rpost): rpost.side_effect = mocked_post_threeothersigs def mocked_get(url, params, **options): - if '/crash_data' in url: - assert 'datatype' in params - - if params['datatype'] == 'meta': - return Response(_SAMPLE_META) - if params['datatype'] == 'unredacted': - processed = dict( - _SAMPLE_UNREDACTED, - dump=dump, - user_comments=comment0, - ) - processed['product'] = 'SummerWolf' - processed['version'] = '99.9' - return Response(processed) - if 'correlations/signatures' in url: return Response({ 'hits': [ @@ -2766,8 +2739,34 @@ def mocked_get(url, params, **options): }) raise NotImplementedError(url) + rget.side_effect = mocked_get + def mocked_raw_crash_get(**params): + assert 'datatype' in params + if params['datatype'] == 'meta': + return copy.deepcopy(_SAMPLE_META) + + raise NotImplementedError(params) + + models.RawCrash.implementation().get.side_effect = ( + mocked_raw_crash_get + ) + + def mocked_processed_crash_get(**params): + assert 'datatype' in params + if params['datatype'] == 'unredacted': + crash = copy.deepcopy(_SAMPLE_UNREDACTED) + crash['product'] = 'SummerWolf' + crash['version'] = '99.9' + return crash + + raise NotImplementedError(params) + + models.UnredactedCrash.implementation().get.side_effect = ( + mocked_processed_crash_get + ) + url = reverse('crashstats:report_index', args=['11cb72f5-eb28-41e1-a8e4-849982120611']) response = self.client.get(url) @@ -2792,24 +2791,39 @@ def test_report_index_correlations_failed(self, rget, rpost): rpost.side_effect = mocked_post_threeothersigs def mocked_get(url, params, **options): - if '/crash_data' in url: - assert 'datatype' in params - - if params['datatype'] == 'meta': - return Response(_SAMPLE_META) - if params['datatype'] == 'unredacted': - return Response(dict( - _SAMPLE_UNREDACTED, - dump=dump, - user_comments=comment0, - )) - if 'correlations/signatures' in url: raise models.BadStatusCodeError(500) raise NotImplementedError(url) + rget.side_effect = mocked_get + def mocked_raw_crash_get(**params): + assert 'datatype' in params + if params['datatype'] == 'meta': + return copy.deepcopy(_SAMPLE_META) + + raise NotImplementedError(params) + + models.RawCrash.implementation().get.side_effect = ( + mocked_raw_crash_get + ) + + def mocked_processed_crash_get(**params): + assert 'datatype' in params + if params['datatype'] == 'unredacted': + return copy.deepcopy(dict( + _SAMPLE_UNREDACTED, + dump=dump, + user_comments=comment0, + )) + + raise NotImplementedError(params) + + models.UnredactedCrash.implementation().get.side_effect = ( + mocked_processed_crash_get + ) + url = reverse('crashstats:report_index', args=['11cb72f5-eb28-41e1-a8e4-849982120611']) response = self.client.get(url) @@ -2818,33 +2832,40 @@ def mocked_get(url, params, **options): @mock.patch('crashstats.crashstats.models.Bugs.get') @mock.patch('requests.get') def test_report_index_no_dump(self, rget, rpost): - dump = '' - comment0 = 'This is a comment' - rpost.side_effect = mocked_post_threesigs def mocked_get(url, params, **options): - if '/crash_data' in url: - assert 'datatype' in params - - if params['datatype'] == 'meta': - return Response(_SAMPLE_META) - if params['datatype'] == 'unredacted': - data = dict( - _SAMPLE_UNREDACTED, - dump=dump, - user_comments=comment0, - ) - del data['dump'] - del data['json_dump'] - return Response(data) - if 'correlations/signatures' in url: raise models.BadStatusCodeError(500) raise NotImplementedError(url) + rget.side_effect = mocked_get + def mocked_raw_crash_get(**params): + assert 'datatype' in params + if params['datatype'] == 'meta': + return copy.deepcopy(_SAMPLE_META) + + raise NotImplementedError + + models.RawCrash.implementation().get.side_effect = ( + mocked_raw_crash_get + ) + + def mocked_processed_crash_get(**params): + assert 'datatype' in params + if params['datatype'] == 'unredacted': + crash = copy.deepcopy(_SAMPLE_UNREDACTED) + del crash['json_dump'] + return crash + + raise NotImplementedError(url) + + models.UnredactedCrash.implementation().get.side_effect = ( + mocked_processed_crash_get + ) + url = reverse('crashstats:report_index', args=['11cb72f5-eb28-41e1-a8e4-849982120611']) response = self.client.get(url) @@ -2860,53 +2881,12 @@ def test_report_index_invalid_crash_id(self): ok_('Invalid crash ID' in response.content) eq_(response['Content-Type'], 'text/html; charset=utf-8') - @mock.patch('requests.get') - def test_report_pending_today(self, rget): - def mocked_get(url, params, **options): - if ( - '/crash_data' in url and - 'datatype' in params and - params['datatype'] == 'unredacted' - ): - raise models.BadStatusCodeError(404) - - rget.side_effect = mocked_get - - today = datetime.datetime.utcnow().strftime('%y%m%d') - url = reverse('crashstats:report_index', - args=['11cb72f5-eb28-41e1-a8e4-849982%s' % today]) - response = self.client.get(url) - ok_('pendingStatus' in response.content) - eq_(response.status_code, 200) - - yesterday = datetime.datetime.utcnow() - datetime.timedelta(days=1) - yesterday = yesterday.strftime('%y%m%d') - url = reverse('crashstats:report_index', - args=['11cb72f5-eb28-41e1-a8e4-849982%s' % yesterday]) - response = self.client.get(url) - ok_('Crash Not Found' in response.content) - eq_(response.status_code, 200) - - url = reverse('crashstats:report_index', - args=['blablabla']) - response = self.client.get(url) - eq_(response.status_code, 400) - @mock.patch('crashstats.crashstats.models.Bugs.get') @mock.patch('requests.get') def test_report_index_with_valid_install_time(self, rget, rpost): rpost.side_effect = mocked_post_123 def mocked_get(url, params, **options): - if ( - '/crash_data' in url and - 'datatype' in params and - params['datatype'] == 'meta' - ): - return Response({ - 'InstallTime': '1461170304', - 'Version': '5.0a1', - }) if 'crashes/comments' in url: return Response({ 'hits': [], @@ -2918,26 +2898,35 @@ def mocked_get(url, params, **options): 'total': 0, }) - if ( - '/crash_data' in url and - 'datatype' in params and - params['datatype'] == 'unredacted' - ): - return Response({ - 'dump': 'some dump', - 'signature': 'FakeSignature1', - 'uuid': '11cb72f5-eb28-41e1-a8e4-849982120611', - 'process_type': None, - 'os_name': 'Windows NT', - 'product': 'WaterWolf', - 'version': '1.0', - 'cpu_name': 'amd64', - }) - raise NotImplementedError(url) rget.side_effect = mocked_get + def mocked_raw_crash_get(**params): + assert 'datatype' in params + if params['datatype'] == 'meta': + return { + 'InstallTime': '1461170304', + 'Version': '5.0a1', + } + + raise NotImplementedError + + models.RawCrash.implementation().get.side_effect = ( + mocked_raw_crash_get + ) + + def mocked_processed_crash_get(**params): + assert 'datatype' in params + if params['datatype'] == 'unredacted': + return copy.deepcopy(_SAMPLE_UNREDACTED) + + raise NotImplementedError + + models.UnredactedCrash.implementation().get.side_effect = ( + mocked_processed_crash_get + ) + url = reverse( 'crashstats:report_index', args=['11cb72f5-eb28-41e1-a8e4-849982120611'] @@ -2954,17 +2943,6 @@ def test_report_index_with_invalid_install_time(self, rget, rpost): rpost.side_effect = mocked_post_123 def mocked_get(url, params, **options): - if ( - '/crash_data' in url and - 'datatype' in params and - params['datatype'] == 'meta' - ): - return Response({ - 'InstallTime': 'Not a number', - 'Version': '5.0a1', - 'Email': '', - 'URL': None, - }) if 'crashes/comments' in url: return Response({ 'hits': [], @@ -2976,25 +2954,34 @@ def mocked_get(url, params, **options): 'total': 0 }) - if ( - '/crash_data' in url and - 'datatype' in params and - params['datatype'] == 'unredacted' - ): - return Response({ - 'dump': 'some dump', - 'signature': 'FakeSignature1', - 'uuid': '11cb72f5-eb28-41e1-a8e4-849982120611', - 'process_type': None, - 'os_name': 'Windows NT', - 'product': 'WaterWolf', - 'version': '1.0', - 'cpu_name': 'amd64', - }) raise NotImplementedError(url) rget.side_effect = mocked_get + def mocked_raw_crash_get(**params): + assert 'datatype' in params + if params['datatype'] == 'meta': + crash = copy.deepcopy(_SAMPLE_META) + crash['InstallTime'] = 'Not a number' + return crash + + raise NotImplementedError(params) + + models.RawCrash.implementation().get.side_effect = ( + mocked_raw_crash_get + ) + + def mocked_processed_crash_get(**params): + assert 'datatype' in params + if params['datatype'] == 'unredacted': + return copy.deepcopy(_SAMPLE_UNREDACTED) + + raise NotImplementedError(params) + + models.UnredactedCrash.implementation().get.side_effect = ( + mocked_processed_crash_get + ) + url = reverse( 'crashstats:report_index', args=['11cb72f5-eb28-41e1-a8e4-849982120611'] @@ -3015,17 +3002,6 @@ def test_report_index_known_total_correlations(self, rget, rpost): rpost.side_effect = mocked_post_123 def mocked_get(url, params, **options): - if ( - '/crash_data' in url and - 'datatype' in params and - params['datatype'] == 'meta' - ): - return Response({ - 'InstallTime': 'Not a number', - 'Version': '5.0a1', - 'Email': '', - 'URL': None, - }) if 'crashes/comments' in url: return Response({ 'hits': [], @@ -3037,12 +3013,25 @@ def mocked_get(url, params, **options): 'total': 0 }) - if ( - '/crash_data' in url and - 'datatype' in params and - params['datatype'] == 'unredacted' - ): - return Response({ + raise NotImplementedError(url) + + rget.side_effect = mocked_get + + def mocked_raw_crash_get(**params): + assert 'datatype' in params + if params['datatype'] == 'meta': + return copy.deepcopy(_SAMPLE_META) + + raise NotImplementedError(params) + + models.RawCrash.implementation().get.side_effect = ( + mocked_raw_crash_get + ) + + def mocked_processed_crash_get(**params): + assert 'datatype' in params + if params['datatype'] == 'unredacted': + return { 'dump': 'some dump', 'signature': 'FakeSignature1', 'uuid': '11cb72f5-eb28-41e1-a8e4-849982120611', @@ -3051,10 +3040,13 @@ def mocked_get(url, params, **options): 'product': 'WaterWolf', 'version': '1.0', 'cpu_name': 'amd64', - }) - raise NotImplementedError(url) + } - rget.side_effect = mocked_get + raise NotImplementedError(params) + + models.UnredactedCrash.implementation().get.side_effect = ( + mocked_processed_crash_get + ) url = reverse( 'crashstats:report_index', @@ -3087,17 +3079,6 @@ def test_report_index_empty_os_name(self, rget, rpost): rpost.side_effect = mocked_post_123 def mocked_get(url, params, **options): - if ( - '/crash_data' in url and - 'datatype' in params and - params['datatype'] == 'meta' - ): - return Response({ - 'InstallTime': 'Not a number', - 'Version': '5.0a1', - 'Email': '', - 'URL': None, - }) if 'crashes/comments' in url: return Response({ 'hits': [], @@ -3109,25 +3090,34 @@ def mocked_get(url, params, **options): 'total': 0 }) - if ( - '/crash_data' in url and - 'datatype' in params and - params['datatype'] == 'unredacted' - ): - return Response({ - 'dump': 'some dump', - 'signature': 'FakeSignature1', - 'uuid': '11cb72f5-eb28-41e1-a8e4-849982120611', - 'process_type': None, - 'os_name': None, - 'product': 'WaterWolf', - 'version': '1.0', - 'cpu_name': 'amd64', - }) raise NotImplementedError(url) rget.side_effect = mocked_get + def mocked_raw_crash_get(**params): + assert 'datatype' in params + if params['datatype'] == 'meta': + return copy.deepcopy(_SAMPLE_META) + + raise NotImplementedError + + models.RawCrash.implementation().get.side_effect = ( + mocked_raw_crash_get + ) + + def mocked_processed_crash_get(**params): + assert 'datatype' in params + if params['datatype'] == 'unredacted': + crash = copy.deepcopy(_SAMPLE_UNREDACTED) + crash['os_name'] = None + return crash + + raise NotImplementedError + + models.UnredactedCrash.implementation().get.side_effect = ( + mocked_processed_crash_get + ) + url = reverse( 'crashstats:report_index', args=['11cb72f5-eb28-41e1-a8e4-849982120611'] @@ -3172,28 +3162,11 @@ def test_report_index_with_invalid_parsed_dump(self, rget, rpost): } comment0 = "This is a comment" - email0 = "some@emailaddress.com" - url0 = "someaddress.com" email1 = "some@otheremailaddress.com" rpost.side_effect = mocked_post_123 def mocked_get(url, params, **options): - if ( - '/crash_data' in url and - 'datatype' in params and - params['datatype'] == 'meta' - ): - return Response({ - "InstallTime": "Not a number", - "FramePoisonSize": "4096", - "Theme": "classic/1.0", - "Version": "5.0a1", - "Email": email0, - "Vendor": "Mozilla", - "URL": url0, - "HangID": "123456789" - }) if 'crashes/comments' in url: return Response({ "hits": [ @@ -3215,45 +3188,34 @@ def mocked_get(url, params, **options): "total": 2 }) - if ( - '/crash_data' in url and - 'datatype' in params and - params['datatype'] == 'unredacted' - ): - return Response({ - "client_crash_date": "2012-06-11T06:08:45", - "json_dump": json_dump, - "signature": "FakeSignature1", - "user_comments": None, - "uptime": 14693, - "release_channel": "nightly", - "uuid": "11cb72f5-eb28-41e1-a8e4-849982120611", - "flash_version": "[blank]", - "hangid": None, - "distributor_version": None, - "truncated": True, - "process_type": None, - "id": 383569625, - "os_version": "10.6.8 10K549", - "version": "5.0a1", - "build": "20120609030536", - "ReleaseChannel": "nightly", - "addons_checked": None, - "product": "WaterWolf", - "os_name": "Mac OS X", - "last_crash": 371342, - "date_processed": "2012-06-11T06:08:44", - "cpu_name": "amd64", - "reason": "EXC_BAD_ACCESS / KERN_INVALID_ADDRESS", - "address": "0x8", - "completeddatetime": "2012-06-11T06:08:57", - "success": True, - "exploitability": "Unknown Exploitability" - }) raise NotImplementedError(url) rget.side_effect = mocked_get + def mocked_raw_crash_get(**params): + assert 'datatype' in params + if params['datatype'] == 'meta': + return copy.deepcopy(_SAMPLE_META) + + raise NotImplementedError + + models.RawCrash.implementation().get.side_effect = ( + mocked_raw_crash_get + ) + + def mocked_processed_crash_get(**params): + assert 'datatype' in params + if params['datatype'] == 'unredacted': + crash = copy.deepcopy(_SAMPLE_UNREDACTED) + crash['json_dump'] = json_dump + return crash + + raise NotImplementedError(params) + + models.UnredactedCrash.implementation().get.side_effect = ( + mocked_processed_crash_get + ) + url = reverse('crashstats:report_index', args=['11cb72f5-eb28-41e1-a8e4-849982120611']) response = self.client.get(url) @@ -3265,28 +3227,11 @@ def test_report_index_with_sparse_json_dump(self, rget, rpost): json_dump = {'status': 'ERROR_NO_MINIDUMP_HEADER', 'sensitive': {}} comment0 = 'This is a comment' - email0 = 'some@emailaddress.com' - url0 = 'someaddress.com' email1 = 'some@otheremailaddress.com' rpost.side_effect = mocked_post_123 def mocked_get(url, params, **options): - if ( - '/crash_data' in url and - 'datatype' in params and - params['datatype'] == 'meta' - ): - return Response({ - 'InstallTime': 'Not a number', - 'FramePoisonSize': '4096', - 'Theme': 'classic/1.0', - 'Version': '5.0a1', - 'Email': email0, - 'Vendor': 'Mozilla', - 'URL': url0, - 'HangID': '123456789', - }) if 'crashes/comments' in url: return Response({ 'hits': [ @@ -3308,45 +3253,34 @@ def mocked_get(url, params, **options): 'total': 2 }) - if ( - '/crash_data' in url and - 'datatype' in params and - params['datatype'] == 'unredacted' - ): - return Response({ - 'client_crash_date': '2012-06-11T06:08:45', - 'json_dump': json_dump, - 'signature': 'FakeSignature1', - 'user_comments': None, - 'uptime': 14693, - 'release_channel': 'nightly', - 'uuid': '11cb72f5-eb28-41e1-a8e4-849982120611', - 'flash_version': '[blank]', - 'hangid': None, - 'distributor_version': None, - 'truncated': True, - 'process_type': None, - 'id': 383569625, - 'os_version': '10.6.8 10K549', - 'version': '5.0a1', - 'build': '20120609030536', - 'ReleaseChannel': 'nightly', - 'addons_checked': None, - 'product': 'WaterWolf', - 'os_name': 'Mac OS X', - 'last_crash': 371342, - 'date_processed': '2012-06-11T06:08:44', - 'cpu_name': 'amd64', - 'reason': 'EXC_BAD_ACCESS / KERN_INVALID_ADDRESS', - 'address': '0x8', - 'completeddatetime': '2012-06-11T06:08:57', - 'success': True, - 'exploitability': 'Unknown Exploitability' - }) - raise NotImplementedError(url) + rget.side_effect = mocked_get + def mocked_raw_crash_get(**params): + assert 'datatype' in params + if params['datatype'] == 'meta': + return copy.deepcopy(_SAMPLE_META) + + raise NotImplementedError + + models.RawCrash.implementation().get.side_effect = ( + mocked_raw_crash_get + ) + + def mocked_processed_crash_get(**params): + assert 'datatype' in params + if params['datatype'] == 'unredacted': + crash = copy.deepcopy(_SAMPLE_UNREDACTED) + crash['json_dump'] = json_dump + return crash + + raise NotImplementedError + + models.UnredactedCrash.implementation().get.side_effect = ( + mocked_processed_crash_get + ) + url = reverse('crashstats:report_index', args=['11cb72f5-eb28-41e1-a8e4-849982120611']) response = self.client.get(url) @@ -3355,10 +3289,7 @@ def mocked_get(url, params, **options): @mock.patch('crashstats.crashstats.models.Bugs.get') @mock.patch('requests.get') def test_report_index_with_crash_exploitability(self, rget, rpost): - dump = 'OS|Mac OS X|10.6.8 10K549\\nCPU|amd64|family 6 mod|1' comment0 = 'This is a comment' - email0 = 'some@emailaddress.com' - url0 = 'someaddress.com' email1 = 'some@otheremailaddress.com' crash_id = '11cb72f5-eb28-41e1-a8e4-849982120611' @@ -3366,21 +3297,6 @@ def test_report_index_with_crash_exploitability(self, rget, rpost): rpost.side_effect = mocked_post_123 def mocked_get(url, params, **options): - if ( - '/crash_data' in url and - 'datatype' in params and - params['datatype'] == 'meta' - ): - return Response({ - 'InstallTime': 'Not a number', - 'FramePoisonSize': '4096', - 'Theme': 'classic/1.0', - 'Version': '5.0a1', - 'Email': email0, - 'Vendor': 'Mozilla', - 'URL': url0, - 'HangID': '123456789', - }) if '/crashes/comments' in url: return Response({ 'hits': [ @@ -3402,45 +3318,34 @@ def mocked_get(url, params, **options): 'total': 2 }) - if ( - '/crash_data' in url and - 'datatype' in params and - params['datatype'] == 'unredacted' - ): - return Response({ - 'client_crash_date': '2012-06-11T06:08:45', - 'dump': dump, - 'signature': 'FakeSignature1', - 'user_comments': None, - 'uptime': 14693, - 'release_channel': 'nightly', - 'uuid': '11cb72f5-eb28-41e1-a8e4-849982120611', - 'flash_version': '[blank]', - 'hangid': None, - 'distributor_version': None, - 'truncated': True, - 'process_type': None, - 'id': 383569625, - 'os_version': '10.6.8 10K549', - 'version': '5.0a1', - 'build': '20120609030536', - 'ReleaseChannel': 'nightly', - 'addons_checked': None, - 'product': 'WaterWolf', - 'os_name': 'Mac OS X', - 'last_crash': 371342, - 'date_processed': '2012-06-11T06:08:44', - 'cpu_name': 'amd64', - 'reason': 'EXC_BAD_ACCESS / KERN_INVALID_ADDRESS', - 'address': '0x8', - 'completeddatetime': '2012-06-11T06:08:57', - 'success': True, - 'exploitability': 'Unknown Exploitability', - }) raise NotImplementedError(url) rget.side_effect = mocked_get + def mocked_raw_crash_get(**params): + assert 'datatype' in params + if params['datatype'] == 'meta': + return copy.deepcopy(_SAMPLE_META) + + raise NotImplementedError(params) + + models.RawCrash.implementation().get.side_effect = ( + mocked_raw_crash_get + ) + + def mocked_processed_crash_get(**params): + assert 'datatype' in params + if params['datatype'] == 'unredacted': + crash = copy.deepcopy(_SAMPLE_UNREDACTED) + crash['exploitability'] = 'Unknown Exploitability' + return crash + + raise NotImplementedError + + models.UnredactedCrash.implementation().get.side_effect = ( + mocked_processed_crash_get + ) + url = reverse('crashstats:report_index', args=[crash_id]) response = self.client.get(url) @@ -3455,260 +3360,103 @@ def mocked_get(url, params, **options): ok_('Exploitability' in response.content) ok_('Unknown Exploitability' in response.content) - @mock.patch('requests.get') - def test_report_index_processed_crash_not_found(self, rget): - crash_id = '11cb72f5-eb28-41e1-a8e4-849982120611' - - def mocked_get(url, params, **options): - if ( - '/crash_data' in url and - 'datatype' in params and - params['datatype'] == 'unredacted' - ): - raise models.BadStatusCodeError(404) - - raise NotImplementedError(url) - rget.side_effect = mocked_get - - url = reverse('crashstats:report_index', - args=[crash_id]) - response = self.client.get(url) - - eq_(response.status_code, 200) - ok_('Crash Not Found' in response.content) - @mock.patch('crashstats.crashstats.models.Bugs.get') - @mock.patch('requests.get') - def test_report_index_raw_crash_not_found(self, rget, rpost): + def test_report_index_raw_crash_not_found(self, rpost): crash_id = '11cb72f5-eb28-41e1-a8e4-849982120611' - dump = 'OS|Mac OS X|10.6.8 10K549\\nCPU|amd64|family 6 mod|1' rpost.side_effect = mocked_post_123 - def mocked_get(url, params, **options): - assert '/crash_data/' in url + def mocked_raw_crash_get(**params): assert 'datatype' in params - if params['datatype'] == 'unredacted': - return Response({ - 'client_crash_date': '2012-06-11T06:08:45', - 'dump': dump, - 'signature': 'FakeSignature1', - 'user_comments': None, - 'uptime': 14693, - 'release_channel': 'nightly', - 'uuid': '11cb72f5-eb28-41e1-a8e4-849982120611', - 'flash_version': '[blank]', - 'hangid': None, - 'distributor_version': None, - 'truncated': True, - 'process_type': None, - 'id': 383569625, - 'os_version': '10.6.8 10K549', - 'version': '5.0a1', - 'build': '20120609030536', - 'ReleaseChannel': 'nightly', - 'addons_checked': None, - 'product': 'WaterWolf', - 'os_name': 'Mac OS X', - 'last_crash': 371342, - 'date_processed': '2012-06-11T06:08:44', - 'cpu_name': 'amd64', - 'reason': 'EXC_BAD_ACCESS / KERN_INVALID_ADDRESS', - 'address': '0x8', - 'completeddatetime': '2012-06-11T06:08:57', - 'success': True, - 'exploitability': 'Unknown Exploitability' - }) - elif params['datatype'] == 'meta': # raw crash json! - raise models.BadStatusCodeError(404) + if params['datatype'] == 'meta': + raise CrashIDNotFound(params['uuid']) - raise NotImplementedError(url) + raise NotImplementedError - rget.side_effect = mocked_get + models.RawCrash.implementation().get.side_effect = ( + mocked_raw_crash_get + ) url = reverse('crashstats:report_index', args=[crash_id]) response = self.client.get(url) - eq_(response.status_code, 200) + eq_(response.status_code, 404) ok_('Crash Not Found' in response.content) - @mock.patch('requests.get') - def test_report_index_pending(self, rget): - crash_id = '11cb72f5-eb28-41e1-a8e4-849982120611' - - def mocked_get(url, params, **options): - if ( - '/crash_data' in url and - 'datatype' in params and - params['datatype'] == 'unredacted' - ): - raise models.BadStatusCodeError(408) - - raise NotImplementedError(url) - rget.side_effect = mocked_get - - url = reverse('crashstats:report_index', - args=[crash_id]) - response = self.client.get(url) - - eq_(response.status_code, 200) - ok_('Fetching this archived report' in response.content) - - @mock.patch('requests.get') - def test_report_index_too_old(self, rget): + @mock.patch('crashstats.crashstats.models.Bugs.get') + def test_report_index_processed_crash_not_found(self, rpost): crash_id = '11cb72f5-eb28-41e1-a8e4-849982120611' - def mocked_get(url, params, **options): - if ( - '/crash_data' in url and - 'datatype' in params and - params['datatype'] == 'unredacted' - ): - raise models.BadStatusCodeError(410) - - raise NotImplementedError(url) - rget.side_effect = mocked_get + rpost.side_effect = mocked_post_123 - url = reverse('crashstats:report_index', - args=[crash_id]) - response = self.client.get(url) + def mocked_raw_crash_get(**params): + assert 'datatype' in params + if params['datatype'] == 'meta': + return copy.deepcopy(_SAMPLE_META) - eq_(response.status_code, 200) - ok_('This archived report has expired' in response.content) + raise NotImplementedError - @mock.patch('requests.get') - def test_report_index_other_error(self, rget): - crash_id = '11cb72f5-eb28-41e1-a8e4-849982120611' + models.RawCrash.implementation().get.side_effect = ( + mocked_raw_crash_get + ) - def mocked_get(url, params, **options): - if ( - '/crash_data' in url and - 'datatype' in params and - params['datatype'] == 'unredacted' - ): - return Response('Scary Error', status_code=500) + def mocked_processed_crash_get(**params): + assert 'datatype' in params + if params['datatype'] == 'unredacted': + raise CrashIDNotFound(params['uuid']) - raise NotImplementedError(url) - rget.side_effect = mocked_get + raise NotImplementedError - url = reverse('crashstats:report_index', - args=[crash_id]) - assert_raises( - models.BadStatusCodeError, - self.client.get, - url + models.UnredactedCrash.implementation().get.side_effect = ( + mocked_processed_crash_get ) - # Let's also check that we get the response in the exception - # message. - try: - self.client.get(url) - assert False # shouldn't get here - except models.BadStatusCodeError as exception: - ok_('Scary Error' in str(exception)) - # and it should include the URL it used - mware_url = models.UnredactedCrash.base_url + '/crash_data/' - ok_(mware_url in str(exception)) - @mock.patch('requests.get') - def test_report_pending_json(self, rget): - crash_id = '11cb72f5-eb28-41e1-a8e4-849982120611' - - def mocked_get(url, params, **options): - if ( - '/crash_data' in url and - 'datatype' in params and - params['datatype'] == 'unredacted' - ): - raise models.BadStatusCodeError(408) + def mocked_priority_job_process(**params): + assert params['crash_ids'] == [crash_id] + return True - raise NotImplementedError(url) - - rget.side_effect = mocked_get + models.Priorityjob.implementation().process.side_effect = ( + mocked_priority_job_process + ) - url = reverse('crashstats:report_pending', - args=[crash_id]) + url = reverse('crashstats:report_index', args=[crash_id]) response = self.client.get(url) - expected = { - 'status': 'error', - 'status_message': ('The report for %s' - ' is not available yet.' % crash_id), - 'url_redirect': '' - } - eq_(response.status_code, 200) - eq_(expected, json.loads(response.content)) - - def test_report_index_and_pending_missing_crash_id(self): - url = reverse('crashstats:report_index', args=['']) - response = self.client.get(url) - eq_(response.status_code, 404) - - url = reverse('crashstats:report_pending', args=['']) - response = self.client.get(url) - eq_(response.status_code, 404) + ok_('Please wait...' in response.content) + ok_( + 'Processing this crash report only takes a few seconds' in + response.content + ) @mock.patch('crashstats.crashstats.models.Bugs.get') - @mock.patch('requests.get') - def test_report_index_with_invalid_date_processed(self, rget, rpost): + def test_report_index_with_invalid_date_processed(self, rpost): crash_id = '11cb72f5-eb28-41e1-a8e4-849982120611' - def mocked_get(url, params, **options): - if ( - '/crash_data' in url and - 'datatype' in params and - params['datatype'] == 'meta' - ): - return Response({ - 'InstallTime': 'Not a number', - 'FramePoisonSize': '4096', - 'Theme': 'classic/1.0', - 'Version': '5.0a1', - 'Email': None, - 'Vendor': 'Mozilla', - 'URL': None, - 'HangID': '123456789', - }) - if ( - '/crash_data' in url and - 'datatype' in params and - params['datatype'] == 'unredacted' - ): - return Response({ - 'client_crash_date': '2012-06-11T06:08:45', - 'dump': 'anything', - 'signature': 'FakeSignature1', - 'user_comments': None, - 'uptime': 14693, - 'release_channel': 'nightly', - 'uuid': '11cb72f5-eb28-41e1-a8e4-849982120611', - 'flash_version': '[blank]', - 'hangid': None, - 'distributor_version': None, - 'truncated': True, - 'process_type': None, - 'id': 383569625, - 'os_version': '10.6.8 10K549', - 'version': '5.0a1', - 'build': '20120609030536', - 'ReleaseChannel': 'nightly', - 'addons_checked': None, - 'product': 'WaterWolf', - 'os_name': 'Mac OS X', - 'last_crash': 371342, - # NOTE! A wanna-be valid date that is not valid - 'date_processed': '2015-10-10 15:32:07.620535', - 'cpu_name': 'amd64', - 'reason': 'EXC_BAD_ACCESS / KERN_INVALID_ADDRESS', - 'address': '0x8', - 'completeddatetime': '2012-06-11T06:08:57', - 'success': True, - 'exploitability': 'Unknown Exploitability', - }) - raise NotImplementedError(url) + def mocked_raw_crash_get(**params): + assert 'datatype' in params + if params['datatype'] == 'meta': + return copy.deepcopy(_SAMPLE_META) - rget.side_effect = mocked_get + raise NotImplementedError(params) + + models.RawCrash.implementation().get.side_effect = ( + mocked_raw_crash_get + ) + + def mocked_processed_crash_get(**params): + assert 'datatype' in params + if params['datatype'] == 'unredacted': + crash = copy.deepcopy(_SAMPLE_UNREDACTED) + # NOTE! A wanna-be valid date that is not valid + crash['date_processed'] = '2015-10-10 15:32:07.620535' + return crash + raise NotImplementedError + + models.UnredactedCrash.implementation().get.side_effect = ( + mocked_processed_crash_get + ) url = reverse('crashstats:report_index', args=[crash_id]) @@ -4802,30 +4550,12 @@ def mocked_get(url, params, **options): @mock.patch('crashstats.crashstats.models.Bugs.get') @mock.patch('requests.get') def test_report_index_redirect_by_prefix(self, rget, rpost): - - dump = "OS|Mac OS X|10.6.8 10K549\\nCPU|amd64|family 6 mod|1" comment0 = "This is a comment" - email0 = "some@emailaddress.com" - url0 = "someaddress.com" email1 = "some@otheremailaddress.com" rpost.side_effect = mocked_post_123 def mocked_get(url, params, **options): - if ( - '/crash_data' in url and - 'datatype' in params and - params['datatype'] == 'meta' - ): - return Response({ - 'InstallTime': '1339289895', - 'FramePoisonSize': '4096', - 'Theme': 'classic/1.0', - 'Version': '5.0a1', - 'Email': email0, - 'Vendor': 'Mozilla', - 'URL': url0 - }) if 'crashes/comments' in url: return Response({ 'hits': [ @@ -4839,42 +4569,6 @@ def mocked_get(url, params, **options): 'total': 1 }) - if ( - '/crash_data' in url and - 'datatype' in params and - params['datatype'] == 'unredacted' - ): - return Response({ - 'client_crash_date': '2012-06-11T06:08:45', - 'dump': dump, - 'signature': 'FakeSignature1', - 'user_comments': None, - 'uptime': 14693, - 'release_channel': 'nightly', - 'uuid': '11cb72f5-eb28-41e1-a8e4-849982120611', - 'flash_version': '[blank]', - 'hangid': None, - 'distributor_version': None, - 'truncated': True, - 'process_type': None, - 'id': 383569625, - 'os_version': '10.6.8 10K549', - 'version': '5.0a1', - 'build': '20120609030536', - 'ReleaseChannel': 'nightly', - 'addons_checked': None, - 'product': 'WaterWolf', - 'os_name': 'Mac OS X', - 'last_crash': 371342, - 'date_processed': '2012-06-11T06:08:44', - 'cpu_name': 'amd64', - 'reason': 'EXC_BAD_ACCESS / KERN_INVALID_ADDRESS', - 'address': '0x8', - 'completeddatetime': '2012-06-11T06:08:57', - 'success': True, - 'exploitability': 'Unknown Exploitability' - }) - if 'correlations/signatures' in url: return Response({ 'hits': [ @@ -4888,6 +4582,28 @@ def mocked_get(url, params, **options): rget.side_effect = mocked_get + def mocked_raw_crash_get(**params): + assert 'datatype' in params + if params['datatype'] == 'meta': + return copy.deepcopy(_SAMPLE_META) + + raise NotImplementedError(params) + + models.RawCrash.implementation().get.side_effect = ( + mocked_raw_crash_get + ) + + def mocked_processed_crash_get(**params): + assert 'datatype' in params + if params['datatype'] == 'unredacted': + return copy.deepcopy(_SAMPLE_UNREDACTED) + + raise NotImplementedError(params) + + models.UnredactedCrash.implementation().get.side_effect = ( + mocked_processed_crash_get + ) + base_crash_id = '11cb72f5-eb28-41e1-a8e4-849982120611' crash_id = settings.CRASH_ID_PREFIX + base_crash_id assert len(crash_id) > 36 @@ -4917,22 +4633,19 @@ def mocked_get(url, params, **options): ok_('' not in response.content) # it's a partial ok_('no reports in the time period specified' in response.content) - @mock.patch('requests.get') - def test_raw_data(self, rget): - def mocked_get(url, params, **options): - assert '/crash_data' in url + def test_raw_data(self): + + def mocked_get(**params): if 'datatype' in params and params['datatype'] == 'raw': - return Response(""" - bla bla bla - """.strip()) + return "bla bla bla" else: # default is datatype/meta - return Response({ + return { 'foo': 'bar', 'stuff': 123, - }) + } - rget.side_effect = mocked_get + models.RawCrash.implementation().get.side_effect = mocked_get crash_id = '176bcd6c-c2ec-4b0c-9d5f-dadea2120531' json_url = reverse('crashstats:raw_data', args=(crash_id, 'json')) @@ -4962,14 +4675,10 @@ def mocked_get(url, params, **options): # dump files are cached. # check the mock function and expect no change - def different_mocked_get(url, **options): - if '/crash_data' in url and 'datatype=raw' in url: - return Response(""" - SOMETHING DIFFERENT - """.strip()) - raise NotImplementedError(url) + def different_mocked_get(**params): + raise AssertionError("shouldn't be used due to caching") - rget.side_effect = different_mocked_get + models.RawCrash.implementation().get.side_effect = different_mocked_get response = self.client.get(dump_url) eq_(response.status_code, 200) diff --git a/webapp-django/crashstats/crashstats/urls.py b/webapp-django/crashstats/crashstats/urls.py index 619bd863b1..9ab148bede 100644 --- a/webapp-django/crashstats/crashstats/urls.py +++ b/webapp-django/crashstats/crashstats/urls.py @@ -90,15 +90,9 @@ url(r'^exploitability/$', views.exploitability_report, name='exploitability_report'), - url(r'^report/index/(?P.*)$', + url(r'^report/index/(?P[\w-]+)$', views.report_index, name='report_index'), - # make the suffix `_ajax` optional there. - # we prefer report/pending/XXX but because of legacy we need to - # support report/pending_ajax/XXX too - url(r'^report/pending(_ajax)?/(?P.*)$', - views.report_pending, - name='report_pending'), url(r'^search/quick/$', views.quick_search, name='quick_search'), diff --git a/webapp-django/crashstats/crashstats/views.py b/webapp-django/crashstats/crashstats/views.py index cd1782479f..ea18991bd7 100644 --- a/webapp-django/crashstats/crashstats/views.py +++ b/webapp-django/crashstats/crashstats/views.py @@ -22,6 +22,7 @@ from session_csrf import anonymous_csrf +from socorro.external.crashstorage_base import CrashIDNotFound from . import forms, models, utils from .decorators import check_days_parameter, pass_default_context @@ -1334,8 +1335,6 @@ def exploitability_report(request, default_context=None): @pass_default_context def report_index(request, crash_id, default_context=None): - if not crash_id: - raise http.Http404('Crash id is missing') valid_crash_id = utils.find_crash_id(crash_id) if not valid_crash_id: return http.HttpResponseBadRequest('Invalid crash ID') @@ -1353,33 +1352,26 @@ def report_index(request, crash_id, default_context=None): context = default_context or {} context['crash_id'] = crash_id - api = models.UnredactedCrash() - - def handle_middleware_404(crash_id, error_code): - if error_code == 404: - # if crash was submitted today, send to pending screen - crash_date = datetime.datetime.strptime(crash_id[-6:], '%y%m%d') - crash_age = datetime.datetime.utcnow() - crash_date - if crash_age < datetime.timedelta(days=1): - tmpl = 'crashstats/report_index_pending.html' - else: - tmpl = 'crashstats/report_index_not_found.html' - return render(request, tmpl, context) - elif error_code == 408: - return render(request, - 'crashstats/report_index_pending.html', context) - elif error_code == 410: - return render(request, - 'crashstats/report_index_too_old.html', context) - - # this is OK because this function is expected to be called within - # an exception stack frame - raise + raw_api = models.RawCrash() + try: + context['raw'] = raw_api.get(crash_id=crash_id) + except CrashIDNotFound: + # If the raw crash can't be found, we can't do much. + tmpl = 'crashstats/report_index_not_found.html' + return render(request, tmpl, context, status=404) + api = models.UnredactedCrash() try: context['report'] = api.get(crash_id=crash_id) - except models.BadStatusCodeError as e: - return handle_middleware_404(crash_id, e.status) + except CrashIDNotFound: + # ...if we haven't already done so. + cache_key = 'priority_job:{}'.format(crash_id) + if not cache.get(cache_key): + priority_api = models.Priorityjob() + priority_api.post(crash_ids=[crash_id]) + cache.set(cache_key, True, 60) + tmpl = 'crashstats/report_index_pending.html' + return render(request, tmpl, context) if 'json_dump' in context['report']: json_dump = context['report']['json_dump'] @@ -1434,12 +1426,6 @@ def handle_middleware_404(crash_id, error_code): reverse=True ) - raw_api = models.RawCrash() - try: - context['raw'] = raw_api.get(crash_id=crash_id) - except models.BadStatusCodeError as e: - return handle_middleware_404(crash_id, e.status) - context['raw_keys'] = [] if request.user.has_perm('crashstats.view_pii'): # hold nothing back @@ -1449,7 +1435,8 @@ def handle_middleware_404(crash_id, error_code): x for x in context['raw'] if x in models.RawCrash.API_WHITELIST ] - context['raw_keys'].sort(key=unicode.lower) + # Sort keys case-insensitively + context['raw_keys'].sort(key=lambda s: s.lower()) if request.user.has_perm('crashstats.view_rawdump'): context['raw_dump_urls'] = [ @@ -1523,37 +1510,6 @@ def handle_middleware_404(crash_id, error_code): return render(request, 'crashstats/report_index.html', context) -@utils.json_view -def report_pending(request, crash_id): - if not crash_id: - raise http.Http404("Crash id is missing") - - data = {} - - url = reverse('crashstats:report_index', kwargs=dict(crash_id=crash_id)) - - api = models.UnredactedCrash() - - try: - data['report'] = api.get(crash_id=crash_id) - status = 'ready' - status_message = 'The report for %s is now available.' % crash_id - url_redirect = "%s" % url - except models.BadStatusCodeError as e: - if str(e).startswith('5'): - raise - status = 'error' - status_message = 'The report for %s is not available yet.' % crash_id - url_redirect = '' - - data = { - "status": status, - "status_message": status_message, - "url_redirect": url_redirect - } - return data - - @pass_default_context def report_list(request, partial=None, default_context=None): context = default_context or {} diff --git a/webapp-django/crashstats/settings/base.py b/webapp-django/crashstats/settings/base.py index a3f7fbccf5..2f171efa72 100644 --- a/webapp-django/crashstats/settings/base.py +++ b/webapp-django/crashstats/settings/base.py @@ -687,6 +687,9 @@ def path(*dirs): 'rabbitmq_user': config('RABBITMQ_USER', ''), 'rabbitmq_password': config('RABBITMQ_PASSWORD', ''), }, + 'boto': { + 'secret_access_key': config('resource.boto.secret_access_key', ''), + }, }, 'resource': { 'elasticsearch': { @@ -715,6 +718,16 @@ def path(*dirs): 'virtual_host': config('RABBITMQ_VIRTUAL_HOST', '/'), 'port': config('RABBITMQ_PORT', 5672), }, + 'boto': { + 'access_key': config('resource.boto.access_key', ''), + 'bucket_name': config( + 'resource.boto.bucket_name', 'crashstats'), + 'prefix': config('resource.boto.prefix', ''), + 'keybuilder_class': config( + 'resource.boto.keybuilder_class', + 'socorro.external.boto.connection_context.DatePrefixKeyBuilder' + ), + } } } diff --git a/webapp-django/crashstats/settings/bundles.py b/webapp-django/crashstats/settings/bundles.py index d267d3b22a..87f3290863 100644 --- a/webapp-django/crashstats/settings/bundles.py +++ b/webapp-django/crashstats/settings/bundles.py @@ -87,6 +87,12 @@ ), 'output_filename': 'css/report-index.min.css', }, + 'report_pending': { + 'source_filenames': ( + 'crashstats/css/report_pending.less', + ), + 'output_filename': 'css/report-pending.min.css', + }, 'report_list': { 'source_filenames': ( 'crashstats/css/report_list.less', @@ -333,6 +339,12 @@ ), 'output_filename': 'js/report-index.min.js', }, + 'report_pending': { + 'source_filenames': ( + 'crashstats/js/socorro/pending.js', + ), + 'output_filename': 'js/report-pending.min.js', + }, 'report_list': { 'source_filenames': ( 'crashstats/js/jquery/plugins/jquery.cookie.js', From 36a958c4d38d733556b61a3e2d006e2368e6804b Mon Sep 17 00:00:00 2001 From: Peter Bengtsson Date: Thu, 15 Sep 2016 09:03:30 -0400 Subject: [PATCH 07/13] fixes bug 1302997 - Crash Not Found on every crash in stage --- webapp-django/crashstats/settings/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/webapp-django/crashstats/settings/base.py b/webapp-django/crashstats/settings/base.py index 2f171efa72..460663d8ff 100644 --- a/webapp-django/crashstats/settings/base.py +++ b/webapp-django/crashstats/settings/base.py @@ -688,7 +688,7 @@ def path(*dirs): 'rabbitmq_password': config('RABBITMQ_PASSWORD', ''), }, 'boto': { - 'secret_access_key': config('resource.boto.secret_access_key', ''), + 'secret_access_key': config('secrets.boto.secret_access_key', ''), }, }, 'resource': { From e25f7ffb61368b259cd89ff15cc8e60fd8e3ff99 Mon Sep 17 00:00:00 2001 From: Peter Bengtsson Date: Thu, 15 Sep 2016 09:56:10 -0400 Subject: [PATCH 08/13] trigger a change --- tools/trigger | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/trigger b/tools/trigger index 91321c551d..c57ee649bc 100644 --- a/tools/trigger +++ b/tools/trigger @@ -1 +1 @@ -This will trigger a jenkins run. \ No newline at end of file +This will trigger a jenkins run. Hurray! From e106b7003c942d7e1fc23383d7e532dc5331e298 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Fri, 16 Sep 2016 13:02:16 +1000 Subject: [PATCH 09/13] Fixes bug 1290329 with a new link. (#3469) The commit adds a new "How to read this crash report" link. It also changes the existing link from "Search Mozilla Support for Help" to "Search Mozilla Support for this signature". --- .../crashstats/jinja2/crashstats/report_index.html | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/webapp-django/crashstats/crashstats/jinja2/crashstats/report_index.html b/webapp-django/crashstats/crashstats/jinja2/crashstats/report_index.html index 5321fdade8..984526cef8 100644 --- a/webapp-django/crashstats/crashstats/jinja2/crashstats/report_index.html +++ b/webapp-django/crashstats/crashstats/jinja2/crashstats/report_index.html @@ -39,7 +39,10 @@

{{ report.product }} {{ report.version }} Crash Report [@ {{ report.signatur
- +
ID: {{ report.uuid }}
From 08231f2342f1ea3ca9651c8424983339e05cd15b Mon Sep 17 00:00:00 2001 From: JP Date: Mon, 19 Sep 2016 09:59:09 -0500 Subject: [PATCH 10/13] Deploy 2.7.11 --- tools/trigger | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/trigger b/tools/trigger index c57ee649bc..609275a18e 100644 --- a/tools/trigger +++ b/tools/trigger @@ -1 +1 @@ -This will trigger a jenkins run. Hurray! +This will trigger a jenkins run. Please do not fail me now! From 26c22b06aa5cb4c94a77fc0d9e94ec711603580c Mon Sep 17 00:00:00 2001 From: JP Date: Mon, 19 Sep 2016 13:20:08 -0500 Subject: [PATCH 11/13] Trigger --- tools/trigger | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/trigger b/tools/trigger index 609275a18e..b68753b9dd 100644 --- a/tools/trigger +++ b/tools/trigger @@ -1 +1 @@ -This will trigger a jenkins run. Please do not fail me now! +This will trigger a jenkins run. Python is as great to work with as underpants made of fire. From 757ae037c3ab3bf61cd482955e920638d9f75505 Mon Sep 17 00:00:00 2001 From: Chris Lonnen Date: Mon, 19 Sep 2016 15:38:21 -0700 Subject: [PATCH 12/13] fixes bug 1301926 (#3463) Removes all direct references to hbase, hb and happybase modules. This deletes adapters, classes, removes references, supporting libraries, and simply alters docs to reference other implementations. remove some second order hbase code and documentation remove references to the top-level analysis folder, which was only supporting hbase, pig, and other unused tech. This is distinct from the socorro/analysis folder that is actively used still. This removes a curious namespace hack in the middleware_app.py that proxies the hbase namespace to boto. --- .gitignore | 2 +- analysis/hbase_schema | 12 - config/cron_submitter.ini-dist | 36 -- config/middleware.ini-dist | 91 +-- docs/configuring-crash-stats.rst | 2 +- docs/configuring-socorro.rst | 2 +- docs/development/addaservice.rst | 2 +- docs/development/api/crashstorage.rst | 61 +- docs/development/crontabber.rst | 4 +- docs/development/databasetabledesc.rst | 2 +- docs/development/generalarchitecture.rst | 14 - docs/development/generic_app.rst | 2 +- docs/development/glossary/collector.rst | 2 +- docs/development/processor.rst | 2 +- requirements.txt | 2 - scripts/crons/cron_correlations.sh | 38 -- scripts/crons/cron_fixbrokendumps.sh | 18 - scripts/install.sh | 1 - scripts/sync_fs.py | 238 -------- socorro/cron/fixBrokenDumps.py | 82 --- socorro/cron/mixins.py | 45 -- socorro/external/happybase/__init__.py | 0 .../external/happybase/connection_context.py | 154 ----- socorro/external/happybase/crash_data.py | 14 - socorro/external/happybase/crashstorage.py | 234 -------- socorro/external/happybase/hbase_client.py | 280 --------- socorro/external/hb/__init__.py | 0 socorro/external/hb/connection_context.py | 206 ------- socorro/external/hb/crash_data.py | 14 - socorro/external/hb/crashstorage.py | 549 ------------------ socorro/external/hb/hbase_client.py | 280 --------- socorro/middleware/middleware_app.py | 14 +- .../unittest/external/happybase/__init__.py | 0 .../happybase/test_connection_context.py | 269 --------- .../external/happybase/test_crash_data.py | 231 -------- .../external/happybase/test_crashstorage.py | 190 ------ socorro/unittest/external/hb/__init__.py | 0 .../external/hb/test_connection_context.py | 237 -------- .../unittest/external/hb/test_crash_data.py | 237 -------- .../unittest/external/hb/test_crashstorage.py | 187 ------ tools/loadjsonz.py | 42 -- 41 files changed, 19 insertions(+), 3777 deletions(-) delete mode 100644 analysis/hbase_schema delete mode 100755 scripts/crons/cron_correlations.sh delete mode 100755 scripts/crons/cron_fixbrokendumps.sh delete mode 100644 scripts/sync_fs.py delete mode 100644 socorro/cron/fixBrokenDumps.py delete mode 100644 socorro/external/happybase/__init__.py delete mode 100644 socorro/external/happybase/connection_context.py delete mode 100644 socorro/external/happybase/crash_data.py delete mode 100644 socorro/external/happybase/crashstorage.py delete mode 100644 socorro/external/happybase/hbase_client.py delete mode 100644 socorro/external/hb/__init__.py delete mode 100644 socorro/external/hb/connection_context.py delete mode 100644 socorro/external/hb/crash_data.py delete mode 100644 socorro/external/hb/crashstorage.py delete mode 100644 socorro/external/hb/hbase_client.py delete mode 100644 socorro/unittest/external/happybase/__init__.py delete mode 100644 socorro/unittest/external/happybase/test_connection_context.py delete mode 100644 socorro/unittest/external/happybase/test_crash_data.py delete mode 100644 socorro/unittest/external/happybase/test_crashstorage.py delete mode 100644 socorro/unittest/external/hb/__init__.py delete mode 100644 socorro/unittest/external/hb/test_connection_context.py delete mode 100644 socorro/unittest/external/hb/test_crash_data.py delete mode 100644 socorro/unittest/external/hb/test_crashstorage.py delete mode 100644 tools/loadjsonz.py diff --git a/.gitignore b/.gitignore index b5a6eebbcd..f205bd8cdf 100644 --- a/.gitignore +++ b/.gitignore @@ -4,9 +4,9 @@ socorro/unittest/config/*.py *.sw[po] *.log distribute*.tar.gz -analysis/build/ breakpad/ breakpad.tar.gz +depot_tools/ nosetests.xml scripts/config/*.py socorro/unittest/config/*.py diff --git a/analysis/hbase_schema b/analysis/hbase_schema deleted file mode 100644 index 1068bc4bb1..0000000000 --- a/analysis/hbase_schema +++ /dev/null @@ -1,12 +0,0 @@ -create 'crash_report_signatures', {NAME => 'counters', COMPRESSION => 'LZO', VERSIONS => '1', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'json', COMPRESSION => 'LZO', VERSIONS => '3', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'} -create 'crash_reports', {NAME => 'flags', VERSIONS => '1', COMPRESSION => 'LZO', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'ids', COMPRESSION => 'LZO', VERSIONS => '1', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'meta_data', COMPRESSION => 'LZO', VERSIONS => '3', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'processed_data', VERSIONS => '3', COMPRESSION => 'LZO', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'raw_data', COMPRESSION => 'LZO', VERSIONS => '3', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'timestamps', COMPRESSION => 'LZO', VERSIONS => '1', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'} -create 'crash_reports_index_hang_id', {NAME => 'ids', COMPRESSION => 'LZO', VERSIONS => '1', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'} -create 'crash_reports_index_hang_id_submitted_time', {NAME => 'ids', COMPRESSION => 'LZO', VERSIONS => '1', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'} -create 'crash_reports_index_legacy_processed', {NAME => 'ids', COMPRESSION => 'NONE', VERSIONS => '1', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'} -create 'crash_reports_index_legacy_submitted_time', {NAME => 'ids', COMPRESSION => 'LZO', VERSIONS => '1', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'} -create 'crash_reports_index_legacy_unprocessed_flag', {NAME => 'ids', COMPRESSION => 'NONE', VERSIONS => '1', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'processor_state', VERSIONS => '5', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'} -create 'crash_reports_index_priority_processed', {NAME => 'ids', COMPRESSION => 'NONE', VERSIONS => '1', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'} -create 'crash_reports_index_signature_ooid', {NAME => 'ids', COMPRESSION => 'LZO', VERSIONS => '1', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'} -create 'crash_reports_index_submitted_time', {NAME => 'ids', COMPRESSION => 'LZO', VERSIONS => '1', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'} -create 'crash_reports_index_unprocessed_flag', {NAME => 'ids', VERSIONS => '1', COMPRESSION => 'NONE', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'processor_state', COMPRESSION => 'NONE', VERSIONS => '5', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'} -create 'metrics', {NAME => 'counters', COMPRESSION => 'LZO', VERSIONS => '1', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'} diff --git a/config/cron_submitter.ini-dist b/config/cron_submitter.ini-dist index 1da48e378b..7659d4a29a 100644 --- a/config/cron_submitter.ini-dist +++ b/config/cron_submitter.ini-dist @@ -119,41 +119,6 @@ # converter: str dump_file_suffix='.dump' - # name: forbidden_keys - # doc: a comma delimited list of keys banned from the processed crash in HBase - # converter: socorro.external.hbase.connection_context. - forbidden_keys='email, url, user_id, exploitability' - - # name: hbase_connection_pool_class - # doc: the class responsible for pooling and giving out HBaseconnections - # converter: configman.converters.class_converter - hbase_connection_pool_class='socorro.external.hbase.connection_context.HBaseConnectionContextPooled' - - # name: hbase_host - # doc: Host to HBase server - # converter: str - hbase_host='localhost' - - # name: hbase_port - # doc: Port to HBase server - # converter: int - hbase_port='9090' - - # name: hbase_timeout - # doc: timeout in milliseconds for an HBase connection - # converter: int - hbase_timeout='5000' - - # name: number_of_retries - # doc: Max. number of retries when fetching from hbaseClient - # converter: int - number_of_retries='2' - - # name: source_implementation - # doc: a class for a source of raw crashes - # converter: configman.converters.class_converter - source_implementation='socorro.external.hbase.crashstorage.HBaseCrashStorage' - # name: sql # doc: an sql string that selects crash_ids # converter: str @@ -195,4 +160,3 @@ # doc: the number of crashes to submit (all, forever, 1...) # converter: str number_of_submissions='all' - diff --git a/config/middleware.ini-dist b/config/middleware.ini-dist index 7f88c47675..d5e6d46949 100644 --- a/config/middleware.ini-dist +++ b/config/middleware.ini-dist @@ -62,40 +62,6 @@ # umask to use for new files #umask=18 - [[hb]] - - #+include ./common_hb.ini - - # delays in seconds between retries - #backoff_delays=10, 30, 60, 120, 300 - - # the suffix used to identify a dump file (for use in temp files) - #dump_file_suffix=.dump - - # the class responsible for proving an hbase connection - #hbase_connection_context_class=socorro.external.hb.connection_context.HBaseConnectionContext - - # Host to HBase server - #hbase_host=localhost - - # Port to HBase server - #hbase_port=9090 - - # timeout in milliseconds for an HBase connection - #hbase_timeout=5000 - - # the maximum number of new crashes to yield at a time - #new_crash_limit=1000000 - - # a local filesystem path where dumps temporarily during processing - #temporary_file_system_storage_path=/tmp - - # a class that will execute transactions - #transaction_executor_class=socorro.database.transaction_executor.TransactionExecutorWithInfiniteBackoff - - # seconds between log during retries - #wait_log_interval=10 - [[logging]] #+include ./common_logging.ini @@ -263,59 +229,6 @@ # see "resource.fs.umask" for the default or override it here #umask=18 -[hbase] - - # delays in seconds between retries - # see "resource.hb.backoff_delays" for the default or override it here - #backoff_delays=10, 30, 60, 120, 300 - - # the suffix used to identify a dump file (for use in temp files) - # see "resource.hb.dump_file_suffix" for the default or override it here - #dump_file_suffix=.dump - - # a list of keys not allowed in a redacted processed crash - # see "resource.redactor.forbidden_keys" for the default or override it here - #forbidden_keys=url, email, user_id, exploitability,json_dump.sensitive,upload_file_minidump_flash1.json_dump.sensitive,upload_file_minidump_flash2.json_dump.sensitive,upload_file_minidump_browser.json_dump.sensitive,memory_info - - # None - #hbase_class=socorro.external.hb.crashstorage.HBaseCrashStorage - - # the class responsible for proving an hbase connection - # see "resource.hb.hbase_connection_context_class" for the default or override it here - #hbase_connection_context_class=socorro.external.hb.connection_context.HBaseConnectionContext - - # Host to HBase server - # see "resource.hb.hbase_host" for the default or override it here - #hbase_host=localhost - - # Port to HBase server - # see "resource.hb.hbase_port" for the default or override it here - #hbase_port=9090 - - # timeout in milliseconds for an HBase connection - # see "resource.hb.hbase_timeout" for the default or override it here - #hbase_timeout=5000 - - # the maximum number of new crashes to yield at a time - # see "resource.hb.new_crash_limit" for the default or override it here - #new_crash_limit=1000000 - - # the name of the class that implements a 'redact' method - # see "resource.redactor.redactor_class" for the default or override it here - #redactor_class=socorro.external.crashstorage_base.Redactor - - # a local filesystem path where dumps temporarily during processing - # see "resource.hb.temporary_file_system_storage_path" for the default or override it here - #temporary_file_system_storage_path=/tmp - - # a class that will execute transactions - # see "resource.hb.transaction_executor_class" for the default or override it here - #transaction_executor_class=socorro.database.transaction_executor.TransactionExecutorWithInfiniteBackoff - - # seconds between log during retries - # see "resource.hb.wait_log_interval" for the default or override it here - #wait_log_interval=10 - [http] [[correlations]] @@ -336,9 +249,9 @@ [implementations] # list of packages for service implementations - #implementation_list=psql: socorro.external.postgresql, hbase: socorro.external.hb, es: socorro.external.es, fs: socorro.external.fs, http: socorro.external.http, rabbitmq: socorro.external.rabbitmq + #implementation_list=psql: socorro.external.postgresql, boto: socorro.external.boto, es: socorro.external.es, fs: socorro.external.fs, http: socorro.external.http, rabbitmq: socorro.external.rabbitmq - # comma separated list of class overrides, e.g `Crashes: hbase` + # comma separated list of class overrides, e.g `Crashes: boto` #service_overrides=CrashData: fs, Correlations: http, CorrelationsSignatures: http, SuperSearch: es, Priorityjobs: rabbitmq, Query: es [introspection] diff --git a/docs/configuring-crash-stats.rst b/docs/configuring-crash-stats.rst index c980ae4ee3..3491ba34f7 100644 --- a/docs/configuring-crash-stats.rst +++ b/docs/configuring-crash-stats.rst @@ -142,7 +142,7 @@ underlying data stores: .. code-block:: bash - implementations__implementation_list='psql: socorro.external.postgresql, fs: socorro.external.filesystem, es: socorro.external.es, http: socorro.external.http, rabbitmq: socorro.external.rabbitmq, hb: socorro.external.fs' + implementations__implementation_list='psql: socorro.external.postgresql, fs: socorro.external.filesystem, es: socorro.external.es, http: socorro.external.http, rabbitmq: socorro.external.rabbitmq' implementations__service_overrides='Correlations: http, CorrelationsSignatures: http, SuperSearch: es, Priorityjobs: rabbitmq, Search: es, Query: es' # Pluggable Elasticsearch implementation elasticsearch__elasticsearch_class='socorro.external.es.connection_context.ConnectionContext' diff --git a/docs/configuring-socorro.rst b/docs/configuring-socorro.rst index 34bc10f3be..aa5fbe262a 100644 --- a/docs/configuring-socorro.rst +++ b/docs/configuring-socorro.rst @@ -188,7 +188,7 @@ in AWS using Consul at https://github.com/mozilla/socorro-infra/ Socorro has a very powerful and expressive configuration system, and can be configured to read from and write to a number of different data stores -(S3, Elasticsearch, HBase, PostgreSQL) and use queues (RabbitMQ) +(S3, Elasticsearch, PostgreSQL) and use queues (RabbitMQ) For instance, to have processor store crashes to both to the filesystem and to ElasticSearch: diff --git a/docs/development/addaservice.rst b/docs/development/addaservice.rst index f76e5c45c6..a69b9fab6d 100644 --- a/docs/development/addaservice.rst +++ b/docs/development/addaservice.rst @@ -16,7 +16,7 @@ URL with parameters. Documentation for each service is available in the Those services are not containing any code, but are only interfaces. They are using other resources from the external module. That external module is composed of one submodule for each external resource we are using. For example, -there is a PostgreSQL submodule, an elasticsearch submodule and an HBase +there is a PostgreSQL submodule, an elasticsearch submodule and a boto (AWS S3) submodule. You will also find some common code among external resources in diff --git a/docs/development/api/crashstorage.rst b/docs/development/api/crashstorage.rst index 0366a61f49..063e4cceef 100644 --- a/docs/development/api/crashstorage.rst +++ b/docs/development/api/crashstorage.rst @@ -32,16 +32,16 @@ Concrete implementation: * `NullCrashStorage`: Silently ignores everything it is told to do. Examples of other concrete implementations are: `PostgreSQLCrashStorage`, -`HBaseCrashStorage`. +`BotoCrashStorage`. CrashStorage containers for aggregating multiple crash storage implementations: * `PolyCrashStorage`: Container for other crash storage systems. * `FallbackCrashStorage`: Container for two other crash storage systems, a primary and a secondary. Attempts on the primary, if it fails it will - fallback to the secondary. In use when we had primary/secondary HBase. - Can be heterogeneous, example: Hbase + filesystem and use crashmovers to - move from filesystem into hbase when hbase comes back. + fallback to the secondary. In use when we have cutover between data stores. + Can be heterogeneous, example: S3 + filesystem and use crashmovers to + move from filesystem into S3 when S3 comes back. * `PrimaryDeferredStorage`: Container for two different storage systems and a predicate function. If predicate is false, store in primary, otherwise store in secondary. Usecase: situation where we want crashes to be put @@ -142,7 +142,7 @@ Use cases: * For Mozilla use by the collectors. * For other users, you can use this class as your primary storage instead of -HBase. Be sure to implement this in collectors, crashmovers, processors and +S3. Be sure to implement this in collectors, crashmovers, processors and middleware (depending on which components you use in your configuration). `Important ops note:` @@ -168,48 +168,6 @@ Classes: in-filesystem queueing techniques so that we know which crashes are new. Backwards compatible with `socorro.external.filesystem` (aka the 2009 system). -socorro.external.hb -------------------- - -socorro.external.hb.crashstorage -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -This is used by crashmovers, processors. In the future, our middleware will -also use this instead of socorro.external.hbase. Can store raw crashes and -dumps. It has no knowledge of aggregations or normalized data. - -*TODO: Needs crash_data to be implemented for middleware* - -Special functions: - -* `crash_id_to_timestamped_row_id`: HBase uses a different primary key than our - internal UUID. Taking the first character and last six, and copying them to the - front of the UUID. First character is the salt for the region, and the next - six provide the date, for ordering. Sometimes you'll see 'ooid' or 'uuid' in - the docs, but we really mean `crash_id`. - -Implementation: - -* `HBaseCrashStorage`: implements access to HBase. HBase schema is defined in - ``analysis/hbase_schema``. - -Exceptions: - -* `BadCrashIdException`: just passes - -socorro.external.hb.connection_context -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -* `HBaseConnection`: all of the code that implements the core connection. Loose - wrapper around a bare socket speaking Thrift protocol. Commit/rollback are - noops. - -* `HBaseConnectionContext`: In production use. A factory in the form of a - functor for creating the HBaseConnection instances. - -* `HBasePersistentConnectionContext`: These are "pooled" so you can use them - again without closing. We don't use it and appears to be broken. - socorro.external.postgresql --------------------------- @@ -303,11 +261,6 @@ socorro.external.filesystem * Preceded `socorro.external.fs`. -socorro.external.hbase -^^^^^^^^^^^^^^^^^^^^^^ - -* Still in use by the middleware for `crash_data`. - socorro.storage ^^^^^^^^^^^^^^^ @@ -331,7 +284,7 @@ Which classes are used with which _app using `PolyCrashStore`. In testing we use `socorro.external.fs`, `socorro.external.rabbitmq`, and `socorro.external.postgresql`. -* `socorro.middleware.middleware_app`: In production: `socorro.external.hbase`. +* `socorro.middleware.middleware_app`: In production: `socorro.external.boto`. In testing: we use `socorro.external.fs` and `socorro.external.postgresql`. * `socorro.collector.submitter_app`: Defines it's own storage classes: @@ -340,8 +293,6 @@ Which classes are used with which _app to get a list of crashstorage ids and uses any other crashstorage as a source for the raw crashes that it pulls. -*TODO: update submitter_app to use the new socorro.external.hb instead of hbase* - Which classes can be used together ---------------------------------- diff --git a/docs/development/crontabber.rst b/docs/development/crontabber.rst index ffc3947d57..0b24181289 100644 --- a/docs/development/crontabber.rst +++ b/docs/development/crontabber.rst @@ -342,8 +342,8 @@ Writing cron apps (aka. jobs) Because of the configurable nature of the ``crontabber`` the actual cron apps can be located anywhere. For example, if it's related to -``HBase`` it could for example be in -``socorro/external/hbase/mycronapp.py``. However, for the most part +``S3`` it could for example be in +``socorro/external/boto/mycronapp.py``. However, for the most part it's probably a good idea to write them in ``socorro/cron/jobs/`` and write one class per file to make it clear. There are already some "sample apps" in there that does nothing except serving as good diff --git a/docs/development/databasetabledesc.rst b/docs/development/databasetabledesc.rst index de7317595a..a6fc4c7a65 100644 --- a/docs/development/databasetabledesc.rst +++ b/docs/development/databasetabledesc.rst @@ -304,7 +304,7 @@ Contains crash-count summaries of crashes per architecture and number of cores. *correlation_modules* -Will contain crash-counts for modules per correlation. Will be populated daily by pull from Hbase. +Will contain crash-counts for modules per correlation. Will be populated daily by pull from S3. *crashes_by_user, crashes_by_user_view* diff --git a/docs/development/generalarchitecture.rst b/docs/development/generalarchitecture.rst index e5e8a79a98..14582350ca 100644 --- a/docs/development/generalarchitecture.rst +++ b/docs/development/generalarchitecture.rst @@ -19,8 +19,6 @@ will find the following folders. Here is what each of them contains: +-----------------+-------------------------------------------------------------+ | Folder | Description | +=================+=============================================================+ -| analysis/ | Contains metrics jobs such as mapreduce. Will be moved. | -+-----------------+-------------------------------------------------------------+ | config/ | Contains the Apache configuration for the different parts | | | of the Socorro application. | +-----------------+-------------------------------------------------------------+ @@ -60,20 +58,8 @@ Here are descriptions of every submodule in there: +-------------------+---------------------------------------------------------------+ | external | Here are APIs related to external resources like databases. | +-------------------+---------------------------------------------------------------+ -| integrationtest | Osolete. | -+-------------------+---------------------------------------------------------------+ -| lib | Different libraries used all over Socorro’s code. | -+-------------------+---------------------------------------------------------------+ | middleware | New-style middleware services place. | +-------------------+---------------------------------------------------------------+ -| monitor | All code related to monitors. | -+-------------------+---------------------------------------------------------------+ -| othertests | Some other tests? | -+-------------------+---------------------------------------------------------------+ -| services | Old-style middleware services place. | -+-------------------+---------------------------------------------------------------+ -| storage | HBase related code. | -+-------------------+---------------------------------------------------------------+ | unittest | All our unit tests are here. | +-------------------+---------------------------------------------------------------+ | webapi | Contains a few tools used by web-based services. | diff --git a/docs/development/generic_app.rst b/docs/development/generic_app.rst index d5483521a3..dbb8a23390 100644 --- a/docs/development/generic_app.rst +++ b/docs/development/generic_app.rst @@ -53,7 +53,7 @@ in the ``TransactionExecutor`` is you can see `here `_ -The idea is that any external module (e.g. HBase, PostgreSQL, etc) +The idea is that any external module (e.g. Boto, PostgreSQL, etc) can define a ``ConnectionContext`` class as per this model. What its job is is to create and close connections and it has to do so in a contextmanager. What that means is that you can do this:: diff --git a/docs/development/glossary/collector.rst b/docs/development/glossary/collector.rst index b52e6b71cc..6487bfe5c5 100644 --- a/docs/development/glossary/collector.rst +++ b/docs/development/glossary/collector.rst @@ -22,4 +22,4 @@ failed saves. This file system would likely be an NFS mounted file system. After a crash is saved, there is an app called :ref:`crashmover-chapter` that -will transfer the crashes to HBase. +will transfer the crashes to S3. diff --git a/docs/development/processor.rst b/docs/development/processor.rst index 337aaba10d..3fe51f3e82 100644 --- a/docs/development/processor.rst +++ b/docs/development/processor.rst @@ -10,7 +10,7 @@ Introduction Socorro Processor is a multithreaded application that applies JSON/dump pairs to the stackwalk_server application, parses the -output, and records the results in the hbase. The processor, coupled +output, and records the results in the S3. The processor, coupled with stackwalk_server, is computationally intensive. Multiple instances of the processor can be run simultaneously from different machines. diff --git a/requirements.txt b/requirements.txt index 0fd9a07472..9c16e384c9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -36,8 +36,6 @@ configman==1.2.11 \ --hash=sha256:16798e8a67467f50f8f9b080c8e70a41f0ff7d2dd061e74423e53bc7ed92fb8d configobj==4.7.2 \ --hash=sha256:515ff923462592e8321df8b48c47e3428f8d406ee22b8de77bef969d1af11171 -hbase-thrift==0.20.4 \ - --hash=sha256:a33e36759cba1a8c31c3c01e943b4ee204604d6ff13dda281f8f3893b23910e6 isodate==0.5.4 \ --hash=sha256:42105c41d037246dc1987e36d96f3752ffd5c0c24834dd12e4fdbe1e79544e31 lxml==3.5.0 \ diff --git a/scripts/crons/cron_correlations.sh b/scripts/crons/cron_correlations.sh deleted file mode 100755 index 55f6a8b41d..0000000000 --- a/scripts/crons/cron_correlations.sh +++ /dev/null @@ -1,38 +0,0 @@ -#!/bin/bash -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -. /etc/socorro/socorrorc - -NAME=`basename $0 .sh` - -# TODO this needs to stay in sync with the correlations.pig script -# FIXME move this bit to pig when we switch to 0.9 and use the new PigStorage -COLUMNS="filename,debug_file,debug_id,module_version,product,version,os_name,reason" -DATE=`date -d 'yesterday' +%y%m%d` -OUTPUT_DATE=`date -d $DATE +%Y%m%d` -OUTPUT_FILE="/mnt/crashanalysis/crash_analysis/correlations/correlations-${OUTPUT_DATE}.txt" -lock $NAME - -pig -param start_date=$DATE -param end_date=$DATE ${SOCORRO_DIR}/analysis/correlations.pig >> /var/log/socorro/cron_correlations.log 2>&1 -fatal $? "pig run failed" - -TMPFILE=`mktemp` -echo $COLUMNS > $TMPFILE -fatal $? "could not write header to tmpfile" - -hadoop fs -cat correlations-${DATE}-${DATE} >> $TMPFILE -fatal $? "hadoop cat failed writing to tmpfile" - -cat $OUTPUT_FILE | psql -U $databaseUserName -h $databaseHost $databaseName -c 'COPY correlations_raw FROM STDIN WITH CSV HEADER' -fatal $? "writing correlations to DB failed" - -mv $TMPFILE $OUTPUT_FILE -fatal $? "could not move tmpfile to output dir" - -hadoop fs -rmr correlations-${DATE}-${DATE} -fatal $? "hadoop cleanup failed" - -unlock $NAME diff --git a/scripts/crons/cron_fixbrokendumps.sh b/scripts/crons/cron_fixbrokendumps.sh deleted file mode 100755 index 6cacbb7622..0000000000 --- a/scripts/crons/cron_fixbrokendumps.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -. /etc/socorro/socorrorc -# Mozilla PHX needs this because of the particular VLAN setup there -# TODO - give cron jobs their own config overrides -. /etc/socorro/socorro-monitor.conf - -NAME=`basename $0 .sh` -lock $NAME -pyjob $NAME startFixBrokenDumps -EXIT_CODE=$? -unlock $NAME - -exit $EXIT_CODE diff --git a/scripts/install.sh b/scripts/install.sh index 40256c70d3..874a462ca3 100755 --- a/scripts/install.sh +++ b/scripts/install.sh @@ -56,7 +56,6 @@ rsync -a sql $BUILD_DIR/application rsync -a wsgi $BUILD_DIR/application rsync -a stackwalk $BUILD_DIR/ rsync -a scripts/stackwalk.sh $BUILD_DIR/stackwalk/bin/ -rsync -a analysis $BUILD_DIR/ rsync -a alembic $BUILD_DIR/application rsync -a webapp-django $BUILD_DIR/ # because this file is served from the parent of the `webapp-django/` directory diff --git a/scripts/sync_fs.py b/scripts/sync_fs.py deleted file mode 100644 index 64f5d8826e..0000000000 --- a/scripts/sync_fs.py +++ /dev/null @@ -1,238 +0,0 @@ -#!/usr/bin/env python - -import os, sys -import happybase -import json -import logging - -from boto.sqs import connect_to_region as sqs_connect -from boto.sqs.jsonmessage import JSONMessage -from boto.s3 import connect_to_region as s3_connect -from boto.s3.key import Key - -from multiprocessing import Process as TaskClass -from multiprocessing import JoinableQueue as Queue - -import signal -import random - -from collections import deque - - -logger = logging.getLogger(__name__) - -# Following params need to be adjusted based on payload size, bandwidth etc -MAX_ROWS_IN_FLIGHT = 4096 -TASK_QUEUE_SIZE = MAX_ROWS_IN_FLIGHT * 4 - - -class HBaseSource: - def __init__(self, addr, row_range, max_rows = 2048, batch_size = 256): - self.thrift_addr = addr - self.start_row, self.end_row = row_range - self.max_rows = max_rows - self.batch_size = batch_size - - def items(self): - prev_last_read_key = None - curr_last_read_key = self.start_row - end_row = self.end_row - - while True: - src_tbl = happybase.Connection(random.choice(self.thrift_addr)).table('crash_reports') - - nrows = 0 - - try: - logger.debug('fetch %d rows of data via thrift', self.max_rows) - - # scan fetches rows with key in the range [row_start, row_stop) - # this necessitates the check for repeating keys as stopping condition - # - logger.info("scan start") - data = deque(src_tbl.scan(row_start = curr_last_read_key, - row_stop = end_row, - columns = ['raw_data', 'processed_data', 'meta_data'], - limit = self.max_rows, - batch_size = self.batch_size)) - logger.info("scan end %d rows starting at %s", len(data), data[0][0]) - while True: - if not data: - break - - key, val = data.popleft() - if (key == prev_last_read_key): - # last record from previous batch should be ignored - continue - - yield key, val - nrows += 1 - - prev_last_read_key = curr_last_read_key - curr_last_read_key = key - - logger.debug('read %d rows of data from hbase ending at %s', nrows, curr_last_read_key) - if nrows < self.max_rows: - print >> sys.stderr, "end of range. exiting" - break - - except happybase.hbase.ttypes.IOError: - logger.exception('caught exception. retrying.') - - except Exception: - logger.exception('unrecoverable exception.') - raise - -class SourceWorker(TaskClass): - def __init__(self, queue, source_config): - TaskClass.__init__(self) - - self.source = HBaseSource(*source_config) - self.queue = queue - - def run(self): - num_rows_written = 0 - total_size_written = 0 - s3_path_tmpl = '{env}/v1/{ftype}/{uuid}' - env = 'stage' - - for key, cols in self.source.items(): - dump_names = [] - for j in cols.keys(): - - suffix = get_suffix(j) - if not suffix: - #logger.info('column %s ignored for key %s', j, key) - continue - - if j.startswith('raw_data'): - dump_names.append(suffix) - - # crashstats/stage/v1/ - # format {{bucket}}/{{prefix}}/{{version}}/{{crash_type}}/{{crash_id}} - skey = s3_path_tmpl.format(env = env, - uuid = key[7:], - ftype = suffix) - - self.queue.put((skey, cols[j])) - - total_size_written += len(cols[j]) - - self.queue.put((s3_path_tmpl.format(env = env, - uuid = key[7:], - ftype = 'dump_names'), - json.dumps(dump_names))) - - num_rows_written += 1 - - if ((num_rows_written % 1000) == 0): - logger.info("wrote %d rows, at %s", num_rows_written, key) - logger.warn("qsize is %d", self.queue.qsize()) - - print >> sys.stderr, "SourceWorker DONE", num_rows_written, total_size_written - -class S3Worker(TaskClass): - def __init__(self, s3_region, s3_bucket, task_queue, result_queue): - signal.signal(signal.SIGINT, signal.SIG_IGN) - - TaskClass.__init__(self) - self.task_queue = task_queue - self.result_queue = result_queue - self.s3_region = s3_region - self.s3_bucket = s3_bucket - - def setup_s3(self): - self.s3 = s3_connect(self.s3_region) - self.bucket = self.s3.get_bucket(self.s3_bucket) - - def write_to_s3(self, key, payload): - k = Key(self.bucket) - k.key = key - k.set_contents_from_string(payload) - - def run(self): - self.setup_s3() - - while True: - kv = self.task_queue.get() - - if kv is None: - print >> sys.stderr, '%s: Exiting' % self.name - self.task_queue.task_done() - break - - k, v = kv - self.write_to_s3(k, v) - self.task_queue.task_done() - return - -def get_suffix(colname): - suffix_map = { - 'processed_data:json' : 'processed_crash', - 'raw_data:dump' : 'dump', - 'meta_data:json' : 'raw_crash', - 'raw_data:upload_file_minidump_browser' : 'upload_file_minidump_browser', - 'raw_data:upload_file_minidump_flash1' : 'upload_file_minidump_flash1', - 'raw_data:upload_file_minidump_flash2' : 'upload_file_minidump_flash2' - } - - if colname in suffix_map: - return suffix_map[colname] - elif colname.startswith('raw_data'): - return colname.split(':', 1)[1] - else: - return None - - -def main(num_workers = 64): - if len(sys.argv) != 3: - show_usage_and_quit() - - queue = Queue(TASK_QUEUE_SIZE) - - # start s3 workers - workers = [S3Worker('us-west-2', 'crashstats', queue, None) - for i in xrange(num_workers)] - - for i in workers: - i.start() - - thrift_hosts = sys.argv[1].split(',') - date = sys.argv[2] - - # start hbase workers - key_ranges = [] - for i in ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f']: - key_ranges.append(('%s%s%s' % (i, date, i), '%s%s%sg' % (i, date, i))) - - num_hbase_workers = 1 - - for i in xrange(0, len(key_ranges), num_hbase_workers): - src_workers = [] - krng = key_ranges[i : (i + num_hbase_workers)] - - for j in range(len(krng)): - src_workers.append(SourceWorker(queue, (thrift_hosts, krng[j]))) - - for w in src_workers: - print "starting src worker", w - w.start() - - for w in src_workers: - w.join() - - for i in workers: - queue.put(None) - - queue.join() - -def show_usage_and_quit(): - print >> sys.stderr, "Usage: %s hosts('host1,host2,host3') date(YYMMDD)" % (sys.argv[0]) - sys.exit(2) - - -if __name__ == '__main__': - logging.basicConfig(format = '%(asctime)s %(name)s:%(levelname)s: %(message)s', - level = logging.INFO) - - main() diff --git a/socorro/cron/fixBrokenDumps.py b/socorro/cron/fixBrokenDumps.py deleted file mode 100644 index cc38f6fa8a..0000000000 --- a/socorro/cron/fixBrokenDumps.py +++ /dev/null @@ -1,82 +0,0 @@ -#!/usr/bin/python -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -import time -import sys -import subprocess -import os -import cPickle - -import psycopg2 -import psycopg2.extras - -import socorrolib.lib.util -import socorro.external.hbase.hbase_client as hbaseClient - -from datetime import datetime, timedelta - -from socorrolib.lib.datetimeutil import utc_now -from socorrolib.lib.datetimeutil import date_to_string - -def fetchOoids(configContext, logger, query): - try: - databaseDSN = "host=%(databaseHost)s dbname=%(databaseName)s user=%(databaseUserName)s password=%(databasePassword)s" % configContext - conn = psycopg2.connect(databaseDSN) - cur = conn.cursor() - except: - socorrolib.lib.util.reportExceptionAndAbort(logger) - - last_date_processed = get_last_run_date(configContext) - - rows = [] - try: - before = time.time() - logger.debug('last_date_processed used for query: %s' % last_date_processed) - cur.execute(query % last_date_processed) - rows = cur.fetchall() - conn.commit() - except: - socorrolib.lib.util.reportExceptionAndAbort(logger) - - return rows, last_date_processed - -def fix(configContext, logger, query, fixer): - rows, last_date_processed = fetchOoids(configContext, logger, query) - hbc = hbaseClient.HBaseConnectionForCrashReports(configContext.hbaseHost, configContext.hbasePort, configContext.hbaseTimeout, logger=logger) - for row in rows: - try: - ooid, last_date_processed = row - logger.info('fixing ooid: %s' % ooid) - dump = hbc.get_dump(ooid) - fname = '/dev/shm/%s.dump' % ooid - with open(fname, 'wb') as orig_dump_file: - orig_dump_file.write(dump) - logger.debug('wrote dump file: %s' % fname) - logger.debug('fixed dump file: %s' % fname) - subprocess.check_call([fixer, fname]) - logger.debug('fixer: %s' % fixer) - with open(fname, 'rb') as fixed_dump_file: - fixed_dump = fixed_dump_file.read() - hbc.put_fixed_dump(ooid, fixed_dump, add_to_unprocessed_queue = True, submitted_timestamp = date_to_string(utc_now())) - logger.debug('put fixed dump file into hbase: %s' % fname) - os.unlink(fname) - logger.debug('removed dump file: %s' % fname) - except: - socorrolib.lib.util.reportExceptionAndContinue(logger) - - return last_date_processed - -def get_last_run_date(config): - try: - with open(config.persistentBrokenDumpPathname, 'r') as f: - return cPickle.load(f) - except IOError: - return utc_now() - timedelta(days=config.daysIntoPast) - -def save_last_run_date(config, date): - with open(config.persistentBrokenDumpPathname, 'w') as f: - return cPickle.dump(date, f) - diff --git a/socorro/cron/mixins.py b/socorro/cron/mixins.py index 67e060282c..031d7edf85 100644 --- a/socorro/cron/mixins.py +++ b/socorro/cron/mixins.py @@ -6,51 +6,6 @@ with_single_transaction ) -#============================================================================== -# dedicated hbase mixins -#------------------------------------------------------------------------------ -# this class decorator adds attributes to the class in the form: -# self.long_term_storage_connection -# self.long_term_storage_transaction -# when using this definition as a class decorator, it is necessary to use -# parenthesis as it is a function call: -# @with_postgres_transactions() -# class MyClass ... -with_hbase_transactions = partial( - with_transactional_resource, - 'socorro.external.hb.connection_context.ConnectionContext', - 'long_term_storage' -) -#------------------------------------------------------------------------------ -# this class decorator adds a _run_proxy method to the class that will -# acquire a database connection and then pass it to the invocation of the -# class' "run" method. Since the connection is in the form of a -# context manager, the connection will automatically be closed when "run" -# completes. -# when using this definition as a class decorator, it is necessary to use -# parenthesis as it is a function call: -# @with_postgres_transactions() -# class MyClass ... -with_hbase_connection_as_argument = partial( - with_resource_connection_as_argument, - 'long_term_storage' -) -#------------------------------------------------------------------------------ -# this class decorator adds a _run_proxy method to the class that will -# call the class' run method in the context of a database transaction. It -# passes the connection to the "run" function. When "run" completes without -# raising an exception, the transaction will be commited if the connection -# context class understands transactions. The default HBase connection does not -# do transactions -# when using this definition as a class decorator, it is necessary to use -# parenthesis as it is a function call: -# @with_postgres_transactions() -# class MyClass ... -with_single_hb_transaction = partial( - with_single_transaction, - 'long_term_storage' -) - #============================================================================== # dedicated rabbitmq mixins #------------------------------------------------------------------------------ diff --git a/socorro/external/happybase/__init__.py b/socorro/external/happybase/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/socorro/external/happybase/connection_context.py b/socorro/external/happybase/connection_context.py deleted file mode 100644 index 22169713fa..0000000000 --- a/socorro/external/happybase/connection_context.py +++ /dev/null @@ -1,154 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - -import contextlib -import socket -import threading - -from configman.config_manager import RequiredConfig -from configman import Namespace - -import happybase - - -#============================================================================== -class HBaseConnection(object): - """An HBase connection class encapsulating various parts of the underlying - mechanism to connect to HBase.""" - #-------------------------------------------------------------------------- - def __init__(self, config, connection): - self.config = config - self._connection = connection - - #-------------------------------------------------------------------------- - def commit(self): - pass - - #-------------------------------------------------------------------------- - def rollback(self): - pass - - #-------------------------------------------------------------------------- - def close(self): - self._connection.close() - - #-------------------------------------------------------------------------- - def __getattr__(self, name): - return getattr(self._connection, name) - - -#============================================================================== -class HappyBaseConnectionContext(RequiredConfig): - """This class implements a connection to HBase for every transaction to be - executed. - """ - required_config = Namespace() - required_config.add_option( - 'hbase_host', - doc='Host to HBase server', - default='localhost', - reference_value_from='resource.hb', - ) - required_config.add_option( - 'hbase_port', - doc='Port to HBase server', - default=9090, - reference_value_from='resource.hb', - ) - required_config.add_option( - 'hbase_timeout', - doc='timeout in milliseconds for an HBase connection', - default=5000, - reference_value_from='resource.hb', - ) - required_config.add_option( - 'temporary_file_system_storage_path', - doc='a local filesystem path where dumps temporarily ' - 'during processing', - default='/tmp', - reference_value_from='resource.hb', - ) - required_config.add_option( - 'dump_file_suffix', - doc='the suffix used to identify a dump file (for use in temp files)', - default='.dump', - reference_value_from='resource.hb', - ) - - operational_exceptions = ( - happybase.NoConnectionsAvailable, - socket.timeout, - socket.error, - ) - - conditional_exceptions = () - - #-------------------------------------------------------------------------- - def __init__(self, config): - super(HappyBaseConnectionContext, self).__init__() - self.config = config - - #-------------------------------------------------------------------------- - def connection(self, name=None): - return HBaseConnection( - self.config, - happybase.Connection( - host=self.config.hbase_host, - port=self.config.hbase_port, - timeout=self.config.hbase_timeout - ) - ) - - #-------------------------------------------------------------------------- - @contextlib.contextmanager - def __call__(self, name=None): - conn = self.connection(name) - try: - yield conn - finally: - self.close_connection(conn) - - #-------------------------------------------------------------------------- - def force_reconnect(self): - pass - - #-------------------------------------------------------------------------- - def close(self): - pass - - #-------------------------------------------------------------------------- - def close_connection(self, connection, force=False): - connection.close() - - #-------------------------------------------------------------------------- - def is_operational_exception(self, msg): - return False - - -#============================================================================== -class HappyBasePooledConnectionContext(HappyBaseConnectionContext): - """This class implements a connection to HBase for every transaction to be - executed. - """ - - #-------------------------------------------------------------------------- - def __init__(self, config): - super(HappyBasePooledConnectionContext, self).__init__(config) - self._connection_pool = happybase.ConnectionPool( - 20, # TODO: how to get this number imported from the taskmanager - host=self.config.hbase_host, - port=self.config.hbase_port, - timeout=self.config.hbase_timeout - ) - - #-------------------------------------------------------------------------- - def connection(self, name=None): - raise Exception('must use context manager') - - #-------------------------------------------------------------------------- - @contextlib.contextmanager - def __call__(self, name=None): - with self._connection_pool.connection() as connection: - yield HBaseConnection(self.config, connection) - diff --git a/socorro/external/happybase/crash_data.py b/socorro/external/happybase/crash_data.py deleted file mode 100644 index 1c7ecb9208..0000000000 --- a/socorro/external/happybase/crash_data.py +++ /dev/null @@ -1,14 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - -from socorro.external.crash_data_base import CrashDataBase - - -class CrashData(CrashDataBase): - """ - Implement the /crash_data service with HBase. - """ - def get_storage(self): - return self.config.hb.hbase_class(self.config.hb) - diff --git a/socorro/external/happybase/crashstorage.py b/socorro/external/happybase/crashstorage.py deleted file mode 100644 index 27be84a32d..0000000000 --- a/socorro/external/happybase/crashstorage.py +++ /dev/null @@ -1,234 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - -import datetime -import json -import os - -from socorro.external.happybase.connection_context import \ - HappyBaseConnectionContext -from socorro.external.crashstorage_base import ( - CrashStorageBase, - CrashIDNotFound, - MemoryDumpsMapping, - FileDumpsMapping -) -from socorrolib.lib.util import DotDict - -from configman import Namespace, class_converter - - -class BadCrashIDException(ValueError): - pass - - -def crash_id_to_row_id(crash_id): - """ - Returns a row_id suitable for the HBase crash_reports table. - The first hex character of the crash_id is used to "salt" the rowkey - so that there should always be 16 HBase RegionServers responsible - for dealing with the current stream of data. - Then, we put the last six digits of the crash_id which represent the - submission date. This lets us easily scan through the crash_reports - table by day. - Finally, we append the normal crash_id string. - """ - try: - return "%s%s%s" % (crash_id[0], crash_id[-6:], crash_id) - except Exception, x: - raise BadCrashIDException(x) - - -def row_id_to_crash_id(row_id): - """ - Returns the natural ooid given an HBase row key. - See ooid_to_row_id for structure of row_id. - """ - try: - return row_id[7:] - except Exception, x: - raise BadCrashIDException(x) - - -def crash_id_to_timestamped_row_id(crash_id, timestamp): - """ - Returns a row_id suitable for the HBase crash_reports index tables. - The first hex character of the ooid is used to "salt" the rowkey - so that there should always be 16 HBase RegionServers responsible - for dealing with the current stream of data. - Then, we put the crash_report submission timestamp. This lets us - easily scan through a time specific region of the index. - Finally, we append the normal ooid string for uniqueness. - """ - if timestamp[-6] in "-+": - return "%s%s%s" % (crash_id[0], timestamp[:-6], crash_id) - return "%s%s%s" % (crash_id[0], timestamp, crash_id) - - -class HBaseCrashStorage(CrashStorageBase): - required_config = Namespace() - required_config.add_option( - 'transaction_executor_class', - default="socorro.database.transaction_executor." - "TransactionExecutorWithInfiniteBackoff", - doc='a class that will execute transactions', - from_string_converter=class_converter, - reference_value_from='resource.hb', - ) - required_config.add_option( - 'hbase_connection_context_class', - default=HappyBaseConnectionContext, - doc='the class responsible for proving an hbase connection', - reference_value_from='resource.hb', - ) - - def __init__(self, config, quit_check_callback=None): - super(HBaseCrashStorage, self).__init__( - config, - quit_check_callback - ) - self.logger.info('connecting to hbase via happybase') - self.hbase = config.hbase_connection_context_class(config) - self.transaction = config.transaction_executor_class( - config, - self.hbase, - quit_check_callback=quit_check_callback - ) - - def save_raw_crash(self, raw_crash, dumps, crash_id): - row_id = crash_id_to_row_id(crash_id) - submitted_timestamp = raw_crash['submitted_timestamp'] - - columns_and_values = { - "flags:processed": "N", - "meta_data:json": json.dumps(raw_crash), - "timestamps:submitted": submitted_timestamp, - "ids:ooid": crash_id, - } - # we don't know where the dumps came from, they could be in - # in the form of names to binary blobs or names to pathnames. - # this call ensures that we've got the former. - in_memory_dumps = dumps.as_memory_dumps_mapping() - for key, dump in in_memory_dumps.iteritems(): - if key in (None, '', 'upload_file_minidump'): - key = 'dump' - columns_and_values['raw_data:%s' % key] = dump - - def do_save(connection, raw_crash, in_memory_dumps, crash_id): - crash_report_table = connection.table('crash_reports') - crash_report_table.put( - row_id, - columns_and_values - ) - self.transaction(do_save, raw_crash, in_memory_dumps, crash_id) - - def save_processed(self, processed_crash): - crash_id = processed_crash['uuid'] - row_id = crash_id_to_row_id(crash_id) - columns_and_values = { - "timestamps:processed": processed_crash['completeddatetime'], - "processed_data:signature": processed_crash['signature'], - "processed_data:json": json.dumps( - processed_crash - ), - "flags:processed": "" - } - - def do_save(connection, processed_crash): - crash_report_table = connection.table('crash_reports') - crash_report_table.put( - row_id, - columns_and_values - ) - - self.transaction(do_save, processed_crash) - - def get_raw_crash(self, crash_id): - row_id = crash_id_to_row_id(crash_id) - - def _do_get_raw_crash(connection, row_id): - crash_report_table = connection.table('crash_reports') - try: - return crash_report_table.row( - row_id, - columns=['meta_data:json'] - )['meta_data:json'] - except KeyError: - raise CrashIDNotFound(crash_id) - raw_crash_json_str = self.transaction(_do_get_raw_crash, row_id) - raw_crash = json.loads(raw_crash_json_str, object_hook=DotDict) - return raw_crash - - def get_raw_dump(self, crash_id, name=None): - row_id = crash_id_to_row_id(crash_id) - if name in (None, '', 'upload_file_minidump'): - name = 'dump' - column_name = 'raw_data:%s' % name - def do_get(connection, row_id, name): - crash_report_table = connection.table('crash_reports') - try: - return crash_report_table.row( - row_id, - columns=[column_name] - )[column_name] - except KeyError: - raise CrashIDNotFound(crash_id) - return self.transaction(do_get, row_id, name) - - @staticmethod - def _make_dump_name(family_qualifier): - name = family_qualifier.split(':')[1] - if name == 'dump': - name = 'upload_file_minidump' - return name - - def get_raw_dumps(self, crash_id): - row_id = crash_id_to_row_id(crash_id) - - def do_get(connection, row_id): - try: - crash_report_table = connection.table('crash_reports') - dumps = crash_report_table.row( - row_id, - columns=['raw_data'] - ) - # ensure that we return a proper mapping of names to - # binary blobs. - return MemoryDumpsMapping( - (self._make_dump_name(k), v) for k, v in dumps.iteritems() - ) - except KeyError: - raise CrashIDNotFound(crash_id) - - return self.transaction(do_get, row_id) - - def get_raw_dumps_as_files(self, crash_id): - in_memory_dumps = self.get_raw_dumps(crash_id) - # convert our in memory name/blob data into name/pathname data - return in_memory_dumps.as_file_dumps_mapping( - crash_id, - self.hbase.config.temporary_file_system_storage_path, - self.hbase.config.dump_file_suffix - ) - - - def get_unredacted_processed(self, crash_id): - row_id = crash_id_to_row_id(crash_id) - - def do_get(connection, row_id): - crash_report_table = connection.table('crash_reports') - try: - return crash_report_table.row( - row_id, - columns=['processed_data:json'] - )['processed_data:json'] - except KeyError: - raise CrashIDNotFound(crash_id) - processed_crash_json_str = self.transaction(do_get, row_id) - processed_crash = json.loads( - processed_crash_json_str, - object_hook=DotDict - ) - return processed_crash - diff --git a/socorro/external/happybase/hbase_client.py b/socorro/external/happybase/hbase_client.py deleted file mode 100644 index f1a9940d83..0000000000 --- a/socorro/external/happybase/hbase_client.py +++ /dev/null @@ -1,280 +0,0 @@ -from socorrolib.app import generic_app - -from configman import Namespace, RequiredConfig, ConfigurationManager -from configman.converters import class_converter - -from socorro.external.happybase.crashstorage import ( - HBaseCrashStorage, \ - crash_id_to_row_id, - row_id_to_crash_id -) - -import itertools -import pprint -import contextlib -import gzip -import sys -import json - - -_raises_exception = object() - - -class NotEnoughArguments(Exception): - def __init__(self, arg): - self.arg = arg - - -def expect_from_aggregation(required_config, name, i, - default=_raises_exception): - def _closure(g, l, a): - if len(a) < i + 1: - if default is _raises_exception: - raise NotEnoughArguments(name) - return default - return a[i] - required_config.add_aggregation(name, _closure) - - -class _Command(RequiredConfig): - required_config = Namespace() - - def __init__(self, app): - self.app = app - self.config = app.config - self.storage = app.storage - - -class _CommandRequiringCrashID(_Command): - required_config = Namespace() - expect_from_aggregation(required_config, 'crash_id', 0) - - -class _CommandRequiringTable(_Command): - required_config = Namespace() - expect_from_aggregation(required_config, 'table', 0) - - -class _CommandRequiringTableRow(_CommandRequiringTable): - required_config = Namespace() - expect_from_aggregation(required_config, 'row_id', 1) - - -class _CommandRequiringScanParameters(_CommandRequiringTable): - required_config = Namespace() - expect_from_aggregation(required_config, 'prefix', 1) - expect_from_aggregation(required_config, 'columns', 2) - expect_from_aggregation(required_config, 'limit', 3) - - -class help(_Command): - """Usage: help - Get help on commands.""" - def run(self): - self.app.config_manager.output_summary() - -class get_raw_crash(_CommandRequiringCrashID): - """Usage: get_raw_crash CRASH_ID - Get the raw crash JSON data.""" - def run(self): - pprint.pprint(self.storage.get_raw_crash(self.config.crash_id)) - - -class get_raw_dumps(_CommandRequiringCrashID): - """Usage: get_raw_dumps CRASH_ID - Get information on the raw dumps for a crash.""" - def run(self): - for name, dump in self.storage.get_raw_dumps( - self.config.crash_id - ).items(): - dump_name = "%s.%s.dump" % (self.config.crash_id, name) - with open(dump_name, "w") as f: - f.write(dump) - print("%s: dump length = %s" % (name, len(dump))) - - -class get_processed(_CommandRequiringCrashID): - """Usage: get_processed CRASH_ID - Get the redacted processed JSON for a crash""" - def run(self): - if self.config.json: - print json.dumps(self.storage.get_processed(self.config.crash_id)) - else: - pprint.pprint(self.storage.get_processed(self.config.crash_id)) - - -class get_unredacted_processed(_CommandRequiringCrashID): - """Usage: get_unredacted_processed CRASH_ID - Get the unredacted processed JSON for a crash""" - def run(self): - if self.config.json: - print json.dumps(self.storage.get_unredacted_processed( - self.config.crash_id - )) - else: - pprint.pprint(self.storage.get_unredacted_processed( - self.config.crash_id - )) - - -class get_report_processing_state(_CommandRequiringCrashID): - """Usage: get_report_processing_state CRASH_ID - Get the report processing state for a crash.""" - def run(self): - @self.storage._wrap_in_transaction - def transaction(conn): - pprint.pprint(self.storage._get_report_processing_state( - conn, - self.config.crash_id - )) - transaction() - - -class union_scan_with_prefix(_CommandRequiringScanParameters): - """Usage: union_scan_with_prefix TABLE PREFIX COLUMNS [LIMIT] - Do a union scan on a table using a given prefix.""" - def run(self): - @self.storage._wrap_in_transaction - def transaction(conn, limit=self.config.limit): - for row in itertools.islice( - self.storage._union_scan_with_prefix( - conn, - self.config.table, - self.config.prefix, - self.config.columns - ), - self.config.limit): - pprint.pprint(row) - transaction() - - -class merge_scan_with_prefix(_CommandRequiringScanParameters): - """Usage: merge_scan_with_prefix TABLE PREFIX COLUMNS [LIMIT] - Do a merge scan on a table using a given prefix.""" - def run(self): - @self.storage._wrap_in_transaction - def transaction(conn, limit=self.config.limit): - for row in itertools.islice( - self.storage._merge_scan_with_prefix( - conn, - self.config.table, - self.config.prefix, - self.config.columns - ), - self.config.limit): - pprint.pprint(row) - transaction() - - -class describe_table(_CommandRequiringTable): - """Usage: describe_table TABLE - Describe the details of a table in HBase.""" - def run(self): - @self.storage._wrap_in_transaction - def transaction(conn): - pprint.pprint(conn.getColumnDescriptors(self.config.table)) - transaction() - - -class get_full_row(_CommandRequiringTableRow): - """Usage: describe_table TABLE ROW_ID - Pretty-print a row in HBase.""" - def run(self): - @self.storage._wrap_in_transaction - def transaction(conn): - pprint.pprint(self.storage._make_row_nice(conn.getRow( - self.config.table, - self.config.row_id - )[0])) - transaction() - - -class export_processed_crashes_for_date(_Command): - """Usage: export_processed_crashes_for_date DATE PATH - Export all crashes for a given date to a path.""" - required_config = Namespace() - expect_from_aggregation(required_config, 'date', 0) - expect_from_aggregation(required_config, 'path', 1) - - def run(self): - @self.storage._wrap_in_transaction - def transaction(conn): - for row in itertools.islice( - self.storage._union_scan_with_prefix(conn, - 'crash_reports', - self.config.date, - ['processed_data:json']), - 10 - ): - crash_id = row_id_to_crash_id(row['_rowkey']) - - if row['processed_data:json']: - file_name = os.path.join(self.config.path, - crash_id + '.jsonz') - with contextlib.closing(gzip.GzipFile(file_name, - 'w', - 9)) as f: - json.dump(row['processed_data:json'], f) - transaction() - - -class HBaseClientConfigurationManager(ConfigurationManager): - def output_summary(self, output_stream=sys.stdout, block_password=True): - super(HBaseClientConfigurationManager, self).output_summary( - output_stream, - block_password - ) - - print >> output_stream, "Available commands:" - - for command in (var for var in globals().values() - if isinstance(var, type) and - issubclass(var, _Command) and - var.__name__[0] != '_'): - - print >> output_stream, ' ' + command.__name__ - print >> output_stream, ' ' + (command.__doc__ or - '(undocumented)') - print >> output_stream, '' - - -class HBaseClientApp(generic_app.App): - app_name = "hbase_client.py" - app_version = "0.1" - app_description = __doc__ - - required_config = Namespace() - required_config.add_option( - 'hbase_crash_storage_class', - default=HappyBaseCrashStorage, - - doc='the class responsible for proving an hbase connection', - from_string_converter=class_converter - ) - required_config.add_option( - 'command', - default=help, - doc='command to use', - is_argument=True, - from_string_converter=lambda s: class_converter(__name__ + '.' + s) - ) - required_config.add_option( - 'json', - default=False, - short_form='j', - doc='json output instead of a pretty printed mapping', - ) - - - def main(self): - self.storage = self.config.hbase_crash_storage_class(self.config) - self.config.command(self).run() - - -if __name__ == '__main__': - try: - generic_app.main(HBaseClientApp, - config_manager_cls=HBaseClientConfigurationManager) - except NotEnoughArguments as e: - print >> sys.stderr, "ERROR: was expecting another argument: " + e.arg - print >> sys.stderr, "Use the 'help' command to get help on commands." diff --git a/socorro/external/hb/__init__.py b/socorro/external/hb/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/socorro/external/hb/connection_context.py b/socorro/external/hb/connection_context.py deleted file mode 100644 index 4b15d6237c..0000000000 --- a/socorro/external/hb/connection_context.py +++ /dev/null @@ -1,206 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - -import contextlib -import socket - -from configman.config_manager import RequiredConfig -from configman import Namespace - -from thrift import Thrift -from thrift.transport import TSocket, TTransport -from thrift.protocol import TBinaryProtocol -from hbase.Hbase import Client -import hbase.ttypes - - -#============================================================================== -class HBaseConnection(object): - """An HBase connection class encapsulating various parts of the underlying - mechanism to connect to HBase.""" - #-------------------------------------------------------------------------- - def __init__(self, config): - self.config = config - self.make_connection() - - #-------------------------------------------------------------------------- - def commit(self): - pass - - #-------------------------------------------------------------------------- - def rollback(self): - pass - - #-------------------------------------------------------------------------- - def make_connection(self): - self.socket = TSocket.TSocket(self.config.hbase_host, - self.config.hbase_port) - self.socket.setTimeout(self.config.hbase_timeout) - self.transport = TTransport.TBufferedTransport(self.socket) - self.protocol = TBinaryProtocol.TBinaryProtocol(self.transport) - self.client = Client(self.protocol) - self.transport.open() - - #-------------------------------------------------------------------------- - def close(self): - self.transport.close() - - -#============================================================================== -class HBaseConnectionContext(RequiredConfig): - """This class implements a connection to HBase for every transaction to be - executed. - """ - required_config = Namespace() - required_config.add_option( - 'hbase_host', - doc='Host to HBase server', - default='localhost', - reference_value_from='resource.hb', - ) - required_config.add_option( - 'hbase_port', - doc='Port to HBase server', - default=9090, - reference_value_from='resource.hb', - ) - required_config.add_option( - 'hbase_timeout', - doc='timeout in milliseconds for an HBase connection', - default=5000, - reference_value_from='resource.hb', - ) - required_config.add_option( - 'temporary_file_system_storage_path', - doc='a local filesystem path where dumps temporarily ' - 'during processing', - default='/tmp', - reference_value_from='resource.hb', - ) - required_config.add_option( - 'dump_file_suffix', - doc='the suffix used to identify a dump file (for use in temp files)', - default='.dump', - reference_value_from='resource.hb', - ) - - operational_exceptions = ( - hbase.ttypes.IOError, - Thrift.TException, - socket.timeout, - socket.error, - ) - - conditional_exceptions = () - - #-------------------------------------------------------------------------- - def __init__(self, config): - super(HBaseConnectionContext, self).__init__() - self.config = config - - #-------------------------------------------------------------------------- - def connection(self, name=None): - return HBaseConnection(self.config) - - #-------------------------------------------------------------------------- - @contextlib.contextmanager - def __call__(self, name=None): - conn = self.connection(name) - try: - yield conn - finally: - self.close_connection(conn) - - #-------------------------------------------------------------------------- - def force_reconnect(self): - pass - - #-------------------------------------------------------------------------- - def close(self): - pass - - #-------------------------------------------------------------------------- - def close_connection(self, connection, force=False): - connection.close() - - #-------------------------------------------------------------------------- - def is_operational_exception(self, msg): - return False - - -#============================================================================== -class HBasePooledConnectionContext(HBaseConnectionContext): - """a configman compliant class that pools HBase database connections""" - #-------------------------------------------------------------------------- - def __init__(self, config): - super(HBasePooledConnectionContext, self).__init__(config) - #self.config.logger.debug("HBasePooledConnectionContext - " - # "setting up connection pool") - self.pool = {} - - #-------------------------------------------------------------------------- - def connection(self, name=None): - """return a named connection. - - This function will return a named connection by either finding one - in its pool by the name or creating a new one. If no name is given, - it will use the name of the current executing thread as the name of - the connection. - - parameters: - name - a name as a string - """ - if not name: - name = self.config.executor_identity() - if name in self.pool: - return self.pool[name] - self.pool[name] = \ - super(HBasePooledConnectionContext, self).connection(name) - return self.pool[name] - - #-------------------------------------------------------------------------- - def close_connection(self, connection, force=False): - """overriding the baseclass function, this routine will decline to - close a connection at the end of a transaction context. This allows - for reuse of connections.""" - if force: - try: - (super(HBasePooledConnectionContext, self) - .close_connection(connection, force)) - except self.operational_exceptions: - self.config.logger.error( - 'HBasePooledConnectionContext - failed closing' - ) - for name, conn in self.pool.iteritems(): - if conn is connection: - break - del self.pool[name] - - #-------------------------------------------------------------------------- - def close(self): - """close all pooled connections""" - self.config.logger.debug( - "HBasePooledConnectionContext - shutting down connection pool" - ) - # force a list, we're changing the pool as we iterate - for name, connection in list(self.pool.iteritems()): - self.close_connection(connection, force=True) - self.config.logger.debug( - "HBasePooledConnectionContext - connection %s closed", - name - ) - - #-------------------------------------------------------------------------- - def force_reconnect(self, name=None): - """tell this functor that the next time it gives out a connection - under the given name, it had better make sure it is brand new clean - connection. Use this when you discover that your connection has - gone bad and you want to report that fact to the appropriate - authority. You are responsible for actually closing the connection or - not, if it is really hosed.""" - if name is None: - name = self.config.executor_identity() - self.config.logger.debug('identity: %s', name) - if name in self.pool: - del self.pool[name] diff --git a/socorro/external/hb/crash_data.py b/socorro/external/hb/crash_data.py deleted file mode 100644 index 76e51dfc20..0000000000 --- a/socorro/external/hb/crash_data.py +++ /dev/null @@ -1,14 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - -from socorro.external.crash_data_base import CrashDataBase - - -class CrashData(CrashDataBase): - """ - Implement the /crash_data service with HBase. - """ - def get_storage(self): - return self.config.hbase.hbase_class(self.config.hbase) - diff --git a/socorro/external/hb/crashstorage.py b/socorro/external/hb/crashstorage.py deleted file mode 100644 index 2328bb95bb..0000000000 --- a/socorro/external/hb/crashstorage.py +++ /dev/null @@ -1,549 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - -import datetime -import heapq -import itertools -import json -import os - -from socorrolib.lib.datetimeutil import utc_now -from socorro.external.crashstorage_base import ( - CrashStorageBase, - CrashIDNotFound, - MemoryDumpsMapping, -) -from socorro.external.hb.connection_context import \ - HBaseConnectionContext -from socorrolib.lib.util import DotDict -from configman import Namespace, class_converter - -from hbase.Hbase import Mutation - - -class BadCrashIDException(ValueError): pass - - -def crash_id_to_row_id(crash_id): - """ - Returns a row_id suitable for the HBase crash_reports table. - The first hex character of the crash_id is used to "salt" the rowkey - so that there should always be 16 HBase RegionServers responsible - for dealing with the current stream of data. - Then, we put the last six digits of the crash_id which represent the - submission date. This lets us easily scan through the crash_reports - table by day. - Finally, we append the normal crash_id string. - """ - try: - return "%s%s%s" % (crash_id[0], crash_id[-6:], crash_id) - except Exception, x: - raise BadCrashIDException(x) - - -def row_id_to_crash_id(row_id): - """ - Returns the natural ooid given an HBase row key. - See ooid_to_row_id for structure of row_id. - """ - try: - return row_id[7:] - except Exception, x: - raise BadCrashIDException(x) - - -def crash_id_to_timestamped_row_id(crash_id, timestamp): - """ - Returns a row_id suitable for the HBase crash_reports index tables. - The first hex character of the ooid is used to "salt" the rowkey - so that there should always be 16 HBase RegionServers responsible - for dealing with the current stream of data. - Then, we put the crash_report submission timestamp. This lets us - easily scan through a time specific region of the index. - Finally, we append the normal ooid string for uniqueness. - """ - if timestamp[-6] in "-+": - return "%s%s%s" % (crash_id[0], timestamp[:-6], crash_id) - return "%s%s%s" % (crash_id[0], timestamp, crash_id) - - -class HBaseCrashStorage(CrashStorageBase): - required_config = Namespace() - required_config.add_option( - 'new_crash_limit', - default=10 ** 6, - doc='the maximum number of new crashes to yield at a time', - reference_value_from='resource.hb', - ) - required_config.add_option( - 'transaction_executor_class', - default="socorro.database.transaction_executor." - "TransactionExecutorWithInfiniteBackoff", - doc='a class that will execute transactions', - from_string_converter=class_converter, - reference_value_from='resource.hb', - ) - required_config.add_option( - 'hbase_connection_context_class', - default=HBaseConnectionContext, - doc='the class responsible for proving an hbase connection', - reference_value_from='resource.hb', - ) - - def __init__(self, config, quit_check_callback=None): - super(HBaseCrashStorage, self).__init__(config, quit_check_callback) - self.logger.info('connecting to hbase') - self.hbase = config.hbase_connection_context_class(config) - self.transaction = config.transaction_executor_class( - config, - self.hbase, - quit_check_callback=quit_check_callback - ) - - def _wrap_in_transaction(self, f): - """This decorator takes a function wraps it in a transaction context. - The function being wrapped will take the connection as an argument.""" - return lambda *args, **kwargs: self.transaction(lambda conn_ctx: f(conn_ctx.client, *args, **kwargs)) - - def close(self): - self.hbase.close() - - def _salted_scanner_iterable(self, client, salted_prefix, scanner): - """Generator based iterable that runs over an HBase scanner - yields a tuple of the un-salted rowkey and the nice format of the - row.""" - self.logger.debug('Scanner %s generated', salted_prefix) - raw_rows = client.scannerGet(scanner) - while raw_rows: - nice_row = self._make_row_nice(raw_rows[0]) - yield (nice_row['_rowkey'][1:], nice_row) - raw_rows = client.scannerGet(scanner) - self.logger.debug('Scanner %s exhausted' % salted_prefix) - client.scannerClose(scanner) - - @staticmethod - def _make_row_nice(client_row_object): - columns = dict( - ((x, y.value) for x, y in client_row_object.columns.items()) - ) - columns['_rowkey'] = client_row_object.row - return columns - - def _get_report_processing_state(self, client, crash_id): - """Return the current state of processing for this report and the - submitted_timestamp needed. For processing queue manipulation. - If the ooid doesn't exist, return an empty array""" - raw_rows = client.getRowWithColumns('crash_reports', - crash_id_to_row_id(crash_id), - ['flags:processed', - 'flags:legacy_processing', - 'timestamps:submitted', - 'timestamps:processed']) - - if raw_rows: - return self._make_row_nice(raw_rows[0]) - else: - raise CrashIDNotFound(crash_id) - - def _put_crash_report_indices(self, client, crash_id, timestamp, indices): - row_id = crash_id_to_timestamped_row_id(crash_id, timestamp) - for index_name in indices: - client.mutateRow(index_name, row_id, - [Mutation(column="ids:ooid", value=crash_id)]) - - def save_raw_crash(self, raw_crash, dumps, crash_id): - @self._wrap_in_transaction - def transaction(client): - row_id = crash_id_to_row_id(crash_id) - submitted_timestamp = raw_crash['submitted_timestamp'] - - legacy_processing = raw_crash.get('legacy_processing', False) - - columns = [("flags:processed", "N"), - ("meta_data:json", json.dumps(raw_crash)), - ("timestamps:submitted", submitted_timestamp), - ("ids:ooid", crash_id) - ] - - # we don't know where the dumps came from, they could be in - # in the form of names to binary blobs or names to pathnames. - # this call ensures that we've got the former. - memory_dumps_mapping = dumps.as_memory_dumps_mapping() - for key, dump in memory_dumps_mapping.iteritems(): - if key in (None, '', 'upload_file_minidump'): - key = 'dump' - columns.append(('raw_data:%s' % key, dump)) - - mutations = [Mutation(column=c, value=v) - for c, v in columns if v is not None] - - indices = [ - 'crash_reports_index_submitted_time', - 'crash_reports_index_unprocessed_flag' - ] - - if legacy_processing == 0: - mutations.append(Mutation(column="flags:legacy_processing", - value='Y')) - indices.append('crash_reports_index_legacy_unprocessed_flag') - indices.append('crash_reports_index_legacy_submitted_time') - - process_type = raw_crash.get('ProcessType', 'default') - - is_hang = 'HangID' in raw_crash - - if is_hang: - hang_id = raw_crash['HangID'] - mutations.append(Mutation(column="ids:hang", value=hang_id)) - - client.mutateRow('crash_reports', row_id, mutations) - self._put_crash_report_indices(client, crash_id, submitted_timestamp, - indices) - - if is_hang: - # Put the hang's indices. - ooid_column_name = "ids:ooid:" + process_type - client.mutateRow( - 'crash_reports_index_hang_id_submitted_time', - crash_id_to_timestamped_row_id(hang_id, submitted_timestamp), - [Mutation(column=ooid_column_name, value=crash_id)] - ) - client.mutateRow( - 'crash_reports_index_hang_id', - hang_id, - [Mutation(column=ooid_column_name, value=crash_id)] - ) - - # update the metrics - time_levels = [ - submitted_timestamp[:16], # minute yyyy-mm-ddTHH:MM - submitted_timestamp[:13], # hour yyyy-mm-ddTHH - submitted_timestamp[:10], # day yyyy-mm-dd - submitted_timestamp[: 7], # month yyyy-mm - submitted_timestamp[: 4] # year yyyy - ] - counter_increments = ['counters:submitted_crash_reports'] - counter_increments.append( - "counters:submitted_crash_reports_legacy_throttle_%d" - % legacy_processing - ) - if process_type != 'default': - if is_hang: - counter_increments.append( - "counters:submitted_crash_report_hang_pairs" - ) - else: - counter_increments.append( - "counters:submitted_oop_%s_crash_reports" % process_type - ) - - client.atomicIncrement( - 'metrics', - 'crash_report_queue', - 'counters:current_unprocessed_size', - 1 - ) - if legacy_processing == 0: - client.atomicIncrement( - 'metrics', - 'crash_report_queue', - 'counters:current_legacy_unprocessed_size', - 1 - ) - - for rowkey in time_levels: - for column in counter_increments: - client.atomicIncrement('metrics', rowkey, column, 1) - - self.logger.info('saved - %s', crash_id) - return transaction() - - def save_processed(self, processed_crash): - @self._wrap_in_transaction - def transaction(client, processed_crash=processed_crash): - processed_crash = processed_crash.copy() - self._stringify_dates_in_dict(processed_crash) - - crash_id = processed_crash['uuid'] - - row_id = crash_id_to_row_id(crash_id) - - processing_state = self._get_report_processing_state(client, crash_id) - submitted_timestamp = processing_state.get( - 'timestamps:submitted', - processed_crash.get('date_processed', 'unknown') - ) - - if processing_state.get('flags:processed', '?') == 'N': - index_row_key = crash_id_to_timestamped_row_id( - crash_id, - submitted_timestamp - ) - client.atomicIncrement('metrics', - 'crash_report_queue', - 'counters:current_unprocessed_size', - -1) - client.deleteAllRow('crash_reports_index_unprocessed_flag', - index_row_key) - - processed_timestamp = processed_crash['completeddatetime'] - - if 'signature' in processed_crash: - if len(processed_crash['signature']) > 0: - signature = processed_crash['signature'] - else: - signature = '##empty##' - else: - signature = '##null##' - - mutations = [] - mutations.append(Mutation(column="timestamps:processed", - value=processed_timestamp)) - mutations.append(Mutation(column="processed_data:signature", - value=signature)) - processed_crash_as_json_string = json.dumps(processed_crash) - mutations.append(Mutation(column="processed_data:json", - value=processed_crash_as_json_string)) - mutations.append(Mutation(column="flags:processed", - value="Y")) - - mutation_size = ( - len(processed_timestamp) - + len(signature) - + len(processed_crash_as_json_string) - + 1 - ) - start_timestamp = utc_now() - try: - client.mutateRow('crash_reports', row_id, mutations) - finally: - end_timestamp = utc_now() - self.config.logger.debug( - 'mutation size for row_id %s: %s, execution time: %s', - row_id, - mutation_size, - end_timestamp - start_timestamp - ) - - sig_ooid_idx_row_key = signature + crash_id - client.mutateRow( - 'crash_reports_index_signature_ooid', - sig_ooid_idx_row_key, - [Mutation(column="ids:ooid", value=crash_id)] - ) - return transaction() - - def save_raw_and_processed(self, raw_crash, dumps, processed_crash, crash_id): - """ bug 866973 - do not put raw_crash back into HBase again - We are doing this in lieu of a queuing solution that could allow - us to operate an independent crashmover. When the queuing system - is implemented, we could remove this, and have the raw crash - saved by a crashmover that's consuming crash_ids the same way - that the processor consumes them. - """ - self.save_processed(processed_crash) - - def get_raw_crash(self, crash_id): - @self._wrap_in_transaction - def transaction(client): - row_id = crash_id_to_row_id(crash_id) - raw_rows = client.getRowWithColumns('crash_reports', - row_id, - ['meta_data:json']) - try: - if raw_rows: - row_column = raw_rows[0].columns["meta_data:json"].value - else: - raise CrashIDNotFound(crash_id) - except KeyError: - self.logger.debug( - 'key error trying to get "meta_data:json" for %s', - crash_id - ) - raise - - return json.loads(row_column, object_hook=DotDict) - return transaction() - - def get_raw_dump(self, crash_id, name=None): - """Return the minidump for a given crash_id as a string of bytes - If the crash_id doesn't exist, raise not found""" - @self._wrap_in_transaction - def transaction(client, name): - if name in (None, '', 'upload_file_minidump'): - name = 'dump' - column_family_and_qualifier = 'raw_data:%s' % name - row_id = crash_id_to_row_id(crash_id) - raw_rows = client.getRowWithColumns('crash_reports', - row_id, - [column_family_and_qualifier]) - - try: - if raw_rows: - return raw_rows[0].columns[column_family_and_qualifier].value - else: - raise CrashIDNotFound(crash_id) - except KeyError: - self.logger.debug( - 'key error trying to get "%s" for %s', - (column_family_and_qualifier, crash_id) - ) - raise - return transaction(name) - - @staticmethod - def _make_dump_name(family_qualifier): - name = family_qualifier.split(':')[1] - if name == 'dump': - name = 'upload_file_minidump' - return name - - def get_raw_dumps(self, crash_id): - """Return the minidump for a given ooid as a string of bytes - If the ooid doesn't exist, raise not found""" - @self._wrap_in_transaction - def transaction(client): - row_id = crash_id_to_row_id(crash_id) - raw_rows = client.getRowWithColumns('crash_reports', - row_id, - ['raw_data']) - try: - if raw_rows: - column_mapping = raw_rows[0].columns - # ensure that we return a proper mapping of names to - # binary blobs. - d = MemoryDumpsMapping([ - (self._make_dump_name(k), v.value) - for k, v in column_mapping.iteritems()]) - return d - else: - raise CrashIDNotFound(crash_id) - except KeyError: - self.logger.debug( - 'key error trying to get "raw_data" from %s', - crash_id - ) - raise - return transaction() - - def get_raw_dumps_as_files(self, crash_id): - memory_dumps_mapping = self.get_raw_dumps(crash_id) - # convert our in memory name/blob data into name/pathname data - return memory_dumps_mapping.as_file_dumps_mapping( - crash_id, - self.hbase.config.temporary_file_system_storage_path, - self.hbase.config.dump_file_suffix - ) - - def get_unredacted_processed(self, crash_id): - """Return the unredacted processed json (jsonz) for a given ooid as a - Mapping. If not found, raise the NotFound exception.""" - @self._wrap_in_transaction - def transaction(client): - row_id = crash_id_to_row_id(crash_id) - raw_rows = client.getRowWithColumns('crash_reports', - row_id, - ['processed_data:json']) - - if raw_rows: - row_columns = raw_rows[0].columns["processed_data:json"].value - else: - raise CrashIDNotFound(crash_id) - - return json.loads(row_columns, object_hook=DotDict) - return transaction() - - def new_crashes(self): - try: - with self.hbase() as context: - for row in itertools.islice( - self._merge_scan_with_prefix( - context.client, - 'crash_reports_index_legacy_unprocessed_flag', - '', - ['ids:ooid'] - ), - self.config.new_crash_limit - ): - self._delete_from_legacy_processing_index(context.client, - row['_rowkey']) - yield row['ids:ooid'] - except self.hbase.operational_exceptions: - self.hbase.force_reconnect() - self.config.logger.critical( - 'hbase is in trouble, forcing reconnect', - exc_info=True - ) - - def _union_scan_with_prefix(self, client, table, prefix, columns): - # TODO: Need assertion for columns contains at least 1 element - """A lazy chain of iterators that yields unordered rows starting with - a given prefix. The implementation opens up 16 scanners (one for each - leading hex character of the salt) one at a time and returns all of - the rows matching""" - for salt in '0123456789abcdef': - salted_prefix = "%s%s" % (salt, prefix) - scanner = client.scannerOpenWithPrefix(table, - salted_prefix, - columns) - for rowkey, row in self._salted_scanner_iterable(client, - salted_prefix, - scanner): - yield row - - def _merge_scan_with_prefix(self, client, table, prefix, columns): - # TODO: Need assertion that columns is array containing at least - # one string - """A generator based iterator that yields totally ordered rows starting - with a given prefix. The implementation opens up 16 scanners (one for - each leading hex character of the salt) simultaneously and then yields - the next row in order from the pool on each iteration.""" - iterators = [] - next_items_queue = [] - for salt in '0123456789abcdef': - salted_prefix = "%s%s" % (salt, prefix) - scanner = client.scannerOpenWithPrefix(table, - salted_prefix, - columns) - iterators.append(self._salted_scanner_iterable(client, - salted_prefix, - scanner)) - # The i below is so we can advance whichever scanner delivers us the - # polled item. - for i, it in enumerate(iterators): - try: - next = it.next - next_items_queue.append([next(), i, next]) - except StopIteration: - pass - heapq.heapify(next_items_queue) - - while True: - try: - while True: - row_tuple, iter_index, next = s = next_items_queue[0] - # tuple[1] is the actual nice row. - yield row_tuple[1] - s[0] = next() - heapq.heapreplace(next_items_queue, s) - except StopIteration: - heapq.heappop(next_items_queue) - except IndexError: - return - - def _delete_from_legacy_processing_index(self, client, index_row_key): - client.deleteAllRow('crash_reports_index_legacy_unprocessed_flag', - index_row_key) - - client.atomicIncrement('metrics', - 'crash_report_queue', - 'counters:current_legacy_unprocessed_size', - -1) - - @staticmethod - def _stringify_dates_in_dict(items): - for k, v in items.iteritems(): - if isinstance(v, datetime.datetime): - items[k] = v.strftime("%Y-%m-%d %H:%M:%S.%f") - return items diff --git a/socorro/external/hb/hbase_client.py b/socorro/external/hb/hbase_client.py deleted file mode 100644 index e22c631417..0000000000 --- a/socorro/external/hb/hbase_client.py +++ /dev/null @@ -1,280 +0,0 @@ -import os - -from socorrolib.app import generic_app - -from configman import Namespace, RequiredConfig, ConfigurationManager -from configman.converters import class_converter - -from socorro.external.hb.crashstorage import ( - HBaseCrashStorage, - row_id_to_crash_id -) - -import itertools -import pprint -import contextlib -import gzip -import sys -import json - - -_raises_exception = object() - - -class NotEnoughArguments(Exception): - def __init__(self, arg): - self.arg = arg - - -def expect_from_aggregation(required_config, name, i, - default=_raises_exception): - def _closure(g, l, a): - if len(a) < i + 1: - if default is _raises_exception: - raise NotEnoughArguments(name) - return default - return a[i] - required_config.add_aggregation(name, _closure) - - -class _Command(RequiredConfig): - required_config = Namespace() - - def __init__(self, app): - self.app = app - self.config = app.config - self.storage = app.storage - - -class _CommandRequiringCrashID(_Command): - required_config = Namespace() - expect_from_aggregation(required_config, 'crash_id', 0) - - -class _CommandRequiringTable(_Command): - required_config = Namespace() - expect_from_aggregation(required_config, 'table', 0) - - -class _CommandRequiringTableRow(_CommandRequiringTable): - required_config = Namespace() - expect_from_aggregation(required_config, 'row_id', 1) - - -class _CommandRequiringScanParameters(_CommandRequiringTable): - required_config = Namespace() - expect_from_aggregation(required_config, 'prefix', 1) - expect_from_aggregation(required_config, 'columns', 2) - expect_from_aggregation(required_config, 'limit', 3) - - -class help(_Command): - """Usage: help - Get help on commands.""" - def run(self): - self.app.config_manager.output_summary() - -class get_raw_crash(_CommandRequiringCrashID): - """Usage: get_raw_crash CRASH_ID - Get the raw crash JSON data.""" - def run(self): - pprint.pprint(self.storage.get_raw_crash(self.config.crash_id)) - - -class get_raw_dumps(_CommandRequiringCrashID): - """Usage: get_raw_dumps CRASH_ID - Get information on the raw dumps for a crash.""" - def run(self): - for name, dump in self.storage.get_raw_dumps( - self.config.crash_id - ).items(): - dump_name = "%s.%s.dump" % (self.config.crash_id, name) - with open(dump_name, "w") as f: - f.write(dump) - print("%s: dump length = %s" % (name, len(dump))) - - -class get_processed(_CommandRequiringCrashID): - """Usage: get_processed CRASH_ID - Get the redacted processed JSON for a crash""" - def run(self): - if self.config.json: - print json.dumps(self.storage.get_processed(self.config.crash_id)) - else: - pprint.pprint(self.storage.get_processed(self.config.crash_id)) - - -class get_unredacted_processed(_CommandRequiringCrashID): - """Usage: get_unredacted_processed CRASH_ID - Get the unredacted processed JSON for a crash""" - def run(self): - if self.config.json: - print json.dumps(self.storage.get_unredacted_processed( - self.config.crash_id - )) - else: - pprint.pprint(self.storage.get_unredacted_processed( - self.config.crash_id - )) - - -class get_report_processing_state(_CommandRequiringCrashID): - """Usage: get_report_processing_state CRASH_ID - Get the report processing state for a crash.""" - def run(self): - @self.storage._wrap_in_transaction - def transaction(conn): - pprint.pprint(self.storage._get_report_processing_state( - conn, - self.config.crash_id - )) - transaction() - - -class union_scan_with_prefix(_CommandRequiringScanParameters): - """Usage: union_scan_with_prefix TABLE PREFIX COLUMNS [LIMIT] - Do a union scan on a table using a given prefix.""" - def run(self): - @self.storage._wrap_in_transaction - def transaction(conn, limit=self.config.limit): - for row in itertools.islice( - self.storage._union_scan_with_prefix( - conn, - self.config.table, - self.config.prefix, - self.config.columns - ), - self.config.limit): - pprint.pprint(row) - transaction() - - -class merge_scan_with_prefix(_CommandRequiringScanParameters): - """Usage: merge_scan_with_prefix TABLE PREFIX COLUMNS [LIMIT] - Do a merge scan on a table using a given prefix.""" - def run(self): - @self.storage._wrap_in_transaction - def transaction(conn, limit=self.config.limit): - for row in itertools.islice( - self.storage._merge_scan_with_prefix( - conn, - self.config.table, - self.config.prefix, - self.config.columns - ), - self.config.limit): - pprint.pprint(row) - transaction() - - -class describe_table(_CommandRequiringTable): - """Usage: describe_table TABLE - Describe the details of a table in HBase.""" - def run(self): - @self.storage._wrap_in_transaction - def transaction(conn): - pprint.pprint(conn.getColumnDescriptors(self.config.table)) - transaction() - - -class get_full_row(_CommandRequiringTableRow): - """Usage: describe_table TABLE ROW_ID - Pretty-print a row in HBase.""" - def run(self): - @self.storage._wrap_in_transaction - def transaction(conn): - pprint.pprint(self.storage._make_row_nice(conn.getRow( - self.config.table, - self.config.row_id - )[0])) - transaction() - - -class export_processed_crashes_for_date(_Command): - """Usage: export_processed_crashes_for_date DATE PATH - Export all crashes for a given date to a path.""" - required_config = Namespace() - expect_from_aggregation(required_config, 'date', 0) - expect_from_aggregation(required_config, 'path', 1) - - def run(self): - @self.storage._wrap_in_transaction - def transaction(conn): - for row in itertools.islice( - self.storage._union_scan_with_prefix(conn, - 'crash_reports', - self.config.date, - ['processed_data:json']), - 10 - ): - crash_id = row_id_to_crash_id(row['_rowkey']) - - if row['processed_data:json']: - file_name = os.path.join(self.config.path, - crash_id + '.jsonz') - with contextlib.closing(gzip.GzipFile(file_name, - 'w', - 9)) as f: - json.dump(row['processed_data:json'], f) - transaction() - - -class HBaseClientConfigurationManager(ConfigurationManager): - def output_summary(self, output_stream=sys.stdout, block_password=True): - super(HBaseClientConfigurationManager, self).output_summary( - output_stream, - block_password - ) - - print >> output_stream, "Available commands:" - - for command in (var for var in globals().values() - if isinstance(var, type) and - issubclass(var, _Command) and - var.__name__[0] != '_'): - - print >> output_stream, ' ' + command.__name__ - print >> output_stream, ' ' + (command.__doc__ or - '(undocumented)') - print >> output_stream, '' - - -class HBaseClientApp(generic_app.App): - app_name = "hbase_client.py" - app_version = "0.1" - app_description = __doc__ - - required_config = Namespace() - required_config.add_option( - 'hbase_crash_storage_class', - default=HBaseCrashStorage, - - doc='the class responsible for proving an hbase connection', - from_string_converter=class_converter - ) - required_config.add_option( - 'command', - default=help, - doc='command to use', - from_string_converter=lambda s: class_converter(__name__ + '.' + s) - ) - required_config.add_option( - 'json', - default=False, - short_form='j', - doc='json output instead of a pretty printed mapping', - ) - - - def main(self): - self.storage = self.config.hbase_crash_storage_class(self.config) - self.config.command(self).run() - - -if __name__ == '__main__': - try: - generic_app.main(HBaseClientApp, - config_manager_cls=HBaseClientConfigurationManager) - except NotEnoughArguments as e: - print >> sys.stderr, "ERROR: was expecting another argument: " + e.arg - print >> sys.stderr, "Use the 'help' command to get help on commands." diff --git a/socorro/middleware/middleware_app.py b/socorro/middleware/middleware_app.py index d3e2ec63f6..2ec77c4adf 100755 --- a/socorro/middleware/middleware_app.py +++ b/socorro/middleware/middleware_app.py @@ -123,7 +123,6 @@ class MiddlewareApp(App): 'implementation_list', doc='list of packages for service implementations', default='psql:socorro.external.postgresql, ' - 'hbase:socorro.external.hb, ' 'es:socorro.external.es, ' 'fs:socorro.external.fs, ' 'http:socorro.external.http, ' @@ -134,7 +133,7 @@ class MiddlewareApp(App): required_config.implementations.add_option( 'service_overrides', - doc='comma separated list of class overrides, e.g `Crashes: hbase`', + doc='comma separated list of class overrides, e.g `Query: es`', default='Correlations: http, ' 'CorrelationsSignatures: http, ' 'SuperSearch: es, ' @@ -156,17 +155,6 @@ class MiddlewareApp(App): from_string_converter=class_converter ) - #-------------------------------------------------------------------------- - # hbase namespace - # the namespace is for external implementations of the services - #------------------------------------------------------------------------- - required_config.namespace('hbase') - required_config.hbase.add_option( - 'hbase_class', - default='socorro.external.boto.crashstorage.BotoS3CrashStorage', - from_string_converter=class_converter - ) - #-------------------------------------------------------------------------- # filesystem namespace # the namespace is for external implementations of the services diff --git a/socorro/unittest/external/happybase/__init__.py b/socorro/unittest/external/happybase/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/socorro/unittest/external/happybase/test_connection_context.py b/socorro/unittest/external/happybase/test_connection_context.py deleted file mode 100644 index ae6da2e70f..0000000000 --- a/socorro/unittest/external/happybase/test_connection_context.py +++ /dev/null @@ -1,269 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - -import mock - -import happybase - -from socorro.external.happybase import connection_context -from socorro.external.happybase.connection_context import ( - HappyBaseConnectionContext, - HappyBasePooledConnectionContext, -) - -from socorrolib.lib.util import SilentFakeLogger, DotDict -from socorro.database.transaction_executor import TransactionExecutor -from socorro.unittest.testbase import TestCase -from configman import Namespace - -from socket import timeout, error - - -class FakeHB_Connection(object): - def __init__(self, config, *args, **kwargs): - self.close_counter = 0 - self.commit_counter = 0 - self.rollback_counter = 0 - - def close(self): - self.close_counter += 1 - - def commit(self): - self.commit_counter += 1 - - def rollback(self): - self.rollback_counter += 1 - - -class TestConnectionContext(TestCase): - def test_basic_hbase_usage(self): - local_config = DotDict({ - 'hbase_host': 'host', - 'database_name': 'name', - 'hbase_port': 9090, - 'hbase_timeout': 9000, - 'logger': SilentFakeLogger(), - }) - a_fake_hbase_connection = FakeHB_Connection(local_config) - with mock.patch.object(happybase, 'Connection', - mock.Mock(return_value=a_fake_hbase_connection)): - hb_context = connection_context.HappyBaseConnectionContext( - local_config - ) - # open a connection - with hb_context() as conn: - pass - self.assertEqual( - a_fake_hbase_connection.close_counter, - 1 - ) - # open another connection again - with hb_context() as conn: - pass - self.assertEqual( - a_fake_hbase_connection.close_counter, - 2 - ) - # get a named connection - with hb_context('fred') as conn: - pass - self.assertEqual( - a_fake_hbase_connection.close_counter, - 3 - ) - # close all connections - hb_context.close() - self.assertEqual( - a_fake_hbase_connection.close_counter, - 3 - ) - - def test_hbase_usage_with_transaction(self): - local_config = DotDict({ - 'hbase_host': 'host', - 'database_name': 'name', - 'hbase_port': 9090, - 'hbase_timeout': 9000, - 'number_of_retries': 2, - 'logger': SilentFakeLogger(), - }) - a_fake_hbase_connection = FakeHB_Connection(local_config) - with mock.patch.object(HappyBaseConnectionContext, 'connection', - mock.Mock(return_value=a_fake_hbase_connection)): - hb_context = connection_context.HappyBaseConnectionContext( - local_config - ) - def all_ok(connection, dummy): - self.assertEqual(dummy, 'hello') - return True - - transaction = TransactionExecutor(local_config, hb_context) - result = transaction(all_ok, 'hello') - self.assertTrue(result) - self.assertEqual( - a_fake_hbase_connection.close_counter, - 1 - ) - self.assertEqual( - a_fake_hbase_connection.rollback_counter, - 0 - ) - self.assertEqual( - a_fake_hbase_connection.commit_counter, - 1 - ) - - def bad_deal(connection, dummy): - raise KeyError('fred') - - self.assertRaises(KeyError, transaction, bad_deal, 'hello') - self.assertEqual( - a_fake_hbase_connection.close_counter, - 2 - ) - self.assertEqual( - a_fake_hbase_connection.commit_counter, - 1 - ) - - hb_context.close() - self.assertEqual( - a_fake_hbase_connection.close_counter, - 2 - ) - -from contextlib import contextmanager - -class FakeHB_Connection2(object): - def __init__(self, config, *args, **kwargs): - self.close_counter = 0 - self.commit_counter = 0 - self.rollback_counter = 0 - - def close(self): - self.close_counter += 1 - - def commit(self): - self.commit_counter += 1 - - def rollback(self): - self.rollback_counter += 1 - - @contextmanager - def __call__(self): - yield self - -class HappyBasePooledConnectionContextMock(HappyBasePooledConnectionContext): - @contextmanager - def __call__(self, name=None): - with self._connection_pool.connection() as connection: - yield connection - - -class TestPooledConnectionContext(TestCase): - def test_basic_hbase_usage(self): - local_config = DotDict({ - 'hbase_host': 'host', - 'database_name': 'name', - 'hbase_port': 9090, - 'hbase_timeout': 9000, - 'logger': SilentFakeLogger(), - }) - a_fake_hbase_connection = FakeHB_Connection(local_config) - a_fake_hbase_pool = mock.MagicMock() - a_fake_hbase_pool.return_value = a_fake_hbase_connection - with mock.patch.object( - happybase, - 'ConnectionPool', - mock.Mock(return_value=a_fake_hbase_pool) - ): - hb_context = connection_context.HappyBasePooledConnectionContext( - local_config - ) - # open a connection - with hb_context() as conn: - pass - self.assertEqual( - a_fake_hbase_connection.close_counter, - 0 - ) - # open another connection again - with hb_context() as conn: - pass - self.assertEqual( - a_fake_hbase_connection.close_counter, - 0 - ) - # get a named connection - with hb_context('fred') as conn: - pass - self.assertEqual( - a_fake_hbase_connection.close_counter, - 0 - ) - # close all connections - hb_context.close() - self.assertEqual( - a_fake_hbase_connection.close_counter, - 0 - ) - - def test_hbase_usage_with_transaction(self): - local_config = DotDict({ - 'hbase_host': 'host', - 'database_name': 'name', - 'hbase_port': 9090, - 'hbase_timeout': 9000, - 'number_of_retries': 2, - 'logger': SilentFakeLogger(), - }) - a_fake_hbase_connection = FakeHB_Connection2(local_config) - a_fake_hbase_pool = mock.MagicMock() - a_fake_hbase_pool.connection = a_fake_hbase_connection - with mock.patch.object( - happybase, - 'ConnectionPool', - mock.Mock(return_value=a_fake_hbase_pool) - ): - hb_context = HappyBasePooledConnectionContextMock( - local_config - ) - def all_ok(connection, dummy): - self.assertEqual(dummy, 'hello') - return True - - transaction = TransactionExecutor(local_config, hb_context) - result = transaction(all_ok, 'hello') - self.assertTrue(result) - self.assertEqual( - a_fake_hbase_connection.close_counter, - 0 - ) - self.assertEqual( - a_fake_hbase_connection.rollback_counter, - 0 - ) - self.assertEqual( - a_fake_hbase_connection.commit_counter, - 1 - ) - - def bad_deal(connection, dummy): - raise KeyError('fred') - - self.assertRaises(KeyError, transaction, bad_deal, 'hello') - self.assertEqual( - a_fake_hbase_connection.close_counter, - 0 - ) - self.assertEqual( - a_fake_hbase_connection.commit_counter, - 1 - ) - - hb_context.close() - self.assertEqual( - a_fake_hbase_connection.close_counter, - 0 - ) diff --git a/socorro/unittest/external/happybase/test_crash_data.py b/socorro/unittest/external/happybase/test_crash_data.py deleted file mode 100644 index fa566c7827..0000000000 --- a/socorro/unittest/external/happybase/test_crash_data.py +++ /dev/null @@ -1,231 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - -import os -from nose.plugins.skip import SkipTest -from nose.tools import eq_ -from configman import ConfigurationManager, Namespace -from mock import Mock, patch - -from socorrolib.lib import ( - MissingArgumentError, - ResourceNotFound, - ResourceUnavailable, -) -from socorro.external.happybase import crash_data, crashstorage -from socorro.external.happybase.connection_context import ( - HappyBaseConnectionContext -) -from socorro.unittest.testbase import TestCase - - -_run_integration_tests = os.environ.get('RUN_HBASE_INTEGRATION_TESTS', False) -if _run_integration_tests in ('false', 'False', 'no', '0'): - _run_integration_tests = False - - -class TestIntegrationHBaseCrashData(TestCase): - - def setUp(self): - if not _run_integration_tests: - raise SkipTest("Skipping HBase integration tests") - super(TestIntegrationHBaseCrashData, self).setUp() - self.config_manager = self._common_config_setup() - - with self.config_manager.context() as config: - store = crashstorage.HBaseCrashStorage(config.hbase) - - # A complete crash report (raw, dump and processed) - fake_raw_dump_1 = 'peter is a swede' - fake_raw_dump_2 = 'lars is a norseman' - fake_raw_dump_3 = 'adrian is a frenchman' - fake_dumps = {'upload_file_minidump': fake_raw_dump_1, - 'lars': fake_raw_dump_2, - 'adrian': fake_raw_dump_3} - fake_raw = { - 'name': 'Peter', - 'legacy_processing': 0, - 'submitted_timestamp': '2013-05-04' - } - fake_processed = { - 'name': 'Peter', - 'uuid': '114559a5-d8e6-428c-8b88-1c1f22120314', - 'completeddatetime': '2012-01-01T00:00:00', - 'email': 'peter@fake.org', - } - - store.save_raw_crash( - fake_raw, - fake_dumps, - '114559a5-d8e6-428c-8b88-1c1f22120314' - ) - store.save_processed(fake_processed) - - # A non-processed crash report - fake_raw = { - 'name': 'Adrian', - 'legacy_processing': 0, - 'submitted_timestamp': '2013-05-04' - } - - store.save_raw_crash( - fake_raw, - fake_dumps, - '58727744-12f5-454a-bcf5-f688a2120821' - ) - - def tearDown(self): - super(TestIntegrationHBaseCrashData, self).tearDown() - with self.config_manager.context() as config: - connection = hbase_client.HBaseConnectionForCrashReports( - config.hbase.hbase_host, - config.hbase.hbase_port, - config.hbase.hbase_timeout - ) - for row in connection.merge_scan_with_prefix( - 'crash_reports', '', ['ids:ooid']): - index_row_key = row['_rowkey'] - connection.client.deleteAllRow( - 'crash_reports', index_row_key) - # because of HBase's async nature, deleting can take time - list(connection.iterator_for_all_legacy_to_be_processed()) - - def _common_config_setup(self): - mock_logging = Mock() - required_config = Namespace() - required_config.namespace('hbase') - required_config.hbase.hbase_class = \ - crashstorage.HBaseCrashStorage - required_config.hbase.add_option('logger', default=mock_logging) - config_manager = ConfigurationManager( - [required_config], - app_name='testapp', - app_version='1.0', - app_description='app description', - values_source_list=[{'hbase': { - 'logger': mock_logging - }}] - ) - return config_manager - - def test_get(self): - with self.config_manager.context() as config: - - priorityjobs_mock = Mock() - service = crash_data.CrashData( - config=config, - all_services={'Priorityjobs': priorityjobs_mock} - ) - params = { - 'datatype': 'raw', - 'uuid': '114559a5-d8e6-428c-8b88-1c1f22120314' - } - - # Test 1: get a raw dump - res_expected = ('peter is a swede', - 'application/octet-stream') - res = service.get(**params) - - eq_(res, res_expected) - - # Test 2: get a raw crash - params['datatype'] = 'meta' - res_expected = { - 'name': 'Peter', - 'legacy_processing': 0, - 'submitted_timestamp': '2013-05-04' - } - res = service.get(**params) - - eq_(res, res_expected) - - # Test 3: get a processed crash - params['datatype'] = 'processed' - res_expected = { - 'name': 'Peter', - 'uuid': '114559a5-d8e6-428c-8b88-1c1f22120314', - 'completeddatetime': '2012-01-01T00:00:00' - } - res = service.get(**params) - - eq_(res, res_expected) - - # Test 3a: get a unredacted processed crash - params['datatype'] = 'unredacted' - res_expected = { - 'name': 'Peter', - 'uuid': '114559a5-d8e6-428c-8b88-1c1f22120314', - 'completeddatetime': '2012-01-01T00:00:00', - 'email': 'peter@fake.org', - } - res = service.get(**params) - - eq_(res, res_expected) - - # Test 4: missing parameters - self.assertRaises( - MissingArgumentError, - service.get - ) - self.assertRaises( - MissingArgumentError, - service.get, - **{'uuid': '114559a5-d8e6-428c-8b88-1c1f22120314'} - ) - - # Test 5: crash cannot be found - self.assertRaises( - ResourceNotFound, - service.get, - **{ - 'uuid': 'c44245f4-c93b-49b8-86a2-c15dc2130504', - 'datatype': 'processed' - } - ) - # Test 5a: crash cannot be found - self.assertRaises( - ResourceNotFound, - service.get, - **{ - 'uuid': 'c44245f4-c93b-49b8-86a2-c15dc2130504', - 'datatype': 'unredacted' - } - ) - - # Test 6: not yet available crash - self.assertRaises( - ResourceUnavailable, - service.get, - **{ - 'uuid': '58727744-12f5-454a-bcf5-f688a2120821', - 'datatype': 'processed' - } - ) - priorityjobs_mock.cls.return_value.create.assert_called_once_with( - uuid='58727744-12f5-454a-bcf5-f688a2120821' - ) - priorityjobs_mock.cls.return_value.create.reset_mock() - - # Test 6a: not yet available crash - self.assertRaises( - ResourceUnavailable, - service.get, - **{ - 'uuid': '58727744-12f5-454a-bcf5-f688a2120821', - 'datatype': 'unredacted' - } - ) - priorityjobs_mock.cls.return_value.create.assert_called_once_with( - uuid='58727744-12f5-454a-bcf5-f688a2120821' - ) - - # Test 7: raw crash cannot be found - self.assertRaises( - ResourceNotFound, - service.get, - **{ - 'uuid': 'c44245f4-c93b-49b8-86a2-c15dc2130505', - 'datatype': 'raw' - } - ) diff --git a/socorro/unittest/external/happybase/test_crashstorage.py b/socorro/unittest/external/happybase/test_crashstorage.py deleted file mode 100644 index 88de752fa2..0000000000 --- a/socorro/unittest/external/happybase/test_crashstorage.py +++ /dev/null @@ -1,190 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - -import mock -import json - -from socorrolib.lib.util import SilentFakeLogger, DotDict -from socorro.external.crashstorage_base import ( - Redactor, - MemoryDumpsMapping, -) -from socorro.external.happybase.crashstorage import HBaseCrashStorage, CrashIDNotFound -from socorro.database.transaction_executor import TransactionExecutor -from socorro.unittest.testbase import TestCase - - -class TestCrashStorage(TestCase): - def setUp(self): - super(TestCrashStorage, self).setUp() - self.context = mock.MagicMock() - self.context.__enter__.return_value = self.context - config = DotDict({ - 'hbase_host': 'host', - 'database_name': 'name', - 'hbase_port': 9090, - 'hbase_timeout': 9000, - 'number_of_retries': 2, - 'logger': SilentFakeLogger(), - 'hbase_connection_context_class': mock.Mock( - return_value=self.context - ), - 'transaction_executor_class': TransactionExecutor, - 'new_crash_limit': 10 ** 6, - 'redactor_class': Redactor, - 'forbidden_keys': Redactor.required_config.forbidden_keys.default, - }) - self.storage = HBaseCrashStorage(config) - - def _fake_processed_crash(self): - d = DotDict() - # these keys survive redaction - d.a = DotDict() - d.a.b = DotDict() - d.a.b.c = 11 - d.sensitive = DotDict() - d.sensitive.x = 2 - d.not_url = 'not a url' - - return d - - def _fake_redacted_processed_crash(self): - d = self._fake_unredacted_processed_crash() - del d.url - del d.email - del d.user_id - del d.exploitability - del d.json_dump.sensitive - del d.upload_file_minidump_flash1.json_dump.sensitive - del d.upload_file_minidump_flash2.json_dump.sensitive - del d.upload_file_minidump_browser.json_dump.sensitive - - return d - - def _fake_unredacted_processed_crash(self): - d = self._fake_processed_crash() - - # these keys do not survive redaction - d['url'] = 'http://very.embarassing.com' - d['email'] = 'lars@fake.com' - d['user_id'] = '3333' - d['exploitability'] = 'yep' - d.json_dump = DotDict() - d.json_dump.sensitive = 22 - d.upload_file_minidump_flash1 = DotDict() - d.upload_file_minidump_flash1.json_dump = DotDict() - d.upload_file_minidump_flash1.json_dump.sensitive = 33 - d.upload_file_minidump_flash2 = DotDict() - d.upload_file_minidump_flash2.json_dump = DotDict() - d.upload_file_minidump_flash2.json_dump.sensitive = 33 - d.upload_file_minidump_browser = DotDict() - d.upload_file_minidump_browser.json_dump = DotDict() - d.upload_file_minidump_browser.json_dump.sensitive = DotDict() - d.upload_file_minidump_browser.json_dump.sensitive.exploitable = 55 - d.upload_file_minidump_browser.json_dump.sensitive.secret = 66 - - return d - - def _fake_unredacted_processed_crash_as_string(self): - d = self._fake_unredacted_processed_crash() - s = json.dumps(d) - return s - - - def test_close(self): - self.storage.close() - self.assertEqual(self.storage.hbase.close.call_count, 0) - - def test_save_processed(self): - self.storage.save_processed({ - "uuid": "936ce666-ff3b-4c7a-9674-367fe2120408", - "completeddatetime": "2012-04-08 10:56:50.902884", - "signature": 'now_this_is_a_signature' - }) - with self.storage.hbase() as conn: - self.assertEqual(conn.table.call_count, 1) - #self.assertEqual(conn.client.mutateRow.call_count, 2) - - def test_save_raw_crash(self): - self.storage.save_raw_crash({ - "submitted_timestamp": "2013-01-09T22:21:18.646733+00:00" - }, MemoryDumpsMapping(), "0bba929f-8721-460c-dead-a43c20071027") - with self.storage.hbase() as conn: - self.assertEqual(conn.table.call_count, 1) - self.assertEqual(conn.table.return_value.put.call_count, 1) - - def test_save_raw_crash_hang(self): - self.storage.save_raw_crash({ - "submitted_timestamp": "2013-01-09T22:21:18.646733+00:00", - "HangID": "?" - }, MemoryDumpsMapping(), "0bba929f-8721-460c-dead-a43c20071027") - with self.storage.hbase() as conn: - self.assertEqual(conn.table.call_count, 1) - self.assertEqual(conn.table.return_value.put.call_count, 1) - - def test_get_raw_dumps(self): - self.storage.get_raw_dumps("936ce666-ff3b-4c7a-9674-367fe2120408") - with self.storage.hbase() as conn: - self.assertEqual(conn.table.return_value.row.call_count, 1) - - def test_get_raw_dumps_as_files(self): - self.storage.get_raw_dumps_as_files( - "936ce666-ff3b-4c7a-9674-367fe2120408") - with self.storage.hbase() as conn: - self.assertEqual(conn.table.return_value.row.call_count, 1) - - def test_get_unredacted_processed(self): - - processed_crash = DotDict() - with self.storage.hbase() as conn: - conn.table.return_value.row.return_value = { - 'processed_data:json': - self._fake_unredacted_processed_crash_as_string() - } - - processed_crash = self.storage.get_unredacted_processed( - "936ce666-ff3b-4c7a-9674-367fe2120408" - ) - self.assertEqual( - processed_crash, - self._fake_unredacted_processed_crash() - ) - - def test_get_processed(self): - faked_hb_row_object = DotDict() - faked_hb_row_object.columns = DotDict() - faked_hb_row_object.columns['processed_data:json'] = DotDict() - faked_hb_row_object.columns['processed_data:json'].value = \ - self._fake_unredacted_processed_crash_as_string() - - processed_crash = DotDict() - with self.storage.hbase() as conn: - conn.table.return_value.row.return_value = { - 'processed_data:json': - self._fake_unredacted_processed_crash_as_string() - } - - processed_crash = self.storage.get_processed( - "936ce666-ff3b-4c7a-9674-367fe2120408" - ) - self.assertEqual( - processed_crash, - self._fake_redacted_processed_crash() - ) - - - def test_get_processed_failure(self): - with self.storage.hbase() as conn: - conn.table.return_value.row.return_value = {} - self.assertRaises( - CrashIDNotFound, - self.storage.get_processed, - "936ce666-ff3b-4c7a-9674-367fe2120408" - ) - - def test_new_crashes(self): - self.storage._salted_scanner_iterable = mock.Mock( - return_value=iter([]) - ) - self.assertEqual(list(self.storage.new_crashes()), []) diff --git a/socorro/unittest/external/hb/__init__.py b/socorro/unittest/external/hb/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/socorro/unittest/external/hb/test_connection_context.py b/socorro/unittest/external/hb/test_connection_context.py deleted file mode 100644 index 0019f08164..0000000000 --- a/socorro/unittest/external/hb/test_connection_context.py +++ /dev/null @@ -1,237 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - -import mock -from nose.tools import eq_, ok_, assert_raises - -from socorro.external.hb import connection_context -from socorrolib.lib.util import SilentFakeLogger, DotDict -from socorro.database.transaction_executor import TransactionExecutor -from socorro.unittest.testbase import TestCase - -from socket import error - - -class FakeHB_Connection(object): - def __init__(self, config): - self.hbaseThriftExceptions = (error,) - self.close_counter = 0 - self.commit_counter = 0 - self.rollback_counter = 0 - - def close(self): - self.close_counter += 1 - - def commit(self): - self.commit_counter += 1 - - def rollback(self): - self.rollback_counter += 1 - - -class TestConnectionContext(TestCase): - def test_basic_hbase_usage(self): - local_config = DotDict({ - 'hbase_host': 'host', - 'database_name': 'name', - 'hbase_port': 9090, - 'hbase_timeout': 9000, - 'number_of_retries': 2, - 'logger': SilentFakeLogger(), - 'executor_identity': lambda: 'dwight' # bogus thread id - }) - a_fake_hbase_connection = FakeHB_Connection(local_config) - with mock.patch.object(connection_context, 'HBaseConnection', - mock.Mock(return_value=a_fake_hbase_connection)): - hb_context = connection_context.HBaseConnectionContext( - local_config - ) - # open a connection - with hb_context() as conn: - pass - eq_( - a_fake_hbase_connection.close_counter, - 1 - ) - # open another connection again - with hb_context() as conn: - pass - eq_( - a_fake_hbase_connection.close_counter, - 2 - ) - # get a named connection - with hb_context('fred') as conn: - pass - eq_( - a_fake_hbase_connection.close_counter, - 3 - ) - # close all connections - hb_context.close() - eq_( - a_fake_hbase_connection.close_counter, - 3 - ) - - def test_hbase_usage_with_transaction(self): - local_config = DotDict({ - 'hbase_host': 'host', - 'database_name': 'name', - 'hbase_port': 9090, - 'hbase_timeout': 9000, - 'number_of_retries': 2, - 'logger': SilentFakeLogger(), - 'executor_identity': lambda: 'dwight' # bogus thread id - }) - a_fake_hbase_connection = FakeHB_Connection(local_config) - with mock.patch.object(connection_context, 'HBaseConnection', - mock.Mock(return_value=a_fake_hbase_connection)): - hb_context = connection_context.HBaseConnectionContext( - local_config - ) - def all_ok(connection, dummy): - eq_(dummy, 'hello') - return True - - transaction = TransactionExecutor(local_config, hb_context) - result = transaction(all_ok, 'hello') - ok_(result) - eq_( - a_fake_hbase_connection.close_counter, - 1 - ) - eq_( - a_fake_hbase_connection.rollback_counter, - 0 - ) - eq_( - a_fake_hbase_connection.commit_counter, - 1 - ) - - def bad_deal(connection, dummy): - raise KeyError('fred') - - assert_raises(KeyError, transaction, bad_deal, 'hello') - eq_( - a_fake_hbase_connection.close_counter, - 2 - ) - eq_( - a_fake_hbase_connection.commit_counter, - 1 - ) - - hb_context.close() - eq_( - a_fake_hbase_connection.close_counter, - 2 - ) - - -class TestHBasePooledConnectionContext(TestCase): - - def test_basic_hbase_usage(self): - local_config = DotDict({ - 'hbase_host': 'host', - 'database_name': 'name', - 'hbase_port': 9090, - 'hbase_timeout': 9000, - 'number_of_retries': 2, - 'logger': SilentFakeLogger(), - 'executor_identity': lambda: 'dwight' # bogus thread id - }) - a_fake_hbase_connection = FakeHB_Connection(local_config) - with mock.patch.object(connection_context, 'HBaseConnection', - mock.Mock(return_value=a_fake_hbase_connection)): - hb_context = connection_context.HBasePooledConnectionContext( - local_config - ) - # open a connection - with hb_context() as conn: - pass - eq_( - a_fake_hbase_connection.close_counter, - 0 - ) - # open another connection again - with hb_context() as conn: - pass - eq_( - a_fake_hbase_connection.close_counter, - 0 - ) - # get a named connection - with hb_context('fred') as conn: - pass - eq_( - a_fake_hbase_connection.close_counter, - 0 - ) - # close all connections - hb_context.close() - eq_( - a_fake_hbase_connection.close_counter, - 2 - ) - - def test_hbase_usage_with_transaction(self): - local_config = DotDict({ - 'hbase_host': 'host', - 'database_name': 'name', - 'hbase_port': 9090, - 'hbase_timeout': 9000, - 'number_of_retries': 2, - 'logger': SilentFakeLogger(), - 'executor_identity': lambda: 'dwight' # bogus thread id - }) - a_fake_hbase_connection = FakeHB_Connection(local_config) - with mock.patch.object(connection_context, 'HBaseConnection', - mock.Mock(return_value=a_fake_hbase_connection)): - hb_context = connection_context.HBasePooledConnectionContext( - local_config - ) - def all_ok(connection, dummy): - eq_(dummy, 'hello') - return True - - transaction = TransactionExecutor(local_config, hb_context) - result = transaction(all_ok, 'hello') - ok_(result) - eq_( - a_fake_hbase_connection.close_counter, - 0 - ) - eq_( - a_fake_hbase_connection.rollback_counter, - 0 - ) - eq_( - a_fake_hbase_connection.commit_counter, - 1 - ) - - def bad_deal(connection, dummy): - raise KeyError('fred') - - assert_raises(KeyError, transaction, bad_deal, 'hello') - # at this point, the underlying connection has been deleted from - # the pool, because it was considered to be a bad connection. - eq_( - a_fake_hbase_connection.close_counter, - 0 - ) - eq_( - a_fake_hbase_connection.commit_counter, - 1 - ) - - hb_context.close() - # because the connection was previously deleted from the pool, - # no connection gets closed at this point. - eq_( - a_fake_hbase_connection.close_counter, - 0 - ) diff --git a/socorro/unittest/external/hb/test_crash_data.py b/socorro/unittest/external/hb/test_crash_data.py deleted file mode 100644 index 1e110b8e59..0000000000 --- a/socorro/unittest/external/hb/test_crash_data.py +++ /dev/null @@ -1,237 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - -import os -from nose.plugins.skip import SkipTest -from nose.tools import eq_, assert_raises -from configman import ConfigurationManager, Namespace -from mock import Mock - -from socorrolib.lib import ( - MissingArgumentError, - ResourceNotFound, - ResourceUnavailable, -) -from socorro.external.hb import crash_data, crashstorage, hbase_client -from socorro.unittest.testbase import TestCase - - -_run_integration_tests = os.environ.get('RUN_HBASE_INTEGRATION_TESTS', False) -if _run_integration_tests in ('false', 'False', 'no', '0'): - _run_integration_tests = False - - -class TestIntegrationHBaseCrashData(TestCase): - - def setUp(self): - if not _run_integration_tests: - raise SkipTest("Skipping HBase integration tests") - super(TestIntegrationHBaseCrashData, self).setUp() - self.config_manager = self._common_config_setup() - - with self.config_manager.context() as config: - store = crashstorage.HBaseCrashStorage(config.hbase) - - # A complete crash report (raw, dump and processed) - fake_raw_dump_1 = 'peter is a swede' - fake_raw_dump_2 = 'lars is a norseman' - fake_raw_dump_3 = 'adrian is a frenchman' - fake_dumps = {'upload_file_minidump': fake_raw_dump_1, - 'lars': fake_raw_dump_2, - 'adrian': fake_raw_dump_3} - fake_raw = { - 'name': 'Peter', - 'legacy_processing': 0, - 'submitted_timestamp': '2013-05-04' - } - fake_processed = { - 'name': 'Peter', - 'uuid': '114559a5-d8e6-428c-8b88-1c1f22120314', - 'completeddatetime': '2012-01-01T00:00:00', - 'email': 'peter@fake.org', - } - - store.save_raw_crash( - fake_raw, - fake_dumps, - '114559a5-d8e6-428c-8b88-1c1f22120314' - ) - store.save_processed(fake_processed) - - # A non-processed crash report - fake_raw = { - 'name': 'Adrian', - 'legacy_processing': 0, - 'submitted_timestamp': '2013-05-04' - } - - store.save_raw_crash( - fake_raw, - fake_dumps, - '58727744-12f5-454a-bcf5-f688a2120821' - ) - - def tearDown(self): - super(TestIntegrationHBaseCrashData, self).tearDown() - with self.config_manager.context() as config: - connection = hbase_client.HBaseConnectionForCrashReports( - config.hbase.hbase_host, - config.hbase.hbase_port, - config.hbase.hbase_timeout - ) - for row in connection.merge_scan_with_prefix( - 'crash_reports', '', ['ids:ooid']): - index_row_key = row['_rowkey'] - connection.client.deleteAllRow( - 'crash_reports', index_row_key) - # because of HBase's async nature, deleting can take time - list(connection.iterator_for_all_legacy_to_be_processed()) - - def _common_config_setup(self): - mock_logging = Mock() - required_config = Namespace() - required_config.namespace('hbase') - required_config.hbase.hbase_class = \ - crashstorage.HBaseCrashStorage - required_config.hbase.add_option('logger', default=mock_logging) - config_manager = ConfigurationManager( - [required_config], - app_name='testapp', - app_version='1.0', - app_description='app description', - values_source_list=[{'hbase': { - 'logger': mock_logging - }}] - ) - return config_manager - - def test_get(self): - with self.config_manager.context() as config: - - priorityjobs_mock = Mock() - service = crash_data.CrashData( - config=config, - all_services={'Priorityjobs': priorityjobs_mock} - ) - params = { - 'datatype': 'raw', - 'uuid': '114559a5-d8e6-428c-8b88-1c1f22120314' - } - - # Test 1a: get a raw dump - res_expected = ('peter is a swede', - 'application/octet-stream') - res = service.get(**params) - eq_(res, res_expected) - - # Test 1b: get a raw dump with the default name - res = service.get(**dict(params, name='upload_file_minidump')) - eq_(res, res_expected) - - # Test 1c: get a raw dump with a different name - res_expected = ('lars is a norseman', - 'application/octet-stream') - res = service.get(**dict(params, name='lars')) - eq_(res, res_expected) - - # Test 2: get a raw crash - params['datatype'] = 'meta' - res_expected = { - 'name': 'Peter', - 'legacy_processing': 0, - 'submitted_timestamp': '2013-05-04' - } - res = service.get(**params) - - eq_(res, res_expected) - - # Test 3: get a processed crash - params['datatype'] = 'processed' - res_expected = { - 'name': 'Peter', - 'uuid': '114559a5-d8e6-428c-8b88-1c1f22120314', - 'completeddatetime': '2012-01-01T00:00:00' - } - res = service.get(**params) - - eq_(res, res_expected) - - # Test 3a: get a unredacted processed crash - params['datatype'] = 'unredacted' - res_expected = { - 'name': 'Peter', - 'uuid': '114559a5-d8e6-428c-8b88-1c1f22120314', - 'completeddatetime': '2012-01-01T00:00:00', - 'email': 'peter@fake.org', - } - res = service.get(**params) - - eq_(res, res_expected) - - # Test 4: missing parameters - assert_raises( - MissingArgumentError, - service.get - ) - assert_raises( - MissingArgumentError, - service.get, - **{'uuid': '114559a5-d8e6-428c-8b88-1c1f22120314'} - ) - - # Test 5: crash cannot be found - assert_raises( - ResourceNotFound, - service.get, - **{ - 'uuid': 'c44245f4-c93b-49b8-86a2-c15dc2130504', - 'datatype': 'processed' - } - ) - # Test 5a: crash cannot be found - assert_raises( - ResourceNotFound, - service.get, - **{ - 'uuid': 'c44245f4-c93b-49b8-86a2-c15dc2130504', - 'datatype': 'unredacted' - } - ) - - # Test 6: not yet available crash - assert_raises( - ResourceUnavailable, - service.get, - **{ - 'uuid': '58727744-12f5-454a-bcf5-f688a2120821', - 'datatype': 'processed' - } - ) - priorityjobs_mock.cls.return_value.create.assert_called_once_with( - uuid='58727744-12f5-454a-bcf5-f688a2120821' - ) - priorityjobs_mock.cls.return_value.create.reset_mock() - - # Test 6a: not yet available crash - assert_raises( - ResourceUnavailable, - service.get, - **{ - 'uuid': '58727744-12f5-454a-bcf5-f688a2120821', - 'datatype': 'unredacted' - } - ) - priorityjobs_mock.cls.return_value.create.assert_called_once_with( - uuid='58727744-12f5-454a-bcf5-f688a2120821' - ) - - # Test 7: raw crash cannot be found - assert_raises( - ResourceNotFound, - service.get, - **{ - 'uuid': 'c44245f4-c93b-49b8-86a2-c15dc2130505', - 'datatype': 'raw' - } - ) diff --git a/socorro/unittest/external/hb/test_crashstorage.py b/socorro/unittest/external/hb/test_crashstorage.py deleted file mode 100644 index e1a94bf341..0000000000 --- a/socorro/unittest/external/hb/test_crashstorage.py +++ /dev/null @@ -1,187 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - -import json - -import mock -from nose.tools import eq_, assert_raises - -from socorrolib.lib.util import SilentFakeLogger, DotDict -from socorro.external.crashstorage_base import ( - Redactor, - MemoryDumpsMapping -) -from socorro.external.hb.crashstorage import HBaseCrashStorage, CrashIDNotFound -from socorro.database.transaction_executor import TransactionExecutor -from socorro.unittest.testbase import TestCase - - -class TestCrashStorage(TestCase): - def setUp(self): - super(TestCrashStorage, self).setUp() - self.context = mock.MagicMock() - self.context.__enter__.return_value = self.context - config = DotDict({ - 'hbase_host': 'host', - 'database_name': 'name', - 'hbase_port': 9090, - 'hbase_timeout': 9000, - 'number_of_retries': 2, - 'logger': SilentFakeLogger(), - 'hbase_connection_context_class': mock.Mock( - return_value=self.context - ), - 'transaction_executor_class': TransactionExecutor, - 'new_crash_limit': 10 ** 6, - 'redactor_class': Redactor, - 'forbidden_keys': Redactor.required_config.forbidden_keys.default, - }) - self.storage = HBaseCrashStorage(config) - - def _fake_processed_crash(self): - d = DotDict() - # these keys survive redaction - d.a = DotDict() - d.a.b = DotDict() - d.a.b.c = 11 - d.sensitive = DotDict() - d.sensitive.x = 2 - d.not_url = 'not a url' - - return d - - def _fake_redacted_processed_crash(self): - d = self._fake_unredacted_processed_crash() - del d.url - del d.email - del d.user_id - del d.exploitability - del d.json_dump.sensitive - del d.upload_file_minidump_flash1.json_dump.sensitive - del d.upload_file_minidump_flash2.json_dump.sensitive - del d.upload_file_minidump_browser.json_dump.sensitive - - return d - - def _fake_unredacted_processed_crash(self): - d = self._fake_processed_crash() - - # these keys do not survive redaction - d['url'] = 'http://very.embarassing.com' - d['email'] = 'lars@fake.com' - d['user_id'] = '3333' - d['exploitability'] = 'yep' - d.json_dump = DotDict() - d.json_dump.sensitive = 22 - d.upload_file_minidump_flash1 = DotDict() - d.upload_file_minidump_flash1.json_dump = DotDict() - d.upload_file_minidump_flash1.json_dump.sensitive = 33 - d.upload_file_minidump_flash2 = DotDict() - d.upload_file_minidump_flash2.json_dump = DotDict() - d.upload_file_minidump_flash2.json_dump.sensitive = 33 - d.upload_file_minidump_browser = DotDict() - d.upload_file_minidump_browser.json_dump = DotDict() - d.upload_file_minidump_browser.json_dump.sensitive = DotDict() - d.upload_file_minidump_browser.json_dump.sensitive.exploitable = 55 - d.upload_file_minidump_browser.json_dump.sensitive.secret = 66 - - return d - - def _fake_unredacted_processed_crash_as_string(self): - d = self._fake_unredacted_processed_crash() - s = json.dumps(d) - return s - - - def test_close(self): - self.storage.close() - eq_(self.storage.hbase.close.call_count, 1) - - def test_save_processed(self): - self.storage.save_processed({ - "uuid": "936ce666-ff3b-4c7a-9674-367fe2120408", - "completeddatetime": "2012-04-08 10:56:50.902884" - }) - with self.storage.hbase() as conn: - eq_(conn.client.mutateRow.call_count, 2) - - def test_save_raw_crash(self): - self.storage.save_raw_crash({ - "submitted_timestamp": "2013-01-09T22:21:18.646733+00:00" - }, MemoryDumpsMapping(), "0bba929f-8721-460c-dead-a43c20071027") - with self.storage.hbase() as conn: - eq_(conn.client.mutateRow.call_count, 5) - - def test_save_raw_crash_hang(self): - self.storage.save_raw_crash({ - "submitted_timestamp": "2013-01-09T22:21:18.646733+00:00", - "HangID": "?" - }, MemoryDumpsMapping(), "0bba929f-8721-460c-dead-a43c20071027") - with self.storage.hbase() as conn: - eq_(conn.client.mutateRow.call_count, 7) - - def test_get_raw_dumps(self): - self.storage.get_raw_dumps("936ce666-ff3b-4c7a-9674-367fe2120408") - with self.storage.hbase() as conn: - eq_(conn.client.getRowWithColumns.call_count, 1) - - def test_get_raw_dumps_as_files(self): - self.storage.get_raw_dumps_as_files( - "936ce666-ff3b-4c7a-9674-367fe2120408") - with self.storage.hbase() as conn: - eq_(conn.client.getRowWithColumns.call_count, 1) - - def test_get_unredacted_processed(self): - faked_hb_row_object = DotDict() - faked_hb_row_object.columns = DotDict() - faked_hb_row_object.columns['processed_data:json'] = DotDict() - faked_hb_row_object.columns['processed_data:json'].value = \ - self._fake_unredacted_processed_crash_as_string() - - processed_crash = DotDict() - with self.storage.hbase() as conn: - conn.client.getRowWithColumns.return_value = [faked_hb_row_object] - - processed_crash = self.storage.get_unredacted_processed( - "936ce666-ff3b-4c7a-9674-367fe2120408" - ) - eq_( - processed_crash, - self._fake_unredacted_processed_crash() - ) - - def test_get_processed(self): - faked_hb_row_object = DotDict() - faked_hb_row_object.columns = DotDict() - faked_hb_row_object.columns['processed_data:json'] = DotDict() - faked_hb_row_object.columns['processed_data:json'].value = \ - self._fake_unredacted_processed_crash_as_string() - - processed_crash = DotDict() - with self.storage.hbase() as conn: - conn.client.getRowWithColumns.return_value = [faked_hb_row_object] - - processed_crash = self.storage.get_processed( - "936ce666-ff3b-4c7a-9674-367fe2120408" - ) - eq_( - processed_crash, - self._fake_redacted_processed_crash() - ) - - - def test_get_processed_failure(self): - with self.storage.hbase() as conn: - conn.client.getRowWithColumns.return_value = [] - assert_raises( - CrashIDNotFound, - self.storage.get_processed, - "936ce666-ff3b-4c7a-9674-367fe2120408" - ) - - def test_new_crashes(self): - self.storage._salted_scanner_iterable = mock.Mock( - return_value=iter([]) - ) - eq_(list(self.storage.new_crashes()), []) diff --git a/tools/loadjsonz.py b/tools/loadjsonz.py deleted file mode 100644 index 7cb219515d..0000000000 --- a/tools/loadjsonz.py +++ /dev/null @@ -1,42 +0,0 @@ -#! /usr/bin/env python -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -import sys -import socorro.external.hbase.hbase_client as hbase -import gzip - -class JsonzLoader(object): - def __init__(self,host,port): - self.hbase_connection = hbase.HBaseConnectionForCrashReports(host,port) - - def close(self): - self.hbase_connection.close() - - def load_from_file(self, uuid, path): - jsonz_file = gzip.open(path, 'rb') - json_string = jsonz_file.read() - jsonz_file.close() - self.hbase_connection.create_ooid_from_jsonz(uuid,json_string) - -if __name__=="__main__": - if len(sys.argv) != 3: - print "Usage: loadjsonz.py \nText file should be uuid and file path seperated by a tab" - sys.exit(1) - input_file_path = sys.argv[1] - host, port = sys.argv[2].split(':') - loader = JsonzLoader(host,int(port)) - input_file = open(input_file_path,'rb') - i = 0 - for line in input_file: - uuid, path = line.strip().split('\t') - loader.load_from_file(uuid, path) - i += 1 - if i % 1000 == 0: - print i,'reports loaded' - loader.close() - input_file.close() - print "%s jsonz file(s) loaded" % i - From 8f64da46520f83dea660557a73ba7b52e772c86e Mon Sep 17 00:00:00 2001 From: Peter Bengtsson Date: Tue, 20 Sep 2016 10:40:12 -0400 Subject: [PATCH 13/13] fixes bug 1294088 - JSON Schema ambiguous types on certain keys (#3460) --- socorro/schemas/crash_report.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/socorro/schemas/crash_report.json b/socorro/schemas/crash_report.json index 9dbc2f7f74..24683cc6b7 100644 --- a/socorro/schemas/crash_report.json +++ b/socorro/schemas/crash_report.json @@ -63,7 +63,7 @@ "description": "Notes from the application that crashed. Mostly contains graphics-related annotations." }, "build_id": { - "type": ["integer", "string", "null"], + "type": ["string", "null"], "description": "The unique build identifier of this version, which is a timestamp of the form YYYYMMDDHHMMSS. " }, "classifications": { @@ -229,7 +229,7 @@ } }, "tiny_block_size": { - "type": ["integer", "string", "null"], + "type": ["string", "null"], "description": "If present, the total size of all memory regions in the crashing process that are smaller than 1 MB." }, "thread_count": { @@ -257,7 +257,7 @@ } }, "write_combine_size": { - "type": ["integer", "string", "null"], + "type": ["string", "null"], "description": "If present, the total size of all committed memory regions in the crashing process marked with PAGE_WRITECOMBINE." } }