forked from openedx-unsupported/edx-analytics-data-api
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathMakefile
138 lines (108 loc) · 6.6 KB
/
Makefile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
ROOT = $(shell echo "$$PWD")
COVERAGE_DIR = $(ROOT)/build/coverage
PACKAGES = analyticsdataserver analytics_data_api
DATABASES = default analytics
ELASTICSEARCH_VERSION = 1.5.2
ELASTICSEARCH_PORT = 9223
PYTHON_ENV=py38
DJANGO_VERSION=django22
.DEFAULT_GOAL := help
help: ## display this help message
@echo "Please use \`make <target>' where <target> is one of"
@perl -nle'print $& if m{^[\.a-zA-Z_-]+:.*?## .*$$}' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m %-25s\033[0m %s\n", $$1, $$2}'
.PHONY: requirements develop clean diff.report view.diff.report quality static
requirements: ## install base requirements
pip3 install -q -r requirements/base.txt
production-requirements: ## install production requirements
pip3 install -r requirements.txt
test.install_elasticsearch: ## install elasticsearch
curl -L -O https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch-$(ELASTICSEARCH_VERSION).zip
unzip elasticsearch-$(ELASTICSEARCH_VERSION).zip
echo "http.port: $(ELASTICSEARCH_PORT)" >> elasticsearch-$(ELASTICSEARCH_VERSION)/config/elasticsearch.yml
test.run_elasticsearch: ## run elasticsearch
cd elasticsearch-$(ELASTICSEARCH_VERSION) && ./bin/elasticsearch -d --http.port=$(ELASTICSEARCH_PORT)
test.requirements: requirements ## install base and test requirements
pip3 install -q -r requirements/test.txt
tox.requirements: ## install tox requirements
pip3 install -q -r requirements/tox.txt
develop: test.requirements ## install test and dev requirements
pip3 install -q -r requirements/dev.txt
upgrade: export CUSTOM_COMPILE_COMMAND=make upgrade
upgrade: ## update the requirements/*.txt files with the latest packages satisfying requirements/*.in
pip3 install -q -r requirements/pip_tools.txt
pip-compile --upgrade -o requirements/pip_tools.txt requirements/pip_tools.in
pip-compile --upgrade -o requirements/base.txt requirements/base.in
pip-compile --upgrade -o requirements/doc.txt requirements/doc.in
pip-compile --upgrade -o requirements/dev.txt requirements/dev.in
pip-compile --upgrade -o requirements/production.txt requirements/production.in
pip-compile --upgrade -o requirements/test.txt requirements/test.in
pip-compile --upgrade -o requirements/tox.txt requirements/tox.in
pip-compile --upgrade -o requirements/travis.txt requirements/travis.in
scripts/post-pip-compile.sh \
requirements/pip_tools.txt \
requirements/base.txt \
requirements/doc.txt \
requirements/dev.txt \
requirements/production.txt \
requirements/test.txt \
requirements/tox.txt \
requirements/travis.txt
## Let tox control the Django version for tests
grep -e "^django==" requirements/base.txt > requirements/django.txt
sed '/^[dD]jango==/d' requirements/test.txt > requirements/test.tmp
mv requirements/test.tmp requirements/test.txt
clean: tox.requirements ## install tox requirements and run tox clean. Delete *.pyc files
tox -e $(PYTHON_ENV)-$(DJANGO_VERSION)-clean
find . -name '*.pyc' -delete
test: tox.requirements clean ## install tox requirements, run tox clean. Install elasticsearch and run tests. Run coverage report.
if [ -e elasticsearch-$(ELASTICSEARCH_VERSION) ]; then curl --silent --head http://localhost:$(ELASTICSEARCH_PORT)/roster_test > /dev/null || make test.run_elasticsearch; fi ## Launch ES if installed and not running
tox -e $(PYTHON_ENV)-$(DJANGO_VERSION)-tests
export COVERAGE_DIR=$(COVERAGE_DIR) && \
tox -e $(PYTHON_ENV)-$(DJANGO_VERSION)-coverage
diff.report: test.requirements ## Show the diff in quality and coverage
diff-cover $(COVERAGE_DIR)/coverage.xml --html-report $(COVERAGE_DIR)/diff_cover.html
diff-quality --violations=pycodestyle --html-report $(COVERAGE_DIR)/diff_quality_pycodestyle.html
diff-quality --violations=pylint --html-report $(COVERAGE_DIR)/diff_quality_pylint.html
view.diff.report: ## Show the diff in quality and coverage using xdg
xdg-open file:///$(COVERAGE_DIR)/diff_cover.html
xdg-open file:///$(COVERAGE_DIR)/diff_quality_pycodestyle.html
xdg-open file:///$(COVERAGE_DIR)/diff_quality_pylint.html
run_check_isort: tox.requirements ## Run tox check_isort. (Installs tox requirements.)
tox -e $(PYTHON_ENV)-$(DJANGO_VERSION)-check_isort
run_pycodestyle: tox.requirements ## Run tox pycodestyle. (Installs tox requirements.)
tox -e $(PYTHON_ENV)-$(DJANGO_VERSION)-pycodestyle
run_pylint: tox.requirements ## Run tox pylint. (Installs tox requirements.)
tox -e $(PYTHON_ENV)-$(DJANGO_VERSION)-pylint
run_isort: tox.requirements ## Run tox isort. (Installs tox requirements.)
tox -e $(PYTHON_ENV)-$(DJANGO_VERSION)-isort
quality: tox.requirements run_pylint run_check_isort run_pycodestyle ## run_pylint, run_check_isort, run_pycodestyle (Installs tox requirements.)
validate: test.requirements test quality ## Runs make test and make quality. (Installs test requirements.)
static: ## Runs collectstatic
python manage.py collectstatic --noinput
migrate: ## Runs django migrations with syncdb and default database
./manage.py migrate --noinput --run-syncdb --database=default
migrate-all: ## Runs migrations on all databases
$(foreach db_name,$(DATABASES),./manage.py migrate --noinput --run-syncdb --database=$(db_name);)
loaddata: migrate ## Runs migrations and generates fake data
python manage.py loaddata problem_response_answer_distribution --database=analytics
python manage.py generate_fake_course_data
demo: clean requirements loaddata ## Runs make clean, requirements, and loaddata, sets api key to edx
python manage.py set_api_key edx edx
# Target used by edx-analytics-dashboard during its testing.
travis: clean test.requirements migrate-all ## Used by travis for testing
python manage.py set_api_key edx edx
python manage.py loaddata problem_response_answer_distribution --database=analytics
python manage.py generate_fake_course_data --num-weeks=2 --no-videos --course-id "edX/DemoX/Demo_Course"
docker_build:
docker build . -f Dockerfile -t openedx/analytics-data-api
docker build . -f Dockerfile --target newrelic -t openedx/analytics-data-api:latest-newrelic
travis_docker_tag: docker_build
docker tag openedx/analytics-data-api openedx/analytics-data-api:$$TRAVIS_COMMIT
docker tag openedx/analytics-data-api:latest-newrelic openedx/analytics-data-api:$$TRAVIS_COMMIT-newrelic
travis_docker_auth:
echo "$$DOCKER_PASSWORD" | docker login -u "$$DOCKER_USERNAME" --password-stdin
travis_docker_push: travis_docker_tag travis_docker_auth ## push to docker hub
docker push 'openedx/analytics-data-api:latest'
docker push "openedx/analytics-data-api:$$TRAVIS_COMMIT"
docker push 'openedx/analytics-data-api:latest-newrelic'
docker push "openedx/analytics-data-api:$$TRAVIS_COMMIT-newrelic"