diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index a296c9f..bdc145b 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -17,6 +17,10 @@ jobs: POSTGRES_PASSWORD: postgres ports: - 5432:5432 + redis: + image: redis:6.0 + ports: + - 6379:6379 env: RP_SIDEKICK_DATABASE: postgres://postgres:postgres@localhost/rp_sidekick steps: diff --git a/config/settings/base.py b/config/settings/base.py index c81eff7..e987a38 100644 --- a/config/settings/base.py +++ b/config/settings/base.py @@ -223,3 +223,5 @@ # something more human-readable. # release="myapp@1.0.0", ) + +REDIS_URL = os.environ.get("REDIS_URL", "redis://localhost:6379/0") diff --git a/rp_yal/tests/test_utils.py b/rp_yal/tests/test_utils.py index ec328f9..fb5a63a 100644 --- a/rp_yal/tests/test_utils.py +++ b/rp_yal/tests/test_utils.py @@ -5,6 +5,7 @@ from freezegun import freeze_time from rp_yal import utils +from rp_yal.utils import redis_conn from sidekick.tests.utils import create_org TEST_CONTENT_SETS = [ @@ -495,3 +496,38 @@ def test_get_first_matching_content_set_no_matches(self): contentset_id = utils.get_first_matching_content_set(TEST_CONTENT_SETS, fields) self.assertEqual(contentset_id, 3) + + +class GetUniquePageSeenIds(TestCase): + def setUp(self): + self.org = create_org() + + def tearDown(self): + redis_conn.delete("page_seen_ids_27831231234") + + @responses.activate + def test_get_unique_page_seen_ids_cache(self): + msisdn = "27831231234" + responses.add( + method=responses.GET, + url="http://contentrepo/api/v2/custom/pageviews/", + json={ + "results": [ + {"page": 164}, + {"page": 165}, + {"page": 166}, + ], + }, + status=200, + match=[ + responses.matchers.query_param_matcher( + {"data__user_addr": msisdn, "unique_pages": "true"} + ) + ], + ) + + ids = utils.get_unique_page_seen_ids(self.org, msisdn) + self.assertEqual(ids, [164, 165, 166]) + ids = utils.get_unique_page_seen_ids(self.org, msisdn) + self.assertEqual(ids, [164, 165, 166]) + self.assertEqual(len(responses.calls), 1) diff --git a/rp_yal/utils.py b/rp_yal/utils.py index 2f6bba2..67c1a0a 100644 --- a/rp_yal/utils.py +++ b/rp_yal/utils.py @@ -1,8 +1,11 @@ from datetime import datetime from urllib.parse import urljoin +import redis import requests +redis_conn = redis.StrictRedis(decode_responses=True) + def get_ordered_content_set(org, fields): search_term = None @@ -208,6 +211,11 @@ def get_first_matching_content_set(contentsets, fields): def get_unique_page_seen_ids(org, msisdn): + key_name = f"page_seen_ids_{msisdn}" + + if redis_conn.get(key_name): + return [int(id) for id in redis_conn.get(key_name).split(",")] + params = { "data__user_addr": msisdn, "unique_pages": "true", @@ -220,7 +228,13 @@ def get_unique_page_seen_ids(org, msisdn): response.raise_for_status() pages_seen = response.json() - return [p["page"] for p in pages_seen["results"]] + ids = [p["page"] for p in pages_seen["results"]] + + value = ",".join([str(id) for id in ids]) + redis_conn.set(key_name, value) + redis_conn.expire(key_name, time=5 * 60 * 60) + + return ids def get_contentrepo_headers(org): @@ -250,5 +264,6 @@ def get_contentset(org, contentset_id, msisdn): if page["id"] not in pages_seen_ids: unseen_pages.append(page) contentset_data["pages"] = unseen_pages + contentset_data["pages_seen_ids"] = pages_seen_ids return contentset_data diff --git a/setup.py b/setup.py index 1c330aa..df20583 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ "phonenumbers==8.10.23", "psycopg2-binary==2.8.6", "rapidpro-python==2.6.1", - "redis==4.4.4", + "redis==4.5.4", "whitenoise==4.1.4", "raven==6.10.0", "hashids==1.3.1",