Skip to content

Commit

Permalink
Merge pull request #186 from praekeltfoundation/add-page-view-caching
Browse files Browse the repository at this point in the history
Add page view caching
  • Loading branch information
erikh360 authored May 13, 2024
2 parents fde5cbb + a927657 commit 3369c52
Show file tree
Hide file tree
Showing 5 changed files with 59 additions and 2 deletions.
4 changes: 4 additions & 0 deletions .github/workflows/test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,10 @@ jobs:
POSTGRES_PASSWORD: postgres
ports:
- 5432:5432
redis:
image: redis:6.0
ports:
- 6379:6379
env:
RP_SIDEKICK_DATABASE: postgres://postgres:postgres@localhost/rp_sidekick
steps:
Expand Down
2 changes: 2 additions & 0 deletions config/settings/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -223,3 +223,5 @@
# something more human-readable.
# release="myapp@1.0.0",
)

REDIS_URL = os.environ.get("REDIS_URL", "redis://localhost:6379/0")
36 changes: 36 additions & 0 deletions rp_yal/tests/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from freezegun import freeze_time

from rp_yal import utils
from rp_yal.utils import redis_conn
from sidekick.tests.utils import create_org

TEST_CONTENT_SETS = [
Expand Down Expand Up @@ -495,3 +496,38 @@ def test_get_first_matching_content_set_no_matches(self):
contentset_id = utils.get_first_matching_content_set(TEST_CONTENT_SETS, fields)

self.assertEqual(contentset_id, 3)


class GetUniquePageSeenIds(TestCase):
def setUp(self):
self.org = create_org()

def tearDown(self):
redis_conn.delete("page_seen_ids_27831231234")

@responses.activate
def test_get_unique_page_seen_ids_cache(self):
msisdn = "27831231234"
responses.add(
method=responses.GET,
url="http://contentrepo/api/v2/custom/pageviews/",
json={
"results": [
{"page": 164},
{"page": 165},
{"page": 166},
],
},
status=200,
match=[
responses.matchers.query_param_matcher(
{"data__user_addr": msisdn, "unique_pages": "true"}
)
],
)

ids = utils.get_unique_page_seen_ids(self.org, msisdn)
self.assertEqual(ids, [164, 165, 166])
ids = utils.get_unique_page_seen_ids(self.org, msisdn)
self.assertEqual(ids, [164, 165, 166])
self.assertEqual(len(responses.calls), 1)
17 changes: 16 additions & 1 deletion rp_yal/utils.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
from datetime import datetime
from urllib.parse import urljoin

import redis
import requests

redis_conn = redis.StrictRedis(decode_responses=True)


def get_ordered_content_set(org, fields):
search_term = None
Expand Down Expand Up @@ -208,6 +211,11 @@ def get_first_matching_content_set(contentsets, fields):


def get_unique_page_seen_ids(org, msisdn):
key_name = f"page_seen_ids_{msisdn}"

if redis_conn.get(key_name):
return [int(id) for id in redis_conn.get(key_name).split(",")]

params = {
"data__user_addr": msisdn,
"unique_pages": "true",
Expand All @@ -220,7 +228,13 @@ def get_unique_page_seen_ids(org, msisdn):
response.raise_for_status()

pages_seen = response.json()
return [p["page"] for p in pages_seen["results"]]
ids = [p["page"] for p in pages_seen["results"]]

value = ",".join([str(id) for id in ids])
redis_conn.set(key_name, value)
redis_conn.expire(key_name, time=5 * 60 * 60)

return ids


def get_contentrepo_headers(org):
Expand Down Expand Up @@ -250,5 +264,6 @@ def get_contentset(org, contentset_id, msisdn):
if page["id"] not in pages_seen_ids:
unseen_pages.append(page)
contentset_data["pages"] = unseen_pages
contentset_data["pages_seen_ids"] = pages_seen_ids

return contentset_data
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
"phonenumbers==8.10.23",
"psycopg2-binary==2.8.6",
"rapidpro-python==2.6.1",
"redis==4.4.4",
"redis==4.5.4",
"whitenoise==4.1.4",
"raven==6.10.0",
"hashids==1.3.1",
Expand Down

0 comments on commit 3369c52

Please sign in to comment.