Skip to content

Commit

Permalink
Merge pull request #6568 from Philippos01/6186/Carousel/Trendings-Boo…
Browse files Browse the repository at this point in the history
…ks-Carousel

Create a Trending Books Carousel in the Home page
  • Loading branch information
jimchamp authored Jun 8, 2022
2 parents 31cc10c + b910b64 commit 585f821
Show file tree
Hide file tree
Showing 4 changed files with 123 additions and 51 deletions.
44 changes: 37 additions & 7 deletions openlibrary/core/bookshelves.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,26 +76,55 @@ def total_unique_users(cls, since=None):
return results[0] if results else None

@classmethod
def most_logged_books(cls, shelf_id=None, limit=10, since=False):
def most_logged_books(cls, shelf_id=None, limit=10, since=False, page=1, fetch=False):
"""Returns a ranked list of work OLIDs (in the form of an integer --
i.e. OL123W would be 123) which have been most logged by
users. This query is limited to a specific shelf_id (e.g. 1
for "Want to Read").
"""
offset = (page - 1) * limit
oldb = db.get_db()
where = 'WHERE bookshelf_id' + ('=$shelf_id' if shelf_id else ' IS NOT NULL ')
if since:
where += ' AND created >= $since'
query = f'select work_id, count(*) as cnt from bookshelves_books {where}'
query += ' group by work_id order by cnt desc limit $limit'
query += ' group by work_id order by cnt desc limit $limit offset $offset'
logger.info("Query: %s", query)
logged_books = list(
oldb.query(
query, vars={'shelf_id': shelf_id, 'limit': limit, 'since': since}
query, vars={'shelf_id': shelf_id, 'limit': limit, 'offset': offset, 'since': since}
)
)
logger.info("Results: %s", logged_books)
return logged_books
return cls.fetch(logged_books) if fetch else logged_books

@classmethod
def fetch(cls, readinglog_items):
"""Given a list of readinglog_items, such as those returned by
Bookshelves.most_logged_books, fetch the corresponding Open Library
book records from solr with availability
"""
from openlibrary.plugins.worksearch.code import get_solr_works
from openlibrary.core.lending import get_availabilities

# This gives us a dict of all the works representing
# the logged_books, keyed by work_id
work_index = get_solr_works(
f"/works/OL{i['work_id']}W"
for i in readinglog_items
)

# Loop over each work in the index and inject its availability
availability_index = get_availabilities(work_index.values())
for work_key in availability_index:
work_index[work_key]['availability'] = availability_index[work_key]

# Return items from the work_index in the order
# they are represented by the trending logged books
for i, item in enumerate(readinglog_items):
key = f"/works/OL{item['work_id']}W"
if key in work_index:
readinglog_items[i]['work'] = work_index[key]
return readinglog_items

@classmethod
def count_total_books_logged_by_user(cls, username, bookshelf_ids=None):
Expand Down Expand Up @@ -185,7 +214,7 @@ def get_users_logged_books(


@classmethod
def get_recently_logged_books(cls, bookshelf_id=None, limit=50, page=1):
def get_recently_logged_books(cls, bookshelf_id=None, limit=50, page=1, fetch=False):
oldb = db.get_db()
page = int(page) if page else 1
data = {
Expand All @@ -198,7 +227,8 @@ def get_recently_logged_books(cls, bookshelf_id=None, limit=50, page=1):
f"SELECT * from bookshelves_books {where} "
"ORDER BY created DESC LIMIT $limit OFFSET $offset"
)
return list(oldb.query(query, vars=data))
logged_books = list(oldb.query(query, vars=data))
return cls.fetch(logged_books) if fetch else logged_books


@classmethod
Expand Down
28 changes: 26 additions & 2 deletions openlibrary/plugins/openlibrary/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import re
import json
from collections import defaultdict

from openlibrary.views.loanstats import get_trending_books
from infogami import config
from infogami.utils import delegate
from infogami.utils.view import render_template # noqa: F401 used for its side effects
Expand Down Expand Up @@ -60,14 +60,38 @@ def get_book_availability(self, id_type, ids):
else []
)

class trending_books_api(delegate.page):
path = "/trending(/?.*)"
#path = "/trending/(now|daily|weekly|monthly|yearly|forever)"
encoding = "json"

def GET(self, period="/daily"):
from openlibrary.views.loanstats import SINCE_DAYS
period = period[1:] # remove slash
i = web.input(page=1, limit=100)
works = get_trending_books(
since_days=SINCE_DAYS[period],
limit=int(i.limit),
page=int(i.page),
books_only=True
)
result = {
'query': f"/trending/{period}",
'works': [dict(work) for work in works],
}
return delegate.RawText(
json.dumps(result),
content_type="application/json"
)

class browse(delegate.page):
path = "/browse"
encoding = "json"

def GET(self):
i = web.input(
q='', page=1, limit=100, subject='', work_id='', _type='', sorts=''
q='', page=1, limit=100, subject='',
work_id='', _type='', sorts=''
)
sorts = i.sorts.split(',')
page = int(i.page)
Expand Down
2 changes: 2 additions & 0 deletions openlibrary/templates/home/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@
$:render_template("home/welcome", test=test)

$if not test:
$:render_template("books/custom_carousel", books=get_trending_books(books_only=True), title=_('Trending Books'), url="/trending/daily", test=test, load_more={"url": "/trending/daily.json", "mode": "page", "limit": 18})

$:macros.QueryCarousel(query="ddc:8* first_publish_year:[* TO 1950] publish_year:[2000 TO *] -public_scan_b:false", title=_('Classic Books'), key="public_domain", url="/read", sort='random.hourly')

$if monthly_reads and not test:
Expand Down
100 changes: 58 additions & 42 deletions openlibrary/views/loanstats.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@
import web
from infogami.utils import delegate
from ..core.lending import get_availabilities
from ..plugins.worksearch.code import get_solr_works

from infogami.utils.view import public

from ..utils import dateutil
from .. import app
Expand All @@ -12,11 +13,20 @@
from ..core.bookshelves import Bookshelves
from ..core.ratings import Ratings
from ..plugins.admin.code import get_counts

from ..plugins.worksearch.code import get_solr_works

LENDING_TYPES = '(libraries|regions|countries|collections|subjects|format)'


SINCE_DAYS = {
'now': 0,
'daily': 1,
'weekly': 7,
'monthly': 30,
'yearly': 365,
'forever': None,
}

def reading_log_summary():
# enable to work w/ cached
if 'env' not in web.ctx:
Expand All @@ -32,27 +42,40 @@ def reading_log_summary():
reading_log_summary, 'stats.readling_log_summary', timeout=dateutil.HOUR_SECS
)

def cached_get_most_logged_books(shelf_id=None, since_days=1, limit=20):
return cache.memcache_memoize(
get_most_logged_books, 'stats.trending', timeout=dateutil.HOUR_SECS
)(shelf_id=shelf_id, since_days=since_days, limit=limit)

def get_most_logged_books(shelf_id=None, since_days=1, limit=20):
"""
shelf_id: Bookshelves.PRESET_BOOKSHELVES['Want to Read'|'Already Read'|'Currently Reading']
since: DATE_ONE_YEAR_AGO, DATE_ONE_MONTH_AGO, DATE_ONE_WEEK_AGO, DATE_ONE_DAY_AGO
"""
# enable to work w/ cached
if 'env' not in web.ctx:
delegate.fakeload()
@public
def get_trending_books(since_days=1, limit=18, page=1, books_only=False):
logged_books = (
Bookshelves.fetch(get_activity_stream(limit=limit, page=page)) # i.e. "now"
if since_days == 0 else
Bookshelves.most_logged_books(
since=dateutil.date_n_days_ago(since_days),
limit=limit,
page=page,
fetch=True)
)
return (
[book['work'] for book in logged_books if book.get('work')]
if books_only else logged_books
)

# Return as dict to enable cache serialization
return [dict(book) for book in
Bookshelves.most_logged_books(
shelf_id=shelf_id,
since=dateutil.date_n_days_ago(since_days),
limit=limit)]

def cached_get_most_logged_books(shelf_id=None, since_days=1, limit=20, page=1):
def get_cachable_trending_books(shelf_id=None, since_days=1, limit=20, page=1):
# enable to work w/ cached
if 'env' not in web.ctx:
delegate.fakeload()
# Return as dict to enable cache serialization
return [dict(book) for book in
Bookshelves.most_logged_books(
shelf_id=shelf_id,
since=dateutil.date_n_days_ago(since_days),
limit=limit,
page=page
)]
return cache.memcache_memoize(
get_cachable_trending_books, 'stats.trending',
timeout=dateutil.HOUR_SECS
)(shelf_id=shelf_id, since_days=since_days, limit=limit, page=page)

def reading_log_leaderboard(limit=None):
# enable to work w/ cached
Expand Down Expand Up @@ -108,11 +131,11 @@ class lending_stats(app.view):
def GET(self, key, value):
raise web.seeother("/")

def get_activity_stream(limit=None):
def get_activity_stream(limit=None, page=1):
# enable to work w/ cached
if 'env' not in web.ctx:
delegate.fakeload()
return Bookshelves.get_recently_logged_books(limit=limit)
return Bookshelves.get_recently_logged_books(limit=limit, page=page)

def get_cached_activity_stream(limit):
return cache.memcache_memoize(
Expand All @@ -127,29 +150,22 @@ class activity_stream(app.view):
def GET(self, page=''):
if not page:
raise web.seeother("/trending/now")
page = page[1:]
page = page[1:] # remove slash
limit = 20
if page == "now":
logged_books = get_activity_stream(limit=limit)
logged_books = Bookshelves.fetch(get_activity_stream(limit=limit))
else:
shelf_id = None # optional; get from web.input()?
logged_books = cached_get_most_logged_books(since_days={
'daily': 1,
'weekly': 7,
'monthly': 30,
'yearly': 365,
'forever': None,
}[page], limit=limit)

work_index = get_solr_works(f"/works/OL{book['work_id']}W" for book in logged_books)
availability_index = get_availabilities(work_index.values())
for work_key in availability_index:
work_index[work_key]['availability'] = availability_index[work_key]
for i, logged_book in enumerate(logged_books):
key = f"/works/OL{logged_book['work_id']}W"
if key in work_index:
logged_books[i]['work'] = work_index[key]
return app.render_template("trending", logged_books=logged_books, mode=page)
logged_books = Bookshelves.fetch(
cached_get_most_logged_books(
since_days=SINCE_DAYS[page],
limit=limit)
)
return app.render_template(
"trending",
logged_books=logged_books,
mode=page
)


class readinglog_stats(app.view):
Expand Down

0 comments on commit 585f821

Please sign in to comment.