Skip to content

Commit

Permalink
Merge pull request #1486 from half-duplex/url-rules
Browse files Browse the repository at this point in the history
Replace url_callbacks with @url decorators
  • Loading branch information
dgw authored May 23, 2019
2 parents 1220067 + 7e935df commit a14c145
Show file tree
Hide file tree
Showing 3 changed files with 18 additions and 50 deletions.
17 changes: 4 additions & 13 deletions sopel/modules/instagram.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
from __future__ import unicode_literals, absolute_import, print_function, division

from datetime import datetime
import re

from requests import get

Expand All @@ -21,24 +20,16 @@
from json import loads


instagram_regex = r'.*(https?:\/\/(?:www\.){0,1}instagram\.com\/([a-zA-Z0-9_\.]{,30}\/)?p\/[a-zA-Z0-9_-]+)\s?.*'
instagram_pattern = re.compile(instagram_regex)
INSTAGRAM_REGEX = r'(https?:\/\/(?:www\.){0,1}instagram\.com\/([a-zA-Z0-9_\.]{,30}\/)?p\/[a-zA-Z0-9_-]+)'


def setup(bot):
bot.register_url_callback(instagram_pattern, instaparse)


def shutdown(bot):
bot.unregister_url_callback(instagram_pattern)

# TODO: Parse Instagram profile page


@module.rule(instagram_regex)
def instaparse(bot, trigger):
@module.url(INSTAGRAM_REGEX)
def instaparse(bot, trigger, match):
# Get the embedded JSON
json = get_insta_json(trigger.group(1))
json = get_insta_json(match.group(1))
bot.say(parse_insta_json(json))


Expand Down
34 changes: 9 additions & 25 deletions sopel/modules/reddit.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
import praw

from sopel.formatting import bold, color, colors
from sopel.module import commands, rule, example, require_chanmsg, NOLIMIT, OP
from sopel.module import commands, example, require_chanmsg, url, NOLIMIT, OP
from sopel.tools import time
from sopel.web import USER_AGENT

Expand All @@ -36,30 +36,15 @@
post_url = r'%s/r/.*?/comments/([\w-]+)' % domain
short_post_url = r'https?://redd.it/([\w-]+)'
user_url = r'%s/u(ser)?/([\w-]+)' % domain
post_regex = re.compile(post_url)
short_post_regex = re.compile(short_post_url)
user_regex = re.compile(user_url)
spoiler_subs = [
'stevenuniverse',
'onepunchman',
]


def setup(bot):
bot.register_url_callback(post_regex, rpost_info)
bot.register_url_callback(short_post_regex, rpost_info)
bot.register_url_callback(user_regex, redditor_info)


def shutdown(bot):
bot.unregister_url_callback(post_regex)
bot.unregister_url_callback(short_post_regex)
bot.unregister_url_callback(user_regex)


@rule('.*%s.*' % post_url)
@rule('.*%s.*' % short_post_url)
def rpost_info(bot, trigger, match=None):
@url(post_url)
@url(short_post_url)
def rpost_info(bot, trigger, match):
match = match or trigger
try:
r = praw.Reddit(
Expand Down Expand Up @@ -142,9 +127,8 @@ def redditor_info(bot, trigger, match=None):
if commanded:
bot.say('No such Redditor.')
return NOLIMIT
else:
return
# Fail silently if it wasn't an explicit command.
return

message = '[REDDITOR] ' + u.name
now = dt.datetime.utcnow()
Expand All @@ -156,7 +140,7 @@ def redditor_info(bot, trigger, match=None):
year_div_by_4 = now.year % 4 == 0
is_leap = year_div_by_400 or ((not year_div_by_100) and year_div_by_4)
if (not is_leap) and ((cakeday_start.month, cakeday_start.day) == (2, 29)):
# If cake day is 2/29 and it's not a leap year, cake day is 1/3.
# If cake day is 2/29 and it's not a leap year, cake day is 3/1.
# Cake day begins at exact account creation time.
is_cakeday = cakeday_start + day <= now <= cakeday_start + (2 * day)
else:
Expand All @@ -177,9 +161,9 @@ def redditor_info(bot, trigger, match=None):


# If you change the groups here, you'll have to change some things above.
@rule('.*%s.*' % user_url)
def auto_redditor_info(bot, trigger):
redditor_info(bot, trigger)
@url(user_url)
def auto_redditor_info(bot, trigger, match):
redditor_info(bot, trigger, match)


@require_chanmsg('.setsfw is only permitted in channels')
Expand Down
17 changes: 5 additions & 12 deletions sopel/modules/wikipedia.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from requests import get

from sopel.config.types import StaticSection, ValidatedAttribute
from sopel.module import NOLIMIT, commands, example, rule
from sopel.module import NOLIMIT, commands, example, url

if sys.version_info.major < 3:
from urllib import quote as _quote
Expand All @@ -30,7 +30,7 @@ def unquote(s):


REDIRECT = re.compile(r'^REDIRECT (.*)')
WIKIPEDIA_REGEX = re.compile('([a-z]+).(wikipedia.org/wiki/)([^ ]+)')
WIKIPEDIA_REGEX = r'/([a-z]+\.wikipedia\.org)/wiki/((?!File\:)[^ ]+)'


class WikipediaSection(StaticSection):
Expand All @@ -42,11 +42,6 @@ class WikipediaSection(StaticSection):

def setup(bot):
bot.config.define_section('wikipedia', WikipediaSection)
bot.register_url_callback(WIKIPEDIA_REGEX, mw_info)


def shutdown(bot):
bot.unregister_url_callback(WIKIPEDIA_REGEX)


def configure(config):
Expand Down Expand Up @@ -77,8 +72,7 @@ def mw_search(server, query, num):
if 'query' in query:
query = query['query']['search']
return [r['title'] for r in query]
else:
return None
return None


def say_snippet(bot, trigger, server, query, show_url=True):
Expand Down Expand Up @@ -115,10 +109,9 @@ def mw_snippet(server, query):
return snippet['extract']


@rule(r'.*\/([a-z]+\.wikipedia\.org)\/wiki\/((?!File\:)[^ ]+).*')
def mw_info(bot, trigger, found_match=None):
@url(WIKIPEDIA_REGEX)
def mw_info(bot, trigger, match):
"""Retrieves and outputs a snippet from the linked page."""
match = found_match or trigger
say_snippet(bot, trigger, match.group(1), unquote(match.group(2)), show_url=False)


Expand Down

0 comments on commit a14c145

Please sign in to comment.