Skip to content

Commit

Permalink
Merge branch 'develop'
Browse files Browse the repository at this point in the history
  • Loading branch information
woxcab committed Apr 10, 2021
2 parents 8dbcd35 + 08949f5 commit 6b87047
Show file tree
Hide file tree
Showing 12 changed files with 78 additions and 21 deletions.
9 changes: 7 additions & 2 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,13 +1,18 @@
sudo: false
dist: bionic
language: python
python:
- "2.7"
- "3.5"
- "3.6"
- "3.7"
- "3.8"
- "3.9-dev"
- "3.9"
- "3.10-dev"
jobs:
allow_failures:
- python: "3.10-dev"
install:
- pip install -U tox-travis codecov
- pip install -U pip tox-travis codecov
script: COVERAGE_FILE=.coverage tox
after_success: codecov
2 changes: 1 addition & 1 deletion README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ scrapy_rss
:alt: PyPI Version

.. image:: https://img.shields.io/travis/woxcab/scrapy_rss/master.svg
:target: http://travis-ci.org/woxcab/scrapy_rss
:target: http://travis-ci.com/woxcab/scrapy_rss
:alt: Build Status

.. image:: https://img.shields.io/badge/wheel-yes-brightgreen.svg
Expand Down
1 change: 0 additions & 1 deletion pytest.ini
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
filterwarnings =
ignore::DeprecationWarning:nose.*
ignore::PendingDeprecationWarning:nose.*
ignore::DeprecationWarning:frozendict.*
ignore::DeprecationWarning:scrapy.*

addopts = --maxfail=10 --cov=scrapy_rss --cov-report=term
3 changes: 2 additions & 1 deletion requirements-27.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
python-dateutil
scrapy<2.0
packaging
python-dateutil
six
3 changes: 2 additions & 1 deletion requirements-36.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
python-dateutil
scrapy>=1.3.1
packaging
python-dateutil
six
3 changes: 2 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
python-dateutil
scrapy>=1.1
packaging
python-dateutil
six
2 changes: 1 addition & 1 deletion scrapy_rss/VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
0.2.1
0.2.2
4 changes: 2 additions & 2 deletions scrapy_rss/exporters.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-

from distutils.version import LooseVersion
from itertools import chain
from collections import Counter

from packaging import version
from datetime import datetime
from dateutil.tz import tzlocal
import six
Expand Down Expand Up @@ -94,7 +94,7 @@ def __init__(self, file, channel_title, channel_link, channel_description,
else:
self._namespaces[ns_prefix] = ns_uri

if LooseVersion(scrapy.__version__) < LooseVersion('1.4.0'): # pragma: no cover
if version.parse(scrapy.__version__) < version.parse('1.4.0'): # pragma: no cover
def _export_xml_field(self, name, serialized_value, depth):
return super(RssItemExporter, self)._export_xml_field(name, serialized_value)

Expand Down
1 change: 0 additions & 1 deletion tests/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,2 @@
xmlunittest
parameterized
frozendict
7 changes: 3 additions & 4 deletions tests/test_exporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
from parameterized import parameterized

import six
from frozendict import frozendict
from lxml import etree
import scrapy
from scrapy import signals
Expand All @@ -24,7 +23,7 @@
from scrapy_rss.exporters import RssItemExporter

import unittest
from tests.utils import RssTestCase
from tests.utils import RssTestCase, FrozenDict


if six.PY2:
Expand All @@ -34,7 +33,7 @@


class CrawlerContext(object):
default_settings = frozendict({'ITEM_PIPELINES':
default_settings = FrozenDict({'ITEM_PIPELINES':
{'scrapy_rss.pipelines.RssExportPipeline': 900,},
'LOG_LEVEL': 'WARNING',
'EXTENSIONS': {
Expand Down Expand Up @@ -96,7 +95,7 @@ def __init__(self, file, channel_title, channel_link, channel_description,
docs=docs, ttl=ttl, *args, **kwargs)


default_feed_settings = frozendict({'feed_file': os.path.join(os.path.dirname(__file__), 'tmp', 'feed.rss'),
default_feed_settings = FrozenDict({'feed_file': os.path.join(os.path.dirname(__file__), 'tmp', 'feed.rss'),
'feed_title': 'Title',
'feed_link': 'http://example.com/feed',
'feed_description': 'Description'})
Expand Down
54 changes: 50 additions & 4 deletions tests/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,17 @@

from twisted.python.failure import Failure
from scrapy.pipelines import ItemPipelineManager
from frozendict import frozendict
from lxml import etree
from xmlunittest import XmlTestCase

from scrapy_rss.meta import ItemElement, MultipleElements
from scrapy_rss.items import RssItem

try:
from collections.abc import Mapping
except ImportError:
from collections import Mapping

try:
from unittest.util import _common_shorten_repr, _shorten
except ImportError:
Expand Down Expand Up @@ -70,6 +74,48 @@ def get_dict_attr(obj,attr):
raise AttributeError


iteritems = getattr(dict, 'iteritems', dict.items) # py2-3 compatibility


class FrozenDict(Mapping):
"""
A simple immutable wrapper around dictionaries.
It can be used as a drop-in replacement for dictionaries where immutability is desired.
"""

dict_cls = dict

def __init__(self, *args, **kwargs):
self._dict = self.dict_cls(*args, **kwargs)
self._hash = None

def __getitem__(self, key):
return self._dict[key]

def __contains__(self, key):
return key in self._dict

def copy(self, **add_or_replace):
return self.__class__(self, **add_or_replace)

def __iter__(self):
return iter(self._dict)

def __len__(self):
return len(self._dict)

def __repr__(self):
return '<%s %r>' % (self.__class__.__name__, self._dict)

def __hash__(self):
if self._hash is None:
h = 0
for key, value in iteritems(self._dict):
h ^= hash((key, value))
self._hash = h
return self._hash


class RaisedItemPipelineManager(ItemPipelineManager):
def process_item(self, item, spider):
d = super(RaisedItemPipelineManager, self).process_item(item, spider)
Expand All @@ -89,9 +135,9 @@ class UnorderedXmlTestCase(XmlTestCase):
@classmethod
def _xml_to_tuple(cls, element):
return (element.tag,
frozendict(element.nsmap),
frozendict(element.attrib),
frozendict(Counter(t for t in element.itertext() if t.strip())),
FrozenDict(element.nsmap),
FrozenDict(element.attrib),
FrozenDict(Counter(t for t in element.itertext() if t.strip())),
frozenset(cls._xml_to_tuple(child) for child in element.getchildren()))

@staticmethod
Expand Down
10 changes: 8 additions & 2 deletions tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,9 @@ envlist =
py34-scrapy{110,113,120,122,130,133,140,150,152,160,170,174}-pytest-pytestcov
py{27,35}-scrapy{110,113,120,122,130}-pytest-pytestcov
py27-scrapy{131,133,140,150,152,160,170,174,180}-pytest-pytestcov
py{35,36,37,38,39}-scrapy{131,133,140,150,152,160,170,174,180,201,210,220,221}-pytest-pytestcov

py{35,36,37,38,39}-scrapy{131,133,140,150,152,160,170,174,180,201,210,220,221,230}-pytest-pytestcov
py310-scrapy{171,174,180,201,210,220,221,230}-pytest-pytestcov
py{36,37,38,39,310}-scrapy{240,241,250}-pytest-pytestcov

[testenv]
deps =
Expand All @@ -23,12 +24,17 @@ deps =
scrapy152: scrapy==1.5.2
scrapy160: scrapy==1.6.0
scrapy170: scrapy==1.7.0
scrapy171: scrapy==1.7.1
scrapy174: scrapy==1.7.4
scrapy180: scrapy==1.8.0
scrapy201: scrapy==2.0.1
scrapy210: scrapy==2.1.0
scrapy220: scrapy==2.2.0
scrapy221: scrapy==2.2.1
scrapy230: scrapy==2.3.0
scrapy240: scrapy==2.4.0
scrapy241: scrapy==2.4.1
scrapy250: scrapy==2.5.0
-rtests/requirements.txt
pytest33: pytest<3.3.0
pytestcov26: pytest-cov<=2.6.0
Expand Down

0 comments on commit 6b87047

Please sign in to comment.