From 422bf6745658f2f3b86f85f5477e30e92d89486a Mon Sep 17 00:00:00 2001 From: Wu Haotian Date: Sat, 22 Feb 2020 23:24:46 +0800 Subject: [PATCH 1/3] refactor: drop support for Python 2, drop requirement for six package --- .circleci/config.yml | 36 -- .travis.yml | 6 - requirements.txt | 3 +- setup.py | 3 +- tests/fake_sae.py | 606 --------------------------------- tests/messages/test_entries.py | 9 +- tests/test_client.py | 137 +++++--- tests/test_session.py | 35 -- tests/test_utils.py | 15 +- tox-win.ini | 2 +- tox.ini | 2 +- werobot/client.py | 2 +- werobot/crypto/__init__.py | 1 - werobot/crypto/exceptions.py | 1 - werobot/exceptions.py | 1 - werobot/logger.py | 27 +- werobot/messages/events.py | 4 +- werobot/messages/messages.py | 4 +- werobot/parser.py | 1 - werobot/robot.py | 4 +- werobot/utils.py | 21 +- 21 files changed, 133 insertions(+), 787 deletions(-) delete mode 100644 tests/fake_sae.py diff --git a/.circleci/config.yml b/.circleci/config.yml index a5fe778b..87d0019b 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,22 +1,5 @@ version: 2 jobs: - python2.7: - docker: - - image: python:2 - working_directory: ~/werobot - steps: - - checkout - - run: cat requirements.txt dev-requirements.txt tox-requirements.txt > dependency.txt - - restore_cache: - key: v1-python2-{{ checksum "dependency.txt" }} - - run: - command: | - cat dev-requirements.txt | grep tox== | xargs pip install codecov - tox -l | grep py2 | tr '\n' ',' | xargs tox -e && codecov - - save_cache: - key: v1-python2-{{ checksum "dependency.txt" }} - paths: - - .tox python3.5: docker: - image: python:3.5 @@ -85,23 +68,6 @@ jobs: key: v1-python3.8-{{ checksum "dependency.txt" }} paths: - .tox - pypy: - docker: - - image: pypy:2 - working_directory: ~/werobot - steps: - - checkout - - run: cat requirements.txt dev-requirements.txt tox-requirements.txt > dependency.txt - - restore_cache: - key: v1-pypy-{{ checksum "dependency.txt" }} - - run: - command: | - cat dev-requirements.txt | grep tox== | xargs pip install codecov - tox -l | grep pypy | tr '\n' ',' | xargs tox -e && codecov - - save_cache: - key: v1-pypy-{{ checksum "dependency.txt" }} - paths: - - .tox docs: docker: - image: python:3.7 @@ -133,11 +99,9 @@ workflows: version: 2 test: jobs: - - python2.7 - python3.5 - python3.6 - python3.7 - python3.8 - - pypy - docs - lint diff --git a/.travis.yml b/.travis.yml index f022fbc3..b865cdcf 100644 --- a/.travis.yml +++ b/.travis.yml @@ -9,12 +9,6 @@ matrix: - env: - PYTHON_MAJOR="py36|py37" - PYTHON_VERSION="3.6.5 3.7.0" - - env: - - PYTHON_MAJOR=py2 - - PYTHON_VERSION="2.7.14" - - env: - - PYTHON_MAJOR=pypy - - PYTHON_VERSION="pypy2.7-7.2.0" cache: directories: - $HOME/.pyenv diff --git a/requirements.txt b/requirements.txt index f9ee0374..76fe8a89 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,3 @@ bottle requests -six -xmltodict \ No newline at end of file +xmltodict diff --git a/setup.py b/setup.py index bc2ea736..9082f81d 100755 --- a/setup.py +++ b/setup.py @@ -50,13 +50,12 @@ def run_tests(self): 'Operating System :: POSIX', 'Operating System :: POSIX :: Linux', 'Programming Language :: Python', - 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: Implementation :: CPython', - 'Programming Language :: Python :: Implementation :: PyPy', 'Intended Audience :: Developers', 'Topic :: Software Development :: Libraries', 'Topic :: Software Development :: Libraries :: Python Modules', diff --git a/tests/fake_sae.py b/tests/fake_sae.py deleted file mode 100644 index db43809d..00000000 --- a/tests/fake_sae.py +++ /dev/null @@ -1,606 +0,0 @@ -#!/usr/bin/env python -""" -Fake client for sae kvdb service. - -This should give you a feel for how this module operates:: - - import kvdb - kv = kvdb.KVClient() - - kv.set("some_key", "Some value") - value = kv.get("some_key") - - kv.set("another_key", 3) - kv.delete("another_key") -""" - -import sys -import os -import six -import time -import re -import pickle - -SERVER_MAX_KEY_LENGTH = 250 -# Storing values larger than 1MB requires recompiling memcached. If you do, -# this value can be changed by doing "memcache.SERVER_MAX_VALUE_LENGTH = N" -# after importing this module. -SERVER_MAX_VALUE_LENGTH = 1024 * 1024 - - -class _Error(Exception): - pass - - -class _ConnectionDeadError(Exception): - pass - - -class _CacheEntry(object): - def __init__(self, value, flags, expiration): - self.value = value - self.flags = flags - self.created_time = time.time() - self.will_expire = expiration != 0 - self.locked = False - self._set_expiration(expiration) - - def _set_expiration(self, expiration): - if expiration > (86400 * 30): - self.expiration = expiration - else: - self.expiration = self.created_time + expiration - - def is_expired(self): - return self.will_expire and time.time() > self.expiration - - -class local(object): - pass - - -_DEAD_RETRY = 30 # number of seconds before retrying a dead server. -_SOCKET_TIMEOUT = 3 # number of seconds before sockets timeout. - -_cache = {} - - -class Client(local): - """ - Object representing a pool of memcache servers. - - See L{memcache} for an overview. - - In all cases where a key is used, the key can be either: - 1. A simple hashable type (string, integer, etc.). - 2. A tuple of C{(hashvalue, key)}. This is useful if you want to avoid - making this module calculate a hash value. You may prefer, for - example, to keep all of a given user's objects on the same memcache - server, so you could use the user's unique id as the hash value. - - @group Setup: __init__, set_servers, forget_dead_hosts, disconnect_all, debuglog - @group Insertion: set, add, replace, set_multi - @group Retrieval: get, get_multi - @group Integers: incr, decr - @group Removal: delete, delete_multi - @sort: __init__, set_servers, forget_dead_hosts, disconnect_all, debuglog,\ - set, set_multi, add, replace, get, get_multi, incr, decr, delete, delete_multi - """ - _FLAG_PICKLE = 1 << 0 - _FLAG_INTEGER = 1 << 1 - _FLAG_LONG = 1 << 2 - _FLAG_COMPRESSED = 1 << 3 - - _SERVER_RETRIES = 10 # how many times to try finding a free server. - - # exceptions for Client - class MemcachedKeyError(Exception): - pass - - class MemcachedKeyLengthError(MemcachedKeyError): - pass - - class MemcachedKeyCharacterError(MemcachedKeyError): - pass - - class MemcachedKeyNoneError(MemcachedKeyError): - pass - - class MemcachedKeyTypeError(MemcachedKeyError): - pass - - class MemcachedStringEncodingError(Exception): - pass - - def __init__( - self, - servers=[], - debug=0, - pickleProtocol=0, - pickler=pickle.Pickler, - unpickler=pickle.Unpickler, - pload=None, - pid=None, - server_max_key_length=SERVER_MAX_KEY_LENGTH, - server_max_value_length=SERVER_MAX_VALUE_LENGTH, - dead_retry=_DEAD_RETRY, - socket_timeout=_SOCKET_TIMEOUT, - cache_cas=False - ): - """ - Create a new Client object with the given list of servers. - - @param servers: C{servers} is passed to L{set_servers}. - @param debug: whether to display error messages when a server can't be - contacted. - @param pickleProtocol: number to mandate protocol used by (c)Pickle. - @param pickler: optional override of default Pickler to allow subclassing. - @param unpickler: optional override of default Unpickler to allow subclassing. - @param pload: optional persistent_load function to call on pickle loading. - Useful for cPickle since subclassing isn't allowed. - @param pid: optional persistent_id function to call on pickle storing. - Useful for cPickle since subclassing isn't allowed. - @param dead_retry: number of seconds before retrying a blacklisted - server. Default to 30 s. - @param socket_timeout: timeout in seconds for all calls to a server. Defaults - to 3 seconds. - @param cache_cas: (default False) If true, cas operations will be - cached. WARNING: This cache is not expired internally, if you have - a long-running process you will need to expire it manually via - "client.reset_cas(), or the cache can grow unlimited. - @param server_max_key_length: (default SERVER_MAX_KEY_LENGTH) - Data that is larger than this will not be sent to the server. - @param server_max_value_length: (default SERVER_MAX_VALUE_LENGTH) - Data that is larger than this will not be sent to the server. - """ - local.__init__(self) - self.debug = debug - self.cache_cas = cache_cas - self.reset_cas() - - # Allow users to modify pickling/unpickling behavior - self.server_max_key_length = server_max_key_length - self.server_max_value_length = server_max_value_length - - _cache = {} - - self.reset_stats() - - def reset_stats(self): - self._get_hits = 0 - self._get_misses = 0 - self._cmd_set = 0 - self._cmd_get = 0 - - def reset_cas(self): - """ - Reset the cas cache. This is only used if the Client() object - was created with "cache_cas=True". If used, this cache does not - expire internally, so it can grow unbounded if you do not clear it - yourself. - """ - self.cas_ids = {} - - def set_servers(self, servers): - """ - Set the pool of servers used by this client. - - @param servers: an array of servers. - Servers can be passed in two forms: - 1. Strings of the form C{"host:port"}, which implies a default weight of 1. - 2. Tuples of the form C{("host:port", weight)}, where C{weight} is - an integer weight value. - """ - pass - - def get_info(self, stat_args=None): - '''Get statistics from each of the servers. - - @param stat_args: Additional arguments to pass to the memcache - "stats" command. - - @return: A list of tuples ( server_identifier, stats_dictionary ). - The dictionary contains a number of name/value pairs specifying - the name of the status field and the string value associated with - it. The values are not converted from strings. - ''' - - info = { - 'outbytes': 41, - 'total_size': 22, - 'inbytes': 62, - 'set_count': 16, - 'delete_count': 0, - 'total_count': 4, - 'get_count': 11 - } - - return info - - def debuglog(self, str): - if self.debug: - sys.stderr.write("MemCached: %s\n" % str) - - def forget_dead_hosts(self): - """ - Reset every host in the pool to an "alive" state. - """ - pass - - def disconnect_all(self): - pass - - def delete(self, key): - '''Deletes a key from the memcache. - - @return: Nonzero on success. - ''' - if key not in _cache: - return False - del _cache[key] - return True - - def add(self, key, val, time=0, min_compress_len=0): - ''' - Add new key with value. - - Like L{set}, but only stores in memcache if the key doesn't already exist. - - @return: Nonzero on success. - @rtype: int - ''' - return self._set("add", key, val, time, min_compress_len) - - def replace(self, key, val, time=0, min_compress_len=0): - '''Replace existing key with value. - - Like L{set}, but only stores in memcache if the key already exists. - The opposite of L{add}. - - @return: Nonzero on success. - @rtype: int - ''' - return self._set("replace", key, val, time, min_compress_len) - - def set(self, key, val, time=0, min_compress_len=0): - '''Unconditionally sets a key to a given value in the memcache. - - The C{key} can optionally be an tuple, with the first element - being the server hash value and the second being the key. - If you want to avoid making this module calculate a hash value. - You may prefer, for example, to keep all of a given user's objects - on the same memcache server, so you could use the user's unique - id as the hash value. - - @return: Nonzero on success. - @rtype: int - @param time: Tells memcached the time which this value should expire, either - as a delta number of seconds, or an absolute unix time-since-the-epoch - value. See the memcached protocol docs section "Storage Commands" - for more info on . We default to 0 == cache forever. - @param min_compress_len: The threshold length to kick in auto-compression - of the value using the zlib.compress() routine. If the value being cached is - a string, then the length of the string is measured, else if the value is an - object, then the length of the pickle result is measured. If the resulting - attempt at compression yeilds a larger string than the input, then it is - discarded. For backwards compatability, this parameter defaults to 0, - indicating don't ever try to compress. - ''' - return self._set("set", key, val, time, min_compress_len) - - def _set(self, cmd, key, val, time, min_compress_len=0): - self.check_key(key) - - self._cmd_set += 1 - - key_exists = key in _cache - - if ( - (cmd == 'add' and key_exists) - or (cmd == 'replace' and not key_exists) - or (cmd == 'prepend' and not key_exists) - or (cmd == 'append' and not key_exists) - ): - return False - - if cmd == 'prepend': - new_val = val + _cache[key].value - elif cmd == 'append': - new_val = _cache[key].value + val - else: - new_val = val - - _cache[key] = _CacheEntry(new_val, 0, time) - return True - - def _get(self, cmd, key): - self.check_key(key) - - self._cmd_get += 1 - - if key in _cache: - entry = _cache[key] - if not entry.is_expired(): - self._get_hits += 1 - return entry.value - self._get_misses += 1 - return None - - def get(self, key): - '''Retrieves a key from the memcache. - - @return: The value or None. - ''' - return self._get('get', key) - - def get_multi(self, keys, key_prefix=''): - ''' - Retrieves multiple keys from the memcache doing just one query. - - >>> success = mc.set("foo", "bar") - >>> success = mc.set("baz", 42) - >>> mc.get_multi(["foo", "baz", "foobar"]) == {"foo": "bar", "baz": 42} - 1 - - get_mult [ and L{set_multi} ] can take str()-ables like ints / longs as keys too. Such as your db pri key fields. - They're rotored through str() before being passed off to memcache, with or without the use of a key_prefix. - In this mode, the key_prefix could be a table name, and the key itself a db primary key number. - - This method is recommended over regular L{get} as it lowers the number of - total packets flying around your network, reducing total latency, since - your app doesn't have to wait for each round-trip of L{get} before sending - the next one. - - See also L{set_multi}. - - @param keys: An array of keys. - @param key_prefix: A string to prefix each key when we communicate with memcache. - Facilitates pseudo-namespaces within memcache. Returned dictionary keys will not have this prefix. - @return: A dictionary of key/value pairs that were available. If key_prefix was provided, the keys in the retured dictionary will not have it present. - - ''' - retval = {} - for e in keys: - _key = key_prefix + str(e) - val = self._get('get', _key) - if val is not None: - retval[e] = val - return retval - - def get_by_prefix( - self, prefix, limit=None, max_count=None, marker=None, start_key=None - ): - ''' - >>> success = mc.set('k1', 1) - >>> success = mc.set('k2', 2) - >>> success = mc.set('xyz', 'xxxxxxx') - >>> mc.get_by_prefix('k') == [('k2', 2), ('k1', 1)] - 1 - - ''' - start_key = marker or start_key - max_count = limit or max_count or 100 - - ignore = False - if start_key is not None: - ignore = True - - for k, e in _cache.items(): - if ignore: - if k == start_key: - ignore = False - continue - - if e.is_expired(): - continue - - if max_count <= 0: break - - if str(k).startswith(prefix): - max_count -= 1 - yield k, e.value - - def getkeys_by_prefix( - self, prefix, limit=None, max_count=None, marker=None, start_key=None - ): - max_count = limit or max_count - marker = marker or start_key - kv = self.get_by_prefix(prefix, max_count, marker=marker) - return [e[0] for e in kv] - - def check_key(self, key, key_extra_len=0): - """Checks sanity of key. Fails if: - Key length is > SERVER_MAX_KEY_LENGTH (Raises MemcachedKeyLength). - Contains control characters (Raises MemcachedKeyCharacterError). - Is not a string (Raises MemcachedStringEncodingError) - Is an unicode string (Raises MemcachedStringEncodingError) - Is not a string (Raises MemcachedKeyError) - Is None (Raises MemcachedKeyError) - """ - if isinstance(key, tuple): key = key[1] - if not key: - raise Client.MemcachedKeyNoneError("Key is None") - if six.PY2: - if isinstance(key, six.text_type): - raise Client.MemcachedStringEncodingError( - "Keys must be str()'s, not unicode. Convert your unicode " - "strings using mystring.encode(charset)!" - ) - if not isinstance(key, str): - raise Client.MemcachedKeyTypeError("Key must be str()'s") - - if isinstance(key, str): - if self.server_max_key_length != 0 and \ - len(key) + key_extra_len > self.server_max_key_length: - raise Client.MemcachedKeyLengthError( - "Key length is > %s" % self.server_max_key_length - ) - for char in key: - if ord(char) < 33 or ord(char) == 127: - raise Client.MemcachedKeyCharacterError( - "Control characters not allowed" - ) - - -KVClient = Client - - -def _doctest(): - import doctest - import werobot.tests.fake_sae as kvdb - servers = ["127.0.0.1:11211"] - mc = Client(servers, debug=1) - globs = {"mc": mc} - return doctest.testmod(kvdb, globs=globs) - - -if __name__ == "__main__": - failures = 0 - print("Testing docstrings...") - _doctest() - print("Running tests:") - print() - serverList = [["127.0.0.1:11211"]] - if '--do-unix' in sys.argv: - serverList.append([os.path.join(os.getcwd(), 'memcached.socket')]) - - for servers in serverList: - mc = KVClient(servers, debug=1) - - def to_s(val): - if not isinstance(val, str): - return "%s (%s)" % (val, type(val)) - return "%s" % val - - def test_setget(key, val): - global failures - print("Testing set/get {'%s': %s} ..." % (to_s(key), to_s(val))) - mc.set(key, val) - newval = mc.get(key) - if newval == val: - print("OK") - return 1 - else: - print("FAIL") - failures = failures + 1 - return 0 - - class FooStruct(object): - def __init__(self): - self.bar = "baz" - - def __str__(self): - return "A FooStruct" - - def __eq__(self, other): - if isinstance(other, FooStruct): - return self.bar == other.bar - return 0 - - test_setget("a_string", "some random string") - test_setget("an_integer", 42) - if test_setget("long", int(1 << 30)): - print("Testing delete ...") - if mc.delete("long"): - print("OK") - else: - print("FAIL") - failures = failures + 1 - print("Checking results of delete ...") - if mc.get("long") == None: - print("OK") - else: - print("FAIL") - failures = failures + 1 - print("Testing get_multi ...") - print(mc.get_multi(["a_string", "an_integer"])) - - # removed from the protocol - # if test_setget("timed_delete", 'foo'): - # print "Testing timed delete ...", - # if mc.delete("timed_delete", 1): - # print "OK" - # else: - # print "FAIL"; failures = failures + 1 - # print "Checking results of timed delete ..." - # if mc.get("timed_delete") == None: - # print "OK" - # else: - # print "FAIL"; failures = failures + 1 - - print("Testing get(unknown value) ...") - print(to_s(mc.get("unknown_value"))) - - f = FooStruct() - test_setget("foostruct", f) - - # print "Testing incr ...", - # x = mc.incr("an_integer", 1) - # if x == 43: - # print "OK" - # else: - # print "FAIL"; failures = failures + 1 - - # print "Testing decr ...", - # x = mc.decr("an_integer", 1) - # if x == 42: - # print "OK" - # else: - # print "FAIL"; failures = failures + 1 - sys.stdout.flush() - - # sanity tests - print("Testing sending spaces...") - sys.stdout.flush() - try: - x = mc.set("this has spaces", 1) - except Client.MemcachedKeyCharacterError as msg: - print("OK") - else: - print("FAIL") - failures = failures + 1 - - print("Testing sending control characters...") - try: - x = mc.set("this\x10has\x11control characters\x02", 1) - except Client.MemcachedKeyCharacterError as msg: - print("OK") - else: - print("FAIL") - failures = failures + 1 - - print("Testing using insanely long key...") - try: - x = mc.set('a' * SERVER_MAX_KEY_LENGTH, 1) - except Client.MemcachedKeyLengthError as msg: - print("FAIL") - failures = failures + 1 - else: - print("OK") - try: - x = mc.set('a' * SERVER_MAX_KEY_LENGTH + 'a', 1) - except Client.MemcachedKeyLengthError as msg: - print("OK") - -db_file = os.environ.get('sae.kvdb.file') -if db_file: - import pickle - - def _save_cache(): - # XXX: reloader should not do this - if not os.environ.get('sae.run_main'): return - try: - pickle.dump(_cache, open(db_file, 'wb')) - except Exception as e: - print("save kvdb to '%s' failed: %s" % (db_file, str(e))) - - def _restore_cache(): - try: - _cache.update(pickle.load(open(db_file, 'rb'))) - except Exception as e: - print("load kvdb from '%s' failed: %s" % (db_file, str(e))) - - import atexit - - atexit.register(_save_cache) - _restore_cache() diff --git a/tests/messages/test_entries.py b/tests/messages/test_entries.py index 309085d9..3dd23ec6 100644 --- a/tests/messages/test_entries.py +++ b/tests/messages/test_entries.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- -import six from werobot.messages.entries import get_value, StringEntry, FloatEntry, IntEntry from werobot.utils import to_text @@ -58,13 +57,13 @@ def test_float_entry(): def test_string_entry(): - assert isinstance(t.test_string, six.text_type) + assert isinstance(t.test_string, str) assert t.test_string == "hello" - assert isinstance(t.test_int_to_string, six.text_type) + assert isinstance(t.test_int_to_string, str) assert t.test_int_to_string == "123" - assert isinstance(t.test_float_to_string, six.text_type) + assert isinstance(t.test_float_to_string, str) assert t.test_float_to_string == "1e-05" - assert isinstance(t.test_chinese, six.text_type) + assert isinstance(t.test_chinese, str) assert t.test_chinese == to_text("喵") assert t.test_string_none is None diff --git a/tests/test_client.py b/tests/test_client.py index 7dd3c1a3..9c9b96cc 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -5,19 +5,14 @@ import pytest import requests import multipart -from six.moves import urllib -from six import BytesIO +import urllib.parse +from io import BytesIO from werobot import WeRoBot from werobot.config import Config from werobot.client import Client, check_error, ClientException from werobot.utils import cached_property -try: - import urllib.parse as urlparse -except ImportError: - import urlparse - BASE_DIR = os.path.dirname(os.path.abspath(__file__)) GOD_PIC = os.path.join(os.path.dirname(__file__), '照桥心美.png') TOKEN_URL = "https://api.weixin.qq.com/cgi-bin/token" @@ -105,7 +100,9 @@ def test_client_request(self): DATA_EXISTS_URL = "http://data-exists.werobot.com/" def empty_params_callback(request): - params = urlparse.parse_qs(urlparse.urlparse(request.url).query) + params = urllib.parse.parse_qs( + urllib.parse.urlparse(request.url).query + ) assert params["access_token"][0] == self.client.token return 200, JSON_HEADER, json.dumps({"test": "test"}) @@ -469,7 +466,9 @@ class TestClientUserInfo(BaseTestClass): @staticmethod def single_user_callback(request): - params = urlparse.parse_qs(urlparse.urlparse(request.url).query) + params = urllib.parse.parse_qs( + urllib.parse.urlparse(request.url).query + ) assert "access_token" in params.keys() assert "openid" in params.keys() assert "lang" in params.keys() @@ -512,7 +511,9 @@ class TestClientGetFollowersClass(BaseTestClass): @staticmethod def get_followers_callback(request): - params = urlparse.parse_qs(urlparse.urlparse(request.url).query) + params = urllib.parse.parse_qs( + urllib.parse.urlparse(request.url).query + ) assert "access_token" in params.keys() assert "next_openid" in params.keys() return 200, JSON_HEADER, json.dumps({"errcode": 0, "errmsg": "ok"}) @@ -673,13 +674,17 @@ class TestClientResourceClass(BaseTestClass): @staticmethod def upload_callback(request): - params = urlparse.parse_qs(urlparse.urlparse(request.url).query) + params = urllib.parse.parse_qs( + urllib.parse.urlparse(request.url).query + ) assert "type" in params.keys() return 200, JSON_HEADER, json.dumps({"errcode": 0, "errmsg": "ok"}) @staticmethod def download_callback(request): - params = urlparse.parse_qs(urlparse.urlparse(request.url).query) + params = urllib.parse.parse_qs( + urllib.parse.urlparse(request.url).query + ) assert "media_id" in params.keys() return 200, JSON_HEADER, json.dumps({"errcode": 0, "errmsg": "ok"}) @@ -699,20 +704,26 @@ def add_news_callback(request): @staticmethod def upload_picture_callback(request): - params = urlparse.parse_qs(urlparse.urlparse(request.url).query) + params = urllib.parse.parse_qs( + urllib.parse.urlparse(request.url).query + ) assert "access_token" in params.keys() return 200, JSON_HEADER, json.dumps({"errcode": 0, "errmsg": "ok"}) @staticmethod def upload_p_media_callback(request): - params = urlparse.parse_qs(urlparse.urlparse(request.url).query) + params = urllib.parse.parse_qs( + urllib.parse.urlparse(request.url).query + ) assert "access_token" in params.keys() assert "type" in params.keys() return 200, JSON_HEADER, json.dumps({"errcode": 0, "errmsg": "ok"}) @staticmethod def download_p_media_callback(request): - params = urlparse.parse_qs(urlparse.urlparse(request.url).query) + params = urllib.parse.parse_qs( + urllib.parse.urlparse(request.url).query + ) assert "access_token" in params.keys() body = json.loads(request.body.decode("utf-8")) assert "media_id" in body.keys() @@ -834,7 +845,9 @@ class TestUploadVideoClass(BaseTestClass): @staticmethod def upload_video_callback(request): - params = urlparse.parse_qs(urlparse.urlparse(request.url).query) + params = urllib.parse.parse_qs( + urllib.parse.urlparse(request.url).query + ) assert "type" in params.keys() assert params["type"][0] == "video" return 200, JSON_HEADER, json.dumps({"errcode": 0, "errmsg": "ok"}) @@ -859,7 +872,9 @@ class TestMediaClass(BaseTestClass): @staticmethod def get_media_callback(request): - params = urlparse.parse_qs(urlparse.urlparse(request.url).query) + params = urllib.parse.parse_qs( + urllib.parse.urlparse(request.url).query + ) assert "access_token" in params.keys() return 200, JSON_HEADER, json.dumps({"errcode": 0, "errmsg": "ok"}) @@ -897,7 +912,9 @@ class TestGetIpListClass(BaseTestClass): @staticmethod def get_ip_list_callback(request): - params = urlparse.parse_qs(urlparse.urlparse(request.url).query) + params = urllib.parse.parse_qs( + urllib.parse.urlparse(request.url).query + ) assert "access_token" in params.keys() return 200, JSON_HEADER, json.dumps({"errcode": 0, "errmsg": "ok"}) @@ -929,13 +946,17 @@ def add_update_delete_callback(request): @staticmethod def upload_callback(request): - params = urlparse.parse_qs(urlparse.urlparse(request.url).query) + params = urllib.parse.parse_qs( + urllib.parse.urlparse(request.url).query + ) assert "access_token" in params.keys() return 200, JSON_HEADER, json.dumps({"errcode": 0, "errmsg": "ok"}) @staticmethod def get_callback(request): - params = urlparse.parse_qs(urlparse.urlparse(request.url).query) + params = urllib.parse.parse_qs( + urllib.parse.urlparse(request.url).query + ) assert "access_token" in params.keys() return 200, JSON_HEADER, json.dumps({"errcode": 0, "errmsg": "ok"}) @@ -1008,13 +1029,17 @@ class TestQrcodeClass(BaseTestClass): @staticmethod def create_callback(request): - params = urlparse.parse_qs(urlparse.urlparse(request.url).query) + params = urllib.parse.parse_qs( + urllib.parse.urlparse(request.url).query + ) assert "access_token" in params.keys() return 200, JSON_HEADER, json.dumps({"errcode": 0, "errmsg": "ok"}) @staticmethod def show_callback(request): - params = urlparse.parse_qs(urlparse.urlparse(request.url).query) + params = urllib.parse.parse_qs( + urllib.parse.urlparse(request.url).query + ) assert "ticket" in params.keys() return 200, JSON_HEADER, json.dumps({"errcode": 0, "errmsg": "ok"}) @@ -1537,7 +1562,9 @@ class TestClientTagManageClass(BaseTestClass): delete_tag_id = 100 def create_tag_callback(self, request): - params = urlparse.parse_qs(urlparse.urlparse(request.url).query) + params = urllib.parse.parse_qs( + urllib.parse.urlparse(request.url).query + ) assert "access_token" in params.keys() body = json.loads(request.body.decode("utf-8")) assert body == {"tag": {"name": self.create_tag_name}} @@ -1549,7 +1576,9 @@ def create_tag_callback(self, request): ) def update_tag_callback(self, request): - params = urlparse.parse_qs(urlparse.urlparse(request.url).query) + params = urllib.parse.parse_qs( + urllib.parse.urlparse(request.url).query + ) assert "access_token" in params.keys() body = json.loads(request.body.decode("utf-8")) assert body == { @@ -1561,19 +1590,25 @@ def update_tag_callback(self, request): return 200, JSON_HEADER, json.dumps({'errcode': 0, 'errmsg': 'ok'}) def get_tags_callback(self, request): - params = urlparse.parse_qs(urlparse.urlparse(request.url).query) + params = urllib.parse.parse_qs( + urllib.parse.urlparse(request.url).query + ) assert "access_token" in params.keys() return 200, JSON_HEADER, json.dumps(self.get_tags_response) def get_users_by_tag_callback(self, request): - params = urlparse.parse_qs(urlparse.urlparse(request.url).query) + params = urllib.parse.parse_qs( + urllib.parse.urlparse(request.url).query + ) assert "access_token" in params.keys() body = json.loads(request.body.decode("utf-8")) assert body == {"tagid": self.get_users_by_tag_id, "next_openid": ""} return 200, JSON_HEADER, json.dumps(self.get_users_by_tag_response) def delete_tag_callback(self, request): - params = urlparse.parse_qs(urlparse.urlparse(request.url).query) + params = urllib.parse.parse_qs( + urllib.parse.urlparse(request.url).query + ) assert "access_token" in params.keys() body = json.loads(request.body.decode("utf-8")) assert body == { @@ -1656,21 +1691,27 @@ class TestClientMembersTagClass(BaseTestClass): get_tags_by_user_response = {'tagid_list': [tag_id]} def tag_user_callback(self, request): - params = urlparse.parse_qs(urlparse.urlparse(request.url).query) + params = urllib.parse.parse_qs( + urllib.parse.urlparse(request.url).query + ) assert "access_token" in params.keys() body = json.loads(request.body.decode("utf-8")) assert body == {"openid_list": self.users_list, "tagid": self.tag_id} return 200, JSON_HEADER, json.dumps({'errcode': 0, 'errmsg': 'ok'}) def untag_user_callback(self, request): - params = urlparse.parse_qs(urlparse.urlparse(request.url).query) + params = urllib.parse.parse_qs( + urllib.parse.urlparse(request.url).query + ) assert "access_token" in params.keys() body = json.loads(request.body.decode("utf-8")) assert body == {"openid_list": self.users_list, "tagid": self.tag_id} return 200, JSON_HEADER, json.dumps({'errcode': 0, 'errmsg': 'ok'}) def get_tags_by_user_callback(self, request): - params = urlparse.parse_qs(urlparse.urlparse(request.url).query) + params = urllib.parse.parse_qs( + urllib.parse.urlparse(request.url).query + ) assert "access_token" in params.keys() body = json.loads(request.body.decode("utf-8")) assert body == { @@ -1745,14 +1786,18 @@ class TestClientMass(BaseTestClass): ] def up_news_callback(self, request): - params = urlparse.parse_qs(urlparse.urlparse(request.url).query) + params = urllib.parse.parse_qs( + urllib.parse.urlparse(request.url).query + ) assert "access_token" in params.keys() body = json.loads(request.body.decode("utf-8")) assert body == {"articles": self.articles} return 200, JSON_HEADER, json.dumps({'errcode': 0, 'errmsg': 'ok'}) def send_all_openid_callback(self, request): - params = urlparse.parse_qs(urlparse.urlparse(request.url).query) + params = urllib.parse.parse_qs( + urllib.parse.urlparse(request.url).query + ) assert "access_token" in params.keys() body = json.loads(request.body.decode("utf-8")) assert 'touser' in body @@ -1773,7 +1818,9 @@ def send_all_openid_callback(self, request): return 200, JSON_HEADER, json.dumps({'errcode': 0, 'errmsg': 'ok'}) def send_all_tagid_callback(self, request): - params = urlparse.parse_qs(urlparse.urlparse(request.url).query) + params = urllib.parse.parse_qs( + urllib.parse.urlparse(request.url).query + ) assert "access_token" in params.keys() body = json.loads(request.body.decode("utf-8")) assert 'filter' in body @@ -1791,14 +1838,18 @@ def send_all_tagid_callback(self, request): return 200, JSON_HEADER, json.dumps({'errcode': 0, 'errmsg': 'ok'}) def delete_callback(self, request): - params = urlparse.parse_qs(urlparse.urlparse(request.url).query) + params = urllib.parse.parse_qs( + urllib.parse.urlparse(request.url).query + ) assert "access_token" in params.keys() body = json.loads(request.body.decode("utf-8")) assert "msg_id" in body return 200, JSON_HEADER, json.dumps({'errcode': 0, 'errmsg': 'ok'}) def preview_openid_callback(self, request): - params = urlparse.parse_qs(urlparse.urlparse(request.url).query) + params = urllib.parse.parse_qs( + urllib.parse.urlparse(request.url).query + ) assert "access_token" in params.keys() body = json.loads(request.body.decode("utf-8")) assert "touser" in body @@ -1815,7 +1866,9 @@ def preview_openid_callback(self, request): return 200, JSON_HEADER, json.dumps({'errcode': 0, 'errmsg': 'ok'}) def preview_wxname_callback(self, request): - params = urlparse.parse_qs(urlparse.urlparse(request.url).query) + params = urllib.parse.parse_qs( + urllib.parse.urlparse(request.url).query + ) assert "access_token" in params.keys() body = json.loads(request.body.decode("utf-8")) assert "towxname" in body @@ -1833,7 +1886,9 @@ def preview_wxname_callback(self, request): return 200, JSON_HEADER, json.dumps({'errcode': 0, 'errmsg': 'ok'}) def get_status_callback(self, request): - params = urlparse.parse_qs(urlparse.urlparse(request.url).query) + params = urllib.parse.parse_qs( + urllib.parse.urlparse(request.url).query + ) assert "access_token" in params.keys() body = json.loads(request.body.decode("utf-8")) assert "msg_id" in body @@ -1845,12 +1900,16 @@ def get_status_callback(self, request): ) def get_news_speed_callback(self, request): - params = urlparse.parse_qs(urlparse.urlparse(request.url).query) + params = urllib.parse.parse_qs( + urllib.parse.urlparse(request.url).query + ) assert "access_token" in params.keys() return 200, JSON_HEADER, json.dumps({"speed": 3, "realspeed": 15}) def set_news_speed_callback(self, request): - params = urlparse.parse_qs(urlparse.urlparse(request.url).query) + params = urllib.parse.parse_qs( + urllib.parse.urlparse(request.url).query + ) assert "access_token" in params.keys() body = json.loads(request.body.decode("utf-8")) assert "speed" in body diff --git a/tests/test_session.py b/tests/test_session.py index b3746d13..29dcc840 100644 --- a/tests/test_session.py +++ b/tests/test_session.py @@ -4,7 +4,6 @@ import mongomock import mockredis import pytest -import six import sqlite3 import werobot @@ -161,37 +160,3 @@ def test_storage(storage): assert storage["榴莲"] == u"榴莲" del storage["榴莲"] assert storage["榴莲"] == {} - - -def test_saeskvtorage(): - """ - Run this test with PY2 only. - """ - if not six.PY2: - return - - class FakeSaeKVDBStorage(saekvstorage.SaeKVDBStorage): - def __init__(self, prefix='ws_'): - try: - saekvstorage.SaeKVDBStorage.__init__(self, prefix) - except RuntimeError: - import os - import sys - sys.path.append(os.path.dirname(__file__)) - import fake_sae as kvdb - self.kv = kvdb.KVClient() - self.prefix = prefix - - storage = FakeSaeKVDBStorage() - - assert storage.get("喵") == {} - storage.set("喵", "喵喵") - assert storage.get("喵").decode('utf-8') == u"喵喵" - storage.delete("喵") - assert storage.get("喵") == {} - - assert storage["榴莲"] == {} - storage["榴莲"] = "榴莲" - assert storage["榴莲"].decode('utf-8') == u"榴莲" - del storage["榴莲"] - assert storage["榴莲"] == {} diff --git a/tests/test_utils.py b/tests/test_utils.py index 47d11242..e8361f40 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- -import six import re from werobot.utils import generate_token, check_token, to_text, to_binary @@ -14,21 +13,19 @@ def test_token_generator(): def test_to_text(): - assert to_text(6) == six.text_type(6) + assert to_text(6) == str(6) assert to_text(b"aa") == "aa" assert to_text("cc") == "cc" - if six.PY2: - assert to_text(u"喵") == u"喵" - assert to_text("喵") == u"喵" + assert to_text(u"喵") == u"喵" + assert to_text("喵") == u"喵" def test_to_binary(): - assert to_binary(6) == six.binary_type(6) + assert to_binary(6) == bytes(6) assert to_binary(b"aa") == b"aa" assert to_binary("cc") == b"cc" - if six.PY2: - assert to_binary(u"喵") == "喵" - assert to_binary("喵") == "喵" + assert to_binary(u"喵") == b"\xe5\x96\xb5" + assert to_binary("喵") == b"\xe5\x96\xb5" def test_pay_sign_dict(): diff --git a/tox-win.ini b/tox-win.ini index 76788d62..fd01504f 100644 --- a/tox-win.ini +++ b/tox-win.ini @@ -1,5 +1,5 @@ [tox] -envlist = {py27,py35,py36}--{dj111}, {py35,py36,py37,py38}--{dj20} +envlist = {py35,py36}--{dj111}, {py35,py36,py37,py38}--{dj20} [testenv] commands = coverage run --source werobot -m py.test diff --git a/tox.ini b/tox.ini index cab824bc..7baa68ad 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = {py27,py35,py36,pypy}--{dj111}, {py35,py36,py37,py38}--{dj20}, docs +envlist = {py35,py36,pypy}--{dj111}, {py35,py36,py37,py38}--{dj20}, docs [testenv] commands = coverage run --source werobot -m py.test diff --git a/werobot/client.py b/werobot/client.py index 321ae6da..b115c8e0 100644 --- a/werobot/client.py +++ b/werobot/client.py @@ -2,7 +2,7 @@ import time import requests -from six.moves import urllib +import urllib.parse from requests.compat import json as _json from werobot.utils import to_text diff --git a/werobot/crypto/__init__.py b/werobot/crypto/__init__.py index 5874c739..6c82e883 100644 --- a/werobot/crypto/__init__.py +++ b/werobot/crypto/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import absolute_import, unicode_literals import base64 import socket diff --git a/werobot/crypto/exceptions.py b/werobot/crypto/exceptions.py index c20f8e40..649132f3 100644 --- a/werobot/crypto/exceptions.py +++ b/werobot/crypto/exceptions.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import absolute_import, unicode_literals class UnvalidEncodingAESKey(Exception): diff --git a/werobot/exceptions.py b/werobot/exceptions.py index 979cbe3b..ae353f4d 100644 --- a/werobot/exceptions.py +++ b/werobot/exceptions.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import absolute_import, unicode_literals class ConfigError(Exception): diff --git a/werobot/logger.py b/werobot/logger.py index 64b817ed..dfb7f383 100644 --- a/werobot/logger.py +++ b/werobot/logger.py @@ -1,6 +1,5 @@ # -*- coding:utf-8 -*- -import six import sys import time import logging @@ -45,25 +44,16 @@ def __init__(self, color, *args, **kwargs): self._color = color if color: fg_color = ( - curses.tigetstr("setaf") or curses.tigetstr("setf") or "" + curses.tigetstr("setaf") or curses.tigetstr("setf") or b"" ) - if (3, 0) < sys.version_info < (3, 2, 3): - fg_color = six.text_type(fg_color, "ascii") self._colors = { - logging.DEBUG: six.text_type( - curses.tparm(fg_color, 4), "ascii" - ), # Blue - logging.INFO: six.text_type( - curses.tparm(fg_color, 2), "ascii" - ), # Green - logging.WARNING: six.text_type( - curses.tparm(fg_color, 3), "ascii" - ), # Yellow - logging.ERROR: six.text_type( - curses.tparm(fg_color, 1), "ascii" - ), # Red + logging.DEBUG: str(curses.tparm(fg_color, 4), "ascii"), # Blue + logging.INFO: str(curses.tparm(fg_color, 2), "ascii"), # Green + logging.WARNING: str(curses.tparm(fg_color, 3), + "ascii"), # Yellow + logging.ERROR: str(curses.tparm(fg_color, 1), "ascii"), # Red } - self._normal = six.text_type(curses.tigetstr("sgr0"), "ascii") + self._normal = str(curses.tigetstr("sgr0"), "ascii") def format(self, record): try: @@ -73,8 +63,7 @@ def format(self, record): record.asctime = time.strftime( "%y%m%d %H:%M:%S", self.converter(record.created) ) - prefix = '[%(levelname)1.1s %(asctime)s %(module)s:%(lineno)d]' % \ - record.__dict__ + prefix = '[%(levelname)1.1s %(asctime)s %(module)s:%(lineno)d]' % record.__dict__ if self._color: prefix = ( self._colors.get(record.levelno, self._normal) + prefix + diff --git a/werobot/messages/events.py b/werobot/messages/events.py index 41b5f6b1..f762fa9a 100644 --- a/werobot/messages/events.py +++ b/werobot/messages/events.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- -import six from werobot.messages.entries import StringEntry, IntEntry, FloatEntry from werobot.messages.base import WeRoBotMetaClass @@ -9,8 +8,7 @@ class EventMetaClass(WeRoBotMetaClass): pass -@six.add_metaclass(EventMetaClass) -class WeChatEvent(object): +class WeChatEvent(object, metaclass=EventMetaClass): target = StringEntry('ToUserName') source = StringEntry('FromUserName') time = IntEntry('CreateTime') diff --git a/werobot/messages/messages.py b/werobot/messages/messages.py index 6237adfc..a6ebcde1 100644 --- a/werobot/messages/messages.py +++ b/werobot/messages/messages.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- -import six from werobot.messages.entries import StringEntry, IntEntry, FloatEntry from werobot.messages.base import WeRoBotMetaClass @@ -9,8 +8,7 @@ class MessageMetaClass(WeRoBotMetaClass): pass -@six.add_metaclass(MessageMetaClass) -class WeChatMessage(object): +class WeChatMessage(object, metaclass=MessageMetaClass): message_id = IntEntry('MsgId', 0) target = StringEntry('ToUserName') source = StringEntry('FromUserName') diff --git a/werobot/parser.py b/werobot/parser.py index 6dda15e1..23720f90 100644 --- a/werobot/parser.py +++ b/werobot/parser.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import absolute_import, unicode_literals import xmltodict from werobot.messages.messages import MessageMetaClass, UnknownMessage diff --git a/werobot/robot.py b/werobot/robot.py index 23315fd2..d1c89280 100644 --- a/werobot/robot.py +++ b/werobot/robot.py @@ -1,7 +1,5 @@ # -*- coding: utf-8 -*- -from __future__ import absolute_import, unicode_literals -import six import warnings from werobot.config import Config, ConfigAttribute @@ -530,7 +528,7 @@ def add_filter(self, func, rules): self.add_filter(func, [x]) else: target_content = rules[0] - if isinstance(target_content, six.string_types): + if isinstance(target_content, str): target_content = to_text(target_content) def _check_content(message): diff --git a/werobot/utils.py b/werobot/utils.py index 8a8c544c..58d815a7 100644 --- a/werobot/utils.py +++ b/werobot/utils.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import absolute_import, unicode_literals import io import json @@ -11,14 +10,12 @@ from functools import wraps from hashlib import sha1 -import six - try: from secrets import choice except ImportError: from random import choice -string_types = (six.string_types, six.text_type, six.binary_type) +string_types = (str, bytes) re_type = type(re.compile("regex_test")) @@ -54,22 +51,24 @@ def wrapped_func(self, *args, **kwargs): def to_text(value, encoding="utf-8"): - if isinstance(value, six.text_type): + if isinstance(value, str): return value - if isinstance(value, six.binary_type): + if isinstance(value, bytes): return value.decode(encoding) - return six.text_type(value) + return str(value) def to_binary(value, encoding="utf-8"): - if isinstance(value, six.binary_type): + if isinstance(value, bytes): return value - if isinstance(value, six.text_type): + if isinstance(value, str): return value.encode(encoding) - return six.binary_type(value) + return bytes(value) def is_string(value): + """Check if value's type is `str` or `bytes` + """ return isinstance(value, string_types) @@ -81,8 +80,6 @@ def byte2int(s, index=0): :return: ASCII int value """ - if six.PY2: - return ord(s[index]) return s[index] From 336979002a16795accebe56c12cca255177f651f Mon Sep 17 00:00:00 2001 From: Wu Haotian Date: Sun, 23 Feb 2020 18:39:30 +0800 Subject: [PATCH 2/3] ci: remove tox-win.ini --- appveyor.yml | 2 +- tox-win.ini | 10 ---------- tox.ini | 2 +- 3 files changed, 2 insertions(+), 12 deletions(-) delete mode 100644 tox-win.ini diff --git a/appveyor.yml b/appveyor.yml index 215d43dc..1c47cfd0 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -9,7 +9,7 @@ install: - C:\Python35\python -m pip install --force-reinstall tox - set PYTHONIOENCODING=utf-8 test_script: - - C:\Python35\python -m tox -c tox-win.ini + - C:\Python35\python -m tox -c tox.ini on_success: - set PATH=C:\\Python35;C:\\Python35\\Scripts;%PATH% - pip install codecov coverage diff --git a/tox-win.ini b/tox-win.ini deleted file mode 100644 index fd01504f..00000000 --- a/tox-win.ini +++ /dev/null @@ -1,10 +0,0 @@ -[tox] -envlist = {py35,py36}--{dj111}, {py35,py36,py37,py38}--{dj20} - -[testenv] -commands = coverage run --source werobot -m py.test -passenv = DATABASE_MYSQL_USERNAME DATABASE_MYSQL_PASSWORD -deps = - dj111: Django < 1.12 - dj20: Django < 2.1 - -rtox-requirements.txt diff --git a/tox.ini b/tox.ini index 7baa68ad..bb539f12 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = {py35,py36,pypy}--{dj111}, {py35,py36,py37,py38}--{dj20}, docs +envlist = {py35,py36}--{dj111}, {py35,py36,py37,py38}--{dj20}, docs [testenv] commands = coverage run --source werobot -m py.test From fcd29d6ef003bda4d31c9e60fc96ecf063713922 Mon Sep 17 00:00:00 2001 From: helloqiu Date: Thu, 27 Feb 2020 21:52:24 +0000 Subject: [PATCH 3/3] docs: add new changelog and update description of deploying on SAE --- docs/changelog.rst | 5 +++++ docs/deploy.rst | 3 ++- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 3c6d7e36..268c34d6 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -1,6 +1,11 @@ Changelog ============= +Version 1.11.0 +---------------- ++ 停止了对 Python2 与 PyPy 的支持。 ( `#539 `_ ) ++ 停止了对 SAE 相关代码的测试。 ( `#539 `_ ) + Version 1.10.1 ---------------- + 修复 :ref:`群发接口` 的 docstring 样式。 diff --git a/docs/deploy.rst b/docs/deploy.rst index fded3590..5d94362f 100644 --- a/docs/deploy.rst +++ b/docs/deploy.rst @@ -130,8 +130,9 @@ server 支持以下几种: 在SAE上部署 ----------------- +.. attention:: 从 :ref:`Version 1.11.0` 开始,WeRoBot 停止测试 SAE 相关部分的代码。 - 新浪云上的 Python 应用的入口为 index.wsgi:application ,也就是 index.wsgi 这个文件中名为 application 的 callable object。 +新浪云上的 Python 应用的入口为 index.wsgi:application ,也就是 index.wsgi 这个文件中名为 application 的 callable object。 所以,假设你在 `robot.py` 中使用了 WeRoBot ::