From 2dc3cf9b018254aa54e802f2be73b47f4aeba555 Mon Sep 17 00:00:00 2001 From: Paul Kendall Date: Thu, 12 May 2022 19:19:08 +1200 Subject: [PATCH] Copy streamexpect to codebase to fix dependency upgrade issue --- python/ETXinitPassthrough.py | 17 +- python/external/six.py | 998 ++++++++++++++++++++++++++++++++ python/external/streamexpect.py | 823 ++++++++++++++++++++++++++ 3 files changed, 1822 insertions(+), 16 deletions(-) create mode 100644 python/external/six.py create mode 100644 python/external/streamexpect.py diff --git a/python/ETXinitPassthrough.py b/python/ETXinitPassthrough.py index b40c5e4..7ad42b0 100644 --- a/python/ETXinitPassthrough.py +++ b/python/ETXinitPassthrough.py @@ -1,27 +1,12 @@ import serial, time, sys -import subprocess import argparse import serials_find -import bootloader +import external.streamexpect as streamexpect def dbg_print(line=''): sys.stdout.write(line + '\n') sys.stdout.flush() -try: - import streamexpect -except ImportError: - sys.stdout.write("Installing pexpect") - subprocess.check_call([sys.executable, "-m", "pip", "install", "streamexpect"]) - try: - import streamexpect - except ImportError: - env.Execute("$PYTHONEXE -m pip install streamexpect") - try: - import streamexpect - except ImportError: - streamexpect = None - def etx_passthrough_init(port, requestedBaudrate): sys.stdout.flush() dbg_print("======== PASSTHROUGH INIT ========") diff --git a/python/external/six.py b/python/external/six.py new file mode 100644 index 0000000..d4fe984 --- /dev/null +++ b/python/external/six.py @@ -0,0 +1,998 @@ +# Copyright (c) 2010-2020 Benjamin Peterson +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Utilities for writing code that runs on Python 2 and 3""" + +from __future__ import absolute_import + +import functools +import itertools +import operator +import sys +import types + +__author__ = "Benjamin Peterson " +__version__ = "1.16.0" + + +# Useful for very coarse version differentiation. +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 +PY34 = sys.version_info[0:2] >= (3, 4) + +if PY3: + string_types = str, + integer_types = int, + class_types = type, + text_type = str + binary_type = bytes + + MAXSIZE = sys.maxsize +else: + string_types = basestring, + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + + if sys.platform.startswith("java"): + # Jython always uses 32 bits. + MAXSIZE = int((1 << 31) - 1) + else: + # It's possible to have sizeof(long) != sizeof(Py_ssize_t). + class X(object): + + def __len__(self): + return 1 << 31 + try: + len(X()) + except OverflowError: + # 32-bit + MAXSIZE = int((1 << 31) - 1) + else: + # 64-bit + MAXSIZE = int((1 << 63) - 1) + del X + +if PY34: + from importlib.util import spec_from_loader +else: + spec_from_loader = None + + +def _add_doc(func, doc): + """Add documentation to a function.""" + func.__doc__ = doc + + +def _import_module(name): + """Import module, returning the module after the last dot.""" + __import__(name) + return sys.modules[name] + + +class _LazyDescr(object): + + def __init__(self, name): + self.name = name + + def __get__(self, obj, tp): + result = self._resolve() + setattr(obj, self.name, result) # Invokes __set__. + try: + # This is a bit ugly, but it avoids running this again by + # removing this descriptor. + delattr(obj.__class__, self.name) + except AttributeError: + pass + return result + + +class MovedModule(_LazyDescr): + + def __init__(self, name, old, new=None): + super(MovedModule, self).__init__(name) + if PY3: + if new is None: + new = name + self.mod = new + else: + self.mod = old + + def _resolve(self): + return _import_module(self.mod) + + def __getattr__(self, attr): + _module = self._resolve() + value = getattr(_module, attr) + setattr(self, attr, value) + return value + + +class _LazyModule(types.ModuleType): + + def __init__(self, name): + super(_LazyModule, self).__init__(name) + self.__doc__ = self.__class__.__doc__ + + def __dir__(self): + attrs = ["__doc__", "__name__"] + attrs += [attr.name for attr in self._moved_attributes] + return attrs + + # Subclasses should override this + _moved_attributes = [] + + +class MovedAttribute(_LazyDescr): + + def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): + super(MovedAttribute, self).__init__(name) + if PY3: + if new_mod is None: + new_mod = name + self.mod = new_mod + if new_attr is None: + if old_attr is None: + new_attr = name + else: + new_attr = old_attr + self.attr = new_attr + else: + self.mod = old_mod + if old_attr is None: + old_attr = name + self.attr = old_attr + + def _resolve(self): + module = _import_module(self.mod) + return getattr(module, self.attr) + + +class _SixMetaPathImporter(object): + + """ + A meta path importer to import six.moves and its submodules. + + This class implements a PEP302 finder and loader. It should be compatible + with Python 2.5 and all existing versions of Python3 + """ + + def __init__(self, six_module_name): + self.name = six_module_name + self.known_modules = {} + + def _add_module(self, mod, *fullnames): + for fullname in fullnames: + self.known_modules[self.name + "." + fullname] = mod + + def _get_module(self, fullname): + return self.known_modules[self.name + "." + fullname] + + def find_module(self, fullname, path=None): + if fullname in self.known_modules: + return self + return None + + def find_spec(self, fullname, path, target=None): + if fullname in self.known_modules: + return spec_from_loader(fullname, self) + return None + + def __get_module(self, fullname): + try: + return self.known_modules[fullname] + except KeyError: + raise ImportError("This loader does not know module " + fullname) + + def load_module(self, fullname): + try: + # in case of a reload + return sys.modules[fullname] + except KeyError: + pass + mod = self.__get_module(fullname) + if isinstance(mod, MovedModule): + mod = mod._resolve() + else: + mod.__loader__ = self + sys.modules[fullname] = mod + return mod + + def is_package(self, fullname): + """ + Return true, if the named module is a package. + + We need this method to get correct spec objects with + Python 3.4 (see PEP451) + """ + return hasattr(self.__get_module(fullname), "__path__") + + def get_code(self, fullname): + """Return None + + Required, if is_package is implemented""" + self.__get_module(fullname) # eventually raises ImportError + return None + get_source = get_code # same as get_code + + def create_module(self, spec): + return self.load_module(spec.name) + + def exec_module(self, module): + pass + +_importer = _SixMetaPathImporter(__name__) + + +class _MovedItems(_LazyModule): + + """Lazy loading of moved objects""" + __path__ = [] # mark as package + + +_moved_attributes = [ + MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), + MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), + MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), + MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("intern", "__builtin__", "sys"), + MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), + MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), + MovedAttribute("getoutput", "commands", "subprocess"), + MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), + MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), + MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("UserDict", "UserDict", "collections", "IterableUserDict", "UserDict"), + MovedAttribute("UserList", "UserList", "collections"), + MovedAttribute("UserString", "UserString", "collections"), + MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), + MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), + MovedModule("builtins", "__builtin__"), + MovedModule("configparser", "ConfigParser"), + MovedModule("collections_abc", "collections", "collections.abc" if sys.version_info >= (3, 3) else "collections"), + MovedModule("copyreg", "copy_reg"), + MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), + MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"), + MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread" if sys.version_info < (3, 9) else "_thread"), + MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), + MovedModule("http_cookies", "Cookie", "http.cookies"), + MovedModule("html_entities", "htmlentitydefs", "html.entities"), + MovedModule("html_parser", "HTMLParser", "html.parser"), + MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"), + MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), + MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), + MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), + MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), + MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), + MovedModule("cPickle", "cPickle", "pickle"), + MovedModule("queue", "Queue"), + MovedModule("reprlib", "repr"), + MovedModule("socketserver", "SocketServer"), + MovedModule("_thread", "thread", "_thread"), + MovedModule("tkinter", "Tkinter"), + MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), + MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), + MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), + MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), + MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), + MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), + MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), + MovedModule("tkinter_colorchooser", "tkColorChooser", + "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", + "tkinter.commondialog"), + MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), + MovedModule("tkinter_font", "tkFont", "tkinter.font"), + MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", + "tkinter.simpledialog"), + MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), + MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), + MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), + MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), + MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), + MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), +] +# Add windows specific modules. +if sys.platform == "win32": + _moved_attributes += [ + MovedModule("winreg", "_winreg"), + ] + +for attr in _moved_attributes: + setattr(_MovedItems, attr.name, attr) + if isinstance(attr, MovedModule): + _importer._add_module(attr, "moves." + attr.name) +del attr + +_MovedItems._moved_attributes = _moved_attributes + +moves = _MovedItems(__name__ + ".moves") +_importer._add_module(moves, "moves") + + +class Module_six_moves_urllib_parse(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_parse""" + + +_urllib_parse_moved_attributes = [ + MovedAttribute("ParseResult", "urlparse", "urllib.parse"), + MovedAttribute("SplitResult", "urlparse", "urllib.parse"), + MovedAttribute("parse_qs", "urlparse", "urllib.parse"), + MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), + MovedAttribute("urldefrag", "urlparse", "urllib.parse"), + MovedAttribute("urljoin", "urlparse", "urllib.parse"), + MovedAttribute("urlparse", "urlparse", "urllib.parse"), + MovedAttribute("urlsplit", "urlparse", "urllib.parse"), + MovedAttribute("urlunparse", "urlparse", "urllib.parse"), + MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), + MovedAttribute("quote", "urllib", "urllib.parse"), + MovedAttribute("quote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote", "urllib", "urllib.parse"), + MovedAttribute("unquote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"), + MovedAttribute("urlencode", "urllib", "urllib.parse"), + MovedAttribute("splitquery", "urllib", "urllib.parse"), + MovedAttribute("splittag", "urllib", "urllib.parse"), + MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("splitvalue", "urllib", "urllib.parse"), + MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), + MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), + MovedAttribute("uses_params", "urlparse", "urllib.parse"), + MovedAttribute("uses_query", "urlparse", "urllib.parse"), + MovedAttribute("uses_relative", "urlparse", "urllib.parse"), +] +for attr in _urllib_parse_moved_attributes: + setattr(Module_six_moves_urllib_parse, attr.name, attr) +del attr + +Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes + +_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), + "moves.urllib_parse", "moves.urllib.parse") + + +class Module_six_moves_urllib_error(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_error""" + + +_urllib_error_moved_attributes = [ + MovedAttribute("URLError", "urllib2", "urllib.error"), + MovedAttribute("HTTPError", "urllib2", "urllib.error"), + MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), +] +for attr in _urllib_error_moved_attributes: + setattr(Module_six_moves_urllib_error, attr.name, attr) +del attr + +Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes + +_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), + "moves.urllib_error", "moves.urllib.error") + + +class Module_six_moves_urllib_request(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_request""" + + +_urllib_request_moved_attributes = [ + MovedAttribute("urlopen", "urllib2", "urllib.request"), + MovedAttribute("install_opener", "urllib2", "urllib.request"), + MovedAttribute("build_opener", "urllib2", "urllib.request"), + MovedAttribute("pathname2url", "urllib", "urllib.request"), + MovedAttribute("url2pathname", "urllib", "urllib.request"), + MovedAttribute("getproxies", "urllib", "urllib.request"), + MovedAttribute("Request", "urllib2", "urllib.request"), + MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), + MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), + MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), + MovedAttribute("BaseHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), + MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), + MovedAttribute("FileHandler", "urllib2", "urllib.request"), + MovedAttribute("FTPHandler", "urllib2", "urllib.request"), + MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), + MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), + MovedAttribute("urlretrieve", "urllib", "urllib.request"), + MovedAttribute("urlcleanup", "urllib", "urllib.request"), + MovedAttribute("URLopener", "urllib", "urllib.request"), + MovedAttribute("FancyURLopener", "urllib", "urllib.request"), + MovedAttribute("proxy_bypass", "urllib", "urllib.request"), + MovedAttribute("parse_http_list", "urllib2", "urllib.request"), + MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"), +] +for attr in _urllib_request_moved_attributes: + setattr(Module_six_moves_urllib_request, attr.name, attr) +del attr + +Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes + +_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), + "moves.urllib_request", "moves.urllib.request") + + +class Module_six_moves_urllib_response(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_response""" + + +_urllib_response_moved_attributes = [ + MovedAttribute("addbase", "urllib", "urllib.response"), + MovedAttribute("addclosehook", "urllib", "urllib.response"), + MovedAttribute("addinfo", "urllib", "urllib.response"), + MovedAttribute("addinfourl", "urllib", "urllib.response"), +] +for attr in _urllib_response_moved_attributes: + setattr(Module_six_moves_urllib_response, attr.name, attr) +del attr + +Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes + +_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), + "moves.urllib_response", "moves.urllib.response") + + +class Module_six_moves_urllib_robotparser(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_robotparser""" + + +_urllib_robotparser_moved_attributes = [ + MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), +] +for attr in _urllib_robotparser_moved_attributes: + setattr(Module_six_moves_urllib_robotparser, attr.name, attr) +del attr + +Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes + +_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), + "moves.urllib_robotparser", "moves.urllib.robotparser") + + +class Module_six_moves_urllib(types.ModuleType): + + """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" + __path__ = [] # mark as package + parse = _importer._get_module("moves.urllib_parse") + error = _importer._get_module("moves.urllib_error") + request = _importer._get_module("moves.urllib_request") + response = _importer._get_module("moves.urllib_response") + robotparser = _importer._get_module("moves.urllib_robotparser") + + def __dir__(self): + return ['parse', 'error', 'request', 'response', 'robotparser'] + +_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), + "moves.urllib") + + +def add_move(move): + """Add an item to six.moves.""" + setattr(_MovedItems, move.name, move) + + +def remove_move(name): + """Remove item from six.moves.""" + try: + delattr(_MovedItems, name) + except AttributeError: + try: + del moves.__dict__[name] + except KeyError: + raise AttributeError("no such move, %r" % (name,)) + + +if PY3: + _meth_func = "__func__" + _meth_self = "__self__" + + _func_closure = "__closure__" + _func_code = "__code__" + _func_defaults = "__defaults__" + _func_globals = "__globals__" +else: + _meth_func = "im_func" + _meth_self = "im_self" + + _func_closure = "func_closure" + _func_code = "func_code" + _func_defaults = "func_defaults" + _func_globals = "func_globals" + + +try: + advance_iterator = next +except NameError: + def advance_iterator(it): + return it.next() +next = advance_iterator + + +try: + callable = callable +except NameError: + def callable(obj): + return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) + + +if PY3: + def get_unbound_function(unbound): + return unbound + + create_bound_method = types.MethodType + + def create_unbound_method(func, cls): + return func + + Iterator = object +else: + def get_unbound_function(unbound): + return unbound.im_func + + def create_bound_method(func, obj): + return types.MethodType(func, obj, obj.__class__) + + def create_unbound_method(func, cls): + return types.MethodType(func, None, cls) + + class Iterator(object): + + def next(self): + return type(self).__next__(self) + + callable = callable +_add_doc(get_unbound_function, + """Get the function out of a possibly unbound function""") + + +get_method_function = operator.attrgetter(_meth_func) +get_method_self = operator.attrgetter(_meth_self) +get_function_closure = operator.attrgetter(_func_closure) +get_function_code = operator.attrgetter(_func_code) +get_function_defaults = operator.attrgetter(_func_defaults) +get_function_globals = operator.attrgetter(_func_globals) + + +if PY3: + def iterkeys(d, **kw): + return iter(d.keys(**kw)) + + def itervalues(d, **kw): + return iter(d.values(**kw)) + + def iteritems(d, **kw): + return iter(d.items(**kw)) + + def iterlists(d, **kw): + return iter(d.lists(**kw)) + + viewkeys = operator.methodcaller("keys") + + viewvalues = operator.methodcaller("values") + + viewitems = operator.methodcaller("items") +else: + def iterkeys(d, **kw): + return d.iterkeys(**kw) + + def itervalues(d, **kw): + return d.itervalues(**kw) + + def iteritems(d, **kw): + return d.iteritems(**kw) + + def iterlists(d, **kw): + return d.iterlists(**kw) + + viewkeys = operator.methodcaller("viewkeys") + + viewvalues = operator.methodcaller("viewvalues") + + viewitems = operator.methodcaller("viewitems") + +_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") +_add_doc(itervalues, "Return an iterator over the values of a dictionary.") +_add_doc(iteritems, + "Return an iterator over the (key, value) pairs of a dictionary.") +_add_doc(iterlists, + "Return an iterator over the (key, [values]) pairs of a dictionary.") + + +if PY3: + def b(s): + return s.encode("latin-1") + + def u(s): + return s + unichr = chr + import struct + int2byte = struct.Struct(">B").pack + del struct + byte2int = operator.itemgetter(0) + indexbytes = operator.getitem + iterbytes = iter + import io + StringIO = io.StringIO + BytesIO = io.BytesIO + del io + _assertCountEqual = "assertCountEqual" + if sys.version_info[1] <= 1: + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + _assertNotRegex = "assertNotRegexpMatches" + else: + _assertRaisesRegex = "assertRaisesRegex" + _assertRegex = "assertRegex" + _assertNotRegex = "assertNotRegex" +else: + def b(s): + return s + # Workaround for standalone backslash + + def u(s): + return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") + unichr = unichr + int2byte = chr + + def byte2int(bs): + return ord(bs[0]) + + def indexbytes(buf, i): + return ord(buf[i]) + iterbytes = functools.partial(itertools.imap, ord) + import StringIO + StringIO = BytesIO = StringIO.StringIO + _assertCountEqual = "assertItemsEqual" + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + _assertNotRegex = "assertNotRegexpMatches" +_add_doc(b, """Byte literal""") +_add_doc(u, """Text literal""") + + +def assertCountEqual(self, *args, **kwargs): + return getattr(self, _assertCountEqual)(*args, **kwargs) + + +def assertRaisesRegex(self, *args, **kwargs): + return getattr(self, _assertRaisesRegex)(*args, **kwargs) + + +def assertRegex(self, *args, **kwargs): + return getattr(self, _assertRegex)(*args, **kwargs) + + +def assertNotRegex(self, *args, **kwargs): + return getattr(self, _assertNotRegex)(*args, **kwargs) + + +if PY3: + exec_ = getattr(moves.builtins, "exec") + + def reraise(tp, value, tb=None): + try: + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + finally: + value = None + tb = None + +else: + def exec_(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + elif _locs_ is None: + _locs_ = _globs_ + exec("""exec _code_ in _globs_, _locs_""") + + exec_("""def reraise(tp, value, tb=None): + try: + raise tp, value, tb + finally: + tb = None +""") + + +if sys.version_info[:2] > (3,): + exec_("""def raise_from(value, from_value): + try: + raise value from from_value + finally: + value = None +""") +else: + def raise_from(value, from_value): + raise value + + +print_ = getattr(moves.builtins, "print", None) +if print_ is None: + def print_(*args, **kwargs): + """The new-style print function for Python 2.4 and 2.5.""" + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + + def write(data): + if not isinstance(data, basestring): + data = str(data) + # If the file has an encoding, encode unicode with it. + if (isinstance(fp, file) and + isinstance(data, unicode) and + fp.encoding is not None): + errors = getattr(fp, "errors", None) + if errors is None: + errors = "strict" + data = data.encode(fp.encoding, errors) + fp.write(data) + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: + if isinstance(sep, unicode): + want_unicode = True + elif not isinstance(sep, str): + raise TypeError("sep must be None or a string") + end = kwargs.pop("end", None) + if end is not None: + if isinstance(end, unicode): + want_unicode = True + elif not isinstance(end, str): + raise TypeError("end must be None or a string") + if kwargs: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: + if isinstance(arg, unicode): + want_unicode = True + break + if want_unicode: + newline = unicode("\n") + space = unicode(" ") + else: + newline = "\n" + space = " " + if sep is None: + sep = space + if end is None: + end = newline + for i, arg in enumerate(args): + if i: + write(sep) + write(arg) + write(end) +if sys.version_info[:2] < (3, 3): + _print = print_ + + def print_(*args, **kwargs): + fp = kwargs.get("file", sys.stdout) + flush = kwargs.pop("flush", False) + _print(*args, **kwargs) + if flush and fp is not None: + fp.flush() + +_add_doc(reraise, """Reraise an exception.""") + +if sys.version_info[0:2] < (3, 4): + # This does exactly the same what the :func:`py3:functools.update_wrapper` + # function does on Python versions after 3.2. It sets the ``__wrapped__`` + # attribute on ``wrapper`` object and it doesn't raise an error if any of + # the attributes mentioned in ``assigned`` and ``updated`` are missing on + # ``wrapped`` object. + def _update_wrapper(wrapper, wrapped, + assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + for attr in assigned: + try: + value = getattr(wrapped, attr) + except AttributeError: + continue + else: + setattr(wrapper, attr, value) + for attr in updated: + getattr(wrapper, attr).update(getattr(wrapped, attr, {})) + wrapper.__wrapped__ = wrapped + return wrapper + _update_wrapper.__doc__ = functools.update_wrapper.__doc__ + + def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + return functools.partial(_update_wrapper, wrapped=wrapped, + assigned=assigned, updated=updated) + wraps.__doc__ = functools.wraps.__doc__ + +else: + wraps = functools.wraps + + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(type): + + def __new__(cls, name, this_bases, d): + if sys.version_info[:2] >= (3, 7): + # This version introduced PEP 560 that requires a bit + # of extra care (we mimic what is done by __build_class__). + resolved_bases = types.resolve_bases(bases) + if resolved_bases is not bases: + d['__orig_bases__'] = bases + else: + resolved_bases = bases + return meta(name, resolved_bases, d) + + @classmethod + def __prepare__(cls, name, this_bases): + return meta.__prepare__(name, bases) + return type.__new__(metaclass, 'temporary_class', (), {}) + + +def add_metaclass(metaclass): + """Class decorator for creating a class with a metaclass.""" + def wrapper(cls): + orig_vars = cls.__dict__.copy() + slots = orig_vars.get('__slots__') + if slots is not None: + if isinstance(slots, str): + slots = [slots] + for slots_var in slots: + orig_vars.pop(slots_var) + orig_vars.pop('__dict__', None) + orig_vars.pop('__weakref__', None) + if hasattr(cls, '__qualname__'): + orig_vars['__qualname__'] = cls.__qualname__ + return metaclass(cls.__name__, cls.__bases__, orig_vars) + return wrapper + + +def ensure_binary(s, encoding='utf-8', errors='strict'): + """Coerce **s** to six.binary_type. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> encoded to `bytes` + - `bytes` -> `bytes` + """ + if isinstance(s, binary_type): + return s + if isinstance(s, text_type): + return s.encode(encoding, errors) + raise TypeError("not expecting type '%s'" % type(s)) + + +def ensure_str(s, encoding='utf-8', errors='strict'): + """Coerce *s* to `str`. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + # Optimization: Fast return for the common case. + if type(s) is str: + return s + if PY2 and isinstance(s, text_type): + return s.encode(encoding, errors) + elif PY3 and isinstance(s, binary_type): + return s.decode(encoding, errors) + elif not isinstance(s, (text_type, binary_type)): + raise TypeError("not expecting type '%s'" % type(s)) + return s + + +def ensure_text(s, encoding='utf-8', errors='strict'): + """Coerce *s* to six.text_type. + + For Python 2: + - `unicode` -> `unicode` + - `str` -> `unicode` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + if isinstance(s, binary_type): + return s.decode(encoding, errors) + elif isinstance(s, text_type): + return s + else: + raise TypeError("not expecting type '%s'" % type(s)) + + +def python_2_unicode_compatible(klass): + """ + A class decorator that defines __unicode__ and __str__ methods under Python 2. + Under Python 3 it does nothing. + + To support Python 2 and 3 with a single code base, define a __str__ method + returning text and apply this decorator to the class. + """ + if PY2: + if '__str__' not in klass.__dict__: + raise ValueError("@python_2_unicode_compatible cannot be applied " + "to %s because it doesn't define __str__()." % + klass.__name__) + klass.__unicode__ = klass.__str__ + klass.__str__ = lambda self: self.__unicode__().encode('utf-8') + return klass + + +# Complete the moves implementation. +# This code is at the end of this module to speed up module loading. +# Turn this module into a package. +__path__ = [] # required for PEP 302 and PEP 451 +__package__ = __name__ # see PEP 366 @ReservedAssignment +if globals().get("__spec__") is not None: + __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable +# Remove other six meta path importers, since they cause problems. This can +# happen if six is removed from sys.modules and then reloaded. (Setuptools does +# this for some reason.) +if sys.meta_path: + for i, importer in enumerate(sys.meta_path): + # Here's some real nastiness: Another "instance" of the six module might + # be floating around. Therefore, we can't use isinstance() to check for + # the six meta path importer, since the other six instance will have + # inserted an importer with different class. + if (type(importer).__name__ == "_SixMetaPathImporter" and + importer.name == __name__): + del sys.meta_path[i] + break + del i, importer +# Finally, add the importer to the meta path import hook. +sys.meta_path.append(_importer) diff --git a/python/external/streamexpect.py b/python/external/streamexpect.py new file mode 100644 index 0000000..75ed797 --- /dev/null +++ b/python/external/streamexpect.py @@ -0,0 +1,823 @@ +# -*- coding: utf-8 -*- +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# Copyright (c) 2015 Digi International Inc. All Rights Reserved. + +# Downloaded from https://github.com/digidotcom/python-streamexpect + +import collections +import re +from . import six +import socket +import sys +import time +import unicodedata + + +__version__ = '0.2.1' + + +class SequenceMatch(object): + """Information about a match that has a concept of ordering.""" + + def __init__(self, searcher, match, start, end): + """ + :param Searcher searcher: The :class:`Searcher` that found the match + :param match: Portion of sequence that triggered the match + :param int start: Index of start of match + :param int end: Index of item directly after match + """ + self.searcher = searcher + self.match = match + self.start = int(start) + self.end = int(end) + + def __repr__(self): + return '{}({!r}, match={!r}, start={}, end={})'.format( + self.__class__.__name__, self.searcher, self.match, self.start, + self.end) + + +class RegexMatch(SequenceMatch): + """Information about a match from a regex.""" + + def __init__(self, searcher, match, start, end, groups): + """ + :param Searcher searcher: The :class:`Searcher` that found the match + :param match: Portion of sequence that triggered the match + :param int start: Index of start of match + :param int end: Index of item directly after match + :param tuple groups: Contains the matched subgroups if the regex + contained groups, otherwise ``None`` + """ + super(RegexMatch, self).__init__(searcher, match, start, end) + self.groups = groups + + def __repr__(self): + return '{}({!r}, match={!r}, start={}), end={}, groups={!r}'.format( + self.__class__.__name__, self.searcher, self.match, self.start, + self.end, self.groups) + + +class ExpectTimeout(Exception): + """Exception raised when *expect* call exceeds a timeout.""" + + +class Searcher(object): + """Base class for searching buffers. + + Implements the base class for *Searcher* types, which are used by the + library to determine whether or not a particular buffer contains a *match*. + The type of the match is determined by the *Searcher* implementation: it + may be bytes, text, or something else entirely. + + To conform to the *Searcher* interface, a class must implement one method + *search* and one read-only property *match_type*. The buffer passed to + the *search* method must match the type returned by the *match_type* + property, and *search* must raise a `TypeError` if it does not. The + member function :func:`_check_type` exists to provide this functionality + for subclass implementations. + """ + + def __repr__(self): + return '{}()'.format(self.__class__.__name__) + + def search(self, buf): + """Search the provided buffer for a *match*. + + Search the provided buffer for a *match*. What exactly a *match* means + is defined by the *Searcher* implementation. If the *match* is found, + returns an `SequenceMatch` object, otherwise returns ``None``. + + :param buf: Buffer to search for a match. + """ + raise NotImplementedError('search function must be provided') + + @property + def match_type(self): + """Read-only property that returns type matched by this *Searcher*""" + raise NotImplementedError('match_type must be provided') + + def _check_type(self, value): + """Checks that *value* matches the type of this *Searcher*. + + Checks that *value* matches the type of this *Searcher*, returning the + value if it does and raising a `TypeError` if it does not. + + :return: *value* if type of *value* matches type of this *Searcher*. + :raises TypeError: if type of *value* does not match the type of this + *Searcher* + """ + if not isinstance(value, self.match_type): + raise TypeError('Type ' + str(type(value)) + ' does not match ' + 'expected type ' + str(self.match_type)) + else: + return value + + +class BytesSearcher(Searcher): + """Binary/ASCII searcher. + + A binary/ASCII searcher. Matches when the pattern passed to the + constructor is found in the input buffer. + + Note that this class only operates on binary types. That means that in + Python 3, it will fail on strings, as strings are Unicode by default. In + Python 2 this class will fail on the Unicode type, as strings are ASCII by + default. + """ + + def __init__(self, b): + """ + :param b: Bytes to search for. Must be a binary type (i.e. bytes) + """ + self._bytes = self._check_type(b) + + def __repr__(self): + return '{}({!r})'.format(self.__class__.__name__, self._bytes) + + @property + def match_type(self): + return six.binary_type + + def search(self, buf): + """Search the provided buffer for matching bytes. + + Search the provided buffer for matching bytes. If the *match* is found, + returns a :class:`SequenceMatch` object, otherwise returns ``None``. + + :param buf: Buffer to search for a match. + :return: :class:`SequenceMatch` if matched, None if no match was found. + """ + idx = self._check_type(buf).find(self._bytes) + if idx < 0: + return None + else: + start = idx + end = idx + len(self._bytes) + return SequenceMatch(self, buf[start:end], start, end) + + +class TextSearcher(Searcher): + """Plain text searcher. + + A plain-text searcher. Matches when the text passed to the constructor is + found in the input buffer. + + Note that this class operates only on text types (i.e. Unicode) and raises + a TypeError if used with binary data. Use the :class:`BytesSearcher` type + to search binary or ASCII text. + + To make sure that modified (accented, grave, etc.) characters are matched + accurately, the input text is converted to the Unicode canonical composed + form before being used to match. + """ + + FORM = 'NFKC' + + def __init__(self, text): + """ + :param text: Text to search for. Must be a text type (i.e. Unicode) + """ + super(TextSearcher, self).__init__() + self._check_type(text) + self._text = unicodedata.normalize(self.FORM, text) + + def __repr__(self): + return '{}({!r})'.format(self.__class__.__name__, self._text) + + @property + def match_type(self): + return six.text_type + + def search(self, buf): + """Search the provided buffer for matching text. + + Search the provided buffer for matching text. If the *match* is found, + returns a :class:`SequenceMatch` object, otherwise returns ``None``. + + :param buf: Buffer to search for a match. + :return: :class:`SequenceMatch` if matched, None if no match was found. + """ + self._check_type(buf) + normalized = unicodedata.normalize(self.FORM, buf) + idx = normalized.find(self._text) + if idx < 0: + return None + start = idx + end = idx + len(self._text) + return SequenceMatch(self, normalized[start:end], start, end) + + +class RegexSearcher(Searcher): + """Regular expression searcher. + + Searches for a match in the stream that matches the provided regular + expression. + + This class follows the Python 3 model for dealing with binary versus text + patterns, raising a `TypeError` if mixed binary/text is used. This means + that a *RegexSearcher* that is instantiated with binary data will raise a + `TypeError` if used on text, and a *RegexSearcher* instantiated with text + will raise a `TypeError` on binary data. + """ + + def __init__(self, pattern, regex_options=0): + """ + :param pattern: The regex to search for, as a single compiled regex + or a string that will be processed as a regex. + :param regex_options: Options passed to the regex engine. + """ + super(RegexSearcher, self).__init__() + self._regex = re.compile(pattern, regex_options) + + def __repr__(self): + return '{}(re.compile({!r}))'.format(self.__class__.__name__, + self._regex.pattern) + + @property + def match_type(self): + return type(self._regex.pattern) + + def search(self, buf): + """Search the provided buffer for a match to the object's regex. + + Search the provided buffer for a match to the object's regex. If the + *match* is found, returns a :class:`RegexMatch` object, otherwise + returns ``None``. + + :param buf: Buffer to search for a match. + :return: :class:`RegexMatch` if matched, None if no match was found. + """ + match = self._regex.search(self._check_type(buf)) + if match is not None: + start = match.start() + end = match.end() + return RegexMatch(self, buf[start:end], start, end, match.groups()) + + +def _flatten(n): + """Recursively flatten a mixed sequence of sub-sequences and items""" + if isinstance(n, collections.Sequence): + for x in n: + for y in _flatten(x): + yield y + else: + yield n + + +class SearcherCollection(Searcher, list): + """Collect multiple `Searcher` objects into one. + + Collect multiple `Searcher` instances into a single `Searcher` instance. + This is different than simply looping over a list of searchers, as this + class will always find the earliest match from any of its sub-searchers + (i.e. the match with the smallest index). + + Note that this class requires that all of its sub-searchers have the same + *match_type*. + """ + + def __init__(self, *searchers): + """ + :param searchers: One or more :class:`Searcher` implementations. + """ + super(SearcherCollection, self).__init__() + self.extend(_flatten(searchers)) + if not self: + raise ValueError(self.__class__.__name__ + ' requires at least ' + 'one sub-searcher to be specified') + + # Check that all searchers are valid + for searcher in self: + try: + getattr(searcher, 'search') + except AttributeError: + raise TypeError('missing required attribute "search"') + try: + getattr(searcher, 'match_type') + except AttributeError: + raise TypeError('missing required attribute "match_type"') + + # Check that all searchers are the same match type + match_type = self[0].match_type + if not all(map(lambda x: x.match_type == match_type, self)): + raise ValueError(self.__class__.__name__ + ' requires that all ' + 'sub-searchers implement the same match_type') + self._match_type = match_type + + def __repr__(self): + return '{}({!r})'.format(self.__class__.__name__, list(self)) + + @property + def match_type(self): + return self._match_type + + def search(self, buf): + """Search the provided buffer for a match to any sub-searchers. + + Search the provided buffer for a match to any of this collection's + sub-searchers. If a single matching sub-searcher is found, returns that + sub-searcher's *match* object. If multiple matches are found, the match + with the smallest index is returned. If no matches are found, returns + ``None``. + + :param buf: Buffer to search for a match. + :return: :class:`RegexMatch` if matched, None if no match was found. + """ + self._check_type(buf) + best_match = None + best_index = sys.maxsize + for searcher in self: + match = searcher.search(buf) + if match and match.start < best_index: + best_match = match + best_index = match.start + return best_match + + +class StreamAdapter(object): + """Adapter to match varying stream objects to a single interface. + + Despite the existence of the Python stream interface and file-like objects, + there are actually a number of subtly different implementations of streams + within Python. In addition, there are stream-like constructs like sockets + that use a different interface entirely (*send*/*recv* versus + *read*/*write*). + + This class provides a base adapter that can be used to convert anything + even remotely stream-like into a form that can consistently be used by + implementations of `Expecter`. The key method is :func:`poll`, which must + *always* provide a blocking interface to the underlying stream, and must + *also* provide a reliable timeout mechanism. The exact method to achieve + these two goals is implementation dependent, and a particular + implementation may be used to meet the need at hand. + + This class also automatically delegates any non-existent attributes to the + underlying stream object. This allows the adapter to be used identically to + the stream. + """ + def __init__(self, stream): + """:param stream: Stream object to wrap over.""" + self.stream = stream + + def __getattr__(self, attr): + return getattr(self.stream, attr) + + def __repr__(self): + return '{}({!r})'.format(self.__class__.__name__, self.stream) + + def poll(self, timeout): + """Unified blocking read access to the underlying stream. + + All subclasses of :class:`StreamAdapter` must implement this method. + Once called, the method must either: + + - Return new read data whenever it becomes available, or + - Raise an `ExpectTimeout` exception if timeout is exceeded. + + The amount of data to return from each call is implementation + dependent, but it is important that either all data is returned from + the function, or that the data be somehow returned to the stream. In + other words, any data not returned must still be available the next + time the `poll` method is called. + + Note that there is no "wait forever" functionality: either some new + data must be returned or an exception must occur in a finite amount of + time. It is also important that, if there is a timeout, the method + raise the exception as soon after the timeout occurred as is reasonably + possible. + """ + raise NotImplementedError(self.__class__.__name__ + + '.poll must be implemented') + + +class PollingStreamAdapterMixin(object): + """Add *poll_period* and *max_read* properties to a `StreamAdapter`""" + + @property + def poll_period(self): + return self._poll_period + + @poll_period.setter + def poll_period(self, value): + value = float(value) + if value <= 0: + raise ValueError('poll_period must be greater than 0') + self._poll_period = value + + @property + def max_read(self): + return self._max_read + + @max_read.setter + def max_read(self, value): + value = int(value) + if value < 0: + raise ValueError('max_read must be greater than or equal to 0') + self._max_read = value + + +class PollingStreamAdapter(StreamAdapter, PollingStreamAdapterMixin): + """A :class:`StreamAdapter` that polls a non-blocking stream. + + Polls a non-blocking stream of data until new data is available or a + timeout is exceeded. It is *VERY IMPORTANT* that the underlying stream be + non-blocking. + """ + + def __init__(self, stream, poll_period=0.1, max_read=1024): + """ + :param stream: Stream to poll for data. + :param float poll_period: Time (in seconds) between polls of the + stream. + :param int max_read: The maximum number of bytes/characters to read + from the stream at one time. + """ + super(PollingStreamAdapter, self).__init__(stream) + self.poll_period = poll_period + self.max_read = max_read + + def poll(self, timeout): + """ + :param float timeout: Timeout in seconds. + """ + timeout = float(timeout) + end_time = time.time() + timeout + while True: + # Keep reading until data is received or timeout + incoming = self.stream.read(self._max_read) + if incoming: + return incoming + if (end_time - time.time()) < 0: + raise ExpectTimeout() + time.sleep(self._poll_period) + + +class PollingSocketStreamAdapter(StreamAdapter, PollingStreamAdapterMixin): + """A :class:`StreamAdapter` that polls a non-blocking socket. + + Polls a non-blocking socket for data until new data is available or a + timeout is exceeded. + """ + + def __init__(self, sock, poll_period=0.1, max_read=1024): + """ + :param sock: Socket to poll for data. + :param float poll_period: Time (in seconds) between poll of the socket. + :param int max_read: The maximum number of bytes/characters to read + from the socket at one time. + """ + super(PollingSocketStreamAdapter, self).__init__(sock) + self.poll_period = poll_period + self.max_read = max_read + + def poll(self, timeout): + """ + :param float timeout: Timeout in seconds. A timeout that is less than + the poll_period will still cause a single read that may take up to + poll_period seconds. + """ + now = time.time() + end_time = now + float(timeout) + prev_timeout = self.stream.gettimeout() + self.stream.settimeout(self._poll_period) + incoming = None + try: + while (end_time - now) >= 0: + try: + incoming = self.stream.recv(self._max_read) + except socket.timeout: + pass + if incoming: + return incoming + now = time.time() + raise ExpectTimeout() + finally: + self.stream.settimeout(prev_timeout) + + +class ExpectBytesMixin(object): + + def expect_bytes(self, b, timeout=3): + """Wait for a match to the bytes in *b* to appear on the stream. + + Waits for input matching the bytes *b* for up to *timeout* seconds. + If a match is found, a :class:`SequenceMatch` result is returned. If + no match is found within *timeout* seconds, raise an + :class:`ExpectTimeout` exception. + + :param b: The byte pattern to search for. + :param float timeout: Timeout in seconds. + :return: :class:`SequenceMatch` if matched, None if no match was found. + """ + return self.expect(BytesSearcher(b), timeout) + + +class ExpectTextMixin(object): + + def expect_text(self, text, timeout=3): + """Wait for a match to the text in *text* to appear on the stream. + + Waits for input matching the text *text* for up to *timeout* + seconds. If a match is found, a :class:`SequenceMatch` result is + returned. If no match is found within *timeout* seconds, raise an + :class:`ExpectTimeout` exception. + + :param text: The plain-text pattern to search for. + :param float timeout: Timeout in seconds. + :return: :class:`SequenceMatch` if matched, None if no match was found. + """ + return self.expect(TextSearcher(text), timeout) + + +class ExpectRegexMixin(object): + + def expect_regex(self, pattern, timeout=3, regex_options=0): + """Wait for a match to the regex in *pattern* to appear on the stream. + + Waits for input matching the regex *pattern* for up to *timeout* + seconds. If a match is found, a :class:`RegexMatch` result is returned. + If no match is found within *timeout* seconds, raise an + :class:`ExpectTimeout` exception. + + :param pattern: The pattern to search for, as a single compiled regex + or a string that will be processed as a regex. + :param float timeout: Timeout in seconds. + :param regex_options: Options passed to the regex engine. + :return: :class:`RegexMatch` if matched, None if no match was found. + """ + return self.expect(RegexSearcher(pattern, regex_options), timeout) + + +class Expecter(object): + """Base class for consuming input and waiting for a pattern to appear. + + Implements the base class for *Expecter* types, which wrap over a + :class:`StreamAdapter` type and provide methods for applying a + :class:`Searcher` to the received data. Any attributes not part of this + class are delegated to the underlying :class:`StreamAdapter` type. + """ + + def __init__(self, stream_adapter, input_callback, window, close_adapter): + """ + :param StreamAdapter stream_adapter: The :class:`StreamAdapter` object + to receive data from. + :param function input_callback: Callback function with one parameter + that is called each time new data is read from the + *stream_adapter*. + :param int window: Number of historical objects (bytes, characters, + etc.) to buffer. + :param bool close_adapter: If ``True``, and the Expecter is used as a + context manager, closes the adapter at the end of the context + manager. + """ + self.stream_adapter = stream_adapter + if not input_callback: + self.input_callback = lambda _: None + else: + self.input_callback = input_callback + self.window = window + self.close_adapter = close_adapter + + # Delegate undefined methods to underlying stream + def __getattr__(self, attr): + return getattr(self._stream_adapter, attr) + + def __enter__(self): + return self + + def __exit__(self, type_, value, traceback): + if self.close_adapter: + self._stream_adapter.close() + return False + + @property + def stream_adapter(self): + return self._stream_adapter + + @stream_adapter.setter + def stream_adapter(self, value): + try: + getattr(value, 'poll') + except AttributeError: + raise TypeError('stream_adapter must define "poll" method') + self._stream_adapter = value + + @property + def window(self): + return self._window + + @window.setter + def window(self, value): + value = int(value) + if value < 1: + raise ValueError('window must be at least 1') + self._window = value + + def expect(self, searcher, timeout): + """Apply *searcher* to underlying :class:`StreamAdapter` + + :param Searcher searcher: :class:`Searcher` to apply to underlying + stream. + :param float timeout: Timeout in seconds. + """ + raise NotImplementedError('Expecter must implement "expect"') + + +class BytesExpecter(Expecter, ExpectBytesMixin, ExpectRegexMixin): + """:class:`Expecter` interface for searching a byte-oriented stream.""" + + def __init__(self, stream_adapter, input_callback=None, window=1024, + close_adapter=True): + """ + :param StreamAdapter stream_adapter: The :class:`StreamAdapter` object + to receive data from. + :param function input_callback: Callback function with one parameter + that is called each time new data is read from the + *stream_adapter*. + :param int window: Number of historical bytes to buffer. + """ + super(BytesExpecter, self).__init__(stream_adapter, input_callback, + window, close_adapter) + self._history = six.binary_type() + self._start = 0 + + def expect(self, searcher, timeout=3): + """Wait for input matching *searcher* + + Waits for input matching *searcher* for up to *timeout* seconds. If + a match is found, the match result is returned (the specific type of + returned result depends on the :class:`Searcher` type). If no match is + found within *timeout* seconds, raise an :class:`ExpectTimeout` + exception. + + :param Searcher searcher: :class:`Searcher` to apply to underlying + stream. + :param float timeout: Timeout in seconds. + """ + timeout = float(timeout) + end = time.time() + timeout + match = searcher.search(self._history[self._start:]) + while not match: + # poll() will raise ExpectTimeout if time is exceeded + incoming = self._stream_adapter.poll(end - time.time()) + self.input_callback(incoming) + self._history += incoming + match = searcher.search(self._history[self._start:]) + trimlength = len(self._history) - self._window + if trimlength > 0: + self._start -= trimlength + self._history = self._history[trimlength:] + + self._start += match.end + if (self._start < 0): + self._start = 0 + + return match + + +class TextExpecter(Expecter, ExpectTextMixin, ExpectRegexMixin): + """:class:`Expecter` interface for searching a text-oriented stream.""" + + def __init__(self, stream_adapter, input_callback=None, window=1024, + close_adapter=True): + """ + :param StreamAdapter stream_adapter: The :class:`StreamAdapter` object + to receive data from. + :param function input_callback: Callback function with one parameter + that is called each time new data is read from the + *stream_adapter*. + :param int window: Number of historical characters to buffer. + """ + super(TextExpecter, self).__init__(stream_adapter, input_callback, + window, close_adapter) + self._history = six.text_type() + self._start = 0 + + def expect(self, searcher, timeout=3): + """Wait for input matching *searcher*. + + Waits for input matching *searcher* for up to *timeout* seconds. If + a match is found, the match result is returned (the specific type of + returned result depends on the :class:`Searcher` type). If no match is + found within *timeout* seconds, raise an :class:`ExpectTimeout` + exception. + + :param Searcher searcher: :class:`Searcher` to apply to underlying + stream. + :param float timeout: Timeout in seconds. + """ + timeout = float(timeout) + end = time.time() + timeout + match = searcher.search(self._history[self._start:]) + while not match: + # poll() will raise ExpectTimeout if time is exceeded + incoming = self._stream_adapter.poll(end - time.time()) + self.input_callback(incoming) + self._history += incoming + match = searcher.search(self._history[self._start:]) + trimlength = len(self._history) - self._window + if trimlength > 0: + self._start -= trimlength + self._history = self._history[trimlength:] + + self._start += match.end + if (self._start < 0): + self._start = 0 + + return match + + +def _echo_text(value): + sys.stdout.write(value) + + +def _echo_bytes(value): + sys.stdout.write(value.decode('ascii', errors='backslashreplace')) + + +def wrap(stream, unicode=False, window=1024, echo=False, close_stream=True): + """Wrap a stream to implement expect functionality. + + This function provides a convenient way to wrap any Python stream (a + file-like object) or socket with an appropriate :class:`Expecter` class for + the stream type. The returned object adds an :func:`Expect.expect` method + to the stream, while passing normal stream functions like *read*/*recv* + and *write*/*send* through to the underlying stream. + + Here's an example of opening and wrapping a pair of network sockets:: + + import socket + import streamexpect + + source, drain = socket.socketpair() + expecter = streamexpect.wrap(drain) + source.sendall(b'this is a test') + match = expecter.expect_bytes(b'test', timeout=5) + + assert match is not None + + :param stream: The stream/socket to wrap. + :param bool unicode: If ``True``, the wrapper will be configured for + Unicode matching, otherwise matching will be done on binary. + :param int window: Historical characters to buffer. + :param bool echo: If ``True``, echoes received characters to stdout. + :param bool close_stream: If ``True``, and the wrapper is used as a context + manager, closes the stream at the end of the context manager. + """ + if hasattr(stream, 'read'): + proxy = PollingStreamAdapter(stream) + elif hasattr(stream, 'recv'): + proxy = PollingSocketStreamAdapter(stream) + else: + raise TypeError('stream must have either read or recv method') + + if echo and unicode: + callback = _echo_text + elif echo and not unicode: + callback = _echo_bytes + else: + callback = None + + if unicode: + expecter = TextExpecter(proxy, input_callback=callback, window=window, + close_adapter=close_stream) + else: + expecter = BytesExpecter(proxy, input_callback=callback, window=window, + close_adapter=close_stream) + + return expecter + + +__all__ = [ + # Functions + 'wrap', + + # Expecter types + 'Expecter', + 'BytesExpecter', + 'TextExpecter', + + # Searcher types + 'Searcher', + 'BytesSearcher', + 'TextSearcher', + 'RegexSearcher', + 'SearcherCollection', + + # Match types + 'SequenceMatch', + 'RegexMatch', + + # StreamAdapter types + 'StreamAdapter', + 'PollingStreamAdapter', + 'PollingSocketStreamAdapter', + 'PollingStreamAdapterMixin', + + # Exceptions + 'ExpectTimeout', +]