diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..848ccf3 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,4 @@ +config/hilbert_cli_config.py ident +tools/hilbert.py ident +tools/hilbert-station ident + diff --git a/.gitignore b/.gitignore index 5750fca..b21007f 100644 --- a/.gitignore +++ b/.gitignore @@ -2,3 +2,10 @@ STATIONS*/ templates/compose bashttpd.log lastapp.cfg +*~ +*.pyc +__pycache__ +.idea +.tox/ +.cache/ +MANIFEST diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000..318dbe0 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,27 @@ +language: python +sudo: false + +cache: + directories: + - $HOME/.cache/pip + +matrix: + include: + - python: 2.7 + env: TOXENV=py27 + - python: 3.4 + env: TOXENV=py34 + +install: + - ./.travis/install.sh + +script: + - source ~/.venv/bin/activate + - tox + +notifications: + email: + recipients: + - malex984+travis.cli@gmail.com + on_success: never + on_failure: always diff --git a/.travis/install.sh b/.travis/install.sh new file mode 100755 index 0000000..bfe0181 --- /dev/null +++ b/.travis/install.sh @@ -0,0 +1 @@ +pip install --upgrade tox diff --git a/Doxyfile b/Doxyfile new file mode 100644 index 0000000..c382bee --- /dev/null +++ b/Doxyfile @@ -0,0 +1,240 @@ +USE_MDFILE_AS_MAINPAGE = +DOXYFILE_ENCODING = UTF-8 +PROJECT_NAME = Hilbert +PROJECT_NUMBER = 0.0.1 +PROJECT_BRIEF = "Hilbert CLI (server part)" +PROJECT_LOGO = +OUTPUT_DIRECTORY = docs/doxy +CREATE_SUBDIRS = NO +OUTPUT_LANGUAGE = English +BRIEF_MEMBER_DESC = YES +REPEAT_BRIEF = YES +ABBREVIATE_BRIEF = +ALWAYS_DETAILED_SEC = NO +INLINE_INHERITED_MEMB = NO +FULL_PATH_NAMES = YES +STRIP_FROM_PATH = +STRIP_FROM_INC_PATH = +SHORT_NAMES = NO +JAVADOC_AUTOBRIEF = YES +QT_AUTOBRIEF = NO +MULTILINE_CPP_IS_BRIEF = YES +INHERIT_DOCS = YES +SEPARATE_MEMBER_PAGES = NO +TAB_SIZE = 4 +ALIASES = +TCL_SUBST = +OPTIMIZE_OUTPUT_FOR_C = NO +OPTIMIZE_OUTPUT_JAVA = YES +OPTIMIZE_FOR_FORTRAN = NO +OPTIMIZE_OUTPUT_VHDL = NO +EXTENSION_MAPPING = +MARKDOWN_SUPPORT = YES +BUILTIN_STL_SUPPORT = NO +CPP_CLI_SUPPORT = NO +SIP_SUPPORT = NO +IDL_PROPERTY_SUPPORT = YES +DISTRIBUTE_GROUP_DOC = NO +SUBGROUPING = YES +INLINE_GROUPED_CLASSES = NO +INLINE_SIMPLE_STRUCTS = NO +TYPEDEF_HIDES_STRUCT = NO +SYMBOL_CACHE_SIZE = 0 +LOOKUP_CACHE_SIZE = 0 +EXTRACT_ALL = YES +EXTRACT_PRIVATE = YES +EXTRACT_PACKAGE = NO +EXTRACT_STATIC = YES +EXTRACT_LOCAL_CLASSES = NO +EXTRACT_LOCAL_METHODS = NO +EXTRACT_ANON_NSPACES = NO +HIDE_UNDOC_MEMBERS = NO +HIDE_UNDOC_CLASSES = NO +HIDE_FRIEND_COMPOUNDS = NO +HIDE_IN_BODY_DOCS = NO +INTERNAL_DOCS = NO +CASE_SENSE_NAMES = NO +HIDE_SCOPE_NAMES = NO +SHOW_INCLUDE_FILES = YES +FORCE_LOCAL_INCLUDES = NO +INLINE_INFO = YES +SORT_MEMBER_DOCS = YES +SORT_BRIEF_DOCS = NO +SORT_MEMBERS_CTORS_1ST = NO +SORT_GROUP_NAMES = NO +SORT_BY_SCOPE_NAME = NO +STRICT_PROTO_MATCHING = NO +GENERATE_TODOLIST = YES +GENERATE_TESTLIST = YES +GENERATE_BUGLIST = YES +GENERATE_DEPRECATEDLIST= YES +ENABLED_SECTIONS = +MAX_INITIALIZER_LINES = 30 +SHOW_USED_FILES = YES +SHOW_FILES = YES +SHOW_NAMESPACES = YES +FILE_VERSION_FILTER = +LAYOUT_FILE = +CITE_BIB_FILES = +QUIET = NO +WARNINGS = YES +WARN_IF_UNDOCUMENTED = YES +WARN_IF_DOC_ERROR = YES +WARN_NO_PARAMDOC = NO +WARN_FORMAT = "$file:$line: $text" +WARN_LOGFILE = +INPUT = config tools +INPUT_ENCODING = UTF-8 +FILE_PATTERNS = "*.py" "*.md" +RECURSIVE = YES +EXCLUDE = +EXCLUDE_SYMLINKS = NO +EXCLUDE_PATTERNS = +EXCLUDE_SYMBOLS = +EXAMPLE_PATH = scripts +EXAMPLE_PATTERNS = +EXAMPLE_RECURSIVE = NO +IMAGE_PATH = +INPUT_FILTER = doxypy +FILTER_PATTERNS = +FILTER_SOURCE_FILES = YES +FILTER_SOURCE_PATTERNS = +SOURCE_BROWSER = NO +INLINE_SOURCES = NO +STRIP_CODE_COMMENTS = YES +REFERENCED_BY_RELATION = NO +REFERENCES_RELATION = NO +REFERENCES_LINK_SOURCE = YES +USE_HTAGS = NO +VERBATIM_HEADERS = YES +ALPHABETICAL_INDEX = YES +COLS_IN_ALPHA_INDEX = 5 +IGNORE_PREFIX = +GENERATE_HTML = YES +HTML_OUTPUT = html +HTML_FILE_EXTENSION = .html +HTML_HEADER = +HTML_FOOTER = +HTML_STYLESHEET = +HTML_EXTRA_FILES = +HTML_COLORSTYLE_HUE = 220 +HTML_COLORSTYLE_SAT = 100 +HTML_COLORSTYLE_GAMMA = 80 +HTML_TIMESTAMP = YES +HTML_DYNAMIC_SECTIONS = NO +HTML_INDEX_NUM_ENTRIES = 100 +GENERATE_DOCSET = NO +DOCSET_FEEDNAME = "Doxygen generated docs" +DOCSET_BUNDLE_ID = org.doxygen.Project +DOCSET_PUBLISHER_ID = org.doxygen.Publisher +DOCSET_PUBLISHER_NAME = Publisher +GENERATE_HTMLHELP = NO +CHM_FILE = +HHC_LOCATION = +GENERATE_CHI = NO +CHM_INDEX_ENCODING = +BINARY_TOC = NO +TOC_EXPAND = NO +GENERATE_QHP = NO +QCH_FILE = +QHP_NAMESPACE = org.doxygen.Project +QHP_VIRTUAL_FOLDER = doc +QHP_CUST_FILTER_NAME = +QHP_CUST_FILTER_ATTRS = +QHP_SECT_FILTER_ATTRS = +QHG_LOCATION = +GENERATE_ECLIPSEHELP = NO +ECLIPSE_DOC_ID = org.doxygen.Project +DISABLE_INDEX = NO +GENERATE_TREEVIEW = YES +ENUM_VALUES_PER_LINE = 4 +TREEVIEW_WIDTH = 250 +EXT_LINKS_IN_WINDOW = NO +FORMULA_FONTSIZE = 10 +FORMULA_TRANSPARENT = YES +USE_MATHJAX = NO +MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest +MATHJAX_EXTENSIONS = +SEARCHENGINE = YES +SERVER_BASED_SEARCH = NO +GENERATE_LATEX = NO +LATEX_OUTPUT = latex +LATEX_CMD_NAME = latex +MAKEINDEX_CMD_NAME = makeindex +COMPACT_LATEX = NO +PAPER_TYPE = a4 +EXTRA_PACKAGES = +LATEX_HEADER = +LATEX_FOOTER = +PDF_HYPERLINKS = YES +USE_PDFLATEX = YES +LATEX_BATCHMODE = NO +LATEX_HIDE_INDICES = NO +LATEX_SOURCE_CODE = NO +LATEX_BIB_STYLE = plain +GENERATE_RTF = NO +RTF_OUTPUT = rtf +COMPACT_RTF = NO +RTF_HYPERLINKS = NO +RTF_STYLESHEET_FILE = +RTF_EXTENSIONS_FILE = +GENERATE_MAN = NO +MAN_OUTPUT = man +MAN_EXTENSION = .3 +MAN_LINKS = NO +GENERATE_XML = NO +XML_OUTPUT = xml +XML_SCHEMA = +XML_DTD = +XML_PROGRAMLISTING = YES +GENERATE_AUTOGEN_DEF = NO +GENERATE_PERLMOD = NO +PERLMOD_LATEX = NO +PERLMOD_PRETTY = YES +PERLMOD_MAKEVAR_PREFIX = +ENABLE_PREPROCESSING = YES +MACRO_EXPANSION = NO +EXPAND_ONLY_PREDEF = NO +SEARCH_INCLUDES = YES +INCLUDE_PATH = +INCLUDE_FILE_PATTERNS = +PREDEFINED = +EXPAND_AS_DEFINED = +SKIP_FUNCTION_MACROS = YES +TAGFILES = +GENERATE_TAGFILE = +ALLEXTERNALS = NO +EXTERNAL_GROUPS = YES +PERL_PATH = /usr/bin/perl +CLASS_DIAGRAMS = YES +MSCGEN_PATH = +HIDE_UNDOC_RELATIONS = NO +HAVE_DOT = YES +DOT_NUM_THREADS = 0 +DOT_FONTNAME = Helvetica +DOT_FONTSIZE = 10 +DOT_FONTPATH = +CLASS_GRAPH = YES +COLLABORATION_GRAPH = YES +GROUP_GRAPHS = YES +UML_LOOK = NO +UML_LIMIT_NUM_FIELDS = 10 +TEMPLATE_RELATIONS = NO +INCLUDE_GRAPH = YES +INCLUDED_BY_GRAPH = YES +CALL_GRAPH = YES +CALLER_GRAPH = YES +GRAPHICAL_HIERARCHY = YES +DIRECTORY_GRAPH = YES +DOT_IMAGE_FORMAT = png +INTERACTIVE_SVG = NO +DOT_PATH = dot +DOTFILE_DIRS = +MSCFILE_DIRS = +DOT_GRAPH_MAX_NODES = 50 +MAX_DOT_GRAPH_DEPTH = 0 +DOT_TRANSPARENT = NO +DOT_MULTI_TARGETS = NO +GENERATE_LEGEND = YES +DOT_CLEANUP = YES + diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..c2de52d --- /dev/null +++ b/Makefile @@ -0,0 +1,37 @@ +## ---------------------------------------------- [ Makefile for Python Checks ] + +# Simple Makefile used during development to check compliance with +# pep8 and to generate documentation + +SRC=config/*.py tools/*.py +NAME=hilbert + +.PHONY: usage pep8 apidocs clean pylint install build all_docs + +usage: # Print Targets + @grep '^[^#[:space:]].*:' Makefile + +check: # Run the tests + /bin/bash -c 'py.test -v -l --tb=auto --full-trace --color=auto tests/test_*.py' + /bin/bash -c 'py.test-3 -v -l --tb=auto --full-trace --color=auto tests/test_*.py' + +tox: tox.ini setup.py # Run clean testing via tox + tox + +pep8: ${SRC} # Check for PEP8 compliance + pep8 --first --show-source --show-pep8 --statistics --max-line-length=100 --format=pylint ${SRC} > docs/pep8.report.txt 2>&1 || echo $? + +pylint: # Analyse Source + pylint --rcfile=rcfile.pylint -f html --comment=y --files-output=y ${SRC} 2>&1 || echo $? # --full-documentation + mv pylint_*.html docs/ + +all_docs: apidocs epydoc pep8 pylint + +apidocs: ${SRC} # Build API Documentation with doxygen + doxygen Doxyfile 2>&1 || echo $? + +epydoc: ${SRC} # Build API Documentation with epydoc + epydoc --html -o docs/epydoc --inheritance=listed --show-imports --graph=all ${SRC} 2>&1 || echo $? + +clean: # Clean Project + rm -rf *~ docs/doxy docs/epydoc docs/pylint_*.html docs/pep8.report.txt diff --git a/config/__init__.py b/config/__init__.py new file mode 100644 index 0000000..6ef28b2 --- /dev/null +++ b/config/__init__.py @@ -0,0 +1,6 @@ +from __future__ import absolute_import +from __future__ import unicode_literals + +# __version__ = '0.2.2-dev' # TODO: add git commit id? + +# from hilbert_cli_config import * diff --git a/config/helpers.py b/config/helpers.py new file mode 100644 index 0000000..45a3eb4 --- /dev/null +++ b/config/helpers.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# encoding: utf-8 +# coding: utf-8 + +from __future__ import absolute_import, print_function, unicode_literals + +# from .hilbert_cli_config import * # load_yaml # Hilbert # VerboseRoundTripLoader, + +############################################################### +# import pickle +# import cPickle as pickle + +def pickle_dump(fn, d): + import dill as pickle # NOTE: 3-clause BSD + with open(fn, 'wb') as p: + # NOTE: Pickle the 'data' dictionary using the highest protocol available? + # pickle.HIGHEST_PROTOCOL = 4 added in Python 3.4 + pickle.dump(d, p, 2) # 2nd PROTOCOL was introduced in Python 2.3. + +def pickle_load(fn): + import dill as pickle # NOTE: 3-clause BSD + with open(fn, 'rb') as p: + d = pickle.load(p) + return d diff --git a/config/hilbert_cli_config.py b/config/hilbert_cli_config.py new file mode 100644 index 0000000..472f540 --- /dev/null +++ b/config/hilbert_cli_config.py @@ -0,0 +1,2548 @@ +# -*- coding: utf-8 -*- +# encoding: utf-8 +# coding: utf-8 + +from __future__ import absolute_import, print_function, unicode_literals + +############################################################### +import ruamel.yaml as yaml +from ruamel.yaml.reader import Reader +from ruamel.yaml.scanner import RoundTripScanner # Scanner +from ruamel.yaml.parser import RoundTripParser # Parser, +from ruamel.yaml.composer import Composer +from ruamel.yaml.constructor import RoundTripConstructor # Constructor, SafeConstructor, +from ruamel.yaml.resolver import VersionedResolver # Resolver, +# from ruamel.yaml.nodes import MappingNode +from ruamel.yaml.compat import PY2, PY3, text_type, string_types, ordereddict + +import semantic_version # supports partial versions + +import logging +import collections +import sys +import os +import re, tokenize +import tempfile +import subprocess, shlex +# import paramiko + + +import pprint as PP +from abc import * + +############################################################### +# logging.basicConfig(format='%(levelname)s [%(filename)s:%(lineno)d]: %(message)s', level=logging.DEBUG) +log = logging.getLogger(__name__) +# log.setLevel(logging.DEBUG) + +_pp = PP.PrettyPrinter(indent=4) + +############################################################### +# NOTE: Global variables +PEDANTIC = False # NOTE: to treat invalid values/keys as errors? +INPUT_DIRNAME = './' # NOTE: base location for external resources + + +############################################################### +if PY3 and (sys.version_info[1] >= 4): + class AbstractValidator(ABC): + """AbstractValidator is the root Base class for any concrete implementation of entities + appearing in the general configuration file""" + + @abstractmethod + def validate(self, d): + pass +elif PY2 or PY3: + class AbstractValidator: + """AbstractValidator is the root Base class for any concrete implementation of entities + appearing in the general configuration file""" + __metaclass__ = ABCMeta + + @abstractmethod + def validate(self, d): + pass +# elif PY3: +# class AbstractValidator(metaclass=ABCMeta): +# """AbstractValidator is the root Base class for any concrete implementation of entities +# appearing in the general configuration file""" +# @abstractmethod +# def validate(self, d): +# pass +else: + raise NotImplementedError("Unsupported Python version: '{}'".format(sys.version_info)) + +############################################################### +if PY3: + from urllib.parse import urlparse + from urllib.request import urlopen +elif PY2: + from urlparse import urlparse + from urllib2 import urlopen + +############################################################### +if PY3: + def is_valid_id(k): + return k.isidentifier() +elif PY2: + def is_valid_id(k): + return re.match(tokenize.Name + '$', k) + + +############################################################### +def pprint(cfg): + global _pp + _pp.pprint(cfg) + + +############################################################### +# timeout=None, +def _execute(_cmd, shell=False, stdout=None, stderr=None): # True??? Try several times? Overall timeout? + global PEDANTIC + __cmd = ' '.join(_cmd) + # stdout = tmp, stderr = open("/dev/null", 'w') + # stdout=open("/dev/null", 'w'), stderr=open("/dev/null", 'w')) + log.debug("Executing shell command: '{}'...".format(__cmd)) + + retcode = None + try: + # with subprocess.Popen(_cmd, shell=shell, stdout=stdout, stderr=stderr) as p: + # timeout=timeout, + retcode = subprocess.call(_cmd, shell=shell, stdout=stdout, stderr=stderr) + except: + log.exception("Could not execute '{0}'! Exception: {1}".format(__cmd, sys.exc_info())) + raise + + assert retcode is not None + log.debug("Exit code: '{}'".format(retcode)) + + if retcode: + if not PEDANTIC: # Bad error code [{0}] while + log.warning("Error exit code {0}, while executing '{1}'!".format(retcode, __cmd)) + else: # Pedantic mode? + log.error("Error exit code {0}, while executing '{1}'!".format(retcode, __cmd)) + raise Exception("Error exit code {0}, while executing '{1}'".format(retcode, __cmd)) + else: + log.debug("Successful command '{}' execution!".format(__cmd)) + + return retcode + + +############################################################### +def _get_line_col(lc): + + if isinstance(lc, (list, tuple)): + l = lc[0] + c = lc[1] + else: + try: + l = lc.line + except: + log.exception("Cannot get line out of '{}': Missing .line attribute!".format(lc)) + raise + + try: + c = lc.col + except: + try: + c = lc.column + except: + log.exception("Cannot get column out of '{}': Missing .col/.column attributes!".format(lc)) + raise + + return l, c + + +############################################################### +class ConfigurationError(Exception): + def __init__(self, msg): + self._msg = msg + + +############################################################### +_up_arrow = '↑' + + +def _key_error(key, value, lc, error_message, e='K'): + (line, col) = _get_line_col(lc) + + if key is None: + key = '*' + + print('{}[line: {}, column: {}]: {}'.format(e, line + 1, col + 1, error_message.format(key))) + print('{}{}: {}'.format(' ' * col, key, value)) # NOTE: !? + # ! TODO: try to get access to original ruamel.yaml buffered lines...? + print('{}{}'.format(' ' * col, _up_arrow)) + print('---') + + +def _key_note(key, lc, key_message, e='K'): + (line, col) = _get_line_col(lc) + + if key is None: + key = '?' + + print('{}[line: {}, column: {}]: {}'.format(e, line + 1, col + 1, key_message.format(key))) + print('---') + + +def _value_error(key, value, lc, error, e='E'): + (line, col) = _get_line_col(lc) + + if key is None: + key = '?' + + val_col = col + len(key) + 2 + print('{}[line: {}, column: {}]: {}'.format(e, line + 1, val_col + 1, error.format(key))) + print('{}{}: {}'.format(' ' * col, key, value)) # NOTE: !? + # ! TODO: try to get access to original ruamel.yaml buffered lines...? + print('{}{}'.format(' ' * val_col, _up_arrow)) + print('---') + +# Unused??? +# def value_warning(key, value, lc, error): +# _value_error(key, value, lc, error, e='W') + + +############################################################### +class VerboseRoundTripConstructor(RoundTripConstructor): + def construct_mapping(self, node, maptyp, deep=False): + + m = RoundTripConstructor.construct_mapping(self, node, maptyp, deep=deep) # the actual construction! + + # additionally go through all nodes in the mapping to detect overwrites: + + starts = {} # already processed keys + locations and values + + for key_node, value_node in node.value: + # keys can be list -> deep + key = self.construct_object(key_node, deep=True) + + # lists are not hashable, but tuples are + if not isinstance(key, collections.Hashable): + if isinstance(key, list): + key = tuple(key) + + value = self.construct_object(value_node, deep=deep) + # TODO: check the lines above in the original Constructor.construct_mapping code for any changes/updates + + if key in starts: # Duplication detection + old = starts[key] + + print("WARNING: Key re-definition within some mapping: ") # mapping details? + _key_error(key, old[1], old[0], "Previous Value: ") + _key_error(key, value, key_node.start_mark, "New Value: ") + print('===') + + starts[key] = (key_node.start_mark, value) # in order to find all such problems! + + return m + + +############################################################### +class VerboseRoundTripLoader(Reader, RoundTripScanner, RoundTripParser, Composer, + VerboseRoundTripConstructor, VersionedResolver): + def __init__(self, stream, version=None, preserve_quotes=None): + Reader.__init__(self, stream) + RoundTripScanner.__init__(self) + RoundTripParser.__init__(self) + Composer.__init__(self) + VerboseRoundTripConstructor.__init__(self, preserve_quotes=preserve_quotes) + VersionedResolver.__init__(self, version) + + +############################################################### +class BaseValidator(AbstractValidator): + """Abstract Base Class for the Config entities""" + + __version = [None] # NOTE: shared version among all Validator Classes! + _parent = None # NOTE: (for later) parent Validator + _data = None # NOTE: result of valid validation + _default_input_data = None # NOTE: Default input to the parser instead of None + + def __init__(self, *args, **kwargs): + parent = kwargs.pop('parent', None) + parsed_result_is_data = kwargs.pop('parsed_result_is_data', False) + + # TODO: FIXME: assure *args, **kwargs are empty! + super(BaseValidator, self).__init__() + + assert self._parent is None + self._parent = parent + + # parsed_result_is_data default: + # - False => parsed result is self + # - True => get_data() + self._parsed_result_is_data = parsed_result_is_data + + self.__API_VERSION_ID = "$Id$" + + def get_parent(self, cls=None): + if cls is None: + return self._parent + + if self._parent is None: + return None + + _p = self._parent + while isinstance(_p, BaseValidator): + if isinstance(_p, cls): + break + _t = _p._parent + if _t is None: + break + _p = _t + + assert _p is not None + if isinstance(_p, cls): + return _p + + log.error("Sorry: could not find parent of specified class ({0})!" + "Found top is of type: {1}".format(cls, type(_p))) + return None + + + def get_api_version(self): + return self.__API_VERSION_ID + + def set_data(self, d): + # assert self._data is None + # assert d is not None + self._data = d + + def get_data(self): + _d = self._data +# assert _d is not None + return _d + + @classmethod + def set_version(cls, v): + """To be set once only for any Validator class!""" + assert len(cls.__version) == 1 +# assert cls.__version[0] is None # NOTE: bad for testing! + cls.__version[0] = v + + @classmethod + def get_version(cls, default=None): + assert len(cls.__version) == 1 + + if cls.__version[0] is not None: + return cls.__version[0] + + return default + + @abstractmethod + def validate(self, d): # abstract... + pass + + @classmethod + def parse(cls, d, *args, **kwargs): + """ + return parsed value, throw exception if input is invalid! + + :param d: + :param parent: + :return: + """ + self = cls(*args, **kwargs) + + log.debug("{1}::parse( input type: {0} )".format(type(d), type(self))) + + if self.validate(d): # NOTE: validate should not **explicitly** throw exceptions!!! + if self._parsed_result_is_data: + return self.get_data() + + return self + + # NOTE: .parse should throw exceptions in case of invalid input data! + raise ConfigurationError(u"{}: {}".format("ERROR:", "Invalid data: '{0}' in {1}!" .format(d, type(self)))) + + def __repr__(self): + """Print using pretty formatter""" + + d = self.get_data() # vars(self) # ??? + return PP.pformat(d, indent=4, width=100) + +# def __str__(self): +# """Convert to string""" +# +# d = self.get_data() # vars(self) # ??? +# return str(d) + + def __eq__(self, other): + assert isinstance(self, BaseValidator) + + if not isinstance(other, BaseValidator): + return self.data_dump() == other + + assert isinstance(other, BaseValidator) + +# assert self.get_api_version() == other.get_api_version() + return self.get_data() == other.get_data() + + def __ne__(self, other): + return not (self == other) # More general than self.value != other.value + + def data_dump(self): + _d = self.get_data() + + if _d is None: + return _d + + assert not isinstance(_d, (tuple, set)) # TODO: is this true in general?!? + + if isinstance(_d, dict): + _dd = {} + for k in _d: + v = _d[k] + if isinstance(v, BaseValidator): + v = v.data_dump() + _dd[k] = v + return _dd + + if isinstance(_d, list): + _dd = [] + for idx, i in enumerate(_d): + v = i + if isinstance(v, BaseValidator): + v = v.data_dump() + _dd.insert(idx, v) + return _dd + +# if isinstance(_d, string_types): + return _d + + def query(self, what): + """ + Generic query for data subset about this object + + A/B/C/(all|keys|data)? + + Get object under A/B/C and return + * it (if 'all') - default! + * its keys (if 'keys') + * its data dump (if 'data') + """ + + # NOTE: no data dumping here! Result may be a validator! + + log.debug("Querying '%s'", what) + + if (what is None) or (what == ''): + what = 'all' + + if what == 'all': + return self + + if what == 'data': + return self.data_dump() + + _d = self.get_data() + + if what == 'keys': + assert isinstance(_d, dict) + return [k for k in _d.keys()] + + s = StringValidator.parse(what, parent=self) + + if s in _d: + return _d[s] + + sep = "/" + ss = s.split(sep) # NOTE: encode using pathes! + + h = ss[0] # top header + t = sep.join(ss[1:]) # tail + + if h in _d: + d = _d[ss[0]] + if isinstance(d, BaseValidator): + return d.query(t) # TODO: FIXME: avoid recursion... + + log.warning("Could not query an object. Ignoring the tail: %s", t) + return d + + raise ConfigurationError(u"{}: {}".format("ERROR:", + "Sorry cannot show '{0}' of {1}!".format(what, type(self)))) + + +############################################################### +class BaseRecordValidator(BaseValidator): + """Aggregation of data as a record with some fixed data members""" + + # TODO: turn _default_type into a class-member (by moving it here)...? + + def __init__(self, *args, **kwargs): + super(BaseRecordValidator, self).__init__(*args, **kwargs) + + self._default_type = None # "default_base" + self._types = {} + self._create_optional = False # Instantiate missing optional keys + self._type = None + + def detect_type(self, d): + """determine the type of variadic data for the format version""" + + assert not (self._default_type is None) + assert len(self._types) > 0 + assert isinstance(d, dict) + + return self._default_type + + def detect_extra_rule(self, key, value): + """Handling for extra un-recorded keys in the mapping""" + return None + + def validate(self, d): + if d is None: + d = self._default_input_data + + # ! TODO: assert that d is a mapping with lc! + + self._type = self.detect_type(d) + + assert self._type is not None + assert self._type in self._types + + _rule = self._types[self._type] + + _ret = True + + _d = {} + _lc = d.lc # starting location of the mapping...? + + for k in _rule.keys(): + r = _rule[k] + if r[0] and (k not in d): + _key_note(k, _lc, "ERROR: Missing mandatory key `{}` (type: '%s')" % (self._type)) # Raise Exception? + _ret = False + # NOTE: the following will add all the missing default values + elif self._create_optional and (not r[0]): # Optional Values should have some default values! + # TODO: FIXME: catch exception in the following: + _k = None + _v = None + try: + _k = StringValidator.parse(k, parent=self) + except ConfigurationError as err: + _key_note(k, _lc, "Error: invalid _optional_ key field '{}' (type: '%s')" % self._type) + pprint(err) + _ret = False + + try: + _v = (r[1]).parse(None, parent=self) # Default Value! + except ConfigurationError as err: + _key_note(k, _lc, "Error: invalid default value (for optional key: '{}') (type: '%s')" % self._type) + pprint(err) + _ret = False + + if _ret: + assert _k is not None + _d[_k] = _v + + (s, c) = _get_line_col(_lc) + for offset, k in enumerate(d): + v = d.get(k) + k = text_type(k) + l = s + offset # ?? + lc = (l, c) + + _k = None + _v = None + + if k in _rule: + try: + _k = StringValidator.parse(k, parent=self) + except ConfigurationError as err: + _key_error(k, v, lc, "Error: invalid key field '{}' (type: '%s')" % self._type) + pprint(err) + _ret = False + + try: + _v = (_rule[k][1]).parse(v, parent=self) + except ConfigurationError as err: + _value_error(k, v, lc, "Error: invalid field value (key: '{}') (type: '%s')" % self._type) + pprint(err) + _ret = False + + else: + _extra_rule = self.detect_extra_rule(k, v) # (KeyValidator, ValueValidator) + + if _extra_rule is None: + _key_error(k, v, lc, "WARNING: Unhandled extra Key: '{}' (type: '%s')" % self._type) + _ret = False + else: + try: + _k = (_extra_rule[0]).parse(k, parent=self) + except ConfigurationError as err: + _key_error(k, v, lc, "Error: invalid key '{}' (type: '%s')" % self._type) + pprint(err) + _ret = False + + try: + _v = (_extra_rule[1]).parse(v, parent=self) + except ConfigurationError as err: + # TODO: FIXME: wrong col (it was for key - not value)! + _value_error(k, v, lc, "Error: invalid field value '{}' value (type: '%s')" % self._type) + pprint(err) + _ret = False + + if _ret: + assert _k is not None + _d[_k] = _v + + if _ret: + self.set_data(_d) + + return _ret + + +############################################################### +class ScalarValidator(BaseValidator): + """Single scalar value out of YAML scalars: strings, numbert etc.""" + + def __init__(self, *args, **kwargs): + kwargs['parsed_result_is_data'] = kwargs.pop('parsed_result_is_data', True) + super(ScalarValidator, self).__init__(*args, **kwargs) + + def validate(self, d): + """check that data is a scalar: not a sequence or mapping or set""" + + if d is None: + d = self._default_input_data # ! + + if d is not None: + if isinstance(d, (list, dict, tuple, set)): # ! Check if data is not a container? + log.error("value: '{}' is not a scalar value!!" . format(d)) + return False + + if isinstance(d, string_types): + d = text_type(d) + + # NOTE: None is also a scalar value...! + self.set_data(d) + return True + +############################################################### +class StringValidator(ScalarValidator): + """YAML String""" + def __init__(self, *args, **kwargs): + super(StringValidator, self).__init__(*args, **kwargs) + + self._default_input_data = '' + + def validate(self, d): + """check whether data is a valid string. Note: should not care about format version""" + + if d is None: + d = self._default_input_data + + assert d is not None + + s = ScalarValidator.parse(d, parent=self) + + if not isinstance(s, string_types): + log.error("value: '{}' is not a string!!" . format(d)) + return False + + self.set_data(text_type(d)) + return True + + +############################################################### +class SemanticVersionValidator(BaseValidator): + def __init__(self, *args, **kwargs): + partial = kwargs.pop('partial', True) +# kwargs['parsed_result_is_data'] = kwargs.pop('parsed_result_is_data', False) + super(SemanticVersionValidator, self).__init__(*args, **kwargs) + +# self._parsed_result_is_data = False + self._partial = partial + self._default_input_data = '0.0.0' + + def validate(self, d): + """check the string data to be a valid semantic version""" + + if d is None: + d = self._default_input_data + + log.debug("{1}::validate( input type: {0} )".format(type(d), type(self))) +# log.debug("SemanticVersionValidator::validate( input data: {0} )".format(str(d))) + + try: + _t = StringValidator.parse(d, parent=self, parsed_result_is_data=True) + except: + log.warning("Input is not a string: {}".format(d)) + try: + _t = StringValidator.parse(str(d), parent=self, parsed_result_is_data=True) + except: + log.error("Input cannot be converted into a version string: {}".format(d)) + return False + + + # self.get_version(None) # ??? + _v = None + try: + _v = semantic_version.Version(_t, partial=self._partial) + except: + log.exception("Wrong version data: '{0}' (see: '{1}')" . format(d, sys.exc_info())) + return False + + self.set_data(_v) + return True + + def data_dump(self): + return str(self.get_data()) + + +############################################################### +class BaseUIString(StringValidator): + """String visible to users => non empty!""" + + def __init__(self, *args, **kwargs): + super(BaseUIString, self).__init__(*args, **kwargs) + + self._default_input_data = None + + def validate(self, d): + """check whether data is a valid (non-empty) string""" + + if d is None: + d = self._default_input_data + + if not super(BaseUIString, self).validate(d): + self.set_data(None) + return False + + if bool(self.get_data()): # NOTE: Displayed string should not be empty! + return True + + self.set_data(None) + return False + + +############################################################### +class BaseEnum(BaseValidator): # TODO: Generalize to not only strings...? + """Enumeration/collection of several fixed strings""" + + def __init__(self, *args, **kwargs): + kwargs['parsed_result_is_data'] = kwargs.pop('parsed_result_is_data', True) + super(BaseEnum, self).__init__(*args, **kwargs) + + self._enum_list = [] # NOTE: will depend on the version... + + def validate(self, d): + """check whether data is in the list of fixed strings (see ._enum_list)""" + + if d is None: + d = self._default_input_data + + t = StringValidator.parse(d, parent=self, parsed_result_is_data=True) + + if not (t in self._enum_list): # check withing a list of possible string values + log.error("string value: '{}' is not among known enum items!!".format(d)) + return False + + self.set_data(t) + return True + + +############################################################### +class ServiceType(BaseEnum): # Q: Is 'Service::type' mandatory? default: 'compose' + def __init__(self, *args, **kwargs): + super(ServiceType, self).__init__(*args, **kwargs) + + compose = text_type('compose') + self._enum_list = [compose] # NOTE: 'docker' and others may be possible later on + self._default_input_data = compose + + +############################################################### +class StationOMDTag(BaseEnum): # Q: Is 'Station::omd_tag' mandatory? default: 'standalone' + def __init__(self, *args, **kwargs): + super(StationOMDTag, self).__init__(*args, **kwargs) + + _v = text_type('standalone') + self._enum_list = [text_type('agent'), text_type('windows'), _v] # NOTE: possible values of omd_tag + self._default_input_data = _v + + +############################################################### +class StationPowerOnMethodType(BaseEnum): # Enum: [WOL], AMTvPRO, DockerMachine + + def __init__(self, *args, **kwargs): + super(StationPowerOnMethodType, self).__init__(*args, **kwargs) + + wol = text_type('WOL') + # NOTE: the list of possible values of PowerOnMethod::type (will depend on format version) + self._enum_list = [wol, text_type('DockerMachine')] # NOTE: 'AMTvPRO' and others may be possible later on + self._default_input_data = wol + + +############################################################### +class URI(BaseValidator): + """Location of external file, either URL or local absolute or local relative to the input config file""" + + def __init__(self, *args, **kwargs): + kwargs['parsed_result_is_data'] = kwargs.pop('parsed_result_is_data', True) + super(URI, self).__init__(*args, **kwargs) + + self._type = None + + def validate(self, d): + """check whether data is a valid URI""" + global PEDANTIC + + if d is None: + d = self._default_input_data + + v = StringValidator.parse(d, parent=self, parsed_result_is_data=True) + + _ret = True + + # TODO: @classmethod def check_uri(v) + if urlparse(v).scheme != '': + self._type = text_type('url') + try: + urlopen(v).close() + except: + log.warning("URL: '{}' is not accessible!".format(v)) + _ret = not PEDANTIC + + # TODO: FIXME: base location should be the input file's dirname??? +# elif not os.path.isabs(v): +# v = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), v)) + + elif os.path.isfile(v): # Check whether file exists + self._type = text_type('file') + + elif os.path.isdir(v): # Check whether directory exists + self._type = text_type('dir') + + if not _ret: + log.warning("missing/unsupported resource location: {}".format(v)) + _ret = (not PEDANTIC) + + if _ret: + self.set_data(v) + + return _ret + + +############################################################### +class BaseID(BaseValidator): + def __init__(self, *args, **kwargs): + kwargs['parsed_result_is_data'] = kwargs.pop('parsed_result_is_data', True) + super(BaseID, self).__init__(*args, **kwargs) + + def validate(self, d): + """check whether data is a valid ID string""" + + if d is None: + d = self._default_input_data + + v = StringValidator.parse(d, parent=self, parsed_result_is_data=True) + + if not is_valid_id(v): + log.error("not a valid variable identifier! Input: '{}'" . format(d)) + return False + + self.set_data(v) + return True + +############################################################### +class ClientVariable(BaseID): # + def __init__(self, *args, **kwargs): + super(ClientVariable, self).__init__(*args, **kwargs) + + def validate(self, d): + """check whether data is a valid ID string""" + + if d is None: + d = self._default_input_data + + v = BaseID.parse(d, parent=self) # .get_data() + + _ret = True + + if not (v == v.lower() or v == v.upper()): # ! Variables are all lower or upper case! + log.error("a variable must be either in lower or upper case! Input: '{}'" . format(d)) + _ret = False + + # NOTE: starting with hilbert_ or HILBERT_ with letters, digits and '_'?? + if not re.match('^hilbert(_[a-z0-9]+)+$', v.lower()): + log.error("variable must start with HILBERT/hilbert and contain words separated by underscores!" + " Input: '{}" .format(d)) + _ret = False + + if _ret: + self.set_data(v) + + return _ret + + +############################################################### +class ServiceID(BaseID): + def __init__(self, *args, **kwargs): + super(ServiceID, self).__init__(*args, **kwargs) + + +############################################################### +class ApplicationID(BaseID): + def __init__(self, *args, **kwargs): + super(ApplicationID, self).__init__(*args, **kwargs) + + +############################################################### +class GroupID(BaseID): + def __init__(self, *args, **kwargs): + super(GroupID, self).__init__(*args, **kwargs) + + +############################################################### +class StationID(BaseID): + def __init__(self, *args, **kwargs): + super(StationID, self).__init__(*args, **kwargs) + + +############################################################### +class ProfileID(BaseID): + def __init__(self, *args, **kwargs): + super(ProfileID, self).__init__(*args, **kwargs) + + +############################################################### +class PresetID(BaseID): + def __init__(self, *args, **kwargs): + super(PresetID, self).__init__(*args, **kwargs) + + +############################################################### +class AutoDetectionScript(StringValidator): + def __init__(self, *args, **kwargs): + super(AutoDetectionScript, self).__init__(*args, **kwargs) + self._default_input_data = '' + + + def check_script(self, script): + global PEDANTIC + + assert script is not None + + _ret = True + log.debug('Checking auto-detection script: {}'.format(script)) + + # NOTE: trying to check the BASH script: shellcheck & bash -n 'string': + fd, path = tempfile.mkstemp() + try: + with os.fdopen(fd, 'w') as tmp: + tmp.write(script) + + _cmd = ["bash", "-n", path] + try: + # NOTE: Check for valid bash script + retcode = _execute(_cmd) + except: + log.exception("Error while running '{}' to check auto-detection script!".format(' '.join(_cmd))) + return False # if PEDANTIC: # TODO: add a special switch? + + if retcode != 0: + log.error("Error while running '{0}' to check auto-detection script: {1}!".format(' '.join(_cmd), retcode)) + return False + + # NOTE: additionall tool: shellcheck (haskell!) + # FIXME: what if this tool is missing!? TODO: Check for it once! + _cmd = ["shellcheck", "-s", "bash", path] + try: + # NOTE: Check for valid bash script + retcode = _execute(_cmd) + except: + log.exception("Error while running '{}' to check auto-detection script!".format(' '.join(_cmd))) + return False + + if retcode != 0: + log.error("Error while running '{0}' to check auto-detection script: {1}!".format(' '.join(_cmd), retcode)) + return False + + finally: + os.remove(path) + + return True + + def validate(self, d): + """check whether data is a valid script""" + + global PEDANTIC + + if d is None: + d = self._default_input_data + + if (d is None) or (d == '') or (d == text_type('')): + self.set_data(text_type('')) + return True + + script = '' + try: + script = StringValidator.parse(d, parent=self, parsed_result_is_data=True) + + if not bool(script): # NOTE: empty script is also fine! + self.set_data(script) + return True + except: + log.exception("Wrong input to AutoDetectionScript::validate: {}". format(d)) + return False + + + if not self.check_script(script): + if PEDANTIC: + log.error("Bad script: {0}".format(script)) + return False + else: + log.warning("Bad script: {0}".format(script)) + + self.set_data(script) + return True + + +############################################################### +class DockerComposeServiceName(StringValidator): # TODO: any special checks here? + + def __init__(self, *args, **kwargs): + super(DockerComposeServiceName, self).__init__(*args, **kwargs) + + def validate(self, d): + """check whether data is a valid service name in file due to DockerComposeRef""" + if d is None: + d = self._default_input_data + + try: + n = StringValidator.parse(d, parent=self, parsed_result_is_data=True) + self.set_data(n) + return True + except: + log.error("Wrong input to DockerComposeServiceName::validate: '{}'". format(d)) + return False + +############################################################### +class DockerComposeRef(URI): + + def __init__(self, *args, **kwargs): + super(DockerComposeRef, self).__init__(*args, **kwargs) + + self._default_input_data = text_type('docker-compose.yml') + + def validate(self, d): + """check whether data is a valid docker-compose file name""" + if d is None: + d = self._default_input_data + + # TODO: call docker-compose on the referenced file! Currently in DockerService!? + + return super(DockerComposeRef, self).validate(d) + + +############################################################### +class Icon(URI): + def __init__(self, *args, **kwargs): + super(Icon, self).__init__(*args, **kwargs) + + def validate(self, d): + """check whether data is a valid icon file name""" + + if d is None: + d = self._default_input_data + + # TODO: FIXME: check the file contents (or extention) + return super(Icon, self).validate(d) + + +############################################################### +class HostAddress(StringValidator): + """SSH alias""" + + def __init__(self, *args, **kwargs): + kwargs['parsed_result_is_data'] = kwargs.pop('parsed_result_is_data', False) + super(HostAddress, self).__init__(*args, **kwargs) + + def validate(self, d): + """check whether data is a valid ssh alias?""" + global PEDANTIC + + if d is None: + d = self._default_input_data + + _h = StringValidator.parse(d, parent=self, parsed_result_is_data=True) + + if _h.startswith("'") and _h.endswith("'"): + _h = _h[1:-1] + if _h.startswith('"') and _h.endswith('"'): + _h = _h[1:-1] + + if PEDANTIC: + if not self.check_ssh_alias(_h, shell=False, timeout=2): + return False + + self.set_data(_h) + return True + + def get_ip_address(self): + pass + + def get_address(self): + return self.get_data() + + def recheck(self): + return self.check_ssh_alias(self.get_address()) + + # TODO: check/use SSH/SCP calls! + def scp(self, source, target, **kwargs): + global PEDANTIC + + assert self.recheck() + _h = self.get_address() # 'jabberwocky' # + + _cmd = shlex.split("scp -q -F {3} {0} {1}:{2}".format(source, _h, target, os.path.join(os.environ['HOME'], ".ssh", "config"))) + __cmd = ' '.join(_cmd) + + # client = paramiko.SSHClient() + # client.load_system_host_keys() + try: + retcode = _execute(_cmd, **kwargs) + except: + log.exception("Could not execute '{0}'! Exception: {1}".format(__cmd, sys.exc_info())) + if not PEDANTIC: + return + raise + + assert retcode is not None + if not retcode: + log.debug("Command ({}) execution success!".format(__cmd)) + return + else: + log.error("Could not run scp command: '{0}'! Return code: {1}".format(__cmd, retcode)) + if PEDANTIC: + raise Exception("Could not run scp command: '{0}'! Exception: {1}".format(__cmd, sys.exc_info())) + + def ssh(self, cmd, **kwargs): + global PEDANTIC + + assert self.recheck() + _h = self.get_address() # 'jabberwocky' + + # TODO: maybe respect SSH Settings for the parent station!? + + _cmd = shlex.split("ssh -q -F {2} {0} {1}".format(_h, ' '.join(cmd), os.path.join(os.environ['HOME'], ".ssh", "config"))) + __cmd = ' '.join(_cmd) + + # client = paramiko.SSHClient() + # client.load_system_host_keys() + try: + retcode = _execute(_cmd, **kwargs) + except: + log.exception("Could not execute '{0}'! Exception: {1}".format(__cmd, sys.exc_info())) + raise + + assert retcode is not None + if not retcode: + log.debug("Command ({}) execution success!".format(__cmd)) + return + else: + log.error("Could not run remote ssh command: '{0}'! Return code: {1}".format(__cmd, retcode)) +# if PEDANTIC: + raise Exception("Could not run remote ssh command: '{0}'! Exception: {1}".format(__cmd, sys.exc_info())) + + + @classmethod + def check_ssh_alias(cls, _h, **kwargs): + """Check for ssh alias""" + global PEDANTIC + timeout = kwargs.pop('timeout', 2) + + log.debug("Checking ssh alias: '{0}'...".format(text_type(_h))) + try: +# client = paramiko.SSHClient() +# client.load_system_host_keys() + + _cmd = ["ssh", "-q", "-F", os.path.join(os.environ['HOME'], ".ssh", "config"), "-o", "ConnectTimeout={}".format(timeout), _h, "exit 0"] + retcode = _execute(_cmd, **kwargs) # , stdout=open("/dev/null", 'w'), stderr=open("/dev/null", 'w') + + if retcode: + log.warning("Non-functional ssh alias: '{0}' => exit code: {1}!".format(text_type(_h), retcode)) + else: + log.debug("Ssh alias '{0}' is functional!" . format(text_type(_h))) + + return retcode == 0 + except: + log.exception("Non-functional ssh alias: '{0}'. Moreover: Unexpected error: {1}" . format(text_type(_h), sys.exc_info())) + if PEDANTIC: + raise + + return False + + +############################################################### +class HostMACAddress(StringValidator): + """MAC Address of the station""" + + def __init__(self, *args, **kwargs): + super(HostMACAddress, self).__init__(*args, **kwargs) + + def validate(self, d): + """check whether data is a valid ssh alias?""" + + if d is None: + d = self._default_input_data + + v = StringValidator.parse(d, parent=self) + + if not re.match("[0-9a-f]{2}([-:])[0-9a-f]{2}(\\1[0-9a-f]{2}){4}$", v.lower()): + _value_error(None, d, d.lc, "ERROR: Wrong MAC Address: [{}]") + return False + + # TODO: verify the existence of that MAC address? + self.set_data(v) + return True + +############################################################### +class BoolValidator(ScalarValidator): + def __init__(self, *args, **kwargs): + super(BoolValidator, self).__init__(*args, **kwargs) + + def validate(self, d): + """check whether data is a valid string""" + + if d is None: + d = self._default_input_data + + if not isinstance(d, bool): + log.error("not a boolean value: '{}'" . format(d)) + return False + + self.set_data(d) + return True + + +class StationVisibility(BoolValidator): ## "hidden": True / [False] + def __init__(self, *args, **kwargs): + super(StationVisibility, self).__init__(*args, **kwargs) + self._default_input_data = False + + +class AutoTurnon(BoolValidator): # Bool, False + def __init__(self, *args, **kwargs): + super(AutoTurnon, self).__init__(*args, **kwargs) + self._default_input_data = False # no powering on by default!? + + +############################################################### +class VariadicRecordWrapper(BaseValidator): + """VariadicRecordWrapper record. Type is determined by the given 'type' field.""" + + def __init__(self, *args, **kwargs): + kwargs['parsed_result_is_data'] = kwargs.pop('parsed_result_is_data', True) + super(VariadicRecordWrapper, self).__init__(*args, **kwargs) + + self._type_tag = text_type('type') + self._type_cls = None + + self._default_type = None + self._types = {} + + # TODO: make sure to use its .parse instead of .validate due to substitution in Wrapper objects! + def validate(self, d): + """determine the type of variadic data for the format version""" + + if d is None: + d = self._default_input_data + + _ret = True + assert isinstance(d, dict) + + assert self._default_type is not None + assert self._default_type in self._types + + _rule = self._types[self._default_type] # Version dependent! + assert _rule is not None + + assert self._type_cls is not None + + if self._type_tag not in d: + _lc = d.lc # start of the current mapping + _key_error(self._type_tag, d, _lc, "ERROR: Missing mandatory key `{}`") + return False + + t = None + try: + t = self._type_cls.parse(d[self._type_tag], parent=self.get_parent(), parsed_result_is_data=True) + except: + log.exception("Wrong type data: {}".format(d[self._type_tag])) + return False + + if t not in _rule: + _lc = d.lc # start of the current mapping + _key_error(self._type_tag, t, _lc, "ERROR: unsupported/wrong variadic type: '{}'") + return False + + tt = _rule[t](parent=self.get_parent()) + + if not tt.validate(d): + _lc = d.lc.key(self._type_tag) + _value_error('*', d, _lc, "ERROR: invalid mapping value '{}' for type '%s'" % t) + return False + + self.set_data(tt) # TODO: completely replace this with tt??? access Methods? + return True + + +############################################################### +class StationPowerOnMethodWrapper(VariadicRecordWrapper): + """StationPowerOnMethod :: Wrapper""" + + def __init__(self, *args, **kwargs): + super(StationPowerOnMethodWrapper, self).__init__(*args, **kwargs) + + T = StationPowerOnMethodType + + self._type_cls = T + _wol_dm = {T.parse("WOL", parsed_result_is_data=True): WOL, T.parse("DockerMachine", parsed_result_is_data=True): DockerMachine} + + self._default_type = "default_poweron_wrapper" + self._types[self._default_type] = _wol_dm + + +############################################################### +class ServiceWrapper(VariadicRecordWrapper): + """Service :: Wrapper""" + + def __init__(self, *args, **kwargs): + super(ServiceWrapper, self).__init__(*args, **kwargs) + + T = ServiceType + self._type_cls = T + _dc = {T.parse("compose", parsed_result_is_data=True): DockerComposeService} + + self._default_type = "default_docker_compose_service_wrapper" + self._types[self._default_type] = _dc + + +############################################################### +class ApplicationWrapper(ServiceWrapper): + """Application :: Wrapper""" + + def __init__(self, *args, **kwargs): + super(ApplicationWrapper, self).__init__(*args, **kwargs) + + t = ServiceType # NOTE: same for docker-compose Services and Applications! + self._type_cls = t + _dc = {t.parse("compose", parsed_result_is_data=True): DockerComposeApplication} + + self._default_type = "default_docker_compose_application_wrapper" + self._types[self._default_type] = _dc + + +############################################################### +class DockerMachine(BaseRecordValidator): + """DockerMachine :: StationPowerOnMethod""" + + # _DM = 'docker-machine' + _HILBERT_STATION = '~/bin/hilbert-station' # TODO: look at station for this..? + + def __init__(self, *args, **kwargs): + super(DockerMachine, self).__init__(*args, **kwargs) + + self._type_tag = text_type('type') + self._vm_host_address_tag = text_type('vm_host_address') + self._vm_name_tag = text_type('vm_name') + + self._default_type = 'DockerMachine' + + DM_rule = { + self._type_tag: (True, StationPowerOnMethodType), # Mandatory! + text_type('auto_turnon'): (False, AutoTurnon), + self._vm_name_tag: (True, StringValidator), + self._vm_host_address_tag: (True, HostAddress) + } + + self._types = {self._default_type: DM_rule} # ! NOTE: AMT - maybe later... + + def get_vm_name(self): + _d = self.get_data() + assert _d is not None + _a = _d.get(self._vm_name_tag, None) + if (_a is None) or (not bool(_a)): + log.error('Missing vm_name!') + raise Exception('Missing vm_name!') + + if _a.startswith("'") and _a.endswith("'"): + _a = _a[1:-1] + if _a.startswith('"') and _a.endswith('"'): + _a = _a[1:-1] + + return _a + + + def get_vm_host_address(self): + _d = self.get_data() + assert _d is not None + _a = _d.get(self._vm_host_address_tag, None) + if (_a is None) or (not bool(_a)): + log.error('Missing vm_host_address!') + raise Exception('Missing vm_host_address!') + + return _a + + def start(self): # , action, action_args): + global PEDANTIC + + _a = self.get_vm_host_address() + assert _a is not None + assert isinstance(_a, HostAddress) + + _n = self.get_vm_name() + + # DISPLAY =:0 + _cmd = [self._HILBERT_STATION, 'dm_start', _n] # self._DM + try: + _ret = _a.ssh(_cmd, shell=True) + except: + s = "Could not power-on virtual station {0} (at {1})".format(_n, _a) + if not PEDANTIC: + log.warning(s) + return False + else: + log.exception(s) + raise + + return _ret + + # process call: ssh to vm_host + docker-machione start vm_id +# raise NotImplementedError("Running 'docker-machine start' action is not supported yet... Sorry!") + + +class WOL(BaseRecordValidator): + """WOL :: StationPowerOnMethod""" + + _WOL = 'wakeonlan' + + def __init__(self, *args, **kwargs): + super(WOL, self).__init__(*args, **kwargs) + + self._type_tag = text_type('type') + self._default_type = 'WOL' + self._MAC_tag = text_type('mac') + + WOL_rule = { + self._type_tag: (True, StationPowerOnMethodType), # Mandatory! + text_type('auto_turnon'): (False, AutoTurnon), + self._MAC_tag: (True, HostMACAddress) + } + + self._types = {self._default_type: WOL_rule} + + def get_MAC(self): + _d = self.get_data() + assert _d is not None + _MAC = _d.get(self._MAC_tag, None) + assert _MAC is not None + assert _MAC != '' + return _MAC + + + + def start(self): # , action, action_args): + global PEDANTIC + + _address = None + _parent = self.get_parent(cls=Station) + + if _parent is not None: + if isinstance(_parent, Station): + _address = _parent.get_address() + assert _address is not None + assert isinstance(_address, HostAddress) + _address = _address.get_address() + + _MAC = self.get_MAC() + + if (_address is None) or (_address == ''): + log.warning("Sorry: could not get station's address for this WOL MethodObject!") + _cmd = [self._WOL, _MAC] + if PEDANTIC: + raise Exception("Sorry: could not get station's address for this WOL MethodObject!") + else: + _cmd = [self._WOL, "-i", _address, _MAC] # IP? + + __cmd = ' '.join(_cmd) + try: + retcode = _execute(_cmd, shell=False) + assert retcode is not None + if not retcode: + log.debug("Command ({}) execution success!".format(__cmd)) + return + else: + log.error("Could not poweron via '{0}'! Return code: {1}".format(__cmd, retcode)) + if PEDANTIC: + raise Exception("Could not execute '{0}'! Exception: {1}".format(__cmd, sys.exc_info())) + except: + log.exception("Could not execute '{0}'! Exception: {1}".format(__cmd, sys.exc_info())) + if not PEDANTIC: + return + raise + + +############################################################### +class DockerComposeService(BaseRecordValidator): + """DockerCompose :: Service data type""" + + _DC = "docker-compose" + + def __init__(self, *args, **kwargs): + super(DockerComposeService, self).__init__(*args, **kwargs) + + self._type_tag = text_type('type') + self._hook_tag = text_type('auto_detections') + self._name_tag = text_type('ref') + self._file_tag = text_type('file') + + _compose_rule = { + self._type_tag: (True, ServiceType), # Mandatory + self._hook_tag: (False, AutoDetectionScript), + self._name_tag: (True, DockerComposeServiceName), + self._file_tag: (False, DockerComposeRef) + } + + self._default_type = "default_dc_service" + self._types = {self._default_type: _compose_rule} + + self._create_optional = True + + def check_service(self, _f, _n): + global PEDANTIC + + return True # TODO: FIXME: takes TOOOOO long at HITS!?!? + + # TODO: Check the corresponding file for such a service -> Service in DockerService! + fd, path = tempfile.mkstemp() + try: + with os.fdopen(fd, 'w') as tmp: + _cmd = [self._DC, "-f", _f, "config"] # TODO: use '--services'? + try: + retcode = _execute(_cmd, shell=False, stdout=tmp, stderr=open("/dev/null", 'w')) + except: + log.exception("Exception while running '{}'!".format(' '.join(_cmd))) + return False +# _ret = not PEDANTIC # TODO: add a special switch? + + if retcode: + return False + + with open(path, 'r') as tmp: + dc = load_yaml(tmp) # NOTE: loading external Docker-Compose's YML file using our validating loader! + ss = None + for k in dc['services']: + if k == _n: + ss = dc['services'][k] + break + + if ss is None: + log.error("missing service/application '{}' in file '{}' !".format(_n, _f)) + return False + finally: + os.remove(path) + + return True + + def validate(self, d): + global PEDANTIC + + if d is None: + d = self._default_input_data + + if not BaseRecordValidator.validate(self, d): # TODO: use .parse? + assert self.get_data() is None + return False + + _d = self.get_data() + + # TODO: remove Validators (BaseString) from strings used as dict keys! + _f = _d[self._file_tag] + + while isinstance(_f, BaseValidator): + _f = _f.get_data() + + _n = _d[self._name_tag] + while isinstance(_n, BaseValidator): + _n = _n.get_data() + + if not os.path.exists(_f): # TODO: FIXME: use URI::check() instead?? + if PEDANTIC: + log.error("Missing file with docker-compose configuration: '%s'", _f) + return False + log.warning("Missing file with docker-compose configuration: '{0}'. Cannot check the service reference id: '{1}'" .format(_f, _n)) + return True + + if not self.check_service(_f, _n): + if PEDANTIC: + log.error("Bad service {0} in {1}".format(_n, _f)) + return False + else: + log.warning("Bad service {0} in {1}".format(_n, _f)) + + return True # _ret + + +############################################################### +class DockerComposeApplication(DockerComposeService): + """DockerCompose :: Application""" + + def __init__(self, *args, **kwargs): + super(DockerComposeApplication, self).__init__(*args, **kwargs) + + _compose_rule = (self._types[self._default_type]).copy() + + _compose_rule.update({ + text_type('name'): (True, BaseUIString), # NOTE: name for UI! + text_type('description'): (True, BaseUIString), + text_type('icon'): (False, Icon), + text_type('compatibleStations'): (True, Group) + }) + + self._types[self._default_type] = _compose_rule + + # TODO: FIXME: add application to compatibleStations! + +############################################################### +class Profile(BaseRecordValidator): + """Profile""" + + def __init__(self, *args, **kwargs): + super(Profile, self).__init__(*args, **kwargs) + + self._default_type = "default_profile" + + default_rule = { + text_type('name'): (True, BaseUIString), + text_type('description'): (True, BaseUIString), + text_type('icon'): (False, Icon), + text_type('services'): (True, ServiceList), + text_type('supported_types'): (False, ServiceTypeList) + } + + self._types = {self._default_type: default_rule} + + self._station_list = None # TODO: FIXME! + + +############################################################### +class StationSSHOptions(BaseRecordValidator): # optional: "Station::ssh_options" # record: user, port, key, key_ref + """StationSSHOptions""" + + def __init__(self, *args, **kwargs): + super(StationSSHOptions, self).__init__(*args, **kwargs) + + self._default_type = "default_station_ssh_options" + + default_rule = { + text_type('user'): (False, StringValidator), + text_type('key'): (False, StringValidator), + text_type('port'): (False, StringValidator), + # TODO: BaseInt?? http://stackoverflow.com/questions/4187185/how-can-i-check-if-my-python-object-is-a-number + text_type('key_ref'): (False, URI), + } + + self._types = {self._default_type: default_rule} + + def validate(self, d): + """check whether data is a valid ssh connection options""" + + if d is None: + d = self._default_input_data + + _ret = super(StationSSHOptions, self).validate(d) + + # TODO: Check for ssh connection: Use some Python SSH Wrapper (2/3) + return _ret + +############################################################### +class StationType(BaseEnum): + """Type of station defines the set of required data fields!""" + + def __init__(self, *args, **kwargs): + super(StationType, self).__init__(*args, **kwargs) + + # NOTE: the list of possible values of Station::type (will depend on format version) + self._default_input_data = text_type('hidden') # NOTE: nothing is required. For extension only! + self._enum_list = [self._default_input_data, + text_type('standalone'), # No remote control via SSH & Hilbert client... + text_type('server'), # Linux with Hilbert client part installed but no remote control! + text_type('standard') # Linux with Hilbert client part installed! + ] # ,text_type('special') + + +############################################################### +class Station(BaseRecordValidator): # Wrapper? + """Station""" + + _extends_tag = text_type('extends') + _client_settings_tag = text_type('client_settings') + _type_tag = text_type('type') + + _HILBERT_STATION = '~/bin/hilbert-station' + + def __init__(self, *args, **kwargs): + super(Station, self).__init__(*args, **kwargs) + + self._poweron_tag = text_type('poweron_settings') + self._ssh_options_tag = text_type('ssh_options') + self._address_tag = text_type('address') + self._ishidden_tag = text_type('hidden') # TODO: deprecate with "fake" station type? + self._profile_tag = text_type('profile') + + self._default_type = "default_station" + default_rule = { + Station._extends_tag: (False, StationID), + text_type('name'): (True, BaseUIString), + text_type('description'): (True, BaseUIString), + text_type('icon'): (False, Icon), + self._profile_tag: (True, ProfileID), + self._address_tag: (True, HostAddress), + self._poweron_tag: (False, StationPowerOnMethodWrapper), # !! variadic, PowerOnType... + self._ssh_options_tag: (False, StationSSHOptions), # !!! record: user, port, key, key_ref + text_type('omd_tag'): (True, StationOMDTag), # ! like ServiceType: e.g. agent. Q: Is this mandatory? + self._ishidden_tag: (False, StationVisibility), # Q: Is this mandatory? + Station._client_settings_tag: (False, StationClientSettings), # IDMap : (BaseID, BaseString) + Station._type_tag: (False, StationType) + } # + + self._types = {self._default_type: default_rule} + + self._compatible_applications = None # TODO: FIXME! + + self._HILBERT_STATION = '~/bin/hilbert-station' + + def is_hidden(self): + _d = self.get_data() + assert _d is not None + + _h = _d.get(self._ishidden_tag, None) + + if _h is None: + _h = StationVisibility.parse(None, parent=self, parsed_result_is_data=True) + + return _h + + def get_profile(self): + _d = self.get_data() + assert _d is not None + _profile_id = _d.get(self._profile_tag, None) + + assert _profile_id is not None + assert _profile_id != '' + + _parent = self.get_parent(cls=Hilbert) + assert isinstance(_parent, Hilbert) + + _profile = _parent.query('Profiles/{}/all' . format(_profile_id)) + assert _profile is not None + assert isinstance(_profile, Profile) + return _profile + + def get_address(self): # TODO: IP? + _d = self.get_data() + assert _d is not None + _a = _d.get(self._address_tag, None) + if (_a is None) or (not bool(_a)): + log.error('Missing station address!') + raise Exception('Missing station address!') + + assert isinstance(_a, HostAddress) + + log.debug('HostAddress: {}'.format(_a)) + return _a + + def shutdown(self): + global PEDANTIC + + _a = self.get_address() + + assert _a is not None + assert isinstance(_a, HostAddress) + + try: + _ret = _a.ssh([self._HILBERT_STATION, "stop"], shell=False) + except: + s = "Could not stop Hilbert on the station {}".format(_a) + if not PEDANTIC: + log.warning(s) + return False + else: + log.exception(s) + raise + + if not _ret: + return _ret + + try: + _ret = _a.ssh([self._HILBERT_STATION, "shutdown"], shell=False) + except: + s = "Could not schedule a shutdown on the station {}".format(_a) + if not PEDANTIC: + log.warning(s) + return False + else: + log.exception(s) + raise + + return _ret + + + def deploy(self): + global PEDANTIC + + + # TODO: get_client_settings() + _d = self.get_data() + _settings = _d.get(self._client_settings_tag, None) + + if _settings is None: + if not PEDANTIC: + log.warning('Missing client settings for this station. Nothing to deploy!') + else: + log.error('Missing client settings for this station. Nothing to deploy!') + raise Exception('Missing client settings for this station. Nothing to deploy!') + + if isinstance(_settings, BaseValidator): + _settings = _settings.get_data() + + default_app_id = _settings.get('hilbert_station_default_application', None) + # TODO: check default_app_id! + # TODO: all compatible applications!? + + _profile = self.get_profile() + if isinstance(_profile, BaseValidator): + _profile = _profile.get_data() + + # All supported applications??!? + _serviceIDs = _profile.get(text_type('services'), []) # TODO: profile.get_services() + assert _serviceIDs is not None + assert isinstance(_serviceIDs, ServiceList) # list of ServiceID + + _serviceIDs = _serviceIDs.get_data() # Note: IDs from config file - NOT Service::ref! + assert isinstance(_serviceIDs, list) # list of strings (with ServiceIDs)? + + _a = self.get_address() + + assert _a is not None + assert isinstance(_a, HostAddress) + + fd, path = tempfile.mkstemp() + try: + with os.fdopen(fd, 'w') as tmp: + # TODO: FIXME: list references into docker-compose.yml??? + # TODO: use bash array to serialize all Services/Applications! + # NOTE: Only handles (IDs) are to be used below: + # NOTE: ATM only compose && Application/ServiceIDs == refs to the same docker-compose.yml! + # TODO: NOTE: may differ depending on Station::type! + tmp.write("hilbert_station_profile_services=\"{}\"\n".format(' '.join(_serviceIDs))) + + for k in _settings: + tmp.write("{0}=\"{1}\"\n".format(k, str(_settings.get(k, '')))) + + tmp.write("background_services=\"${hilbert_station_profile_services}\"\n") + + tmp.write("default_app=\"${hilbert_station_default_application}\"\n") # ID! + + # TODO: collect all compatible applications! + tmp.write("possible_apps=\"${default_app}\"\n") + + # TODO: add also all further necessary resources (docker-compose.yml etc) and tar.gz it for deployment?! + # _cmd = ["scp", path, "{0}:/tmp/{1}".format(_a, os.path.basename(path))] # self._HILBERT_STATION, 'deploy' + + + try: + _a.scp(path, "/tmp/{}".format(os.path.basename(path)), shell=False) + except: + s = "Could not deploy new local settings to {}".format(_a) + if not PEDANTIC: + log.warning(s) + return False + else: + log.exception(s) + raise + finally: + log.debug("Temporary Station Configuration File: {}".format(path)) +# s = '' +# with os.open(path, 'r') as tmp: +# s += tmp.readline() + '\n' +# log.debug("New Station Configuration: {}".format(s)) +# os.remove(path) + + _cmd = [self._HILBERT_STATION, "init", os.path.join("/tmp", os.path.basename(path))] + try: + _a.ssh(_cmd, shell=False) + except: + s = "Could not initialize the station using the new configuration file with {}".format(' '.join(_cmd)) + if not PEDANTIC: + log.warning(s) + return False + else: + log.exception(s) + raise + + # ### see existing deploy.sh!? + # TODO: what about other external resources? docker-compose*.yml etc...? + # TODO: restart hilbert-station? + +# raise NotImplementedError("Cannot deploy local configuration to this station!") + +# def start_service(self, action_args): +# raise NotImplementedError("Cannot start a service/application on this station!") + +# def finish_service(self, action_args): +# raise NotImplementedError("Cannot finish a service/application on this station!") + + def app_change(self, app_id): + global PEDANTIC + + _a = self.get_address() + + assert _a is not None + assert isinstance(_a, HostAddress) + + try: + _ret = _a.ssh([self._HILBERT_STATION, "app_change", app_id], shell=False) + except: + s = "Could not change top application on the station '{0}' to '{1}'".format(_a, app_id) + if not PEDANTIC: + log.warning(s) + return False + else: + log.exception(s) + raise + + return _ret + +# raise NotImplementedError("Cannot switch to a different application on this station!") + + def poweron(self): + _d = self.get_data() + assert _d is not None + + poweron = _d.get(self._poweron_tag, None) + if poweron is None: + log.error("Missing/wrong Power-On Method configuration for this station!") + raise Exception("Missing/wrong Power-On Method configuration for this station!") + + poweron.start() # , action_args???? + + def run_action(self, action, action_args): + """ + Run the given action on/with this station + + :param action_args: arguments to the action + :param action: + start (poweron) + stop (shutdown) + cfg_deploy + app_change +# start [] +# finish [] + + :return: nothing. + """ + + if action not in ['start', 'stop', 'cfg_deploy', 'app_change']: + raise Exception("Running action '{0}({1})' is not supported!" . format(action, action_args)) + + # Run 'ssh address hilbert-station action action_args'?! + if action == 'start': + self.poweron() # action_args + elif action == 'cfg_deploy': + self.deploy() # action_args + elif action == 'stop': + self.shutdown() # action_args + elif action == 'app_change': + self.app_change(action_args) # ApplicationID + +# elif action == 'start': +# self.start_service(action_args) +# elif action == 'finish': +# self.finish_service(action_args) + + + def get_base(self): + _d = self.get_data() + assert _d is not None + _b = _d.get(self._extends_tag, None) # StationID (validated...) + +# if _b is not None: +# if isinstance(_b, BaseValidator): +# _b = _b.get_data() + + return _b + + def extend(delta, base): # delta == self! + assert delta.get_base() is not None + assert base.get_base() is None + + # NOTE: at early stage there may be no parent data... + if delta.get_parent() is not None: + if delta.get_parent().get_data() is not None: + assert delta.get_base() in delta.get_parent().get_data() + assert delta.get_parent().get_data().get(delta.get_base(), None) == base + + _d = delta.get_data() + _b = base.get_data() + + assert delta._extends_tag in _d + assert delta._extends_tag not in _b + + del _d[delta._extends_tag] + assert delta.get_base() is None + + # NOTE: Extend/merge the client settings: + k = delta._client_settings_tag + + bb = _b.get(k, None) + if bb is not None: + dd = _d.get(k, None) + if dd is None: + dd = StationClientSettings.parse(None, parent=delta) + + assert isinstance(dd, StationClientSettings) + assert isinstance(bb, StationClientSettings) + dd.extend(bb) + + _d[k] = dd + + # NOTE: the following is an application of delta to base data + for k in _b: # NOTE: take from base only the missing parts + assert k != delta._extends_tag + + if k == delta._client_settings_tag: + continue + + v = _d.get(k, None) + + if v is None: # key from base is missing or None in delte? + _d[k] = _b[k] # TODO: is copy() required for complicated structures? + + +############################################################### +class BaseIDMap(BaseValidator): + """Mapping: SomeTypeID -> AnyType""" + + def __init__(self, *args, **kwargs): + super(BaseIDMap, self).__init__(*args, **kwargs) + + self._default_type = None + self._types = {} # type -> (TypeID, Type) + + self._default_input_data = {} + + def detect_type(self, d): + """determine the type of variadic data for the format version""" + + assert not (self._default_type is None) + assert len(self._types) > 0 + + return self._default_type + + def validate(self, d): + if d is None: + d = self._default_input_data + + assert isinstance(d, dict) + + self._type = self.detect_type(d) + + assert self._type is not None + assert self._type in self._types + + (_id_rule, _rule) = self._types[self._type] + + try: + _lc = d.lc # starting position? + except: + log.warning("Input data bears no ruamel.yaml line/column data!") + _lc = (0, 0) + + (s, c) = _get_line_col(_lc) + + _d = {} + + _ret = True + for offset, k in enumerate(d): + v = d[k] # TODO: d[offset]??? + l = s + offset + _lc = (l, c) + + _id = None + _vv = None + + try: + _id = _id_rule.parse(k, parent=self) + except ConfigurationError as err: + _key_error(k, v, _lc, "Invalid ID: '{}' (type: '%s')" % (self._type)) # Raise Exception? + pprint(err) + _ret = False + + try: + _vv = _rule.parse(v, parent=self) + except ConfigurationError as err: + _value_error(k, v, _lc, "invalid Value (for ID: '{}') (type: '%s')" % (self._type)) # Raise Exception? + pprint(err) + _ret = False + + if _ret: + assert _id is not None + assert _vv is not None + # _id = _id.get_data() # ?? + _d[_id] = _vv # .get_data() + + if _ret: + self.set_data(_d) + + return _ret + + +############################################################### +class GlobalServices(BaseIDMap): + def __init__(self, *args, **kwargs): + super(GlobalServices, self).__init__(*args, **kwargs) + + self._default_type = "default_global_services" + self._types = {self._default_type: (ServiceID, ServiceWrapper)} + + # def validate(self, data): + # _ret = BaseID.validate(self, data) + # ### TODO: Any post processing? + # return _ret + + +############################################################### +# "client_settings": (False, StationClientSettings) # IDMap : (BaseID, BaseString) +class StationClientSettings(BaseIDMap): + def __init__(self, *args, **kwargs): + super(StationClientSettings, self).__init__(*args, **kwargs) + + self._default_type = "default_station_client_settings" + self._types = { + self._default_type: (ClientVariable, ScalarValidator) + } # ! TODO: only strings for now! More scalar types?! BaseScalar? + + # TODO: FIXME: check for default hilbert applicationId! + + def extend(delta, base): + assert isinstance(base, StationClientSettings) + + _b = base.get_data() + if _b is not None: + assert isinstance(_b, dict) + _b = _b.copy() + + # NOTE: merge and override settings from the base using the current delta: + _d = delta.get_data() + if _d is not None: + _b.update(_d) + + delta.set_data(_b) + + +############################################################### +class GlobalApplications(BaseIDMap): + def __init__(self, *args, **kwargs): + super(GlobalApplications, self).__init__(*args, **kwargs) + + self._default_type = "default_global_applications" + self._types = {self._default_type: (ApplicationID, ApplicationWrapper)} + +############################################################### +class GlobalProfiles(BaseIDMap): + def __init__(self, *args, **kwargs): + super(GlobalProfiles, self).__init__(*args, **kwargs) + + self._default_type = "default_global_profiles" + self._types = {self._default_type: (ProfileID, Profile)} + + # def validate(self, data): + # _ret = BaseID.validate(self, data) + # ### TODO: Any post processing? + # return _ret + + +############################################################### +class GlobalStations(BaseIDMap): + """Global mapping of station IDs to Station's""" + + def __init__(self, *args, **kwargs): + super(GlobalStations, self).__init__(*args, **kwargs) + + self._default_type = "default_global_stations" + self._types = {self._default_type: (StationID, Station)} # NOTE: {StationID -> Station} + + def validate(self, d): + """Extension mechanism on top of the usual ID Mapping parsing""" + + if d is None: + d = self._default_input_data + + if not BaseIDMap.validate(self, d): + return False + + sts = self.get_data() # NOTE: may be handy for postprocessing! + + _ret = True + + _processed = {} + _todo = {} + + for k in sts: + v = sts[k] # TODO: popitem as below? + _b = v.get_base() + + if _b is None: # TODO: FIXME: add to Station API! + _processed[k] = v + else: + assert _b in sts # NOTE: any station extends some _known_ station! + _todo[k] = v + + _chg = True + while bool(_todo) and _chg: + _chg = False + _rest = {} + while bool(_todo): + k, v = _todo.popitem() + + _b = v.get_base() + assert k != _b # no infinite self-recursive extensions! + + # print(_b, ' :base: ', type(_b)) + assert _b in _processed + + if _b in _processed: + v.extend(_processed[_b]) + _processed[k] = v + assert v.get_base() is None + _chg = True + else: + _rest[k] = v + + _todo = _rest + + if bool(_todo): + log.error('Cyclic dependencies between stations: {}' .format(_todo)) + _ret = False + +# if _ret: +# self.set_data(_processed) + + # TODO: FIXME: check for required fields after extension only!!! + + return _ret + + +############################################################### +class BaseList(BaseValidator): + """List of entities of the same type""" + + def __init__(self, *args, **kwargs): + super(BaseList, self).__init__(*args, **kwargs) + + self._default_type = None + self._types = {} + + def validate(self, d): + if d is None: + d = self._default_input_data + + assert self._default_type is not None + assert len(self._types) > 0 + + _lc = d.lc + + # NOTE: determine the class of items based on the version and sample data + self._type = self._types[self._default_type] + assert self._type is not None + + if (not isinstance(d, (list, dict, tuple, set))) and isinstance(d, string_types): + try: + _d = [self._type.parse(StringValidator.parse(d, parent=self))] + self.get_data(_d) + return True + except: + pass # Not a single string entry... + + # list!? + _ret = True + + _d = [] + for idx, i in enumerate(d): # What about a string? + _v = None + try: + _v = self._type.parse(i, parent=self) + _d.insert(idx, _v) # append? + except ConfigurationError as err: + _value_error("[%d]" % idx, d, _lc, "Wrong item in the given sequence!") + pprint(err) + _ret = False + + if _ret: + self.set_data(_d) + + return _ret + + +############################################################### +class GroupIDList(BaseList): + """List of GroupIDs or a single GroupID!""" + + def __init__(self, *args, **kwargs): + super(GroupIDList, self).__init__(*args, **kwargs) + + self._default_type = "default_GroupID_list" + self._types = {self._default_type: GroupID} + + +############################################################### +class ServiceList(BaseList): + """List of ServiceIDs or a single ServiceID!""" + + def __init__(self, *args, **kwargs): + super(ServiceList, self).__init__(*args, **kwargs) + + self._default_type = "default_ServiceID_list" + self._types = {self._default_type: ServiceID} + + +############################################################### +class ServiceTypeList(BaseList): + """List of ServiceType's or a single ServiceType!""" + + def __init__(self, *args, **kwargs): + super(ServiceTypeList, self).__init__(*args, **kwargs) + + self._default_type = "default_ServiceType_list" + self._types = {self._default_type: ServiceType} + + +############################################################### +class Group(BaseRecordValidator): # ? TODO: GroupSet & its .parent? + """Group""" + + def __init__(self, *args, **kwargs): + super(Group, self).__init__(*args, **kwargs) + + self._default_type = "default_group" + + self._include_tag = text_type('include') + self._exclude_tag = text_type('exclude') + self._intersectWith_tag = text_type('intersectWith') + + self._station_list = None # TODO: FIXME! + + default_rule = { + self._include_tag: (False, GroupIDList), + self._exclude_tag: (False, GroupIDList), + self._intersectWith_tag: (False, GroupIDList) + } + # text_type('name'): (False, BaseUIString), + # text_type('description'): (False, BaseUIString), + # text_type('icon'): (False, Icon) + + self._types = {self._default_type: default_rule} + + def detect_extra_rule(self, key, value): # Any extra unlisted keys in the mapping? + + if value is None: # Set item! + return GroupID, ScalarValidator + + return None + + def validate(self, d): + if d is None: + d = self._default_input_data + + _ret = BaseRecordValidator.validate(self, d) + + # TODO: FIXME: Add extra keys into include! + + return _ret + + + +############################################################### +class GlobalGroups(BaseIDMap): + def __init__(self, *args, **kwargs): + super(GlobalGroups, self).__init__(*args, **kwargs) + + self._default_type = "default_global_groups" + self._types = {self._default_type: (GroupID, Group)} + + +############################################################### +class Preset(BaseRecordValidator): + """Preset""" + + def __init__(self, *args, **kwargs): + super(Preset, self).__init__(*args, **kwargs) + + self._default_type = "default_preset" + # self.__tag = "Version" + default_rule = { + # self.__tag: (True , ??), # Mandatory + # self.__tag: (False, ??), # Optional + } + self._types = {self._default_type: default_rule} + raise NotImplementedError("Presets are not supported yet!") + + +############################################################### +class GlobalPresets(BaseIDMap): # Dummy for now! + + def __init__(self, *args, **kwargs): + super(GlobalPresets, self).__init__(*args, **kwargs) + + self._default_type = "default_global_presets" + self._types = {self._default_type: (PresetID, Preset)} + + def validate(self, d): + if d is None: + d = self._default_input_data + + log.warning("Presets are not supported yet!") + # raise NotImplementedError("Presets are not supported yet!") + return True + +############################################################### +class Hilbert(BaseRecordValidator): + """General Hilbert Configuration format""" + + def __init__(self, *args, **kwargs): + kwargs['parsed_result_is_data'] = kwargs.pop('parsed_result_is_data', False) + + super(Hilbert, self).__init__(*args, **kwargs) # This is the Main Root of all Validators! + + self._default_type = "default_global" + + self._version_tag = text_type('Version') + self._applications_tag = text_type('Applications') + self._services_tag = text_type('Services') + self._profiles_tag = text_type('Profiles') + self._stations_tag = text_type('Stations') + self._groups_tag = text_type('Groups') + + ### explicit (optional) Type? + default_rule = { + self._version_tag: (True, SemanticVersionValidator), # Mandatory, specifies supported Types of Config's Entity + self._services_tag: (True, GlobalServices), + self._applications_tag: (True, GlobalApplications), + self._profiles_tag: (True, GlobalProfiles), + self._stations_tag: (True, GlobalStations), + self._groups_tag: (False, GlobalGroups), # Optional + text_type('Presets'): (False, GlobalPresets), # Optional. May be removed! default? + } + + self._types = {self._default_type: default_rule} + + self._default_input_data = None + + @classmethod + def parse(cls, d, *args, **kwargs): + self = cls(*args, **kwargs) + + if self._version_tag not in d: + _key_note(self._version_tag, d.lc, "ERROR: Missing mandatory '{}' key field!") + raise ConfigurationError(u"{}: {}".format("ERROR:", "Missing version tag '{0}' in the input: '{1}'!".format(self._version_tag, d))) + + try: + _v = SemanticVersionValidator.parse(d[self._version_tag], parent=self, partial=True) + except: + _value_error(self._version_tag, d, d.lc, "Wrong value of global '{}' specification!") + raise + + self.set_version(_v) # NOTE: globally available now! + + if self.validate(d): # NOTE: validate should not **explicitly** throw exceptions!!! + if self._parsed_result_is_data: + return self.get_data() + + return self + + # NOTE: .parse should! + raise ConfigurationError(u"{}: {}".format("ERROR:", "Invalid data: '{}'!".format(d))) + + def validate(self, d): + global PEDANTIC + if d is None: + d = self._default_input_data + + assert isinstance(d, dict) + + _ret = BaseRecordValidator.validate(self, d) + + for offset, k in enumerate(d): + if k == self._version_tag: + if offset != 0: + if not PEDANTIC: + log.warning("'{}' specified correctly but not ahead of everything else (offset: {})!" + .format(self._version_tag, offset)) + else: + log.error("'{}' specified correctly but not ahead of everything else (offset: {})!" + .format(self._version_tag, offset)) + _ret = False + break + + if not _ret: + log.error("Wrong Hilbert configuration!") + return _ret + + _d = self.get_data() + # NOTE: check uniqueness of keys among (Services/Applications): + + # TODO: add get_service(s) and get_application(s)? + _services = self.query("{0}/{1}".format(self._services_tag, 'data')) + _applications = self.query("{0}/{1}".format(self._applications_tag, 'data')) # _d.get() + + assert _services is not None + assert _applications is not None + + if (len(_services) > 0) and (len(_applications) > 0): + for p, k in enumerate(_services): + if k in _applications: + log.error("'{}' is both a ServiceID and an ApplicationID:".format(k)) + + __services = d.get(self._services_tag) # Rely on the above and use get_data? + __applications = d.get(self._applications_tag) + + _key_error(k, __services[k], __services.lc.key(k), "Service key: {}") + _key_error(k, __applications[k], __applications.lc.key(k), "Application key: {}") + + _ret = False + + # ! TODO: check Uniqueness of keys among (Profiles/Stations/Groups) !!!! + # ! TODO: check for GroupID <-> StationID <-!=-> ProfileID + + return _ret + +############################################################### +def load_yaml(f, Loader=VerboseRoundTripLoader, version=(1, 2), preserve_quotes=True): + try: + return yaml.load(f, Loader=Loader, version=version, preserve_quotes=preserve_quotes) + except (IOError, yaml.YAMLError) as e: + error_name = getattr(e, '__module__', '') + '.' + e.__class__.__name__ + raise ConfigurationError(u"{}: {}".format(error_name, e)) + + +def load_yaml_file(filename): + with open(filename, 'r') as fh: + return load_yaml(fh) + + +############################################################### +def parse_hilbert(d, parent=None): + assert d is not None + return Hilbert.parse(d, parent=parent, parsed_result_is_data=False) + + +############################################################### +def yaml_dump(d, stream=None): + return yaml.round_trip_dump(d, stream=stream) diff --git a/config/subcmdparser.py b/config/subcmdparser.py new file mode 100644 index 0000000..29a3aca --- /dev/null +++ b/config/subcmdparser.py @@ -0,0 +1,315 @@ +# import arghandler # NOQA + +import argparse # NOQA +import inspect # NOQA +import logging # NOQA +from operator import attrgetter + +log = logging.getLogger(__name__) # + +################################# +# decorator +################################# +registered_subcommands = {} +registered_subcommands_help = {} + +def subcmd(arg=None, **kwargs): + """ + This decorator is used to register functions as subcommands with instances + of SubCommandHandler. + """ + if inspect.isfunction(arg): + return subcmd_fxn(arg, arg.__name__, kwargs) + else: + def inner_subcmd(fxn): + return subcmd_fxn(fxn, arg, kwargs) + + return inner_subcmd + + +def subcmd_fxn(cmd_fxn, name, kwargs): + global registered_subcommands, registered_subcommands_help + + # get the name of the command + if name is None: + name = cmd_fxn.__name__ + + registered_subcommands[name] = cmd_fxn + registered_subcommands_help[name] = kwargs.pop('help', '') + + return cmd_fxn + +######################### +class SortingHelpFormatter(argparse.RawTextHelpFormatter): + def __init__(self, *args, **kwargs): + kwargs['indent_increment'] = 1 + kwargs['max_help_position'] = 17 + super(SortingHelpFormatter, self).__init__(*args, **kwargs) + +######################### +class SubCommandHandler(argparse.ArgumentParser): + """Modified ArgumentHandler from https://github.com/druths/arghandler""" + + def __init__(self, *args, **kwargs): + """ + All constructor arguments are the same as found in `argparse.ArgumentParser`. + + kwargs + ------ + * `use_subcommand_help [=False]`: when printing out the help message, use a shortened + version of the help message that simply shows the sub-commands supported and + their description. + + * `enable_autocompletion [=False]`: make it so that the command line + supports autocompletion + + """ + + ### extract any special keywords here + self._use_subcommand_help = kwargs.pop('use_subcommand_help', False) + self._enable_autocompletion = kwargs.pop('enable_autocompletion', False) + + + self._ignore_remainder = False + self._use_subcommands = True + self._subcommand_lookup = dict() + self._subcommand_help = dict() + + self._has_parsed = False + + # setup the class + if self._use_subcommand_help: + kwargs['formatter_class']=SortingHelpFormatter + + super(SubCommandHandler, self).__init__(*args, **kwargs) + + def add_argument(self, *args, **kwargs): + """ + This has the same functionality as `argparse.ArgumentParser.add_argument`. + """ + # just watch for the REMAINDER nargs to see if subcommands are relevant + + assert not(self._ignore_remainder and 'nargs' in kwargs and kwargs['nargs'] == argparse.REMAINDER) + # self._use_subcommands = False + + return super(SubCommandHandler, self).add_argument(*args, **kwargs) + + def set_subcommands(self, subcommand_lookup): + """ + Provide a set of subcommands that this instance of ArgumentHandler should + support. This is an alternative to using the decorator `@subcmd`. Note that + the total set of subcommands supported will be those specified in this method + combined with those identified by the decorator. + """ + if type(subcommand_lookup) is not dict: + raise TypeError('subcommands must be specified as a dict') + + # sanity check the subcommands + self._subcommand_lookup = {} + self._subcommand_help = {} + for cn, cf in subcommand_lookup.items(): + if type(cn) is not str: + raise TypeError('subcommand keys must be strings. Found %s' % str(cn)) + if type(cf) == tuple: + if not callable(cf[0]): + raise TypeError('subcommand with name %s must be callable' % cn) + else: + self._subcommand_lookup[cn] = cf[0] + self._subcommand_help[cn] = cf[1] + elif not callable(cf): + raise TypeError('subcommand with name %s must be callable' % cn) + else: + self._subcommand_lookup[cn] = cf + self._subcommand_help[cn] = '' + + return + + def parse_args(self, argv=None): + """ + Works the same as `argparse.ArgumentParser.parse_args`. + """ + group = self.add_mutually_exclusive_group() + group.add_argument("-v", "--verbose", action=CountedVerboseAction, help='increase verbosity') + group.add_argument("-q", "--quiet", action=CountedQuietAction, help='decrease verbosity') + + # add_argument, set_logging_level, set_subcommands, + # handler.set_logging_argument('-l', '--log_level', default_level=logging.INFO) + + self.add_argument('-H', '--helpall', action=HelpAllAction, + nargs=0, default=argparse.SUPPRESS, required=False, type=None, metavar=None, + help="show detailed help and exit") + + + global registered_subcommands, registered_subcommands_help + + if self._has_parsed: + raise Exception('ArgumentHandler.parse_args can only be called once') + + # collect subcommands into _subcommand_lookup + for cn, cf in registered_subcommands.items(): + self._subcommand_lookup[cn] = cf + self._subcommand_help[cn] = registered_subcommands_help[cn] + + assert len(self._subcommand_lookup) > 0 +# self._use_subcommands = False + + # add in subcommands if appropriate + assert self._use_subcommands +# if not self._use_subcommands: +# pass +# else: + max_cmd_length = max([len(x) for x in self._subcommand_lookup.keys()]) + subcommands_help_text = 'the subcommand to run' + if self._use_subcommand_help: + subcommands_help_text = ':\n' + for command in sorted(self._subcommand_lookup.keys()): # Sorted... + subcommands_help_text += command.ljust(max_cmd_length + 2) + subcommands_help_text += self._subcommand_help[command] + subcommands_help_text += '\n' + + self.add_argument('cmd', choices=self._subcommand_lookup.keys(), help=subcommands_help_text, + metavar='subcommand') + + cargs_help_msg = 'arguments for the subcommand' if not self._use_subcommand_help else argparse.SUPPRESS + self.add_argument('cargs', nargs=argparse.REMAINDER, help=cargs_help_msg) + + # handle autocompletion if requested + if self._enable_autocompletion: + import argcomplete + argcomplete.autocomplete(self) + + # parse arguments + args = super(SubCommandHandler, self).parse_args(argv) + + self._has_parse = True + + +# cargs_help_msg = 'arguments for the subcommand' if not self._use_subcommand_help else argparse.SUPPRESS +# self.add_argument('cargs', nargs=argparse.REMAINDER, help=cargs_help_msg) + + return args + + def run(self, argv=None, context_fxn=None): + """ + This method triggers a three step process: + + 1) Parse the arguments in `argv`. If not specified, `sys.argv` is + used. + + 2) Configure the logging level. This only happens if the + `set_logging_argument` was called. + + 3) Run the appropriate subcommand. This only happens if subcommands + are available and enabled. Prior to the subcommand being run, + the `context_fxn` is called. This function accepts one argument - + the namespace returned by a call to `parse_args`. + + The parsed arguments are all returned. + """ + # get the arguments + args = self.parse_args(argv) + + # # handle the logging argument + # if self._logging_argument: + # level = eval('args.%s' % self._logging_argument) + # + # # convert the level + # level = eval('logging.%s' % level) + # + # # call the logging config fxn + # self._logging_config_fxn(level, args) + +# self.logging_handler(args) +# pedantic_handler(self, vars(args)) + + # generate the context + context = args + if context_fxn: + context = context_fxn(args) + + # create the sub command argument parser + scmd_parser = argparse.ArgumentParser(prog='%s %s' % (self.prog, args.cmd), add_help=True) + + # handle the subcommands + self._subcommand_lookup[args.cmd](scmd_parser, context, args.cargs) + + return args # run() + +# logging.CRITICAL = 50 +# logging.ERROR = 40 +# logging.WARNING = 30 +# logging.INFO = 20 +# logging.DEBUG = 10 +# logging.NOTSET = 0 +class CountedVerboseAction(argparse._CountAction): + def __init__(self, *args, **kwargs): + super(CountedVerboseAction, self).__init__(*args, **kwargs) + + def __call__(self, parser, namespace, values, option_string=None): + new_count = argparse._ensure_value(namespace, self.dest, 0) + 1 + setattr(namespace, self.dest, new_count) + + _log = logging.getLogger() # root logger! + level = max(logging.DEBUG, _log.level - logging.DEBUG) + + if _log.level != level: + log.debug("Changing logging level: %s -> %s", + logging.getLevelName(_log.level), + logging.getLevelName(level)) + _log.setLevel(level) + log.debug("New logging level: %s", logging.getLevelName(_log.level)) + + +class CountedQuietAction(argparse._CountAction): + def __init__(self, *args, **kwargs): + super(CountedQuietAction, self).__init__(*args, **kwargs) + + def __call__(self, parser, namespace, values, option_string=None): + new_count = argparse._ensure_value(namespace, self.dest, 0) + 1 + setattr(namespace, self.dest, new_count) + + _log = logging.getLogger() # root logger! + level = min(logging.CRITICAL, _log.level + logging.DEBUG) + + if _log.level != level: + log.debug("Changing logging level: %s -> %s", + logging.getLevelName(_log.level), + logging.getLevelName(level)) + _log.setLevel(level) + log.debug("New logging level: %s", logging.getLevelName(_log.level)) + +class MyHelpAction(argparse.Action): ### _HelpAction?? + def __init__(self, + option_strings, + dest=argparse.SUPPRESS, + default=argparse.SUPPRESS, + help=None): + super(MyHelpAction, self).__init__( + option_strings=option_strings, + dest=dest, + default=default, + nargs=0, + help=help) + + def __call__(self, parser, namespace, values, option_string=None): + parser.print_help() + raise BaseException("Help was printed!") + + +class HelpAllAction(argparse.Action): ### _HelpAction?? + def __init__(self, option_strings, *args, **kwargs): + super(HelpAllAction, self).__init__(option_strings=option_strings, *args, **kwargs) + + def __call__(self, parser, args, values, option_string=None): + for cn in sorted(parser._subcommand_lookup.keys()): + # create the sub command argument parser + scmd_parser = argparse.ArgumentParser(prog='%s %s' % (parser.prog, cn), add_help=False) + scmd_parser.add_argument('-h', '--help', action=MyHelpAction, help="show %(prog)s's help message") + try: + print('\n') + a = parser._subcommand_lookup[cn](scmd_parser, args, ['--help']) + except: + pass + + parser.exit(0) + setattr(args, self.dest, values) diff --git a/docs/.gitignore b/docs/.gitignore new file mode 100644 index 0000000..9536646 --- /dev/null +++ b/docs/.gitignore @@ -0,0 +1,4 @@ +doxy/ +epydoc/ +pep8.report.txt +pylint_*.html diff --git a/docs/ConfigurationDD.png b/docs/ConfigurationDD.png index f03b0b7..c5624c6 100644 Binary files a/docs/ConfigurationDD.png and b/docs/ConfigurationDD.png differ diff --git a/docs/ConfigurationDD.xml b/docs/ConfigurationDD.xml index d63a7a9..f674907 100644 --- a/docs/ConfigurationDD.xml +++ b/docs/ConfigurationDD.xml @@ -1 +1 @@ -7V1pk6M2Gv41XbW7VXYhcX+Me46kamYzNZ2tbD51YZDd7GDwAp6ezq+PZCQMQuKwgbY7cqY6RhaX9LyH3kt3+v3ux8fU2z99TgIU3UEt+HGnv7uD0HUd/Jc0vBQNpgaKhm0aBkVTpeEh/BPRRo22HsIAZbWOeZJEebivN/pJHCM/r7V5aZo817ttkqh+1723RY2GB9+Lmq2/h0H+VLQ60Dq1/4zC7RO7M7Dc4pe153/bpskhpve7g/rm+Cl+3nnsWvRFsycvSJ4rTfr7O/0+TZK8+Lb7cY8iMrRs2IrzPkh+LZ87RXHe5wRo2MUp373ogNgzH58sf2GjgQI8OPQwSfOnZJvEXvT+1Lo6vjEi1wT4yE92oY+/a/j7U76LaHP5rqQdxcFPZKLw4TpK/G9F04cwimiH/6E8f6HI8A55gptO9/6UJHt21TxNvqH7JErS49PqmnZ/r2nlL2z+SN8Nvnyl58Yk/xV3pneyydGPMP8veYqlSY/+KB86T18qP5HDP+i1myNPJyNLDqnPcGXR4c69dItYP4dSCxnnyql0xj6iZIfwjXAHSl4LbWkDl14pRZGXh9/rwPUo/rflueXlviQhfsBTl2SzyfCT1EDC+tD76TpFK6NmR6/fpnhFelbLhQzXrF+I0Tu7UDEsjQthpHgvlW570iETvBS9j2XVH9gCWvsLmpf1N62O/rC1P/5SvCE7qkz6qelIuRKyh85sVNxGA7dP4f2oGJo3SMVQd5dW5WMbNUwaprXUqx/7PBqHWuttdEdfOi23GYkDGNp1USi+g5Kz88lZyCTWTVFoQzzq1hI6pg2M4u+5UteuX9awrElozrT/XlIXMhJWND0LTWvuLdI0dJcVEoZuHZKOIfr1Winc/rvp1fZ8erWicDzchoDCTXcYhWtLw7aM2Qjc0uq0B3TzPCK2jDqYXTgNDXM0w1Ql2WPxND+0PzA7+utmW//LpbSAgq0Ig2u1xl+2+RGcRcOBNTyg9HuIUUnb8S0OfF/cVjmfYwnZc7iLvBjVKXyTxDljFIRe/KcwCj55L8mBQD3LPf8bO1o9JWn4J+7vlewBT39OyRBaLdR6OvOBXJHeK0UZPvcLoz9QNn3yspw9TxJF3j4L18cnJF12GHJhvEryPNnRTuzVPnC3L4ycPK+rMTV26qcwRncS9lSsGdsYxneU5uhHH0YAOOCyJeHzyaJbrhOeqtZc1ihiFBVctsNOtwTAW9EeP90RKuwJw1/eESLY76PQx8wsiclxX2DSW6RSqOKxzDlBVJuTODnCuIo22uRF4TbGhxHakCuQecEPGP1Em3Mig1bZ3vPDePvp2OedcWr5SsebND0/hTl6wO3kmZ5Tj5yY4OttoiN2nsIgQPERW7mXe+uSgCirw+9hrvA/PKL3RPSY+L3u8TE4HeN/pHua3ycxfj8vPIIKYew/oywXwg32hhvDl9UPXtAaAV2mDF35yx4NxNdv5JRzWZ3Cz1j4MeGc+NGa+MGDcIQQ0ScfA5Qjn3CbrEATfms86sexUQiYCAFstTEHAoBjSjhIVnCFR6z45niYi/m3vB0Z03id7SucIZSylgbzobzmgV20RVpJeFCoeNDkCASaMScE3TN084oipPTzt6Sf6/ac+jlwgQB8hP/F3q5UhzChxyWKqnLwhLJaF8WZenKmE+1fpX4NXBFrIugIUOan4b7KfhRIrgckcyrRgFnoREp06GOAHMFwSEOlNk8767Mqzq7Ik7+SqcgF5bOzrf8fSGTfKkj8bIHHAqWxFy1ItOPC04BpIAQWyFk7i8D04CJY63BhO2sQbFz8s2adLsCrO1UG4ye7PdaP8HRhDaNYwsk4kbhZrvDL1bKPWLHYD7FMKXKYjBzm1eJF68gutNxjiCbZ7RjYKdReWXevaeucIt/U3b3Up68DnHEUdsi7ZwUauyuAmdOir9ObfUV+7sVbPDqVEK7azSALIe+4GY9pLzpy2RytyIBlDWgP8yYZErUQgwWdxL1ibn2Zm9kbg/JVQB8UnMPZJHK+WCDW9Hw13VNNt0Cfn2q67fkiNOUxFHWDDBFJLA6EIuTYVgkEqUSKBKG3S0jiSSVWpIjYrsZvnB05Qj4fPpS/tESOIBCY6BgrwiJCysDxP8q3lsqjWkSIKOSLaTKdASHnRn8wAeTUJVBpFBg7W4GLgNANi8PthVFMcjOraDUgt+HfRyGerAtM+K1mEKXTdet0IyhxBqfEGUZTiStNsTWzqzWG2dUR2Ey6Vs8FYPDEr/FbPmbFovYxQBvvEOWPntAhcPGC9syAC6UK9FQFxrD/CnE6yqIWiNIiz4IpkbFHPtQGTm2dJJECz3TgEeiR04EHSlxLF4AHc7uIKBUtEGJaiELQJAgS2JgnQxA0BCob8yz8/Mun1fuvvz3+SwXmzDb5IovqdLPPFK3K7C+XSzW/E86v3lMJHmV+RYaGzljNXIW8XLrEAn2WWJOGvEAuRRI2bdplDlQVdobmjoE7UTbTwJD0XK2IJuJaJVu4aEUkQs8oTAs0hRJTScbzfLvOAq2htzACBy02GwcYaG0FluP383xjaCHi4zmu94d4vAdbB4bTgVLQpiQF0fpuKlKwZI6hfZqc/IDdGPpSdFe89BoAJFreTcdL9cZUjuxZYt9P/hZZdm671+nkU2pxPFFM1TOUR/I6Wb6D1pvylxavU+AhZ+O3wqFWToAJtKpviXUc2bU01COk83n2ZhVend0XALT2B1y5AK7/xQ4nRxYl4QUBVvEz5Tyfh6cJrRZTMTUgSklkNqt98ozSJK7nFPURkeS8X+PPCLOKQKla14Eqka1kMlTpLZbQLHt6TPaVJMVuQN3Zq0OGUnxbMkz4f9/Qy2OKNne20uSvBF7mnKtaTeKqUVlAV4oOFj8wj0iTGcxUFtA1gwRqMy7mLCgXUCoJaMZJ1+dUdoGMMyS74DH3tmqJM5cJfEZlFALJwnY8A3hgL3RiBV9Dz12YtuVYdgDAxlv3M4DTaRtq/JYIsCIoSYmvuTHtzKgBl6u5Bifzj9G+g5fsRZBwf5N2d+GQoeHGCplTIVMHM2rf0Gwu/VUYzLTzq8+oODPLzpzpNW0JKbICrKK0mtd1cPTJC2rCQVRS+a7hAQHTeECaPghQr4vMspzLYjD1X88sfQ7MturLAFjL0y3w32n2Iii3iyjziNorovJptoP7Uwf1WO4cVkx6yr1EOshxhEy4MUn23nm/6pUJV5LspTuOMPKo1z6Hk9DqpfjWO6p283gd2p9lLEmroWsX9rddOf00397l3obVVx7Kq3RSuRoA2zJszXQcyLZ7KGWztSx+0E3oGkwT6+BV55C7rcj9Ugl9KbmzPe8q1G7NJJj5mvqg50ZAF0vFgUJuYHdKL6Pl1Ir8Rj0DvlQM99RpstvII1EmlJfMWWL8jl8+iSsYjpFKC1na+AXh3CoEcboklP5geo0C4prM7Kcc31cJjlkLG5qS7Gfl975qjMxZBlEkfJTbe/45n7fWnyzzgdaMz+RMIQozop4nm4peInFErmtgYW5PgplFgPwkLRIhC3xhPTCNiOLXcI9WNk9RasysiBTEh06HSChSgrvU3n8XC+RbWYfdWqWiVjtH/8UVn9ZgWm4DWI4AV23l4enNbqXcJBDt09IJ73eJ/w2lnz2MwPh6UW7cOMpHqsdl8dZwgRGhtBf0raram30KSk0UEv377lHVuTy/zqXWGwaM3zTVOOGsj7O+k1mO8Kw/JVn+qBK15p19syfNjzP7Itv1NB4ftantCQ0XbWq70JaWwapClWmblIwvDcXgN2CtX6DHHriOoS31yqd+QddZWpUP2+R3qH/U5oztQIP1C421Qa7OCeQOV5OjXdb/rW/ACwRxfD0S9fgt5mSpoI096K5O0b2p5Vz11GkVXZGeK6w7q42g6EJTGLpf7pY6KBFZbZg6jdo0RklYIYRGUZtsKFGaj/ul5oc0ZlZJVcx14nkXOMOmm3frnFCP33/9dLXySBlehPIICFLJStlTRdUoddClRaR2nq+W3WfxEWuwq0JkdBHN+Dh8xGjM5U0VfQL1JfyURZ96bjVCH0HKC+rLcDqH5y3Dp42gtgyeFRmtS7ZG/4ErvJEXeJYoXKRLQn7AA7oncuZaxeRNLdum88JZXDywJVi3vXUvnC1aPHZuIPL5t+9fvv56tfhWaqDYLCHYDwcYU6mBtswTU1SpuidvdyxUdfzmZdlzkgZKJ+ytE5aUe5FOKJr+UXRCR1RGhDN89kmRP4Y6XbAxV6ik7JnG0ZGkLORjXTQBF4IDxWz/La1bg6gEwXhrPCfb47gs/GL4yIo13a7/gR9HK7gVJJGJ9Lv2z2aAHivW3UcWjp6A1nTm3b6rst86SJRJCnuugihYtaWmMTZ5ru9xgHcR8vmZJof53hmWurtk6ZMkz5KtyMu8T33pVjIwuZzz8TIsgetMhnBlCuhBAmaTBEpR/MqmAF4Q6LQogZQ4uP5GR7EAPr2T63+xMQBqcmMALYg1jThpaEWt8qVfboqKgikfWi44+TXDDmv6UX9iZFbbi8JjtCVm3vpsEskCZku8CzCNpVP9uV8CdfMujZRn2EsiDQ544eTraweMQLbZ0SBzy3/Whzg/PMR44XmtFpebWutMZ1HkA7kspn92umGcTmK+FZPiadj6bcrdYmbc73/ZedsBASntyVepIo/XJQ9D58mjaQoQAfYNGdyhAafSxUaoDXfz1Wl6KWYlMmqKmXsdG+TwxWPMgeVghvY3HJuD9IUqDhvcQSrOR4zw29FubrZ6zJwVY8zeJWPGMPRCUxZ11L9kzBGEqmDMBOVaS55wnRVjDCl2Yj86BEie538QGn0I51k806cswmajoGnXacFbpsA1IbjmrDgDDdmWceiHGFzikvsKabeItDnr1kBDWjWN7MmQ4RXS7yF5CoW3N4u3WWvmQOEGC537S79kOdqdr2UpW80sthqzh63mjQdHQuEmacqRNlE6edl4nvP6lE5uA5f6pWZwmJmatgSuZtgW/Vs331hQX0Ld0VyT/q0/Q+/4Do5ATD5QZCSHmcHtn2x1WJN0zpo0tL/ZUSCd9+lz/S+3Vsld+kSiCHWgV3TpF880cSTZWRR+K/wr8tYoWpUzyAl5qcO/oTL04mjgomicMgBAt3U4Gz+DvP25Z0ja4Gggl7vPa3MCS7RO71ePTOmyk+uyDcVVQHxSEmpIKYEuK0qyfku6rCUyDkwkQd6AnOiKRea4d7dyy9iLQBZcjS5rAGPpGpWPVWe5trvUzMrnTF0Wuq13IaHMlY87iaKrO29a0cWHaUI0zlP31Ns/fU4CRHr8BQ== \ No newline at end of file +7V1rb6M6Gv41lXZXSgQGDHycdm5HandG01mdPZ8qAk7CDglZINPp+fXHBpuAMcQkQJMeZ0ZVYszNft7769c3xt3m16fE260f4gBFN0ALft0Y728AsEyI/5KGl6LBYA2rJAyKJv3Q8Bj+iWijRlv3YYDSWscsjqMs3NUb/Xi7RX5Wa/OSJH6ud1vGUf2uO2+FGg2Pvhc1W38Pg2xdtDoAHto/o3C1ZnfWoVscWXj+j1US77f0fjfAWOaf4vDGY9eiL5quvSB+rjQZH26MuySOs+Lb5tcdisjQsmErzvvYcrR87gRtM5kTgGkXp/z0oj1iz5w/WfbCRgMFeHDozzjJ1vEq3nrRh0Prbf7GiFxTx7/8eBP6+LuGv6+zTUSby3cl7WgbvCMThX8uotj/UTR9DKOIdvgfyrIXigxvn8W46XDv+zjesatmSfwD3cVRnORPa2ja3Z2mlUfY/JG+S3z5Ss+lRf4Vd6Z3ssmvX2H2X/IUc4v++qN86Cx5qRwiP/+g126OPJ2MNN4nPsMVpMOdeckKsX6OUzSSca6cSmfsE4o3CN8Id6DkNdPmtu7SKyUo8rLwZx24HsX/qjy3vNzXOMQPeOgSL5cpfpIaSFgfej/DoGil1Gw5Rv02xSvSszouZLpW/UKM3tmFimFpXAgjxXupdNuRDqngpeh9IKw/MNS17he0zutvwSP9QWd//KV4Q/arMumHppxyW8geOJNRcRcNXD+Fy1ExsK6QioHhzmHlY5s1TJoWnBvVj30ajQOt8zaGY8ydjtsMxAFM7bIoFN9Bydnp5CxgEuuqKLQhHg04B45l62bx91Spa9cva0I4Cs1Z9t9L6gJGwoqmJ6Fpzb1GmgbuvELCwK1D0jFFRy+Vwu2/m15tT6dXKwrHw20KKNxy+1G4NjdtaE5G4FCr055uWKcRMTTrYHbBODTM3QaY3TQGrfP661Z3f9uwuvqfL6UFFAwjDK7bBf6yynJwFg171vCIkp8hRiVtx7fY831xW+V8jiWkz+Em8raoTuHLeJsxRkHoxV+HUXDvvcR7AvU08/wf7NftOk7CP3F/r2QPePozSoYAdlDr4cxHckV6rwSl+NyvjP70suneSzP2PHEUebs0XORPSLpsMOTC7W2cZfGGdmKv9pG7feHk5HldjamxU+/DLbppYU+FzdjFMH6iJEO/ZBiBzgGRCa/ng0e3tBPWVW8uoxARo6jgsht2BhQA75b2eHdDJJckDH97Lw9EesmkFZp47DJO8NTmYBvnsK2iizZ5Ubja4p8RWpIrkHkIfS96R5szInNu053nh9vVfd7nvXlo+UbHlzQ9r8MMPeJ28kzPiUdOjPH1llGOlXUYBGibYynzMm9REgxlbfg9rFv8H4/gHRE1Fn6vO/xbP/zG/0n3JLuLt/j9vDAHEcJYf0ZpJoQXkIYXY4RQDk4ADoAmqw1N2csO9cTTd3LKqaxN4Wco/FhgSvxoTfzgQcghRPTHpwBlyM9C/LoFmvBb41HPx0YhYCQEMOtiCgTojtXCQdKCKzxhRTfDw1zMP/Q2ZEy3i3RX4QxhK2tpMB/Kax7ZRTukVQsPChUPGh2BumZOCUH3BF383W4X4TkirEnp429JHzdMWX0cDqGPQ1GiQz99vAJFpZOPxc8aeOJQVzCRi9DKdVdvwdTW25QqNh7sbTn1Vd3qAJZaF4UOSWl3kCcXqXPrrkjcEXQEKPWTcFcVaQoklwOSSdVy5uUVGWahjwGSg2GfhMoUG3fWp1WFXVE6yG2b3VWQPjsb/n9P0kNvg9hPZ3gwULL1ohlJmZ15mm6ZCOkz5CycWWB5YBYsDDCznYUeLF18WIOHC/AqSpXD+PFmhzUdPF9YbS38Am2sSNzcbkW2K1ifsLa666NaKXoYjx4MSfV8EHoQOSeOoeUOQzROrydKQ6H2ygZhzQTkrMOmQeglPn0d3REjq68VCPgYv93EmSuAmdNhBNKbfUN+5m1XeHQqeYC1mwFNl7oZj2kvyrlshm7JgKUNaPcLSZrtIh/jBSmRfxKLs6SR2G4qymDhFP7WIu21BC3r2r6a7bFmW2D6jTXb9nS5vu3ZOHVXH5FLLKOIIiRvq6QUVXKOgtDbxGQJUyXrqMj9r2YCnZyDRD4fP5ZHOnKQkB5YKM86YrlF5RKEP8q3bhVKtdwiUfIgU2eOphadmkfEpJBTF0Ola2DodS9cLg1bdjdYPly7A19kErRHh+6iEE/WGcGhTmeIUuyOK3YDaHImp8mZAoe+YY/l0Nedds9JlxVdYAbP/QK/6FNaGLdPAVp6+yh78oTRprMN23rkwFIW7vAKQW8DV6D+CdE6iH2rC6JPJ4OVCNucIXVBVFvEcaQsiFEhJNApx4MQEASbzocQ5nwR0TE6gESGTuFoTBwJIg6j4QiYAiWOuR8+/3Z/++Hb96d/qSSwySZfFHgYb/aZ6lWZ/fl8ruZ3xPkVONLHm1+R6+FoXnCm0qvONbp0GaNr1PQqwC2/BU1Xd7m+rgo7U3OHwJ1opVy/9CoKQxX/GyEx1JBGU4d1JELPIExLbwolGdW2X0DcdWZoAbyZGThotlw6uokWMICOLxcQx9BCJPSTm/99AuG9nQX96UApaGOSgsjKG4sUoEB+M+V8l8SH8OBxGH0tuit/04XASGTkjcdRjcZUDhxxYt8PcZi29d/d0ahDrKkjIEUxVV8DP1A0CvoOWizLIx3RqMBDztLvhEOtYAUTa9WYE+s4cMipb6TI4Cs5WFV4He0+0/XO/jpXkILrf3YgyhGkUBRr2YIAK/qpLI/8HKfZO3qKYpGvziKFrpCxeKQuWlNbytr4GSXxtr4oTkbokvO+bB8Q5jyB0t8uA1UiB8xoqDI63Ktpun6Kd5VVttLGwOPj5y87Pg1X8anXQpQ1pXWsCQI/apXRBaODZSZMI8XaHG9qldElgwRoE5qDELTLJLXIaMJJNybUb4Henmc+nFc1sGcGca0ugOfOLBs60A50fekt5LyqdOb6elRbuFmZ8qLY2eThgil1bL1N5sWb4CnzVj2V6y8P7797K6VYXwCMnAkV69IuFIlFP89Q7m3/F4nN8lA6XkaH3r/G1RoXqN9I4XMsfBr6hKo9gB0Oqmbdtw4QFWwur/umcDRJmZICWu04MqYMBllNl5TK+RqXT8AJFX3mcZxydVnXeqy2StaiVWWvG8eTWRbXhIOoNv1NI9CnjxPoa4ba9HqBebbSnyHRrh89cQ8J3eoqY6/rcH64Bf47zqYu5b47kqWl+aXmvfsPHLVk8nzMTZmOkOMAC0GHJNk758Ot1ELQkmTP3bqJkUd9EwkwCq2ei2+275osXvv2N4+Ucuc3Uerd33bb6af59i73Nmxzh768yiBbAOi6DU1bsxwHsH1zylGC8+KAYQHXZBr9EV51CrnbitzPldDnkjvbPLRC7XAiwcxvTqJL7qjWn2vUd1g7sscK4FJ5+nan9DLYknJRcFMytVEtWBh7lfgq8kgyFeUl09WGZaRztDQsv4HoaWsX2kpjy69dOCTbKi/y0Cuu5MH0CjVgRdgp0332O/KqKHgi3jrqQ47ClLDXeCmFq1O2bFAx3DHBNWkJ2ba6UbRmf5kM1gyFimDWEkdd1LDCorYEMrMA+XFSLA4sUgSwuEgiIh8a0V3RZjWK200AyEnL1QKRqDzGxP5dqNHXoq1dWzmfTmtIXgXjc/wt6DaA5QhwZXSoYPRm11KYURdtk3MU3u9j/wdKHjyMwO3loty8cpQPVLSK357QEJgaumh1flf9UWn2Kai+UAj0n5unPJtXFYM8qRikJg2DchdLuVkfJkW3zb7Es76O0+ypumrpaCkxtVzpYlBkSfKOYVAk8pSN419WexEf0HDWXsQzbQ5Nsx4snbHatecGfvlgaP0CElsXO6Y2Nyqf+gVdZw4rH7Y3c99ojM1vw6rJBVx672vM1z494thuKAI9+7/2vsa6IKvnuPRgDeXmem0LFhtb/V2cQntVZlv11HEV2in1WWAJ12GXuYm9VsuqbWnH0WqGKIw6nlZjgxbdON+VNtsnW+Z7zBd3qHkfb94Fnu3x5h2eEvf9/cv9xYoj5V8pxBGfLyDY3qV0g1RRNUhNcFH5pJybbDy/T02Qh3d3ys4eizPB/gFdgbdGhKFhOJPZmMurKp1EN+qYonSS5EYe9BFauUvd7qZzeJrdPW6CJuSiFgwrrTYj37+nyTiwxSjcAvkYO/yIB3RHJNelCt6rsgPHC99BLt0QCgzBtx6+s0Xm6FGH+sP3n1+/fblYfCvFUqhYCs0VkVJgDaFY2m0hnH2KEqIVkZJhZEOO/JuXps9xEiidUFonLCn3MnVCR1RwgPOkyizmzlOkztj2KlRS9kRv60BSlk+HtzSBeQt6iln5XaM7s68ESXwLPCerfFxmfjF8xAZOVot/aLktTLgVIEUX6Xftn83EPlb4WkYWDr6+pRm9u/7YpJwdJFqoBiStIApWba5pjE2eGmzsEU4E/PIvXvJKL+Ay3DlbnUWWcTGLvFxWZszdygIvbknrcAu4dNcZDeHKFSBBAlaTBEpR/MquAF4QANvuJg6uv+l2+wL41WNc/7OdAUBrdwbQOmDjiJOGVtQpX+QKeam0l/Kh2wUnbzNssKYfyRMj89qelQ+jzbFcsyeTSFC3OhJcdMucO9XDcuszj2eGjJXhwnsTXzkDBbCNg3q5W/6z2G+z/eMWG56X6nG5KltnPI8in7kFmf5ZtXU0kcntHCXma3EpHoZNbsvrDjfjbvfbxlv1SHHpXrSVKPJ4XfIwDZ48mq4AEWDfkMMdmGAsXWyA0lNXX/xCSjErkVFTzNzL2GaGr2hjHas2walSffub1BkwmIrDBreXivMJI/x6tJurLU4xZUEKS7oixRCOXmC15THJV6TIQajqUYxQDbLkCRdZkKIsd9XEztaP9gFqrw+wFzp9COeZPdOnLBJxo6Dp1+nAW6rANSK4pixIAcy2jdfQLzG4xDsNKKRdI9KmrDQBzNaiTGQrihRbSL+H5CkU3t4s3kTbAY4HONFuAMfrM72kGdqcrmUpX80kvhpLwlfzxpMjgXCjMBVIG2n9eNl4WvD6sH7c1icMmFmaNtddzbQh/Vt330BgzIHhaK5F/9afQTq/gyMQi08UGShgZnK7EMMj3iSD8yb17W/BfjF9rv/53qr2kD6RKEId6BVD+sUzjZxJdhKFXwv/irwFim7LGeSEfGvAv6EySHE0/axsnDIBwLANMBk/A7z/WTIlrXc2kMvd57U5ARTZ6XKFzJQue9m6bENKCXRZ0bLtt6TLQpFzYCQJ8gbkxJm5yCU3uSpd1tTNuWtWPrDOcm13rlmVz4m6LHA770JSmSsfdxRF13DetKKLfyYx0TgP3RNvt36IA0R6/AU= \ No newline at end of file diff --git a/docs/GeneralDD.png b/docs/GeneralDD.png index 44ad1c7..7e5275a 100644 Binary files a/docs/GeneralDD.png and b/docs/GeneralDD.png differ diff --git a/docs/GeneralDD.xml b/docs/GeneralDD.xml index 0a78764..a531992 100644 --- a/docs/GeneralDD.xml +++ b/docs/GeneralDD.xml @@ -1 +1 @@ -7V1bc6M4Fv41rp15sAsB4vKY6/RsJdvZyUz1ziO2scOObbyYdJL59SuBJKMbFyMcJ213VdoGIUA61+8cHY2cq/XrL1m0fbpP5/FqZFvz15FzPbJt2woD9B8+8lYeAY5NjiyzZE6O7Q88Jn/H5KBFjj4n83jHNczTdJUnW/7gLN1s4lnOHYuyLH3hmy3SFX/XbbSMpQOPs2glH/2WzPMnchR44f7ElzhZPpFbB7ZXnphGs7+WWfq8IffbpJu4PLOOaDfkHXdP0Tx9qdzPuRk5V1ma5uW39etVvMLjSkeMXpe/0cccOZdP+XqFfgD0tTh9q7kYtLkYvVcWb/Lq7bT90Tn+Hq2eSY+PcfY9zka2t0JdXE7xt2XO+q7ceveSrFcRGhnn8uUpyePHbTTDp14QOfGPVQxljO9poV9syPCp5Sra7egJ0uFdgjstTs/SdTIjpxfJanWVrtKsuL1z6+N/6PgqmsarSzZltEkxafRsms3jTDhDb3db6ZeckseRjDwamTx+FacbsVCcruM8e0NNyNkxAJDMNeGfMaSj/bKnRuiRRk8VQnQc0jAiHLBk3e8nE30h86meWzt0pCmL54hByM80y5/SZbqJVjf7o5WpEoe/Mp/xa5L/p/L9T9xkAvGvDXpKdgr/2J9j817MdR5l+QVm8sqE4GN4NkibeDOnLWaYTPCj4IOkSXkLKncwJfw3zvM38jt6zlN0aP+Wd2m6JVft8iz9K67Qkn9z4d1Y7AyVFkCgOvKci3STqy5WUA2d8V36nM3IJNiBT4RhlC1j0jKE5TE8Q7XklcWrKE++8yJORSfk0oc0QU/DyNJ2eKK0Hch3UT4puaoqOho6ArbQUfl2UkdoRqO3SrMtbrCreWBbfGCr/rmE9oQL9kxTPsCehdjYtuQq+8xVH4er7A/OVd5pMhVwufb9uSoI35GrrDNXdeIq23OOxFaWQKbAP012cEwrGdnQPyuZU2UH/1jcYEzJ+KI0P00t4wZGuYrOk+DrJnhyL9Dxr/fokBetsfO6me7wf8QXxqf5Ey3c4wrTRatkuUHfV/ECN92uomQznuKn0JNde08T8KNGZFHVyQShysm09ATY1sf0XWlIpXFAsmOLvyKBtEVMiF+05HbKqgE9wLAYV8mRbP7aj4w0ErZiIOixnuw5tn1uHgRVlS4WuziXBrernSTTsDTgGAbZdqEshsBFU9qHVTuuzFyjjGrJA63CNZg+74VreKoxKNlxy42E979nDMNdorfNx4QHMaMXbGiXYt9aI1mXkBPe9hX9tfZ/aQ97Ji/vs9tGm8oxSWYwmXIfbaJlvI4LQrmOdk/TNMrmtBv0qkJPklBhDbfisSeplfrtqy9Yvlr1xcelTMIng7o3bnomgQYFiZcimlusCnNgUVgALdBB/KOKDgrw37V7A26xbbLMonkSt1Xrew5qliIUt/OPSt3HFKl7XjpVmerygmYImapUYjKfV7jXus2QHRnj8IDWMtDwdvXYt3iK+vrjV/Tnpy+/39+hS/75+LO2g2b7YoYmFpsrxMJYFuaaiTkX7DJ7AiyI+gHQD0LXlw0OphE4mvAm7AovcOuooTW3+LDz3OFIAZ46NOj/SufxhxhxYCk07NHG2G81xl+S1RS93/jq7tcxjSFZPz28ISdu8xGGGMJJZdwQfb/jiHvaEZ/SoXvKc6zvo21SGdupdmANwwcEMiAAAkMMKqcYgHDa8IFVfNrCBzWEx8EEVKVwqJk/dIjHsKth1xGhwtDcR83Hs3LYsFWZLac/2ZaLzU4b3dQSvv8sm5zzJMPZAGigXuPZmbh7EPdV8TkQG2OcUR9fb80SUMUSfkuWoPagNXEsD/SzLQ2ziawdOU2Yo2dMN1gVTpER8rPKnzKkA/tDR0gJSkoPqJSeCeyIhbzrPB2ebzlWa8FG3Rinyh51saFmaaMG2X0oe7GLYBbPZmrFdHtbsN8mvcMMSPm2wqT6ZBUldu2rQjlBRw5EDBhCh6OaMezHj6RnKKLFQOiiLbztuUJHNN+rAd6WvU+ofE+9/OgKkEOHx9FsK6h9IBhYvdo7blj/wiKU3LG9APD3Ruxp+L4iIb7mT4VjQVC2nRa6v1gidphMJm3E61FweZYgcwxgPmyBExtDkdg0nSqINDwwr9BlHA58nc7+Kuj2t3iZ7OhktiNLPKzJLFpdkMPrZD4v1Myw9OrLyYrDkavK4R2MXGHnkfls5OpQ3Vo34M12ToVmRSKdpnmernkzLd3GG865CVSGDzZ7LMmxauOvXF0V1wnsU+e76T2+Go6qGlUO6GVU9c4I4C0Ux4UTWP0IeLmpuL4naLaw3gwJapv3thLYHCiQinnyXYlViCFB4mDpw1/0QOHMq0EJdqy4qQ6qWK2S7U5gHirais6xxxjLnsKeLbQQVss0+3p/XocCSK6oPrHeiDayBSvekdWR0jMVjPRDtJGjjMH1iLHm2NPcB5jLcweT4F2K5Cy64CHOdsiWKGPLj3ma4eUwWAmJAanzoU97qFYOvV/KgO4ppSf6XCkL3SStJMi94iOL/gXE/0Y14I8ipUchfrUZDjZsuTCJavI+0hVYYZ10LWfnYoZ8+931ZTnNI9sd+Zdfvt7fjHzU/Hay2z21mtNmKBVJzc3SjP/kUI+JjqpiuRfwgcJxCPuPquPpI+GYbJR8hk+Md4UtjocZABU7/bpB9kwyfy50zhW6Ag1bnP1jR5ROvKvMRHmnAyZDa+tj8l8sbDVGOvemHsQMo8gBur29Dgtm6sch9JIQCnMrJwUBX8ExJrxjJBhMeGs9Y2AnHOdic80D+XYNLemcR60fWy/L65xMBXEuFvMqcQp+Yhswn8bYqn4nkQG10TPX5dEFBuGaj6ZJfqBrC3A3FJijLcLvCg6oYwnrZQ05ui4QHtitd3RFT9d0BrtjqWAbwT0QpP6MUd1esFOvrnKohXawK5ZYTawfwhGL72PPnHwHlq2I9a9RF8SWy1KtIjmFFICz+KsRf80i7jCJxmzFDuFJ37OHkmhDyw8XDiw/PBVSNgwnnfnlQHNBL4KGMCRo7O4j2RG2wCYACOq/9UI4GE780AEWep3A9mHIJzY4lj8hZwLPtoZZewos3rMAXoONIeCiXdu/uw3jySVnzjLox5ZBitTAU5dB4hJa/zOJINBRBHVs//4iSIVAnkXQDyyC/A8IpwDgTkIPQuAjR8PxBckB7GBi6UXHR5ZPHS2ej2YgKeoUDAfyDAHkvPQBcnBYnBehWbxL/iZL07FkIOSCWsPLEbweKcIJXeVRKePQZ9QtyKZeN8/LG6JtaiCLAAJbkCL9hAgrn8PzKn+9kUQyQJmhmvhIlzwIM9u+XiQXUD20vGOHypJ8OFdXZ1LUOwL2dV0sW8Ivjk9lWJeqsC8NLem1DBTA7rFnKfIRA1Xsya8hmfbBpxapxGf09VTNqL7GEqWrqrEEKMLKW0v1NUOsiUWXKzCBREXSMYwll7djAl7iYmuJ2DBegI2qQ905u84mcxw42d8C+IJENmUuCQYNkQLaR/Z6NQdeQ3sg4ltc+/7mElThSR8kWe9E07fO6VMqIahPnwpbKmQT6VOOp4oBn/Xxj6yPgaLcwInr43HoToiKLHSyz+MoNnQmoAJfHLgUdBw6k4oyFoEEO/AmYfVzFH3cU2M2tqeLe7TtLbuufW+NDBTrMbjFcP/+Df35Em3mK6xCBdF14PJM5oe2dqxEK8mTZbilcqpMpMCCFjEwY+vd9vPx4y54YyLyFJYS96ZVRj/HIdYWdRoMEqvbdXA+H7FS/7qh4tbFdrtC5kkJth1cX+soFOscU7q2QazMESybrB+XYNla8XP5fsP1ymt4kU+eVKRQMFFqeBly5+XCYhV/l6uW39geBE32bK/mpmv3++etLAzzQhXD6ccQw6zL/1wMEZr1BhnodeaHU9IN1Go5s0JNc2iaFTos36ma8euiijy+MIlqVldXL5kXBYfGhXm700QaBK7sk3mRlXbrIYkXtcXadX5H5y0LxtYkZAt0+5rUQqWkMWUms0a1YkXve1WW7O3/oakciU4PIIlT5tfLAiMpC5+mupGsH1uqDgcqd/861laVEE7s6odfTMUWM3Ut6xjAiVf50FrNlFIt9emG0MNBketz3aJTr1vUXfABIQ03kKs+0qqfnNwzUgNCTsYbqAbEBNd/0BoiqqTPqnEi1Y04eE8A83UjhGjsoakBjBrEDCAZ/AOOP5AerMGRP/5y8V5ZxucECcnL/NQJEgo7pjlBwnfo6o4TSFD0wcSvJExAIXfwIycogqaEBTHlsGv7T55wQUlFWze/9x4Xxmvtqyy1ZpmsYHeZ2amTf4wdi7tS/hgKpDD2COSkzWDyqcWguaI/9bRYblRuNnSqq3e05jvbwsYEfGTRhV5sJux+yoHlqAlFQvgOjGBJbYDH1nOsly1HkRJUgEmgRq0Aa5QtxoiupUhqvWs0M0c8n1btPomdb3x93r0p9KJ0CxUdrZJNPKbuUuFtTGy5m2832PdAlvzLrrhL3b6MDfBHnQ/aUJ8/w6zCoxpaoumyLExceez5sms5VL1+YLk1YkF0kt7bsGjFo1QKczwamLYa2kPYUBpg0z4rJ0Ad4UNOPsRZgh4e0/mpb3YmAI06t9YcvH4wWTkD5SHIpkUQiJEqIHC/IZduHITCnXwCz5ur8q/fFLMOu1osROxKg5A/pC9x9nWjNoHaVbh9T+tXR9AylTLZ0t8stibMjnnjTe6elEvzRSk1wca9tw4gKcqbA1gwN49f0dnH522cbdLvdUF7zuowc3MkgxcL3S172lTreD2NMz1238qaonGj5/XqYpanVaov9rp7SHcJWUnfpXKDyDnFSs7L5qD3UBaaLYpEui9MNRYEZQvNM2GhuS1i4I0o0ftnVvWw/m7RRzIHNHsoNttW82j3xIaK35OxlSGgxKDZCrzjW5j2AWVmupSPQTRMwzrVEA+EirDO75fXfYI5fdSuZo+1LmJnXin8gWZcV/iDMaQBrWuFgn7suXCTJTY5ym7N5jXRm5wcpFQn7Q6BlFrJBRU2BKhl0npf5MAR0QC3ZiMkHdRMVQRTWODAKNNYLJI19oJ2JdoPIih5bd9nQbAZs5w0hM1km9Ct2VVxNeZ61Qa9un8stVBhg7dKllV1c/3faLNM8QK7JH9rm6ryUUpc1dWs2tvEBmxg2xWjV6riVY4KpvRMGDmWDFL/sV2l0XxUqcLSAevlTNy6kWLE2j5xzJ9YlY8d8HmJivXpgEbvjW/NTlGiyrBdbLeTUZn8FRORffqDplh2OtygUfy9Mmg3r2gENkXW3G9xqTx3fYdOv2NV99EL6kcPHHP4LFmL/xJv4ozmHC6S5XPGUt7/vLi/+/njDCVd13IkSrQbNOXBhUCLBTT0O1Cl6EmWVxYvYjS0My7hUwWt1mrWQRP2GJahMPX7JvPNk2idbuac20KamIl6XEK/ddSjcyybMgK3oIwKOh7JML7YuDW5h/pItPk0Vz2fANvb84a3/46YX5XKWrOmS7t466jQiG5NVwmrdqykW7/QS2dYvu8yr9AWjFfXGcKxsYHesenuv3YVlJ8N9VUGgUXUt9pS66sLEKGWahtLozJzun2uj+VYgksdGqJpZa9G6dmhOY7vLI/VCLSlkMXZ7m0zQ+dw0Up8k9vfH/Av/P3x6uGk1xnUIZQnnKyBozM6Pm2RrEEuNpCaZ/vKsExbY2avdhwIeJP/sJ0gOideSPnBjt+UUSxis92vsJuylsUU/AOucAjiob2isLdqruBTTgbKggSODJVUhN1AAuvx8Uu/VF1ONBhXyFV2rjMdmqWj2iTx4UheqxTMYvVqv+HFResV0Xtx4YYC2/bdOIZ2zdJAGeeJVYdbB3E8odjG2PUEe6FxybW5vTTH0BcFRNggUuQroOGtWmoiT8ZXcB/BYuLETq0A+vb1rpNZ1G6N+ZBlgewTr/1jNn2V+u510nfIFQ9KOckKHnTYzdflHRbPjJCEdshnPIuSBXKn6W0PTprtIhgPMUJsOfSwLw8jYOcDQeajjoD5GELRRKTiuoqRqypjGsHIqSrrbC29N8RRg/getp+eCXZX1hijLQdPc3cCkYHFMqetbR5XTGVgyfqmM+ahaF05jtmFnYyfTmgjueb4URYXwXkGiZd/0ZHn7RwXmPloW8upFTZjYpm9mGQygoYDKCxWN5TyI2ykMAguHshxnZPXakAsbjB2rGOqtfCs1kYdsrLtQOXPB8azsnXaRlwiwFIuuisusSuWMWRacQFLAhBds5qLsYwBUO2w7IVu6vCsuXjhc9Kay+Y6NZDhjn5mKaaEffMs2j7dp3O8QuTm/w== \ No newline at end of file +7R1Zc+I4+tdQO/0AZVk+H3NOz1Z6OzuZqd55NGDAM4BZ43SSedjfvpJtCUmWfMoE0tBVaZBk2Za++9II3mxef06C3epLPA/XI9OYv47g7cg0TcP30H+45S1vAdB08pZlEs2LtkPDU/R3WDQaRetzNA/33MA0jtdptOMbZ/F2G85Sri1IkviFH7aI1/xdd8EyLDU8zYJ1ufVbNE9XRStw/EPH5zBaropbe+T9psHsr2USP2+L+23jbZj3bAIyTfGO+1Uwj1+Y+8G7EbxJ4jjNv21eb8I1XleyYuS69I085gher9LNGv0A6GvWfa+4GDS5GL1XEm5T9nbK+cgefw/Wz2RG0wk2OzTZdrrfZXMaDZqewuR7mKDWNbrx9RR/W6b0iZgH3r9Em3WA1hNev6yiNHzaBTPc9YKAkH+ZbANC/KQG+kUXGnct18F+TzqKCR8iPGnWPYs30azoXkTr9U28jpPs9vA++6D2dTAN19d0o8mQbKtJb5zMw0ToIbe7Z+YtusqrX+wXWpk0fBWBBCFeGG/CNHlDQ4reMQB2ASEF1o1tskcvBxi2nWLQigFfaBWNQYE3Szr9AQTQlwIK5BBh+rC0ZeEcoVXxM07SVbyMt8H67tDKbJW4/Mx+hq9R+h/m+x94yMTGv7boKWkX/nHoo/ue7XUaJOkVJg3MhuA2vBvFmHA7JyNmGEzwo+DGYkh+C0KtXPTrzzBN34rfwXMao6bDWz7E8a64ap8m8V8hA0vu3ZVzZ9AeQmOAAHXFcy7ibSq7WAI1ZMf38XMyKzbB9NyChAbJMixG+nbehneoEryScB2k0XeeMMrgpLj0MY7Q01CwNCEPlCa0+SnyJy2uYglOzUTAFCbK3640EdrR4I0ZtsMD9hUPbAoPXNxH+VzC+AILDkiTP8ABhejaNsQq84JV54NV5pljlXOaSAUsThrpj1We/45YZVywqhVWmQ48EloZApgC9zTRAepmMmX14MJkThUd3GNhgzYm44rU/DS5jOVpxSqyT4yGjHXdCG/uFWr/+uVWoQvjblFvrlWPGaQL1tFyi76vwwUeulsH0XY8xU+hBrvmmibgV62gRaySCXyZkqlBx3St0pKW1gHRjh3+igjSDiEhftEc2wmqeqSBWnAsKUbS/Wu+MqWVMCULQdp6oufYdLl9EFhVvFjsw7S0uG3lpDIMlxYcm0F2bSCL2u2CKZnDqFxXKq4RRDXKCy2za1B+3suu4cjWIEfHHbcSzn+fsfHuGr1tOi5wECN6hoZmTvaNDaJ1UdHh7F7RX+Pwl8xwQPL8PvtdsGXaSjSD0pQvwTZYhpswA5TbYL+axkEyJ9OgVxVmKhEVOnAntq1Ko+Rvz75g/mrsi49zmoQ7vao3rnsmAQYFihcjmFusM3FgkUkADayD+AdrHRTMf7fWHbjHsskyCeZR2JStHzConooQu517VOg+Jkk94NKp0lSLJzRD0FQpEyvjOYO9xn2C5MgQOxWUkoECt9m2b+EUzfX7L+jPT59/+/KALvnn0yflBPXyxQxtLBZXCgljmYlrOvZckMvMCTBsNA+wXc+33LLAQTkCBxPOhF7heFYVNDTGFtduvXfYU4C3Di36v+J5eBYrDgwJhz3aGruN1vhztJ6i9xvfPPwyJj4k46fHN6TEbc9hiW17wqwbgu93XHFHueJTsnSrNMX8PthFzNpOlQur2XxQmAwKAwK1GDBd1IBw2uYDI/s0NR9UAB5nJiAshbOauUO7eDSrGmYVEEoEzYOvfTzLlw1Llcly+pNpWFjsNNFNDeH7p7LIOY8SHEOAFuo1nF2Auwdw32SfjrYxihnV/vXGKGHLUMJtiBJEHjQm0HBAP9lSM5qUuSPHCVP0jPEWs8IpEkI+yfSpbjywHBBh4H/Vxszm7FG03YGyBAJk/FCHWYl6w6uUIB6lOSxsgGHtcIrFnCq3UT0hktvfXbu8nQtvFs5mcp51f59h5jZ+wLhJUJrBX3Uci9Ss7cq8PF5L5ES46duQg5qx3Q9Vi5ntEjAKUzS1fDuWMBEJIKuxfJcVU1v6nmrS0tZ2bnvCkxpe5QOJ46HlV7+AaDVuOV6w5fc2zhNPPYPxX9NVpkMUBrW90kp/tUTgPZlMmlDSo5jgaSzMMWzwfgOTsDaDEd2mU7UXDW+Dl/AmzuR7G8/+yuD213AZ7clmNgNLvKzRLFhfFc2baD7P2Maw8OqW4xKHA1eZbjsYuNqtV+ajgSskvLJqwevlFgZmRSCdxmkab3ixK96FW06P8WSCDBZjjJIO1UQ1ubnJrhPQp0pNUyt3FRjFCkkQ9BKSejv/eYkDWvbEZj+CaVyXC98ROJtfLYb4lcN7Swl0DyRGiXn0XWqWEL1/hS6l9nSRhkxvl9sfaFt2U5VVYr2OdnsBeQhpyybHymEoUeTuOZuUTCdvGFFfrbqrFP6S1qmOodfCjUxBKodldiTVNAWhuws3glJ3Ww93aoo1x4MvOe/rDIIPMaKz6ILHMNkjWSJ3Iz+lcYLzZRplc1yaPkpTJR16v+gA1VOWnujsoxM4M1M7SssTckJ+Bapv43+jtnaczkI/Ze1sNpKMzpJowT50Fhh+FZ3N9+lqhrT8/e11vuH/Q3NO9vtVox2tt5kimrld6tGeoCDlQIl3FlSpDf04ltrljSFHimW4Y7zPJHG8tADIkOmXLZJmovlzxnFu0BVo2cLkH/uC5YR7ZifyO3XYDKWkjy2ei4Upt3jOnaljOyNpsM/9/a3vONpkEaPB3sqjf3TsbQPleHBn1wk7tOhe82Z5swKWVKqjUoutpuRVKqYEOBeLeS1wcpZ44jtjlUyK8m3cZJbF2xaoAVe/26ykBVomHx9q2gJyNLXXW46Ijd4gaq4FhAe2qtVcU7TOaw5Vh4bMaCMoBwLVn1GoOxB2otMxTQ24g8nIYRVOfdseUUc+1suL78AwJU79DZqikOSSWMlITsHXfyF/FeSvisRVkcY+5I/Kji0cka5jDkXthqYtlj0wbXFkNrRhsOyCSx1FCTV50iBkECfeWcsYpoAmAAiiQeNsONufuD4EBnodz3Rtnw9hgIY7KXo8xzSGSUAFBm/dB06N/CFYTNuOf3f5xpFVq7nQoB+IBknCAc+OBol5tO5HIkGgJQlqOf79SZDMInkhQT8OCXI/gqkFAGviO7YNXKRoQFegHMD0JoaadJwzfWop8ZybgCQpVjCcAWgII89LHyMPdpjzJDQJ99HfRX46pgwFuKDR9vXIvh1JXA1t6VFO49Bn1NAHd2AhvTPqjYlnA1OgIv2IiDyOH/LXawkxAwQZ2JBIkvcg7GzzopGcq7VrjccW5SX5kBpVsUmR7wh2sdssdwm/OO5KMC+VsSMF2Ki5jC0YwseOIYlU9CR+KehqyEiEtrS66HmEyZxo4MSPGrhAiUXnwAUyQ11BBjKuV+CCJOKQC/f+96/oz+dgO19jUBX2q2MCQvslEsQXkkDMhTEPFNBNDW9Hieg+7MePG9KNZP7Sir9bskxvWKXwcxxgbZBZqBFYrbaL8/GAlXjiaspHXO12aySt50Jj52IRR4FYeEzqKq12NRjA0s36cQGWZkNdatFqLr5ZgYt8EIDEN0BJqeZEm9YJMWJJWosr/Vo7HnigZnyv4boL0bqXusyacQFbmWz8rzdCDJN59rEQwudG98cHuj0XfDgh3kCklgsqVAy3daNCixBVVozfZCVR8YVRUJE/xF4yz1Lqx5l4u1dY9ASs7ONBSHK5tYsDobLyqErvaO0tGBsTHxIxta9ILVjXxgSZ9ArVkqyV9y6TpM6urSBGnZVGtP8jUVMChddQu9YIQQOt8QdK+i8z1Yb8BtrS8y+OdViTbU9M9sNHEtNI3rbVizx74jAfUq2QlhqSd9dEDXTxctuXdP5TT+fv4K4RYlC8cjEk6Evonq9Gj+YJdGVPtJ7kSC4fUim0yAIdWEGmlEfZuRiu/jzKqvCpDkAghGdDIo2y7A8MZDSFTYyml0Sikm52IlF/HYGT5dtUJeSD+4yGfJtG5Rik7t4BjmE/sbtNbJ/Fh92RirxMcF8Rcud4OAawa/SxWRVCCKE9OdwCuEIAka7oPtGBXaPaOr2GA6dmPBDTMdqNJ+mbas28cryG2GYtNb8u9O986R9xl5wz/Rv71qQgSRkNdHnnn2nDCWCimzvWhB37cMIQPzHO2PScic9+jkL/Pj6FMiUU6qMkuPeKfb7Q5Q9NlyX2pA502YUk5+QE5FAXTFyGTtuCiHjOcigALSXLtuM/OJ0noKKs29/7+A3ttf5lJrR6mizB/zL2E5fNMQ5Tbgv5Y9vg0XrsFCYbpeDkQlB5RX/oaZAElZ+DdKo5RUq7Kj1dR4cz0CDpZ3QnzH7MgYrGQukSfgItnsEmbuTGe6ymLUehEoSAjURvUyUBq6Ut2oCuIUlqfKA1FUccl1QZP4lDeVx1tpIut1JuuJdMtI624ZgYtDNtY2KWp/l2h3UPJMm/7LO7VB0ZWeOXqvIS1JwnkGBU4d1NSqBpk6wm5kM7JAeNMf4Pdb4AMKwKsiAqSe8tWDTCUUKFORz1dEsNzWML7NIC69ZZOQIKhU/R+RgmEXp4DOenfg6b4AFWqbX64h46gxUcKKq0LFp4Qgb92AEC9mtS6caeUK537BZxE/pOJVCf11llu1osRNuVInThMX4Jk69buQjUrCbve0q/KoAuQymlLToy6qkc88aL3D0hl2T/EGiya8/+6gBSBDcHkGDunr6i3qfnXZhs4+9VIZic1KHn5ogGLxaqW/aUqTbhZhom6uiKRtIUCeh53qyvZmnMQn121t5jvI+K/P429SREzMny36/rQxiHktBMkSSSc2zYIB27LKE5OiQ0q0F0Rq2V6P3j5HtIf0UxCUlkfDmFvl62mgf7FV0q/kzIRoKA1ChNHX/HlzDNDsVv2hS1QTBM3Dqsi8e2JW6d365v+zhz+rBdxZlwbcjOnClHgnZcVY6EIqQGrmv4An/s6S+mYepQOq3eKHVyk5MzKVVRuy4mpUZ0QWYbAkQyaVwgzYOiNcCqOLhJZWomLIIyLNDRyzQWS3eNHa9ZUflOAFWu1PBRLNgUWU7ahE1pmzCt3hoHFeI6K4PefHnKuVAmgzdKfZJNc/tnsF3GuFxClL41DSY+l8JbVZW0DjKxBhnYtETvlaykFpSZKR0dQo5RNlL/vlvHwXzE1K5qYevlRNyqlaLA2jyY250YzMf0+IQRSbUhQLz32o+GJ1YiZtmudrvJKD+yKCxI9ukvmqSIyHCLRuzvzKLdvaIV2GZl034Nc+a577t06jO22q+eV7164JjLZ5S5+M/hNkzISVmLaPmc0ATGP66+PHw6n6UkWcpHgkSzhlN2Lk+apUOT70AWoleSvJJwEaKlnXEpOTLTaiVnHTRgj9oyJKJ+32C+eRRs4u2cU1uKIXq8Hte229jr0dqXTRCBKw9ACB1vydBeOqYxuPtqT7T+MFc1ngDTOeCGc/iOkF8WylqRoa9MxT+qaUSVoZ+bVVvW961O21cJlu+btO+bgvBqwSEUGxOoFZv2+mtbQvnRrL5SJ7Bo9WVHKnV1wUSohNralDgqTrdICYGGoFL7mmBaOqtWeIYkxvGd6bHcAm1IaHGyf9vOUB8u9Ytvcv/bI/6Fvz/dPJ50nkGVhfKEgzWwd0aFpw2CNYqLNYTmma7ULdNUmDmwHWgDXuTvdj5F68CLUnwwdOsiioUTdTpcYdZFLYsh+B2ugIXFQ3lFJm9VXMGHnAwUBQlg2VTCELuBCNbT0+d+obocadDOkFl0rhId6qmjXCRx7VE5V8mbhfJ6DMOTiw7n2Vi+gLZ9j7MhU9MwUIp5YoX2xk4cRyidNrYcQV6orYWj74TPsV0iW34NSSlfYWs+QKbC86S9tM4RJCaO7FQSoG9fH1qJRc2K/wxZ5NE88UqOesNXie5eRX2HzHiQ0klaiapFzgOEvMeSYFxfMmmbPh/zLNIWm+smokbnsNk2pLGLGGKWnQ+Hcn+C9Xwgo/mopcl8bFuikEjylFgruazSuRYrOWFmreWl9zZyVNh8u53zpwPhpTVjycjBA92hJyKwWLa+sdRjicEMNFxfd8y8LVYFhVBvaic9O/WEDrir9yAlYeaep0bx/C9qed7Nce2/czvyTs6yKRKX0cvUd+Qd0jSALaSr68pbNXnGPESEvumVXTsnz9YAtQlQvCaKVB1fM3XwNf/C10YtArNNT6bSe9oDs1XsRswSoFEX7TmXOBUNGtLNuYBRUq01sy6KMhrsat0CGNrxwwvr4onPSbMunnNpCHJHP5MYQ8JheBLsVl/iOU4Sufs/ \ No newline at end of file diff --git a/rcfile.pylint b/rcfile.pylint new file mode 100644 index 0000000..f5c623d --- /dev/null +++ b/rcfile.pylint @@ -0,0 +1,345 @@ +[MASTER] + +# Specify a configuration file. +#rcfile= + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Profiled execution. +profile=no + +# Add files or directories to the blacklist. They should be base names, not +# paths. +ignore=.git + +# Pickle collected data for later comparisons. +persistent=yes + +# List of plugins (as comma separated values of python modules names) to load, +# usually to register additional checkers. +load-plugins= + +# DEPRECATED +include-ids=no + +# DEPRECATED +symbols=no + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +unsafe-load-any-extension=no + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code +extension-pkg-whitelist= + + +[MESSAGES CONTROL] + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time. See also the "--disable" option for examples. +#enable= + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once).You can also use "--disable=all" to +# disable everything first and then reenable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use"--disable=all --enable=classes +# --disable=W" +#disable= + + +[REPORTS] + +# Set the output format. Available formats are text, parseable, colorized, msvs +# (visual studio) and html. You can also give a reporter class, eg +# mypackage.mymodule.MyReporterClass. +output-format=html + +# Put messages in a separate file for each module / package specified on the +# command line instead of printing them on stdout. Reports (if any) will be +# written in a file name "pylint_global.[txt|html]". +files-output=yes + +# Tells whether to display a full report or only the messages +reports=yes + +# Python expression which should return a note less than 10 (10 is the highest +# note). You have access to the variables errors warning, statement which +# respectively contain the number of errors / warnings messages and the total +# number of statements analyzed. This is used by the global evaluation report +# (RP0004). +evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) + +# Add a comment according to your evaluation note. This is used by the global +# evaluation report (RP0004). +comment=yes + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details +#msg-template= + + +[TYPECHECK] + +# Tells whether missing members accessed in mixin class should be ignored. A +# mixin class is detected if its name ends with "mixin" (case insensitive). +ignore-mixin-members=yes + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis +ignored-modules= + +# List of classes names for which member attributes should not be checked +# (useful for classes with attributes dynamically set). +#ignored-classes=SQLObject + +# When zope mode is activated, add a predefined set of Zope acquired attributes +# to generated-members. +zope=no + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E0201 when accessed. Python regular +# expressions are accepted. +generated-members=REQUEST,acl_users,aq_parent + + +[SIMILARITIES] + +# Minimum lines number of a similarity. +min-similarity-lines=4 + +# Ignore comments when computing similarities. +ignore-comments=yes + +# Ignore docstrings when computing similarities. +ignore-docstrings=yes + +# Ignore imports when computing similarities. +ignore-imports=no + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME,XXX,TODO + + +[VARIABLES] + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# A regular expression matching the name of dummy variables (i.e. expectedly +# not used). +dummy-variables-rgx=_$|dummy + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid to define new builtins when possible. +additional-builtins= + + +[FORMAT] + +# Maximum number of characters on a single line. +max-line-length=80 + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + +# List of optional constructs for which whitespace checking is disabled +no-space-check=trailing-comma,dict-separator + +# Maximum number of lines in a module +max-module-lines=1000 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + + +[LOGGING] + +# Logging modules to check that the string format arguments are in logging +# function parameter format +logging-modules=logging + + +[BASIC] + +# Required attributes for module, separated by a comma +required-attributes= + +# List of builtins function names that should not be used, separated by a comma +bad-functions=map,filter,apply,input,file + +# Good variable names which should always be accepted, separated by a comma +good-names=i,j,k,ex,Run,_ + +# Bad variable names which should always be refused, separated by a comma +bad-names=foo,bar,baz,toto,tutu,tata + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Include a hint for the correct naming format with invalid-name +include-naming-hint=no + +# Regular expression matching correct function names +function-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Naming hint for function names +function-name-hint=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression matching correct variable names +variable-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Naming hint for variable names +variable-name-hint=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression matching correct constant names +const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ + +# Naming hint for constant names +const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$ + +# Regular expression matching correct attribute names +attr-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Naming hint for attribute names +attr-name-hint=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression matching correct argument names +argument-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Naming hint for argument names +argument-name-hint=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression matching correct class attribute names +class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ + +# Naming hint for class attribute names +class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ + +# Regular expression matching correct inline iteration names +inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ + +# Naming hint for inline iteration names +inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$ + +# Regular expression matching correct class names +class-rgx=[A-Z_][a-zA-Z0-9]+$ + +# Naming hint for class names +class-name-hint=[A-Z_][a-zA-Z0-9]+$ + +# Regular expression matching correct module names +module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ + +# Naming hint for module names +module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ + +# Regular expression matching correct method names +method-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Naming hint for method names +method-name-hint=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=__.*__ + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 + + +[DESIGN] + +# Maximum number of arguments for function / method +max-args=5 + +# Argument names that match this expression will be ignored. Default to name +# with leading underscore +ignored-argument-names=_.* + +# Maximum number of locals for function / method body +max-locals=15 + +# Maximum number of return / yield for function / method body +max-returns=6 + +# Maximum number of branch for function / method body +max-branches=12 + +# Maximum number of statements in function / method body +max-statements=50 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of attributes for a class (see R0902). +max-attributes=7 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=2 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + + +[IMPORTS] + +# Deprecated modules which should not be used, separated by a comma +deprecated-modules=regsub,TERMIOS,Bastion,rexec + +# Create a graph of every (i.e. internal and external) dependencies in the +# given file (report RP0402 must not be disabled) +import-graph= + +# Create a graph of external dependencies in the given file (report RP0402 must +# not be disabled) +ext-import-graph= + +# Create a graph of internal dependencies in the given file (report RP0402 must +# not be disabled) +int-import-graph= + + +[CLASSES] + +# List of interface methods to ignore, separated by a comma. This is used for +# instance to not check methods defines in Zope's Interface base class. +ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__,__new__,setUp + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=mcs + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when being caught. Defaults to +# "Exception" +overgeneral-exceptions=Exception diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..910a8b0 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,18 @@ +dill >= 0.2.5 +semantic_version >= 2.6.0 +argparse >= 1.4.0 +argcomplete >= 1.6.0 +ruamel.yaml >= 0.12.15 +# logging; python_version < 2.8 + +#pprint +##tempfile # ? +#urllib +#urllib2 +#urlparse + +#unittest +#unittest2; python_version < 2.7 +#mock ; python_version < 2.7 +#tox +#pytest diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..7a7e809 --- /dev/null +++ b/setup.py @@ -0,0 +1,30 @@ +from distutils.core import setup + +setup(name='Hilbert', + version='0.2.2', + description='Hilbert tool (server part)', + url='https://github.com/hilbert/hilbert-cli', + author='Oleksandr Motsak', + author_email='http://goo.gl/mcpzY', + packages=['config'], + package_dir={'config': 'config'}, # package_data={'...': ['data/*.dat']}, + # py_modules=['tools/hilbert.py'], + scripts=['tools/hilbert', 'tools/hilbert.py'], + license='', + classifiers=[''], + platforms=[''], # data_files=[('config/templates', ['docker-compose.yml'])], + install_requires=[ + 'dill>=0.2.5', + 'semantic_version>=2.6.0', + 'argparse>=1.4.0', + 'argcomplete>=1.6.0', + 'ruamel.yaml>=0.12.15', + ], + extras_require={ + ':python_version == "2.7"': [ + 'logging', # TODO: check whether this is really required!?! + ], + }, + ) # TODO: add testing!? +# glob.glob(os.path.join('mydir', 'subdir', '*.html')) +# os.listdir(os.path.join('mydir', 'subdir')) diff --git a/station/compose.cfg b/station/compose.cfg new file mode 100644 index 0000000..705b3c3 --- /dev/null +++ b/station/compose.cfg @@ -0,0 +1,83 @@ +### HW / SW settings on the station host system: + +# pulse audio +export PULSE_SOCKET=${PULSE_SOCKET:-/run/user/${UID}/pulse/native} +export PULSE_COOKIE=${PULSE_COOKIE:-${HOME}/.config/pulse/cookie} + +# X11 + +if [ -z "${DISPLAY}" ]; then + if [ -r "/tmp/x.id" ]; then + N=$(cat /tmp/x.id | grep 'DISPLAY_NUM:' | tail -n 1 | sed s@DISPLAY_NUM:@@g) + + if [ -f "/tmp/.X${N}-lock" ]; then + export DISPLAY="unix:${N}" + # TODO: make sure we can use it! + fi + fi +fi + +N="0" +# loop through display number 0 ... 100 +# until free display number is found +while [ -z "${DISPLAY}" ] && [ "${N}" -le 100 ] +do +# for ((;;N++)); do + if [ -f "/tmp/.X${N}-lock" ]; then + # TODO: make sure we can use it! + export DISPLAY="unix:${N}" + break; + fi; + N=$((N+1)) +# done +done + +export XAUTH=${XAUTH:-/tmp/.docker.xauth} + +if [ -n "${DISPLAY}" ]; then +# : echo "DISPLAY: '${DISPLAY}', XAUTHORITY: '${XAUTHORITY}' -> '${XAUTH}'" +# [ ! -f "${XAUTH}" ] && touch "${XAUTH}" + (xauth nlist "${DISPLAY}" | sed -e 's/^..../ffff/' | sort | uniq | xauth -f "${XAUTH}" nmerge - ) 1> /dev/null 2>&1 +# [ ! -s "${XAUTH}" ] && echo "WARNING: something is wrong with '${XAUTH}': `ls -al ${XAUTH}`" + (xhost +) 1> /dev/null 2>&1 +fi + +#unset XAUTHORITY + + +export HB_PORT="${HB_PORT:-8888}" +export HB_HOST="${HB_HOST:-127.0.0.1}" +## station_id?? + +HB_DEFAULT_URL="http://${HB_HOST}:${HB_PORT}" +export HB_URL=${HB_URL:-$HB_DEFAULT_URL} + +export HB_INIT_TIMEOUT=3 + +### For now... +export station_public_ip="" +export management_server='' +export management_server_ip="" + +#### ip ro ### | awk '/^default/{print $3}' + + +export qrs_screenshot_message="Bild gespeichert.\n\nImage saved." +export qr_uploadlocs="/tmp/" + +# QR device ID (according to xinput): + +export QR_DEVICE_ID="keyboard:AT Translated Set 2 keyboard" +## export QR_DEVICE_ID="keyboard:Mitsumi Electric Apple Extended USB Keyboard" +# export QR_DEVICE_ID="13" + +## inherited settings from prototype main interactive script: +export CUPS_SERVER="" +export MOUSE_CURSOR="on" +export CUSTOMIZATION="alsa nv vb" +export ALSA_CARD="1" +export LANGUAGE="en" +## export MENU_TRY="gui" +export VNC_PASSWD="" + +export WEBGL_APPS="${WEBGL_APPS:-http://supernova.mfo.de:7070}" diff --git a/station/default.sh b/station/default.sh index c333827..8976dac 100755 --- a/station/default.sh +++ b/station/default.sh @@ -1,8 +1,5 @@ #!/bin/sh -set -v -set -x - SELFDIR=`dirname "$0"` SELFDIR=`cd "$SELFDIR" && pwd` cd "${SELFDIR}/" @@ -18,19 +15,31 @@ if [ -r "./station.cfg" ]; then . "./station.cfg" fi -if [ -r "./startup.cfg" ]; then - . "./startup.cfg" -fi +#if [ -r "./startup.cfg" ]; then +# . "./startup.cfg" +#fi station_default_app="${station_default_app:-$default_app}" +## TODO: FIXME: check stopped containers! if [ -r "/tmp/lastapp.cfg" ]; then . "/tmp/lastapp.cfg" else export current_app="${station_default_app}" fi +#if hash ethtool 2>/dev/null; then +# ## TODO: FIXME: 'NET_IF'??? +# for i in `LANG=C netstat -rn | awk '/^0.0.0.0/ {thif=substr($0,74); print thif;} /^default.*UG/ {thif=substr($0,65); print thif;}'`; +# do +## echo "DEBUG: trying to enable WOL for interface: [$i]..." +## sudo -n -P ethtool -s "$i" wol g +# echo "DEBUG: checking WOL setting for interface: [$i]: " +# sudo -n -P ethtool "$i" | grep Wake-on +# done +#fi + if [ -r "./docker.cfg" ]; then . "./docker.cfg" diff --git a/station/docker-compose.yml b/station/docker-compose.yml new file mode 100644 index 0000000..058c9bd --- /dev/null +++ b/station/docker-compose.yml @@ -0,0 +1,610 @@ +version: '2' +services: + base: + image: malex984/dockapp:base + volumes: + - /tmp:/tmp:rw + - ${PWD}:/DOCKAPP + - /etc/localtime:/etc/localtime:ro + labels: + - "is_top_app=0" + - "description=Base for dockapp services" + working_dir: /DOCKAPP + privileged: false + network_mode: "host" + environment: + - CFG_DIR=/DOCKAPP + - CUPS_SERVER + - ALSA_CARD + - LANGUAGE + - MOUSE_CURSOR + - CUSTOMIZATION + entrypoint: + - /bin/sh + + ddd: + extends: + service: base + image: ${DOCKER_COMPOSE_IMAGE} + labels: + - "is_top_app=0" + - "description=Docker CLI + Compose" + volumes: + - ${NO_PROXY}:${NO_PROXY} + environment: + - NO_PROXY + entrypoint: + - /bin/sh + + admin: + extends: + service: ddd + volumes: + - /dev:/dev:rw + - /run/udev:/run/udev + - /sys/fs/cgroup:/sys/fs/cgroup:ro + - /run/systemd:/run/systemd + - /var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket + labels: + - "is_top_app=0" + cap_add: + - SYS_ADMIN + - NET_ADMIN + - ALL + + consul_base: + extends: + service: admin + image: progrium/consul + ports: + - "8400:8400" + - "8500:8500" + - "8600:53/udp" + labels: + - "is_top_app=0" + stdin_open: false + tty: false + restart: "on-failure:5" + entrypoint: + - /bin/start + + consul_agent: + extends: + service: consul_base + command: + - "-advertise" + - "{station_public_ip}" + - "-join" + - "${management_server_ip}" + + consul: + extends: + service: consul_base + volumes: + - "${PWD}/KV:/data" + command: + - "-server" + - "-bootstrap" + - "-advertise" + - "{station_public_ip}" + - "-ui-dir" + - "/ui" + + registrator: + extends: + service: admin + image: gliderlabs/registrator:latest + entrypoint: + - "/bin/registrator" + command: + - "consul://${management_server_ip}:8500" + + x11: + extends: + service: admin + image: malex984/dockapp:dummy + privileged: true + stdin_open: false + tty: false + restart: "on-failure:5" + entrypoint: + - /sbin/my_init + - --skip-runit + - --skip-startup-files + - -- + command: + - startXephyr.sh + + xephyr: + extends: + service: x11 + environment: + - XCMD=Xephyr + + xvfb: + extends: + service: x11 + environment: + - XCMD=Xvfb + command: + - startXephyr.sh + - "-screen" + - "0" + - "1024x768x16" + + omd_agent: + extends: + service: ddd + image: imaginary.mfo.de:5000/malex984/omd_agent + environment: + - HB_PORT + - HB_HOST + - HB_URL + ports: + - "6556" + - "${HB_PORT}" + stdin_open: false + tty: false + restart: "on-failure:5" + labels: + - "is_top_app=0" + entrypoint: + - /sbin/my_init + - --skip-runit + - --skip-startup-files + - -- + command: + - omd_agent_entrypoint.sh + + hb_test: + extends: + service: base + privileged: false + image: malex984/dockapp:omd_agent + environment: + - HB_PORT + - HB_HOST + - HB_URL + - HB_INIT_TIMEOUT + - APP_ID=hb_test + stdin_open: false + tty: false + restart: "on-failure:5" + labels: + - "is_top_app=1" + - "description=HB test in python" + entrypoint: + - /sbin/my_init + - --skip-runit + - --skip-startup-files + - -- + command: + - python2.7 + - /usr/local/bin/heartbeat2.py + + + hb_test_a: + extends: + service: base + image: imaginary.mfo.de:5000/malex984/appa + environment: + - APP_ID=hb_test_a + - HB_PORT + - HB_HOST + - HB_URL + - HB_INIT_TIMEOUT + stdin_open: false + tty: false + restart: "on-failure:5" + labels: + - "description=HB test in bash: A" + - "is_top_app=1" + entrypoint: + - /sbin/my_init + - --skip-runit + - --skip-startup-files + - -- + - /usr/local/bin/A.sh + command: + - AHB_HelloA + + hb_test_b: + extends: + service: hb_test_a + environment: + - APP_ID=hb_test_b + labels: + - "description=HB test in bash: B" + command: + - BHB_HelloB + + hb_test_c: + extends: + service: hb_test_a + environment: + - APP_ID=hb_test_c + labels: + - "description=HB test in bash: C" + command: + - CHB_HelloC + + busybox: + extends: + service: admin + labels: + - "is_top_app=0" + image: busybox + entrypoint: + - /bin/sh + + ptmx: + extends: + service: busybox + stdin_open: false + tty: false + restart: "on-failure:5" + command: + - "./ptmx.sh" + + reboot: + extends: + service: busybox + stdin_open: false + tty: false + command: + - reboot + + omd_anew: + extends: + service: base + image: malex984/dockapp:omd + ports: + - "80" + - "514" + - "5667" + - "5000" + labels: + - "is_top_app=0" + stdin_open: false + tty: false + restart: "on-failure:5" + volumes: + - "omd_data:/OMD" + entrypoint: + - /sbin/my_init + - --skip-runit + - --skip-startup-files + - -- + command: + - omd_entrypoint.sh + + omd_persistent: + extends: + service: omd_anew + volumes: + - "omd_data:/omd/sites" + + pa: + extends: + service: admin + labels: + - "is_top_app=0" + volumes: + - ${PULSE_SOCKET}:/run/pulse/native + - ${PULSE_COOKIE}:/run/pulse/cookie + environment: + - PULSE_SERVER=/run/pulse/native + - PULSE_COOKIE=/run/pulse/cookie + + gui: + extends: + service: pa + labels: + - "is_top_app=0" + environment: + - QT_X11_NO_MITSHM=1 + - XLIB_SKIP_ARGB_VISUALS=1 + - XAUTHORITY="${XAUTH}" + - DISPLAY + stdin_open: false + tty: false + entrypoint: + - /sbin/my_init + - --skip-runit + - --skip-startup-files + - -- + + qrhandler: + extends: + service: gui + labels: + - "is_top_app=0" + image: malex984/dockapp:qrhandler + restart: "on-failure:5" + volumes: + - supernova:/supernova + environment: + - QR_DEVICE_ID + - qrs_screenshot_message + - qr_uploadlocs + command: + - qrhandler.sh + + x11vnc: + extends: + service: gui + labels: + - "is_top_app=0" + image: malex984/dockapp:x11vnc + restart: "on-failure:5" + ports: + - "127.0.0.1:5900-5910:5900-5910" + devices: + - "/dev/shm:/dev/shm" + privileged: true + environment: + - VNC_PASSWD + ipc: host + command: + - x11vnc.sh + cap_add: + - SYS_ADMIN + - NET_ADMIN + - ALL + + + xeyes: + extends: + service: gui + image: malex984/dockapp:xeyes + restart: "on-failure:5" + labels: + - "is_top_app=1" + command: + - xeyes + + kiosk: + extends: + service: gui + image: malex984/dockapp:kiosk + restart: "on-failure:5" + labels: + - "is_top_app=1" + - "description=Kiosk Web Browser GUI app." + environment: + - HB_PORT + - HB_HOST + - HB_URL + - HB_INIT_TIMEOUT=9 + privileged: false + devices: + - "/dev/nvidia0:/dev/nvidia0" + - "/dev/nvidiactl:/dev/nvidiactl" + - "/dev/nvidia-modeset:/dev/nvidia-modeset" + - "/dev/nvram:/dev/nvram" + entrypoint: + - /sbin/my_init + - --skip-runit + - --skip-startup-files + - -- + - hb_wrapper.sh + - launch.sh + - browser.sh + - -l + command: + - "${WEBGL_APPS}/" + + IB_kiosk: + extends: + service: kiosk + labels: + - "description=Image Blend under Kiosk" + environment: + - APP_ID=light_pollution + command: + - "${WEBGL_APPS}/WebGL_ImageBlend_New/fade_08_new_05b_hb.html?HB_APP_ID=light_pollution&HB_URL=${HB_URL}" + + + HZ_kiosk: + extends: + service: kiosk + labels: + - "description=Habitable Zone under Kiosk" + environment: + - APP_ID=habitable_zones + command: + - "${WEBGL_APPS}/WebGL_Habitable_New_HB/gravity_habitable_new_05_hb2.html?HB_APP_ID=habitable_zones&HB_URL=${HB_URL}" + + chrome: + extends: + service: gui + image: malex984/dockapp:chrome + labels: + - "is_top_app=1" + - "description=Google Chrome Web Browser GUI app." + restart: "on-failure:5" + environment: + - HB_PORT + - HB_HOST + - HB_URL + - HB_INIT_TIMEOUT=9 + entrypoint: + - /sbin/my_init + - --skip-runit + - --skip-startup-files + - -- + - hb_wrapper.sh + - launch.sh + - browser.sh + command: + - "${WEBGL_APPS}/" + + IB_chrome: + extends: + service: chrome + labels: + - "description=Image Blend under Chrome" + environment: + - APP_ID=IB_chrome + command: + - "${WEBGL_APPS}/WebGL_ImageBlend_New/fade_08_new_05b_hb.html?HB_URL=${HB_URL}&HB_APP_ID=IB_chrome" + + HZ_chrome: + extends: + service: chrome + labels: + - "description=Habitable Zone under Chrome" + environment: + - APP_ID=HZ_chrome + command: + - "${WEBGL_APPS}/WebGL_Habitable_New_HB/gravity_habitable_new_05_hb2.html?HB_URL=${HB_URL}&HB_APP_ID=HZ_chrome" + + opera: + extends: + service: chrome + labels: + - "description=Opera Web Browser GUI app." + environment: + - GOOGLE_CHROME=opera + + chromium: + extends: + service: chrome + labels: + - "description=Chromium Web Browser GUI app." + environment: + - GOOGLE_CHROME=chromium-browser + + kivy: + extends: + service: gui + image: malex984/dockapp:kivy + labels: + - "is_top_app=1" + restart: "on-failure:5" + entrypoint: + - /sbin/my_init + - --skip-runit + - --skip-startup-files + - -- + - launch.sh + command: + - /usr/local/src/Deflectouch/run.sh + + main: + extends: + service: gui + image: malex984/dockapp:main + labels: + - "is_top_app=1" + restart: "on-failure:5" + environment: + - MENU_TRY="gui" + entrypoint: + - /sbin/my_init + - --skip-runit + - --skip-startup-files + - -- + - /usr/local/bin/main.sh + + demo: + extends: + service: gui + image: malex984/dockapp:demo + labels: + - "is_top_app=1" + - "description=Choose demo app" + restart: "on-failure:5" + environment: + - MENU_TRY="gui" + entrypoint: + - /sbin/my_init + - --skip-runit + - --skip-startup-files + - -- + - /usr/local/bin/demo.sh + + x11vnc: + extends: + service: gui + image: malex984/dockapp:x11vnc + labels: + - "is_top_app=0" + restart: "on-failure:5" + entrypoint: + - /sbin/my_init + - --skip-runit + - --skip-startup-files + - -- + - /usr/local/bin/x11vnc.sh + + + registry: + image: registry:2 + labels: + - "is_top_app=0" + - "description=Docker Private Registry" + ports: + - "8055:5000" + restart: "on-failure:5" + volumes: + - "${PWD}/REG/DATA:/var/lib/registry" + + registry_tls: + extends: + service: registry + volumes: + - "${PWD}/REG/CERT:/certs" + labels: + - "description=Docker Private Registry (with TLS)" + environment: + - "REGISTRY_HTTP_TLS_CERTIFICATE=/certs/domain.crt" + - "REGISTRY_HTTP_TLS_KEY=/certs/domain.key" + + + dockapp: + extends: + service: gui + volumes: + - .:/DOCKAPP + working_dir: /DOCKAPP/ + labels: + - "is_top_app=1" + + register: + extends: + service: dockapp + image: imaginary.mfo.de:5000/malex984/alpine + devices: + - "/dev/bus/usb:/dev/bus/usb:rwm" + - "/dev/nvidia0:/dev/nvidia0" + - "/dev/nvidiactl:/dev/nvidiactl" + - "/dev/dri:/dev/dri" + - "/dev/snd:/dev/snd" + - "/dev/shm:/dev/shm" + - "/dev/input:/dev/input" + network_mode: "host" + ipc: host + pid: "host" + + x11_xclock: + extends: + service: admin + image: alpine:3.4 + stdin_open: false + tty: false + restart: "on-failure:5" + labels: + - "is_top_app=1" + entrypoint: + - /bin/sh + - -c + command: + - 'apk update && apk add xorg-server xf86-video-vesa xf86-input-evdev xf86-input-mouse xf86-input-keyboard udev && echo "exec xclock">>~/.xinitrc && apk add xclock --update-cache --repository "http://dl-cdn.alpinelinux.org/alpine/edge/testing" && startx' + +volumes: + supernova: + driver: local + omd_data: + driver: local + diff --git a/station/docker.cfg b/station/docker.cfg new file mode 100644 index 0000000..0a36af5 --- /dev/null +++ b/station/docker.cfg @@ -0,0 +1,16 @@ +export NO_PROXY=${NO_PROXY:-/var/run/docker.sock} + +#if [ -S "NO_PROXY" ]; then +# export DOCKER_HOST=${DOCKER_HOST:-unix://$NO_PROXY} +#else +# export DOCKER_HOST=${DOCKER_HOST:-$NO_PROXY} +#fi + +#export DOCKER_PLUGINS=${DOCKER_PLUGINS:-/run/docker/plugins/} +export DOCKER_COMPOSE_IMAGE=${DOCKER_COMPOSE_IMAGE:-malex984/dockapp:ddd} + +## export SWARM_DISCOVERY_HOST={SWARM_DISCOVERY_HOST:-dilbert} +## export SWARM_CLUSTER_TOKEN={SWARM_CLUSTER_TOKEN:-??????????????????} + +export COMPOSE_PROJECT_NAME=dockapp +export COMPOSE_FILE=${COMPOSE_FILE:-docker-compose.yml} diff --git a/station/finishall.sh b/station/finishall.sh index bd2de5a..202afa5 100755 --- a/station/finishall.sh +++ b/station/finishall.sh @@ -1,8 +1,5 @@ #!/bin/sh -set -v -set -x - SELFDIR=`dirname "$0"` SELFDIR=`cd "$SELFDIR" && pwd` cd "${SELFDIR}/" @@ -14,9 +11,9 @@ if [ -r "./station.cfg" ]; then . "./station.cfg" fi -if [ -r "./startup.cfg" ]; then - . "./startup.cfg" -fi +#if [ -r "./startup.cfg" ]; then +# . "./startup.cfg" +#fi #if [ -r "./docker.cfg" ]; then # . "./docker.cfg" diff --git a/station/generate_ogl.sh b/station/generate_ogl.sh new file mode 100755 index 0000000..2829ce3 --- /dev/null +++ b/station/generate_ogl.sh @@ -0,0 +1,76 @@ +#!/bin/bash + +#SELFDIR=`dirname "$0"` +#SELFDIR=`cd "$SELFDIR" && pwd` +#cd "$SELFDIR" + +IMAGE_VERSION="${IMAGE_VERSION:-latest}" + +I="dummy" + +# The following is an adaptation to the new naming schema: hilbert/$APP:$VERSION +U=hilbert +IMG="$U/${I}:${IMAGE_VERSION}" # IMG="$APP" #IMG="$U/$I:$APP" + +ID=$(docker images | awk '{ print "[" $1 ":" $2 "]" }' | sort | uniq | grep "\[${IMG}\]") + +if [ -z "$ID" ]; then + echo "ERROR: no such image '${IMG}'" +# exit 2 + U=malex984 + IMG="$U/dockapp:${I}" # IMG="$APP" #IMG="$U/$I:$APP" + ID=$(docker images | awk '{ print "[" $1 ":" $2 "]" }' | sort | uniq | grep "\[${IMG}\]") + + if [ -z "$ID" ]; then + echo "ERROR: no such image '${IMG}'" + exit 2 + fi + +fi + +shift + +G="$1" +shift + +#D=$1 +D=dummyx11 +C="c_$D" + + +## pre-cleanup +docker rm -vf $C 1>&2 || true +docker rmi -f --no-prune=false $D 1>&2 || true + + +R="-it -a stdin -a stdout -a stderr --label is_top_app=0 --ipc=host --net=host --pid=host -v /etc/localtime:/etc/localtime:ro -v /tmp/:/tmp/:rw" +O="--skip-startup-files --no-kill-all-on-exit --quiet --skip-runit" + +## Create $C conainer out of $IMG and run customization script in it: +docker run $R --name $C $IMG $O -- bash -c 'customize.sh' 1>&2 +# --privileged --ipc=host --net=host --pid=host -v /dev/:/dev/:rw + + +# docker start $C && sleep 1 +# docker commit --change "VOLUME /usr/" --change "VOLUME /etc/" --change "VOLUME /opt/" $C $D +# docker diff $C > ${mkfile_dir}/${APP}_${D}.diff + + +## Output the effects of customization procedure: +docker diff $C + +## Select necessary added/changed customizing files: +A=`docker diff $C |grep -E '^A /(usr|etc|sbin|var/lib|root|home)/' |grep -vE ' (/usr/src|/usr/lib/python[23]|/usr/share/doc|/usr/share/man|/etc/container_environment)' |sed 's@^[CA] @@g'|xargs` + +docker commit $C $D + +## pre-cleaup: +docker rm -vf $C 1>&2 +rm -Rf $G 1>&2 || true + +## generate target archive $G: +# TODO: --recursion ? ADDEDFILES=/bin/true /lib/x86_64-linux-gnu/libc.so.6 /lib64/ld-linux-x86-64.so.2 /usr/lib/x86_64-linux-gnu/ ? +docker run $R --rm $D $O -- bash -c "tar czvf $G --hard-dereference --dereference $A && chmod a+rw $G" + +## post-cleanup: +docker rmi -f --no-prune=false $D 1>&2 || true diff --git a/station/get-compose.sh b/station/get-compose.sh new file mode 100755 index 0000000..c634f18 --- /dev/null +++ b/station/get-compose.sh @@ -0,0 +1,93 @@ +#! /bin/bash + +#if hash docker-composei 2>/dev/null; then +# echo "DEBUG: using $(which docker-compose), $(docker-compose --version)" +# ln -s `which docker-compose` "${PWD}/compose" +#else + #! Get Docker compose + DOCKER_COMPOSE_VERSION=1.9.0 # NOTE: update to newer compose version if necessary! + DOCKER_COMPOSE_BASE_URL="https://github.com/docker/compose/releases/download/${DOCKER_COMPOSE_VERSION}" + + DOCKER_COMPOSE_BIN_URL="${DOCKER_COMPOSE_BASE_URL}/docker-compose-$(uname -s)-$(uname -m)" + DOCKER_COMPOSE_SH_URL="${DOCKER_COMPOSE_BASE_URL}/run.sh" + + + if [ ! -x ./docker-compose ]; then + if hash curl 2>/dev/null; then + curl -L "${DOCKER_COMPOSE_BIN_URL}" > ./docker-compose && chmod +x ./docker-compose + elif hash wget 2>/dev/null; then + wget -q -O - "${DOCKER_COMPOSE_BIN_URL}" > ./docker-compose && chmod +x ./docker-compose + fi + fi + + if [ -x ./docker-compose ]; then + if [ -x ./docker-compose ]; then + ln -s "${PWD}/docker-compose" "${PWD}/compose" + fi + + else + if [ ! -x ./docker-compose.sh ]; then + if hash curl 2>/dev/null; then + curl -L "${DOCKER_COMPOSE_SH_URL}" > ./docker-compose.sh && chmod +x ./docker-compose.sh + elif hash wget 2>/dev/null; then + wget -q -O - "${DOCKER_COMPOSE_SH_URL}" > ./docker-compose.sh && chmod +x ./docker-compose.sh + fi + fi + + if [ -x ./docker-compose.sh ]; then + ln -s "${PWD}/docker-compose.sh" "${PWD}/compose" + fi + fi +#fi + +if [ ! -x ./compose ]; then + if hash docker-composei 2>/dev/null; then + echo "DEBUG: using $(which docker-compose), $(docker-compose --version)" + ln -s `which docker-compose` "${PWD}/compose" + fi +fi + +if [ ! -x ./compose ]; then + echo "Warning: could not get 'docker-compose' from 'https://github.com/docker/compose/releases'! + Please download it as '$PWD/docker-compose' and make it executable!" +else + echo "DEBUG: using `readlink -f ${PWD}/compose`, `./compose --version`" +fi + + + +################################################################################### +### docker login -u malex984 -p ... ... imaginary.mfo.de:5000 + +D="${HOME}/.docker/" +F="${D}/config.json" +mkdir -p "${D}" +HILBERT_SERVER_DOCKER_REPOSITORY="${HILBERT_SERVER_DOCKER_REPOSITORY:-imaginary.mfo.de:5000}" +HILBERT_SERVER_DOCKER_REPOSITORY_AUTH="${HILBERT_SERVER_DOCKER_REPOSITORY_AUTH:-bWFsZXg5ODQ6MzJxMzJx}" +### TODO: FIXME: update wrt server repository! Later on! +cat > "${F}~" < ./local-persist-linux-amd64 +#chmod +x ./local-persist-linux-amd64 +#sudo ./local-persist-linux-amd64 1>./local-persist-linux-amd64.log 2>&1 & + +#docker volume create -d local-persist -o mountpoint="$CFG_DIR/KV" --name=KV +#docker volume create -d local-persist -o mountpoint="$CFG_DIR/OMD" --name=OMD +#docker volume create -d local-persist -o mountpoint="$CFG_DIR/CFG" --name=CFG diff --git a/station/luncher.sh b/station/luncher.sh index 9201a92..63ade57 100755 --- a/station/luncher.sh +++ b/station/luncher.sh @@ -1,8 +1,5 @@ #!/bin/sh -set -v -set -x - # # Run docker-compose within 'ddd' # @@ -36,9 +33,9 @@ if [ -r "./station.cfg" ]; then . "./station.cfg" fi -if [ -r "./startup.cfg" ]; then - . "./startup.cfg" -fi +#if [ -r "./startup.cfg" ]; then +# . "./startup.cfg" +#fi if [ -r "/tmp/lastapp.cfg" ]; then . "/tmp/lastapp.cfg" diff --git a/station/prepare.sh b/station/prepare.sh index d1e09fa..7447d49 100755 --- a/station/prepare.sh +++ b/station/prepare.sh @@ -1,45 +1,13 @@ -#!/bin/sh - -set -v -set -x +#! /bin/bash ### Preparation for actual docker-framework SELFDIR=`dirname "$0"` SELFDIR=`cd "$SELFDIR" && pwd` - -## unset DISPLAY - -### set -e cd "${SELFDIR}/" #### TODO: needs some safety check to avoid multiple runs... -## install Docker Volume 'local-persist' plugin following https://github.com/CWSpear/local-persist -##curl -fsSL https://raw.githubusercontent.com/CWSpear/local-persist/master/scripts/install.sh | sudo bash - -### TODO: update to newer compose version if necessary!... -DOCKER_COMPOSE_LINUX64_URL="https://github.com/docker/compose/releases/download/1.8.0/docker-compose-Linux-x86_64" - -if [ ! -f ./compose ]; -then - - if hash curl 2>/dev/null; then - curl -L "${DOCKER_COMPOSE_LINUX64_URL}" > ./compose && chmod +x ./compose - elif hash wget 2>/dev/null; then - wget -q -O - "${DOCKER_COMPOSE_LINUX64_URL}" > ./compose && chmod +x ./compose - fi - -fi - -if [ ! -f ./compose ]; -then - echo "Warning: could not get docker-compose via '${DOCKER_COMPOSE_LINUX64_URL}'! - Please download it as '$PWD/compose' and make it executable!" -fi - -chmod a+x ./compose - #! Finish our services (possible left-overs due to some crash) #### ./finishall.sh # NOTE: no clean-up for left-overs for now @@ -49,33 +17,21 @@ chmod a+x ./compose #! All images?? # docker images -q -a | xargs --no-run-if-empty docker rmi -## cd ./tmp/ -### TODO: add the plugin for global installation? -#curl -fsSL https://github.com/CWSpear/local-persist/releases/download/v1.1.0/local-persist-linux-amd64 > ./local-persist-linux-amd64 -#chmod +x ./local-persist-linux-amd64 -#sudo ./local-persist-linux-amd64 1>./local-persist-linux-amd64.log 2>&1 & - ### add me to all the necessary groups etc ... #if [ ! -L "$CFG_DIR/CFG" ]; then # ln -sf "$CFG_DIR" "$CFG_DIR/CFG" #fi -#docker volume create -d local-persist -o mountpoint="$CFG_DIR/KV" --name=KV -#docker volume create -d local-persist -o mountpoint="$CFG_DIR/OMD" --name=OMD -#docker volume create -d local-persist -o mountpoint="$CFG_DIR/CFG" --name=CFG - - #! TODO: FIXME: SUDO! ### ./ptmx.sh >/dev/null 2>&1 & if [ -f ./OGL.tgz ]; then cp -fp ./OGL.tgz /tmp/ || sudo -n -P cp -fp ./OGL.tgz /tmp/ +else + echo "WARNING: Missing 'OGL.tgz'! Please regenerate!" fi -if hash ethtool 2>/dev/null; then - sudo ethtool -s "${NET_IF}" wol g -fi # if [ -e "/tmp/lastapp.cfg" ]; then # @@ -87,31 +43,13 @@ fi # # fi -### docker login -u malex984 -p ... ... imaginary.mfo.de:5000 - -D="${HOME}/.docker/" -F="${D}/config.json" - -mkdir -p "${D}" -cat > "${F}~" <= (2, 7): +# import unittest # NOQA +#else: +# import unittest2 as unittest # NOQA +# +#try: +# from unittest import mock +#except ImportError: +# import mock # NOQA diff --git a/tests/data/Hilbert.yml b/tests/data/Hilbert.yml new file mode 100644 index 0000000..1e113d7 --- /dev/null +++ b/tests/data/Hilbert.yml @@ -0,0 +1,97 @@ +Version: 0.6.0 # 0.MAJOR.MINOR, later: MAJOR.MINOR + +Services: # file: docker-compose.yml, + omd_agent: { type: compose, ref: omd_agent, auto_detections: 'export HB_URL=${HILBERT_HEARTBEAT_URL}' } + omd: { type: compose, ref: omd_anew, auto_detections: 'export OMD=${HILBERT_OMD_PATH}' } + # ptmx, registry, mng qr_handler + +Profiles: + standalone: + services: [] + description: Generic Networking, without SSH & Docker & OMD agent. + name: standalone + supported_types: [] + + server: + services: [ ptmx, omd_agent, omd, registry, mng ] + description: Only for Server Station + name: server + supported_types: [ compose ] + + simple: + services: [ ptmx, omd_agent ] + description: Any Linux station without QR Scanner + name: simple + supported_types: [ compose ] + + qr: + services: [ ptmx, omd_agent, qr_handler ] + description: Linux stations with QR Scanner + name: Tracking + supported_types: [ compose ] + +Stations: + station_defaults: # fake station: only used to share default settings + name: hidden default station + description: Not a real station - Just hidden default settings + profile: standalone + omd_tag: standalone + address: Problematic.SSH.Alias + hidden: true # hide on Dashboard + client_settings: + hilbert_autostart: true # Station starts Hilbert upon booting + hilbert_autostart_delay: 20 # … with this delay in [sec] + HILBERT_PREFERRED_LANGUAGE: de + HILBERT_HEARTBEAT_URL: http://127.0.0.1:8888 + HILBERT_CUPS_SERVER: printer1.public.supernova:631 + HILBERT_HIDE_MOUSE_CURSOR: 1 + HILBERT_ALSA_CARD: 1 + HILBERT_CUSTOMIZATIONS: nv,alsa + + testhost1: + profile: standalone + omd_tag: standalone + hidden: false + description: Some STANDALONE Station + name: Test 1 + address: test1.host.dns.name + poweron_settings: { type: WOL, mac: '11:22:33:44:55:66', auto_turnon: true } + + supernova: + extends: station_defaults + hidden: true # + description: 'Server Station: Supernova' + name: Supernova Server + address: supernova.mfo.de + profile: server + omd_tag: agent + client_settings: + hilbert_autostart_delay: 0 # no delay before starting Hilbert here + HILBERT_SERVER_CONFIG_PATH: '${HOME}/.config/hilbert-server/' # where to keep sync'ed content + HILBERT_OMD_PATH: '${HOME}/.config/hilbert-omd/' # persistent storage for OMD + HILBERT_REGISTRY_DATA_PATH: '${HOME}/.config/hilbert-registry-data/' # persistent storage for docker registry + + vb_hb_test_a: + extends: station_defaults + hidden: false + address: 192.168.99.109 # No static DNS host name for VMs + description: 'Testing Virtual Station: A' + name: 'Virtual Station: Test A' + omd_tag: agent + profile: simple + poweron_settings: { type: DockerMachine, auto_turnon: true, vm_host_address: supernova.mfo.de, vm_name: vb-hb-test-a } + client_settings: + hilbert_autostart_delay: 10 # 10 sec. delay before starting here Hilbert-CLI-Station + hilbert_station_default_application: hb_test_a + +Groups: + mygroup: { simple, exclude: [qr] } + +Applications: + hb_test: + type: compose + ref: hb_test # file: docker-compose.yml # default - may be omitted + auto_detections: 'export HB_URL=${HILBERT_HEARTBEAT_URL}' + name: HB-Test + description: Random HB testing + compatibleStations: { mygroup } diff --git a/tests/data/Hilbert.yml.data.pickle b/tests/data/Hilbert.yml.data.pickle new file mode 100644 index 0000000..114f28b Binary files /dev/null and b/tests/data/Hilbert.yml.data.pickle differ diff --git a/tests/data/Hilbert.yml.pickle b/tests/data/Hilbert.yml.pickle new file mode 100644 index 0000000..a1f4e6b Binary files /dev/null and b/tests/data/Hilbert.yml.pickle differ diff --git a/tests/data/dc.yml b/tests/data/dc.yml new file mode 100644 index 0000000..33bfe94 --- /dev/null +++ b/tests/data/dc.yml @@ -0,0 +1,7 @@ +version: '2' +services: + omd: + image: malex984/dockapp:base + environment: + - HV + diff --git a/tests/data/docker-compose.yml b/tests/data/docker-compose.yml new file mode 100644 index 0000000..51cf55e --- /dev/null +++ b/tests/data/docker-compose.yml @@ -0,0 +1,596 @@ +version: '2' +services: + base: + image: malex984/dockapp:base + volumes: + - /tmp:/tmp:rw + - ${PWD}:/DOCKAPP + - /etc/localtime:/etc/localtime:ro + labels: + - "is_top_app=0" + - "description=Base for dockapp services" + working_dir: /DOCKAPP + privileged: false + network_mode: "host" + environment: + - CFG_DIR=/DOCKAPP + - CUPS_SERVER + - ALSA_CARD + - LANGUAGE + - MOUSE_CURSOR + - CUSTOMIZATION + entrypoint: + - /bin/sh + + ddd: + extends: + service: base + image: ${DOCKER_COMPOSE_IMAGE} + labels: + - "is_top_app=0" + - "description=Docker CLI + Compose" + volumes: + - ${NO_PROXY}:${NO_PROXY} + environment: + - NO_PROXY + entrypoint: + - /bin/sh + + admin: + extends: + service: ddd + volumes: + - /dev:/dev:rw + - /run/udev:/run/udev + - /sys/fs/cgroup:/sys/fs/cgroup:ro + - /run/systemd:/run/systemd + - /var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket + labels: + - "is_top_app=0" + cap_add: + - SYS_ADMIN + - NET_ADMIN + - ALL + + consul_base: + extends: + service: admin + image: progrium/consul + ports: + - "8400:8400" + - "8500:8500" + - "8600:53/udp" + labels: + - "is_top_app=0" + stdin_open: false + tty: false + restart: "on-failure:5" + entrypoint: + - /bin/start + + consul_agent: + extends: + service: consul_base + command: + - "-advertise" + - "{station_public_ip}" + - "-join" + - "${management_server_ip}" + + consul: + extends: + service: consul_base + volumes: + - "${PWD}/KV:/data" + command: + - "-server" + - "-bootstrap" + - "-advertise" + - "{station_public_ip}" + - "-ui-dir" + - "/ui" + + registrator: + extends: + service: admin + image: gliderlabs/registrator:latest + entrypoint: + - "/bin/registrator" + command: + - "consul://${management_server_ip}:8500" + + x11: + extends: + service: admin + image: malex984/dockapp:dummy + privileged: true + stdin_open: false + tty: false + restart: "on-failure:5" + entrypoint: + - /sbin/my_init + - --skip-runit + - --skip-startup-files + - -- + command: + - startXephyr.sh + + xephyr: + extends: + service: x11 + environment: + - XCMD=Xephyr + + xvfb: + extends: + service: x11 + environment: + - XCMD=Xvfb + command: + - startXephyr.sh + - "-screen" + - "0" + - "1024x768x16" + + omd_agent: + extends: + service: ddd + image: imaginary.mfo.de:5000/malex984/omd_agent + environment: + - HB_PORT + - HB_HOST + - HB_URL + ports: + - "6556" + - "${HB_PORT}" + stdin_open: false + tty: false + restart: "on-failure:5" + labels: + - "is_top_app=0" + entrypoint: + - /sbin/my_init + - --skip-runit + - --skip-startup-files + - -- + command: + - omd_agent_entrypoint.sh + + hb_test: + extends: + service: base + privileged: false + image: malex984/dockapp:omd_agent + environment: + - HB_PORT + - HB_HOST + - HB_URL + - HB_INIT_TIMEOUT + - APP_ID=hb_test + stdin_open: false + tty: false + restart: "on-failure:5" + labels: + - "is_top_app=1" + - "description=HB test in python" + entrypoint: + - /sbin/my_init + - --skip-runit + - --skip-startup-files + - -- + command: + - python2.7 + - /usr/local/bin/heartbeat2.py + + + hb_test_a: + extends: + service: base + image: imaginary.mfo.de:5000/malex984/appa + environment: + - APP_ID=hb_test_a + - HB_PORT + - HB_HOST + - HB_URL + - HB_INIT_TIMEOUT + stdin_open: false + tty: false + restart: "on-failure:5" + labels: + - "description=HB test in bash: A" + - "is_top_app=1" + entrypoint: + - /sbin/my_init + - --skip-runit + - --skip-startup-files + - -- + - /usr/local/bin/A.sh + command: + - AHB_HelloA + + hb_test_b: + extends: + service: hb_test_a + environment: + - APP_ID=hb_test_b + labels: + - "description=HB test in bash: B" + command: + - BHB_HelloB + + hb_test_c: + extends: + service: hb_test_a + environment: + - APP_ID=hb_test_c + labels: + - "description=HB test in bash: C" + command: + - CHB_HelloC + + busybox: + extends: + service: admin + labels: + - "is_top_app=0" + image: busybox + entrypoint: + - /bin/sh + + ptmx: + extends: + service: busybox + stdin_open: false + tty: false + restart: "on-failure:5" + command: + - "./ptmx.sh" + + reboot: + extends: + service: busybox + stdin_open: false + tty: false + command: + - reboot + + omd_anew: + extends: + service: base + image: malex984/dockapp:omd + ports: + - "80" + - "514" + - "5667" + - "5000" + labels: + - "is_top_app=0" + stdin_open: false + tty: false + restart: "on-failure:5" + volumes: + - "omd_data:/OMD" + entrypoint: + - /sbin/my_init + - --skip-runit + - --skip-startup-files + - -- + command: + - omd_entrypoint.sh + + omd_persistent: + extends: + service: omd_anew + volumes: + - "omd_data:/omd/sites" + + pa: + extends: + service: admin + labels: + - "is_top_app=0" + volumes: + - ${PULSE_SOCKET}:/run/pulse/native + - ${PULSE_COOKIE}:/run/pulse/cookie + environment: + - PULSE_SERVER=/run/pulse/native + - PULSE_COOKIE=/run/pulse/cookie + + gui: + extends: + service: pa + labels: + - "is_top_app=0" + environment: + - QT_X11_NO_MITSHM=1 + - XLIB_SKIP_ARGB_VISUALS=1 + - XAUTHORITY="${XAUTH}" + - DISPLAY + stdin_open: false + tty: false + entrypoint: + - /sbin/my_init + - --skip-runit + - --skip-startup-files + - -- + + qrhandler: + extends: + service: gui + labels: + - "is_top_app=0" + image: malex984/dockapp:qrhandler + restart: "on-failure:5" + volumes: + - supernova:/supernova + environment: + - QR_DEVICE_ID + - qrs_screenshot_message + - qr_uploadlocs + command: + - qrhandler.sh + + x11vnc: + extends: + service: gui + labels: + - "is_top_app=0" + image: malex984/dockapp:x11vnc + restart: "on-failure:5" + ports: + - "127.0.0.1:5900-5910:5900-5910" + environment: + - VNC_PASSWD + command: + - x11vnc.sh + + + xeyes: + extends: + service: gui + image: malex984/dockapp:xeyes + restart: "on-failure:5" + labels: + - "is_top_app=1" + command: + - xeyes + + kiosk: + extends: + service: gui + image: malex984/dockapp:kiosk + restart: "on-failure:5" + labels: + - "is_top_app=1" + - "description=Kiosk Web Browser GUI app." + environment: + - HB_PORT + - HB_HOST + - HB_URL + - HB_INIT_TIMEOUT=9 + entrypoint: + - /sbin/my_init + - --skip-runit + - --skip-startup-files + - -- + - hb_wrapper.sh + - launch.sh + - browser.sh + - -l + command: + - "${WEBGL_APPS}/" + + IB_kiosk: + extends: + service: kiosk + labels: + - "description=Image Blend under Kiosk" + environment: + - APP_ID=light_pollution + command: + - "${WEBGL_APPS}/WebGL_ImageBlend_New/fade_08_new_05b_hb.html?HB_APP_ID=light_pollution&HB_URL=${HB_URL}" + + + HZ_kiosk: + extends: + service: kiosk + labels: + - "description=Habitable Zone under Kiosk" + environment: + - APP_ID=habitable_zones + command: + - "${WEBGL_APPS}/WebGL_Habitable_New_HB/gravity_habitable_new_05_hb2.html?HB_APP_ID=habitable_zones&HB_URL=${HB_URL}" + + chrome: + extends: + service: gui + image: malex984/dockapp:chrome + labels: + - "is_top_app=1" + - "description=Google Chrome Web Browser GUI app." + restart: "on-failure:5" + environment: + - HB_PORT + - HB_HOST + - HB_URL + - HB_INIT_TIMEOUT=9 + entrypoint: + - /sbin/my_init + - --skip-runit + - --skip-startup-files + - -- + - hb_wrapper.sh + - launch.sh + - browser.sh + command: + - "${WEBGL_APPS}/" + + IB_chrome: + extends: + service: chrome + labels: + - "description=Image Blend under Chrome" + environment: + - APP_ID=IB_chrome + command: + - "${WEBGL_APPS}/WebGL_ImageBlend_New/fade_08_new_05b_hb.html?HB_URL=${HB_URL}&HB_APP_ID=IB_chrome" + + HZ_chrome: + extends: + service: chrome + labels: + - "description=Habitable Zone under Chrome" + environment: + - APP_ID=HZ_chrome + command: + - "${WEBGL_APPS}/WebGL_Habitable_New_HB/gravity_habitable_new_05_hb2.html?HB_URL=${HB_URL}&HB_APP_ID=HZ_chrome" + + opera: + extends: + service: chrome + labels: + - "description=Opera Web Browser GUI app." + environment: + - GOOGLE_CHROME=opera + + chromium: + extends: + service: chrome + labels: + - "description=Chromium Web Browser GUI app." + environment: + - GOOGLE_CHROME=chromium-browser + + kivy: + extends: + service: gui + image: malex984/dockapp:kivy + labels: + - "is_top_app=1" + restart: "on-failure:5" + entrypoint: + - /sbin/my_init + - --skip-runit + - --skip-startup-files + - -- + - launch.sh + command: + - /usr/local/src/Deflectouch/run.sh + + main: + extends: + service: gui + image: malex984/dockapp:main + labels: + - "is_top_app=1" + restart: "on-failure:5" + environment: + - MENU_TRY="gui" + entrypoint: + - /sbin/my_init + - --skip-runit + - --skip-startup-files + - -- + - /usr/local/bin/main.sh + + demo: + extends: + service: gui + image: malex984/dockapp:demo + labels: + - "is_top_app=1" + - "description=Choose demo app" + restart: "on-failure:5" + environment: + - MENU_TRY="gui" + entrypoint: + - /sbin/my_init + - --skip-runit + - --skip-startup-files + - -- + - /usr/local/bin/demo.sh + + x11vnc: + extends: + service: gui + image: malex984/dockapp:x11vnc + labels: + - "is_top_app=0" + restart: "on-failure:5" + entrypoint: + - /sbin/my_init + - --skip-runit + - --skip-startup-files + - -- + - /usr/local/bin/x11vnc.sh + + + registry: + image: registry:2 + labels: + - "is_top_app=0" + - "description=Docker Private Registry" + ports: + - "8055:5000" + restart: "on-failure:5" + volumes: + - "${PWD}/REG/DATA:/var/lib/registry" + + registry_tls: + extends: + service: registry + volumes: + - "${PWD}/REG/CERT:/certs" + labels: + - "description=Docker Private Registry (with TLS)" + environment: + - "REGISTRY_HTTP_TLS_CERTIFICATE=/certs/domain.crt" + - "REGISTRY_HTTP_TLS_KEY=/certs/domain.key" + + + dockapp: + extends: + service: gui + volumes: + - .:/DOCKAPP + working_dir: /DOCKAPP/ + labels: + - "is_top_app=1" + + register: + extends: + service: dockapp + image: imaginary.mfo.de:5000/malex984/alpine + devices: + - "/dev/bus/usb:/dev/bus/usb:rwm" + - "/dev/nvidia0:/dev/nvidia0" + - "/dev/nvidiactl:/dev/nvidiactl" + - "/dev/dri:/dev/dri" + - "/dev/snd:/dev/snd" + - "/dev/shm:/dev/shm" + - "/dev/input:/dev/input" + network_mode: "host" + ipc: host + pid: "host" + + x11_xclock: + extends: + service: admin + image: alpine:3.4 + stdin_open: false + tty: false + restart: "on-failure:5" + labels: + - "is_top_app=1" + entrypoint: + - /bin/sh + - -c + command: + - 'apk update && apk add xorg-server xf86-video-vesa xf86-input-evdev xf86-input-mouse xf86-input-keyboard udev && echo "exec xclock">>~/.xinitrc && apk add xclock --update-cache --repository "http://dl-cdn.alpinelinux.org/alpine/edge/testing" && startx' + +volumes: + supernova: + driver: local + omd_data: + driver: local + diff --git a/tests/data/miniHilbert.yml b/tests/data/miniHilbert.yml new file mode 100644 index 0000000..58218ac --- /dev/null +++ b/tests/data/miniHilbert.yml @@ -0,0 +1,6 @@ +Version: 0.1 +Services: +Profiles: +Stations: +Groups: +Applications: diff --git a/tests/data/miniHilbert.yml.data.pickle b/tests/data/miniHilbert.yml.data.pickle new file mode 100644 index 0000000..6d3b5d7 Binary files /dev/null and b/tests/data/miniHilbert.yml.data.pickle differ diff --git a/tests/data/miniHilbert.yml.pickle b/tests/data/miniHilbert.yml.pickle new file mode 100644 index 0000000..69327c1 Binary files /dev/null and b/tests/data/miniHilbert.yml.pickle differ diff --git a/tests/data/singleHostHilbert.yml b/tests/data/singleHostHilbert.yml new file mode 100644 index 0000000..b5ce121 --- /dev/null +++ b/tests/data/singleHostHilbert.yml @@ -0,0 +1,104 @@ +Version: 0.6.0 # 0.MAJOR.MINOR, later: MAJOR.MINOR + +Services: # file: docker-compose.yml, + omd_agent: { type: compose, file: docker-compose.yml, ref: omd_agent, auto_detections: 'export HB_URL=${HILBERT_HEARTBEAT_URL}' } + omd_anew: { type: compose, file: docker-compose.yml, ref: omd_anew, auto_detections: '' } + omd_persistent: { type: compose, file: docker-compose.yml, ref: omd_persistent, auto_detections: 'export OMD=${HILBERT_OMD_PATH}' } + ptmx: { type: compose, file: docker-compose.yml, ref: ptmx, auto_detections: '' } + registry: { type: compose, file: docker-compose.yml, ref: registry, auto_detections: '' } + mng: { type: compose, file: docker-compose.yml, ref: mng, auto_detections: '' } + qrhandler: { type: compose, file: docker-compose.yml, ref: qrhandler, auto_detections: '' } + +Applications: + hb_test: + type: compose + ref: hb_test + file: docker-compose.yml + auto_detections: 'export HB_URL=${HILBERT_HEARTBEAT_URL}' + name: HB-Test + description: Random HB testing + compatibleStations: { server, simple } + + kiosk: + type: compose + ref: kiosk + file: docker-compose.yml + auto_detections: 'export HB_URL=${HILBERT_HEARTBEAT_URL}' + name: Kiosk with HB + description: Kiosk Web Browser GUI app + compatibleStations: { server, simple } + +Profiles: + standalone: # TODO: FIXME: this should not be required! + services: [] + description: Generic Networking, without SSH & Docker & OMD agent. + name: standalone + supported_types: [] + + server: + services: [ ptmx, omd_agent, omd, registry ] # , mng + description: Only for Server System + name: server + supported_types: [ compose ] + + simple: + services: [ ptmx, omd_agent ] + description: Any Linux station without QR Scanner + name: simple + supported_types: [ compose ] + + std: + services: [ ptmx, omd_agent, qrhandler ] + description: Linux station with QR Scanner and Display + name: standard + supported_types: [ compose ] + + +Stations: + station_defaults: # fake station: only used to share default settings + type: hidden + name: hidden default station + description: Not a real station - Just hidden default settings + profile: standalone # TODO: FIXME: this should not be required! + omd_tag: standalone + address: Problematic.SSH.Alias # TODO: NOTE: should not be mandatory! + hidden: true # hide on Dashboard # to be removed due to station.type + client_settings: + hilbert_autostart: true # Station starts Hilbert upon booting + hilbert_autostart_delay: 20 # … with this delay in [sec] + HILBERT_PREFERRED_LANGUAGE: de + HILBERT_HEARTBEAT_URL: http://127.0.0.1:8888 + HILBERT_CUPS_SERVER: printer1.public.supernova:631 + HILBERT_HIDE_MOUSE_CURSOR: 1 + HILBERT_ALSA_CARD: 1 + HILBERT_CUSTOMIZATIONS: nv,alsa + + server: + type: server + extends: station_defaults + hidden: false # to be removed due to station.type + description: 'Server Station: Supernova' + name: Supernova Server + address: localhost + profile: server + omd_tag: agent + client_settings: + hilbert_autostart_delay: 0 # no delay before starting Hilbert here + HILBERT_SERVER_CONFIG_PATH: '${HOME}/.config/hilbert-server/' # where to keep sync'ed content + HILBERT_OMD_PATH: '${HOME}/.config/hilbert-omd/' # persistent storage for OMD + HILBERT_REGISTRY_DATA_PATH: '${HOME}/.config/hilbert-registry-data/' # persistent storage for docker registry + + station: + type: standard + extends: station_defaults + hidden: false # to be removed due to station.type + address: localhost + description: 'Testing Virtual Station: A' + name: 'Virtual Station: Test A' + omd_tag: agent + profile: simple + poweron_settings: { type: WOL, mac: '3c:18:a0:04:e6:c2', auto_turnon: true } + client_settings: + hilbert_autostart_delay: 10 # 10 sec. delay before starting here Hilbert-CLI-Station + hilbert_station_default_application: hb_test_a + diff --git a/tests/data/singleHostHilbert.yml.data.pickle b/tests/data/singleHostHilbert.yml.data.pickle new file mode 100644 index 0000000..2b63e8d Binary files /dev/null and b/tests/data/singleHostHilbert.yml.data.pickle differ diff --git a/tests/data/singleHostHilbert.yml.pickle b/tests/data/singleHostHilbert.yml.pickle new file mode 100644 index 0000000..946198d Binary files /dev/null and b/tests/data/singleHostHilbert.yml.pickle differ diff --git a/tests/test_loadwarnings.py b/tests/test_loadwarnings.py new file mode 100644 index 0000000..0d1c374 --- /dev/null +++ b/tests/test_loadwarnings.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- +# encoding: utf-8 +# coding: utf-8 + +from __future__ import absolute_import, print_function, unicode_literals # NOQA + +import sys +from os import path + +DIR=path.dirname( path.dirname( path.abspath(__file__) ) ) +sys.path.append(DIR) +sys.path.append(path.join(DIR, 'config')) + +from helpers import * +from hilbert_cli_config import * +from subcmdparser import * + + +import pytest # NOQA + +def load(s): + return load_yaml(s) + +class TestLoad: + def test_1(self, capsys): + out, err = capsys.readouterr() + load('{a, b, a}') + out, err = capsys.readouterr() + +# with capsys.disabled(): + assert err == '' + assert out == """\ +WARNING: Key re-definition within some mapping: +K[line: 1, column: 2]: Previous Value: + a: None + ↑ +--- +K[line: 1, column: 8]: New Value: + a: None + ↑ +--- +=== +""" + + + + def test_2(self, capsys): + out, err = capsys.readouterr() + load("""{ ? a, ? b, ? a }""") + out, err = capsys.readouterr() + + assert err == '' + assert out == """\ +WARNING: Key re-definition within some mapping: +K[line: 1, column: 5]: Previous Value: + a: None + ↑ +--- +K[line: 1, column: 15]: New Value: + a: None + ↑ +--- +=== +""" + + + + + + diff --git a/tests/test_validate.py b/tests/test_validate.py new file mode 100644 index 0000000..15eb7dc --- /dev/null +++ b/tests/test_validate.py @@ -0,0 +1,84 @@ +# -*- coding: utf-8 -*- +# encoding: utf-8 +# coding: utf-8 + +from __future__ import absolute_import, print_function, unicode_literals # NOQA + +import sys +from os import path +DIR=path.dirname( path.dirname( path.abspath(__file__) ) ) +sys.path.append(DIR) +sys.path.append(path.join(DIR, 'config')) + +from helpers import * +from hilbert_cli_config import * +from subcmdparser import * + +#from config.hilbert_cli_config import * +#from config.helpers import * +#from config.subcmdparser import * + + +# sys.path.append( DIR ) + +#from hilbert_cli_config import * +#from helpers import * + +import pytest # NOQA +import os # NOQA + + +FIXTURE_DIR = os.path.abspath(os.path.join( + os.path.dirname(os.path.realpath(__file__)), + 'data', + )) + +def hilbert_validation(input_yaml_file, data_file): + global INPUT_DIRNAME + + input_file = os.path.join(FIXTURE_DIR, input_yaml_file) + data_file = os.path.join(FIXTURE_DIR, data_file) + + assert os.path.exists(input_file) + assert os.path.exists(data_file) + + # Load Hilbert configuration in YAML format + yml = load_yaml_file(input_file) + assert yml is not None + + # Load previously verified data + d = pickle_load(data_file) + assert d is not None + assert isinstance(d, dict) + + cwd = os.getcwd() + try: + INPUT_DIRNAME = FIXTURE_DIR + os.chdir(INPUT_DIRNAME) + cfg = parse_hilbert(yml) + finally: + os.chdir(cwd) + + assert cfg is not None + assert isinstance(cfg, Hilbert) + + data = cfg.data_dump() + assert isinstance(data, dict) + + # print(cfg) + # print(yaml_dump(data)) + # print(yaml_dump(d)) + + # NOTE: Main check: + assert data == d # Compare dictionaries with simple data! + + +class TestValidate: + def test_minimal_sample(self, capsys): + hilbert_validation('miniHilbert.yml', 'miniHilbert.yml.data.pickle') + + def test_sample(self, capsys): + hilbert_validation('Hilbert.yml', 'Hilbert.yml.data.pickle') + + def test_single(self, capsys): + hilbert_validation('singleHostHilbert.yml', 'singleHostHilbert.yml.data.pickle') diff --git a/tests/test_version.py b/tests/test_version.py new file mode 100644 index 0000000..29d930c --- /dev/null +++ b/tests/test_version.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +# encoding: utf-8 +# coding: utf-8 + +from __future__ import absolute_import, print_function, unicode_literals # NOQA + +import sys +from os import path + +DIR=path.dirname( path.dirname( path.abspath(__file__) ) ) +sys.path.append(DIR) +sys.path.append(path.join(DIR, 'config')) + +# from helpers import * +from hilbert_cli_config import SemanticVersionValidator, load_yaml +# from subcmdparser import * + +import pytest # NOQA + +# NOTE: supports partial versions! +import semantic_version # NOQA + + +# TODO: FIXME: set globally format version for SemanticVersionValidator! + +def _helper(s, partial=False): + v = None + try: + v = semantic_version.Version(s, partial=partial) + except: + pass + + assert v is not None + assert isinstance(v, semantic_version.Version) + + yaml_data = load_yaml("'{}'".format(s)) + assert yaml_data is not None + + # NOTE: Validator parsing + validator = SemanticVersionValidator.parse(yaml_data, parent=None, partial=partial, parsed_result_is_data=False) + + assert validator is not None + assert isinstance(validator, SemanticVersionValidator) + validator_data = validator.get_data() + assert validator_data is not None + assert isinstance(validator_data, semantic_version.Version) + + return v == validator_data + + +class TestVersions: + def test_good(self): + v = '0.0.1' + assert _helper(v) + + v = '0.1.2' + assert _helper(v) + + v = '1.2.3' + assert _helper(v) + + def test_good_partial(self): + v = '0.0' + assert _helper(v, partial=True) + + v = '0.1' + assert _helper(v, partial=True) + + v = '1.2' + assert _helper(v, partial=True) + +# TODO: add failure tests: e.g. wrong version + with exceptions... \ No newline at end of file diff --git a/tools/hilbert b/tools/hilbert new file mode 120000 index 0000000..8506212 --- /dev/null +++ b/tools/hilbert @@ -0,0 +1 @@ +hilbert.py \ No newline at end of file diff --git a/tools/hilbert-station b/tools/hilbert-station new file mode 100755 index 0000000..edd6f6c --- /dev/null +++ b/tools/hilbert-station @@ -0,0 +1,441 @@ +#! /usr/bin/env bash + +# TODO: add some more description here? +# NOTE: Custom argument/option handling only using `getopts`! + +# NOTE: Exit codes may be as follows (see status.sh, others will be updated): +# - 0: success (no error detected). There maybe warnings +# - 1: detected error which is not our fault (e.g. network / HW etc): user can try again +# - 2: error due to wrong usage of scripts (bad arguments) / in config files or some assumption was violated. +# NOTE: in PEDANTIC mode some non-critical issues may be treated this way. +# - any other value: something unexpected has happened! + + +TOOL=$(basename "$0") # script_name=`basename "$0"` # script_name="${script_name%.*}" +LOGLEVEL=0 + +# NOTE: locking following: http://stackoverflow.com/a/1985512 by Przemyslaw Pawelczyk +# NOTE: uses flock from util-linux[-ng] + +# NOTE: /var/lock/ == /run/lock/ => will be removed in case of a crash! No stale lockfile is possible! +if [[ -w "/var/lock/lockdev" ]]; then + LOCKFILE="/var/lock/lockdev/${TOOL}" +elif [[ -w "/var/lock/" ]]; then + LOCKFILE="/var/lock/${TOOL}" +else + LOCKFILE="/tmp/_var_lock_${TOOL}" +fi +LOCKFD=99 + +# PRIVATE +_lock() { flock -$1 $LOCKFD; } +_no_more_locking() { _lock u; _lock xn && rm -f $LOCKFILE; } +_prepare_locking() { eval "exec $LOCKFD>\"$LOCKFILE\""; trap _no_more_locking EXIT; } + +# PUBLIC +exlock_now() { _lock xn; } # obtain an exclusive lock immediately or fail +exlock() { _lock x; } # obtain an exclusive lock. wait +shlock() { _lock s; } # obtain a shared lock +unlock() { _lock u; } # drop a lock + +# TODO(?): evaluate other ways according to http://stackoverflow.com/questions/1715137/the-best-way-to-ensure-only-1-copy-of-bash-script-is-running + +exec 3>&1 4>&2 +trap 'exec 2>&4 1>&3 ' 0 1 2 3 13 15 RETURN +exec 2>&1 +# 1>log.out + +start_locking () { + DEBUG "Starting exclusive (locked) usage of ${TOOL}..." + + # ON START: + _prepare_locking + # Simplest example is avoiding running multiple instances of script: `exlock_now || exit 1` + # Remember! Lock file is removed when one of the scripts exits and it is + # the only script holding the lock or lock is not acquired at all. + + if exlock_now + then + DEBUG "Obtained exclusive lock of [$(ls -la ${LOCKFILE})]" + else + DEBUG "Lockfile: $(ls -la ${LOCKFILE})" + ERROR "Another user is already running ${TOOL}: please try again later!" + exit 2 + fi + +} + +function INFO() { + echo "INFO [${TOOL}:${FUNCNAME[1]}] $*" +} +function WARNING() { + echo "WARNING [${TOOL}:${FUNCNAME[1]}] $*" +} +function DEBUG() { + echo "DEBUG [${TOOL}:${FUNCNAME[1]}] $*" +} +function ERROR(){ + echo "ERROR [${TOOL}:${FUNCNAME[1]}] $*" +} + +# TODO: read HILBERT_CONFIG_DIR from `~/.hilbert-station` ? +export HILBERT_CONFIG_DIR="${HILBERT_CONFIG_DIR:-${HOME}/.config/${TOOL}}" +export HILBERT_CONFIG_FILE="${HILBERT_CONFIG_FILE:-${HILBERT_CONFIG_DIR}/station.cfg}" + +CLI_VERSION_ID="\$Id$" + +usage () { + cat << EOF +usage: ${TOOL} [-h] [-p] [-V] [-v | -q] subcommand + +Hilbert - client part for Linux systems + +positional arguments: + subcommand: + init [] init station based on given or installed configuration + list_applications list of supported applications + app_change change the currently running top application to specified + start start Hilbert on the system + stop stop Hilbert on the system + shutdown shut down the system + +optional arguments: + -h show this help message and exit + -V show version info and exit + -v increase verbosity + -q decrease verbosity +EOF +} + +# -p turn on pedantic mode +# dm_start Start a VM using docker-machine -> separate script!!! + + +version () { + cat << EOF +This tool: [$0] +Version: [${CLI_VERSION_ID}] +Lockfile: [$(ls -l ${LOCKFILE})] + +Workdir: [${PWD}] +Config Dir: [${HILBERT_CONFIG_DIR}] +Config file: [$(readlink -f ${HILBERT_CONFIG_FILE})] +All Configs: [$(sh -c "cd ${HILBERT_CONFIG_DIR}/ && ls *.cfg | xargs")] + +Host: [$(hostname)] +System: [$(uname -a)] +EOF + + DEBUG "${HILBERT_CONFIG_FILE}: +[" + cat "${HILBERT_CONFIG_FILE}" # TODO: indent the file contents for pretty-printing? + DEBUG " +]" +} +DEBUG "Input args: ($@)" + + +while getopts ":hqvpV" opt; do + case ${opt} in + h ) + usage + exit 0 + ;; + V ) + version + exit 0 + ;; + p ) + DEBUG "Turned-on the pedantic mode!(?)" + set -e # TODO: test this!! + ;; + v ) + DEBUG "Turning on tracing of bash command execution + verbosity..." + set -v + set -x + ;; + q ) + DEBUG "Turning off tracing of bash command execution + verbosity..." + set +v + set +x + ;; + \? ) + ERROR "Invalid Option: -$OPTARG" + exit 1 + ;; + esac +done + + + +if [[ ! -d "${HILBERT_CONFIG_DIR}" ]]; then + WARNING "Configuration directory '${HILBERT_CONFIG_DIR}' is missing!" + mkdir -p "${HILBERT_CONFIG_DIR}" +fi + +if [[ ! -r "${HILBERT_CONFIG_DIR}" ]]; then + WARNING "Configuration directory '${HILBERT_CONFIG_DIR}' is unreadable!" + chmod u+rwx "${HILBERT_CONFIG_DIR}" +fi + +if [[ ! -d "${HILBERT_CONFIG_DIR}" ]]; then + ERROR "Configuration directory '${HILBERT_CONFIG_DIR}' is missing!" + exit 1 +fi + +if [[ ! -r "${HILBERT_CONFIG_DIR}" ]]; then + ERROR "Configuration directory '${HILBERT_CONFIG_DIR}' is unreadable!" + exit 1 +fi + +DEBUG "Configuration directory '${HILBERT_CONFIG_DIR}' exists and is readable..." + +if [[ ! -r "${HILBERT_CONFIG_FILE}" ]]; then + WARNING "Station Configuration file '${HILBERT_CONFIG_FILE}' is unreadable!" +# exit 1 +else + DEBUG "Station Configuration file '${HILBERT_CONFIG_FILE}' exists and is readable..." +fi + + + +cmd_install_station_config() { + subcommand="prepare(install_station_config)" + arg=$1; shift + + if [[ -z "${arg}" ]]; then + ERROR "Wrong argument '${arg}' to '${subcommand}'!" + usage + exit 1 + fi + + if [[ ! -r "${arg}" ]]; then + ERROR "New configuration file '${arg}' is not readable!" + usage + exit 1 + fi + + # TODO: what about further resources? Maybe a .tar.gz and unpack it into ${new_cfg_dir}? + + # NOTE: New configuration directory + # TODO: make sure there is no such dir yet! + new_cfg_dir=$(mktemp -d --tmpdir=${HILBERT_CONFIG_DIR} "`basename ${arg}`.XXXXXXXXXX") + DEBUG "new_cfg_dir: ${new_cfg_dir}" + + mkdir -p "${new_cfg_dir}" + + # NOTE: take over the deployed station configuration file! + DEBUG "Moving ${arg} => ${new_cfg_dir}/station.cfg" + mv "${arg}" "${new_cfg_dir}/station.cfg" + + ALL_CONFIG_DIR="${HILBERT_CONFIG_DIR}/configs" + + # What about the rest of resources? + if [[ -L "${ALL_CONFIG_DIR}" ]]; then + DEBUG "${ALL_CONFIG_DIR} -> $(readlink -f ${ALL_CONFIG_DIR})" + + # TODO: remove older config? + unlink "${ALL_CONFIG_DIR}" + fi + + # NOTE: atomic configuration update! + # TODO: FIXME: full path in symbolic link => may interfere when volume-mounted... + # use basename ${new_cfg_dir}??? instead? + ln -sf "${new_cfg_dir}" "${ALL_CONFIG_DIR}" + + if [[ ! -L "${ALL_CONFIG_DIR}" ]]; then + WARNING "${ALL_CONFIG_DIR} is not a link!" + fi +} + +cmd_init () { + subcommand="init" + arg=$1; shift + + if [[ -n "${arg}" ]]; then + cmd_install_station_config "${arg}" + fi + + if [[ ! -x "${HILBERT_CONFIG_DIR}/prepare.sh" ]]; then + ERROR "'${HILBERT_CONFIG_DIR}/prepare.sh' is not executable!" + exit 1 + else + exec "${HILBERT_CONFIG_DIR}/prepare.sh" + fi +} + +cmd_app_change () { + subcommand="app_change" + arg=$1; shift + + if [[ -z ${arg} ]]; then + ERROR "Wrong argument '${arg}' to '${subcommand}'!" + usage + exit 1 + fi + + # TODO: read configuration. Check whether arg is a valid application + + if [[ ! -x "${HILBERT_CONFIG_DIR}/topswitch.sh" ]]; then + ERROR "'${HILBERT_CONFIG_DIR}/topswitch.sh' is not executable!" + exit 1 + else + exec "${HILBERT_CONFIG_DIR}/topswitch.sh" "${arg}" + fi +} + +cmd_default () { + if [[ ! -x "${HILBERT_CONFIG_DIR}/default.sh" ]]; then + ERROR "'${HILBERT_CONFIG_DIR}/default.sh' is not executable!" + exit 1 + else + exec "${HILBERT_CONFIG_DIR}/default.sh" + fi +} + +cmd_list_applications () { + subcommand="start" + + # TODO: read config and handle the following: variables! + source "${HILBERT_CONFIG_FILE}" # only: key="...value..."! + + for app in ${possible_apps}; do + echo " - ${app}" + done + + DEBUG "Default application: ${default_app}" +} + +cmd_start () { + subcommand="start" + + # TODO: read config and handle the following: variables! + source "${HILBERT_CONFIG_FILE}" # only: key="...value..."! + + +# if [[ "${hilbert_autostart}" != "True" ]]; then +# DEBUG "hilbert_autostart: ${hilbert_autostart} (True/False)" +# DEBUG "hilbert_autostart_delay: ${hilbert_autostart_delay} (positive int)" +# +# INFO "Auto-starting is not enabled in ${HILBERT_CONFIG_FILE}" +# DEBUG "Will NOT start Hilbert!.." +# exit 0 +# fi + +# DEBUG "Sleeping for [${hilbert_autostart_delay}] sec (due to 'hilbert_autostart_delay' from '${HILBERT_CONFIG_FILE}')..." +# sleep "${hilbert_autostart_delay}" + + cmd_default +} + +cmd_finishall () { + subcommand="stop(finishall)" + + if [[ ! -x "${HILBERT_CONFIG_DIR}/finishall.sh" ]]; then + WARNING "'${HILBERT_CONFIG_DIR}/finishall.sh' is not executable!" + else + "${HILBERT_CONFIG_DIR}/finishall.sh" + fi +} + +cmd_stop () { + subcommand="stop" + # TODO: NOTE: any arguments? + + cmd_finishall +} + +cmd_shutdown () { + subcommand="shutdown" + arg=$@ + + DEBUG "Shutting this system down... Arguments: [${arg}]" # -h now? + shutdown ${arg} || sudo -n -P shutdown ${arg} +} + +shift $((OPTIND -1)) +subcommand=$1; shift + +DEBUG "Subcommand to handle: '$subcommand'" + +case "$subcommand" in + list_applications) + start_locking + cmd_list_applications "$@" # TODO: arguments? + exit 0 + ;; + + init) # |prepare + start_locking + cmd_init "$@" + exit 0 + ;; + app_change) # app_switch| + start_locking + cmd_app_change "$@" + exit 0 + ;; + stop) + start_locking + cmd_stop "$@" # TODO: arguments? + exit 0 + ;; + shutdown) + start_locking + cmd_shutdown "$@" # TODO: arguments? + ;; + start) + start_locking + cmd_start "$@" # TODO: arguments? + ;; + + dm_start) + ERROR "Please use a separate script for starting a VM via docker-machine!" + exit 2 + ;; + + cmd_*) # hidden subcommand-s + start_locking # NOTE: Just to be sure!.. + ${subcommand} "$@" + + DEBUG "Script successfully handled hidden subcommand: [${subcommand}]!" + exit 0 + ;; +esac + +if [[ -z "${subcommand}" ]]; then + usage + DEBUG "Script successfully finished without handling any subcommands..." + exit 0 +else + ERROR "Invalid sub-command: '$subcommand'" + exit 1 +fi + + + +exit 255 + +############### Separate script for the Server system... + +cmd_dm_start () { + subcommand="dm_start" + arg=$1; shift + + if [[ -z ${arg} ]]; then + ERROR "Wrong argument '${arg}' to '${subcommand}'!" + usage + exit 1 + fi + + DM="${DM:-docker-machine}" # TODO: check if ${DM} exists and is an executable! + DEBUG "executing: [${DM} start \"${arg}\"]" + exec ${DM} start "${arg}" +} + +# dm_start) +# # User should be able to start multiple VMs in parallel... +# cmd_dm_start "$@" +# ;; + + diff --git a/tools/hilbert.py b/tools/hilbert.py new file mode 100755 index 0000000..fc63b1a --- /dev/null +++ b/tools/hilbert.py @@ -0,0 +1,674 @@ +#! /usr/bin/env python + +# -*- coding: utf-8 -*- +# encoding: utf-8 +# coding: utf-8 + +# PYTHON_ARGCOMPLETE_OK # NOQA + +from __future__ import absolute_import, print_function, unicode_literals +import sys +from os import path +import argparse # NOQA +import logging + +DIR=path.dirname(path.dirname(path.abspath(__file__))) + +sys.path.append(DIR) +sys.path.append(path.join(DIR, 'config')) + +from helpers import * +from hilbert_cli_config import * +from subcmdparser import * +#from config.hilbert_cli_config import * +#from config.helpers import * +#from config.subcmdparser import * + +# datefmt='%Y.%m.%d %I:%M:%S %p' +logging.basicConfig(format='%(levelname)s [%(filename)s:%(lineno)d]: %(message)s') +# %(name)s Name of the logger (logging channel) +# %(levelno)s Numeric logging level for the message (DEBUG, INFO, WARNING, ERROR, CRITICAL) +# %(levelname)s Text logging level for the message ("DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL") +# %(pathname)s Full pathname of the source file where the logging call was issued (if available) +# %(filename)s Filename portion of pathname +# %(module)s Module (name portion of filename) +# %(lineno)d Source line number where the logging call was issued (if available) +# %(funcName)s Function name +# %(created)f Time when the LogRecord was created (time.time() return value) +# %(asctime)s Textual time when the LogRecord was created +# %(msecs)d Millisecond portion of the creation time +# %(relativeCreated)d Time in milliseconds when the LogRecord was created, +# relative to the time the logging module was loaded +# (typically at application startup time) +# %(thread)d Thread ID (if available) +# %(threadName)s Thread name (if available) +# %(process)d Process ID (if available) +# %(message)s The result of record.getMessage(), computed just as the record is emitted + +log = logging.getLogger(__name__) # + +__CLI_VERSION_ID = "$Id$" + + +#import traceback +#def main_exception_handler(type, value, tb): +# log.exception("Uncaught exception! Type: {0}, Value: {1}, TB: {2}".format(type, value, traceback.format_tb(tb))) +##sys.excepthook = main_exception_handler # Install exception handler + +@subcmd('cfg_verify', help='verify the correctness of Hilbert Configuration .YAML file') +def cmd_verify(parser, context, args): + log.debug("Running '{}'".format('cfg_verify')) + + # TODO: optional due to default value? + parser.add_argument('configfile', help="input .YAML file, default: 'Hilbert.yml'", nargs='?') + + args = parser.parse_args(args) + cfg = input_handler(parser, vars(context), args) + + assert cfg is not None + + log.debug("Done") + return args + + +def input_handler(parser, ctx, args): + global INPUT_DIRNAME + + fn = None + df = None + + args = vars(args) + + if 'configfile' in args: + fn = args['configfile'] + log.debug("Input YAML file specified: {}".format(fn)) + + if 'configdump' in args: + df = args['configdump'] + log.debug("Input dump file specified: {}".format(df)) + + if (fn is None) and (df is None): + fn = 'Hilbert.yml' + log.warning("Missing input file specification: using default '{}'!".format(fn)) + + if (fn is not None) and (df is not None): + log.error("Input file specification clashes with the input dump specification: specify a single input source!") + exit(1) + + if fn is not None: + if not URI(None).validate(fn): + log.error("Wrong file specification: '{}'".format(fn)) + exit(1) + log.info("Input file: '{}'".format(fn)) + + if df is not None: + if not URI(None).validate(fn): + log.error("Wrong dump file specification: '{}'".format(df)) + exit(1) + log.info("Input dump file: '{}'".format(df)) + + if fn is not None: + INPUT_DIRNAME = os.path.abspath(os.path.dirname(fn)) + else: + assert df is not None + INPUT_DIRNAME = os.path.abspath(os.path.dirname(df)) + + cfg = None + + if fn is not None: + log.info("Loading '{}'...".format(fn)) + try: + yml = load_yaml_file(fn) + log.info("Input file is a correct YAML!") + + log.info("Data Validation/Parsing: ") + + os.chdir(INPUT_DIRNAME) # NOTE: references relative to input file's location! + cfg = parse_hilbert(yml) # NOTE: checks that this is a proper dictionary... + except: + log.exception("ERROR: wrong input file: '{}'!".format(fn)) + exit(1) + else: + log.info("Loading dump '{}'...".format(df)) + try: + cfg = pickle_load(df) + log.info("Input dump file is valid!") + except: + log.exception("Wrong input dump file: '{}'!".format(df)) + exit(1) + + if cfg is None: + log.error("Could not get the configuration!") + exit(1) + + assert isinstance(cfg, Hilbert) + log.info("Configuration is OK!") + + return cfg + + +############################################################################### +def output_handler(parser, ctx, args): + global PEDANTIC + args = vars(args) + + od = None + if 'outputdump' in args: + od = args['outputdump'] + log.debug("Specified output dump file: {}".format(od)) + assert od is not None + + f = URI(None) + if f.validate(od): + if os.path.exists(f.get_data()): # TODO: testme! + if not PEDANTIC: + log.warning("Output dump file: '{}' already exists! Will be overwritten!".format(od)) + else: + log.warning("Output dump file: '{}' already exists! Cannot overwrite it in PEDANTIC mode!".format(od)) + od = None + else: # TODO: testme! + log.error("Wrong output file: '{}'".format(od)) + od = None + return od + + +def cmd_list(parser, context, args, obj): + cfg = input_handler(parser, vars(context), args) + return cfg.query(obj) + + +@subcmd('cfg_query', help='query some part of configuration. possibly dump it to a file') +def cmd_query(parser, context, args): + log.debug("Running '{}'" . format('cfg_query')) + + parser.add_argument('-o', '--object', required=False, default='all', + help="specify the object in the config (default: 'all')") + parser.add_argument('-od', '--outputdump', default=argparse.SUPPRESS, + help="specify output dump file") + + group = parser.add_mutually_exclusive_group(required=False) + + group.add_argument('--configfile', required=False, + help="specify input .YAML file (default: 'Hilbert.yml')") # NOTE: default! + group.add_argument('--configdump', required=False, + help="specify input dump file") + + args = parser.parse_args(args) + + _args = vars(args) + + obj = 'all' + if 'object' in _args: + obj = _args['object'] + + try: + log.info("Querring object: '{}'... " . format(obj)) + obj = cmd_list(parser, context, args, obj) + except: + log.exception("Sorry cannot query '{}' yet!" . format(obj)) + exit(1) + + assert obj is not None + + if isinstance(obj, BaseValidator): + print(yaml_dump(obj.data_dump())) + else: + print(yaml_dump(obj)) + + od = output_handler(parser, vars(context), args) + + if od is not None: + log.info("Writing the configuration into '{}'...".format(od)) + pickle_dump(od, obj) + + log.debug("Done") + return args + + +@subcmd('list_applications', help='list application IDs') +def cmd_list_applications(parser, context, args): + log.debug("Running '{}'" . format('list_applications')) + + group = parser.add_mutually_exclusive_group() + + group.add_argument('--configfile', required=False, + help="specify input .YAML file (default: 'Hilbert.yml')") + group.add_argument('--configdump', required=False, + help="specify input dump file") + + args = parser.parse_args(args) + + log.debug("Listing all Application ID...") + + + obj = None + try: + obj = cmd_list(parser, context, args, 'Applications/keys') + except: + log.exception("Sorry could not get the list of '{}' from the input file!".format('applications')) + exit(1) + + assert obj is not None + + if isinstance(obj, BaseValidator): + print(yaml_dump(obj.data_dump())) + else: + print(yaml_dump(obj)) + + log.debug("Done") + return args + + + +@subcmd('list_stations', help='list station IDs') +def cmd_list_stations(parser, context, args): + log.debug("Running '{}'" . format('list_stations')) + + group = parser.add_mutually_exclusive_group() + + group.add_argument('--configfile', required=False, + help="specify input .YAML file (default: 'Hilbert.yml')") + group.add_argument('--configdump', required=False, + help="specify input dump file") + + args = parser.parse_args(args) + + log.debug("Listing all Station ID...") + + obj = None + try: + obj = cmd_list(parser, context, args, 'Stations/keys') + except: + log.exception("Sorry could not get the list of '{}' from the input file!".format('stations')) + exit(1) + + assert obj is not None + + if isinstance(obj, BaseValidator): + print(yaml_dump(obj.data_dump())) + else: + print(yaml_dump(obj)) + + log.debug("Done") + return args + + + +@subcmd('list_profiles', help='list profile IDs') +def cmd_list_profiles(parser, context, args): + log.debug("Running '{}'" . format('list_profiles')) + + group = parser.add_mutually_exclusive_group() + + group.add_argument('--configfile', required=False, + help="specify input .YAML file (default: 'Hilbert.yml')") + group.add_argument('--configdump', required=False, + help="specify input dump file") + + args = parser.parse_args(args) + + log.debug("Listing all Profile ID...") + + obj = None + try: + obj = cmd_list(parser, context, args, 'Profiles/keys') + except: + log.exception("Sorry could not get the list of '{}' from the input file!".format('profiles')) + exit(1) + + assert obj is not None + + if isinstance(obj, BaseValidator): + print(yaml_dump(obj.data_dump())) + else: + print(yaml_dump(obj)) + + log.debug("Done") + return args + + +@subcmd('list_groups', help='list (named) group IDs') +def cmd_list_groups(parser, context, args): + log.debug("Running '{}'" . format('list_groups')) + + group = parser.add_mutually_exclusive_group() + + group.add_argument('--configfile', required=False, + help="specify input .YAML file (default: 'Hilbert.yml')") + group.add_argument('--configdump', required=False, + help="specify input dump file") + + args = parser.parse_args(args) + + log.debug("Listing all Group ID...") + + obj = None + try: + obj = cmd_list(parser, context, args, 'Groups/keys') + except: + log.exception("Sorry could not get the list of '{}' from the input file!".format('groups')) + exit(1) + + assert obj is not None + + if isinstance(obj, BaseValidator): + print(yaml_dump(obj.data_dump())) + else: + print(yaml_dump(obj)) + + log.debug("Done") + return args + + +@subcmd('list_services', help='list service IDs') +def cmd_list_services(parser, context, args): + log.debug("Running '{}'" . format('list_services')) + + group = parser.add_mutually_exclusive_group() + + group.add_argument('--configfile', required=False, + help="specify input .YAML file (default: 'Hilbert.yml')") + group.add_argument('--configdump', required=False, + help="specify input dump file") + + args = parser.parse_args(args) + + log.debug("Listing all Service ID...") + + obj = None + try: + obj = cmd_list(parser, context, args, 'Services/keys') + except: + log.exception("Sorry could not get the list of '{}' from the input file!".format('services')) + exit(1) + + assert obj is not None + + if isinstance(obj, BaseValidator): + print(yaml_dump(obj.data_dump())) + else: + print(yaml_dump(obj)) + + log.debug("Done") + return args + +# NOTE: just a helper ATM +def cmd_action(parser, context, args, Action=None, appIdRequired=False): + args = parser.parse_args(args) + _args = vars(args) + + action = Action + if action is None: + assert 'Action' in _args + action = _args['Action'] + + log.debug("Action: '%s'", action) + + assert action is not None + assert action != '' + + + # stationId, + assert 'StationID' in _args + stationId = _args['StationID'] + log.debug("Input StationID: '%s'", stationId) + stationId = StationID.parse(stationId, parent=None, parsed_result_is_data=True) + log.debug("Checked StationID: '%s'", stationId) + + action_args = None + + if appIdRequired: + applicationID = _args.get('ApplicationID', None) + assert applicationID is not None + log.debug("Input ApplicationID: '%s'", applicationID) + applicationID = ApplicationID.parse(applicationID, parent=None, parsed_result_is_data=True) + log.debug("Checked ApplicationID: '%s'", applicationID) + action_args = applicationID + + elif 'action_args' in _args: + action_args = _args.get('action_args', None) + + + + stations = None + log.debug("Validating given StationID: '%s'...", stationId) + try: + log.debug("Querying all stations in the Configuration...") + stations = cmd_list(parser, context, args, 'Stations/all') + except: + log.exception("Sorry could not get the list of '{}' from the input file!".format('stations')) + exit(1) + + assert stations is not None + + if stationId not in stations.get_data(): + log.error("Invalid StationID (%s)!", stationId) + exit(1) + + station = stations.get_data()[stationId] + assert station is not None + + log.debug("StationID is valid according to the Configuration!") + log.debug("Running action: '{0} {1}' on station '{2}'" .format(action, str(action_args), stationId)) + try: + station.run_action(action, action_args) # NOTE: temporary API for now + except: + log.exception("Could not run '{0} {1}' on station '{2}'" . format(action, str(action_args), stationId)) + exit(1) + return args + +@subcmd('start', help='poweron a station') +def cmd_start(parser, context, args): + action = 'start' + log.debug("Running 'cmd_{}'" . format(action)) + + group = parser.add_mutually_exclusive_group() + + group.add_argument('--configfile', required=False, + help="specify input .YAML file (default: 'Hilbert.yml')") + group.add_argument('--configdump', required=False, + help="specify input dump file") + + parser.add_argument('StationID', help="station to power-on via network") +# parser.add_argument('action_args', nargs='?', help="optional arguments for poweron", metavar='args') + + cmd_action(parser, context, args, Action=action, appIdRequired=False) + + log.debug("Done") + return args + + +@subcmd('stop', help='shutdown a station') +def cmd_stop(parser, context, args): + action = 'stop' + log.debug("Running 'cmd_{}'" . format(action)) + + group = parser.add_mutually_exclusive_group() + + group.add_argument('--configfile', required=False, + help="specify input .YAML file (default: 'Hilbert.yml')") + group.add_argument('--configdump', required=False, + help="specify input dump file") + + parser.add_argument('StationID', help="specify the station") +# parser.add_argument('action_args', nargs='?', help="optional arguments for shutdown", metavar='args') + + cmd_action(parser, context, args, Action=action, appIdRequired=False) + + log.debug("Done") + return args + + +@subcmd('cfg_deploy', help="deploy station's local configuration to corresponding host") +def cmd_cfg_deploy(parser, context, args): + action = 'cfg_deploy' + log.debug("Running 'cmd_{}'" . format(action)) + + group = parser.add_mutually_exclusive_group() + + group.add_argument('--configfile', required=False, + help="specify input .YAML file (default: 'Hilbert.yml')") + group.add_argument('--configdump', required=False, + help="specify input dump file") + + parser.add_argument('StationID', help="specify the station") +# parser.add_argument('action_args', nargs='?', help="optional arguments for deploy", metavar='args') + + cmd_action(parser, context, args, Action=action, appIdRequired=False) + + log.debug("Done") + return args + + +# @subcmd('app_start', help='start an application on a station') +def cmd_app_start(parser, context, args): + action = 'app_start' + log.debug("Running 'cmd_{}'" . format(action)) + + group = parser.add_mutually_exclusive_group() + + group.add_argument('--configfile', required=False, + help="specify input .YAML file (default: 'Hilbert.yml')") + group.add_argument('--configdump', required=False, + help="specify input dump file") + + parser.add_argument('StationID', help="specify the station") + parser.add_argument('ApplicationID', help="specify the application to start") +# parser.add_argument('action_args', nargs='?', help="optional argument for start: ApplicationID/ServiceID ", metavar='id') + + cmd_action(parser, context, args, Action=action, appIdRequired=True) + + log.debug("Done") + return args + +# @subcmd('app_stop', help='stop the current application on a station') +def cmd_app_stop(parser, context, args): + action = 'app_stop' + log.debug("Running 'cmd_{}'" . format(action)) + + group = parser.add_mutually_exclusive_group() + + group.add_argument('--configfile', required=False, + help="specify input .YAML file (default: 'Hilbert.yml')") + group.add_argument('--configdump', required=False, + help="specify input dump file") + + parser.add_argument('StationID', help="specify the station") +# parser.add_argument('ApplicationID', help="specify the application to stop") +# parser.add_argument('action_args', nargs='?', +# help="optional argument for finish: ApplicationID/ServiceID ", metavar='id') + + cmd_action(parser, context, args, Action=action, appIdRequired=True) + + log.debug("Done") + return args + + +@subcmd('app_change', help="change station's top application") +def cmd_app_change(parser, context, args): + action = 'app_change' + log.debug("Running 'cmd_{}'" . format(action)) + + group = parser.add_mutually_exclusive_group() + + group.add_argument('--configfile', required=False, + help="specify input .YAML file (default: 'Hilbert.yml')") + group.add_argument('--configdump', required=False, + help="specify input dump file") + + parser.add_argument('StationID', help="specify the station") + parser.add_argument('ApplicationID', help="new top Application") +# parser.add_argument('other_args', nargs='?', help="optional arguments for 'app_change'", metavar='args') + + cmd_action(parser, context, args, Action=action, appIdRequired=True) + + log.debug("Done") + return args + + +# @subcmd('run_action', help='run specified action on given station with given arguments...') +def cmd_run_action(parser, context, args): + log.debug("Running 'cmd_{}'" . format('run_action')) + + group = parser.add_mutually_exclusive_group() + group.add_argument('--configfile', required=False, + help="specify input .YAML file (default: 'Hilbert.yml')") + group.add_argument('--configdump', required=False, + help="specify input dump file") + + parser.add_argument('Action', help="specify the action") + parser.add_argument('StationID', help="specify the station") + parser.add_argument('action_args', nargs='?', help="optional arguments for the action", metavar='args') + + cmd_action(parser, context, args, Action=None, appIdRequired=False) + + log.debug("Done") + return args + +class PedanticModeAction(argparse.Action): + def __init__(self, option_strings, *args, **kwargs): + super(PedanticModeAction, self).__init__(option_strings=option_strings, *args, **kwargs) + + def __call__(self, parser, args, values, option_string=None): + global PEDANTIC + PEDANTIC = True + if PEDANTIC: + log.debug("PEDANTIC mode is ON!") +# setattr(args, self.dest, values) + +def _version(): + import platform + import dill + import ruamel.yaml as yaml + import semantic_version + + log.debug("Running '--{}'".format('version')) + + log.debug("Python (platform) version: {}".format(platform.python_version())) + log.debug("ruamel.yaml version: {}".format(yaml.__version__)) + log.debug("dill version: {}".format(dill.__version__)) + log.debug("logging version: {}".format(logging.__version__)) + log.debug("semantic_version version: {}".format(semantic_version.__version__)) + + print("Hilbert Configuration API: {}".format(Hilbert(None).get_api_version())) + print("Logging Level: {}".format(logging.getLevelName(logging.getLogger().level))) + + log.debug("Done") + + +class ListVersionsAction(argparse.Action): + def __init__(self, option_strings, *args, **kwargs): + super(ListVersionsAction, self).__init__(option_strings=option_strings, *args, **kwargs) + + def __call__(self, parser, args, values, option_string=None): + _version() + parser.exit(status=0) +# setattr(args, self.dest, values) + + +def main(): + handler = SubCommandHandler(use_subcommand_help=True, enable_autocompletion=True, + prog='hilbert', + description="Hilbert - server tool: loads configuration and does something using it") + + handler.add_argument('-p', '--pedantic', action=PedanticModeAction, + nargs=0, default=argparse.SUPPRESS, required=False, type=None, metavar=None, + help="turn on pedantic mode") + + handler.add_argument('-V', '--version', action=ListVersionsAction, + nargs=0, default=argparse.SUPPRESS, required=False, type=None, metavar=None, + help="show %(prog)s's version and exit") + + _argv = sys.argv[1:] + + # NOTE: show help by if not arguments given + if len(_argv) == 0: + log.debug("No command arguments given => Showing usage help!") + _argv = ['-h'] +# handler.print_help() + + args = handler.run(_argv) + handler.exit(status=0) + +if __name__ == "__main__": + main() diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..6329e28 --- /dev/null +++ b/tox.ini @@ -0,0 +1,20 @@ +[tox] +# envlist = pep8,py35,py27,py34,py33,py26,pypy,jython +#envlist = py35,py27,py34,py33,py26,pypy,jython +envlist = py34,py27 + +[testenv] +commands = + /bin/bash -c 'py.test -v -l --tb=auto --full-trace --color=auto tests/test_*.py' +deps = + dill>=0.2.5 + semantic_version>=2.6.0 + argparse>=1.4.0 + argcomplete>=1.6.0 + ruamel.yaml>=0.12.15 + py27: logging + pytest + +[pytest] +norecursedirs = tests/data +