From 3173ffe82b971e348a79642747cb0cfc69480a9e Mon Sep 17 00:00:00 2001 From: noah <234082230@qq.com> Date: Mon, 29 Aug 2016 10:54:35 +0800 Subject: [PATCH 01/25] [ADD] connector_dns [FIX] format [IMP] format [IMP] unit test [IMP] unit test --- connector_dns/README.rst | 82 +++++++++++ connector_dns/__init__.py | 10 ++ connector_dns/__openerp__.py | 18 +++ connector_dns/backend.py | 6 + connector_dns/connector.py | 55 ++++++++ connector_dns/dns_menu.xml | 26 ++++ connector_dns/dns_view.xml | 162 ++++++++++++++++++++++ connector_dns/models/__init__.py | 4 + connector_dns/models/dns.py | 107 ++++++++++++++ connector_dns/tests/__init__.py | 6 + connector_dns/tests/common.py | 64 +++++++++ connector_dns/tests/test_backend.py | 66 +++++++++ connector_dns/tests/test_binder.py | 57 ++++++++ connector_dns/unit/__init__.py | 6 + connector_dns/unit/backend_adapter.py | 97 +++++++++++++ connector_dns/unit/binder.py | 87 ++++++++++++ connector_dns/unit/export_synchronizer.py | 57 ++++++++ 17 files changed, 910 insertions(+) create mode 100755 connector_dns/README.rst create mode 100755 connector_dns/__init__.py create mode 100755 connector_dns/__openerp__.py create mode 100755 connector_dns/backend.py create mode 100755 connector_dns/connector.py create mode 100755 connector_dns/dns_menu.xml create mode 100755 connector_dns/dns_view.xml create mode 100644 connector_dns/models/__init__.py create mode 100755 connector_dns/models/dns.py create mode 100644 connector_dns/tests/__init__.py create mode 100644 connector_dns/tests/common.py create mode 100644 connector_dns/tests/test_backend.py create mode 100644 connector_dns/tests/test_binder.py create mode 100755 connector_dns/unit/__init__.py create mode 100755 connector_dns/unit/backend_adapter.py create mode 100755 connector_dns/unit/binder.py create mode 100755 connector_dns/unit/export_synchronizer.py diff --git a/connector_dns/README.rst b/connector_dns/README.rst new file mode 100755 index 0000000..db1e7b1 --- /dev/null +++ b/connector_dns/README.rst @@ -0,0 +1,82 @@ +.. image:: https://img.shields.io/badge/licence-AGPL--3-blue.svg + :target: http://www.gnu.org/licenses/agpl-3.0-standalone.html + :alt: License: AGPL-3 + +============= +Connector DNS +============= + +This module aims to allows to manage your DNS domain through Odoo. + +Installation +============ + +To install this module, you need to: + + * have basic modules installed (connector) + +Configuration +============= + +To configure this module, you need to: + +#. Go to ... + +.. figure:: path/to/local/image.png + :alt: alternative description + :width: 600 px + +Usage +===== + +To use this module, you need to: + +#. Go to ... + +.. image:: https://odoo-community.org/website/image/ir.attachment/5784_f2813bd/datas + :alt: Try me on Runbot + :target: https://runbot.odoo-community.org/runbot/{repo_id}/{branch} + +Known issues / Roadmap +====================== + +* TBD + +Bug Tracker +=========== + +Bugs are tracked on `GitHub Issues +`_. In case of trouble, please +check there if your issue has already been reported. If you spotted it first, +help us smashing it by providing a detailed and welcomed feedback. + +Credits +======= + +Images +------ + +* Odoo Community Association: `Icon `_. + +Contributors +------------ + +* Eric Caudal +* Noah Wang +* Liu Lixia +* Augustin Cisterne-Kaas + +Maintainer +---------- + +.. image:: https://odoo-community.org/logo.png + :alt: Odoo Community Association + :target: https://odoo-community.org + +This module is maintained by the OCA. + +OCA, or the Odoo Community Association, is a nonprofit organization whose +mission is to support the collaborative development of Odoo features and +promote its widespread use. + +To contribute to this module, please visit https://odoo-community.org. diff --git a/connector_dns/__init__.py b/connector_dns/__init__.py new file mode 100755 index 0000000..09f5c98 --- /dev/null +++ b/connector_dns/__init__.py @@ -0,0 +1,10 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 Elico Corp +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). + +from . import backend +from . import connector +from .models import dns +from . import tests +from . import unit + diff --git a/connector_dns/__openerp__.py b/connector_dns/__openerp__.py new file mode 100755 index 0000000..3167a34 --- /dev/null +++ b/connector_dns/__openerp__.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 Elico Corp +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). +{ + 'name': 'DNS connector', + 'version': '8.0.1.0.0', + 'category': 'Connector', + 'depends': ['connector'], + 'author': 'Elico Corp,Odoo Community Association (OCA)', + 'license': 'AGPL-3', + 'website': 'https://www.elico-corp.com', + 'data': [ + 'dns_view.xml', + 'dns_menu.xml' + ], + 'installable': True, + 'application': False +} diff --git a/connector_dns/backend.py b/connector_dns/backend.py new file mode 100755 index 0000000..76c3868 --- /dev/null +++ b/connector_dns/backend.py @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 Elico Corp +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). +import openerp.addons.connector.backend as backend + +dns = backend.Backend('dns') diff --git a/connector_dns/connector.py b/connector_dns/connector.py new file mode 100755 index 0000000..fb1c97c --- /dev/null +++ b/connector_dns/connector.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 Elico Corp +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). +from openerp import models, fields +from openerp.addons.connector.connector import (Environment) +from openerp.addons.connector.checkpoint import checkpoint + + +def get_environment(session, model_name, backend_id): + """ Create an environment to work with. """ + backend_record = session.browse('dns.backend', backend_id) + env = Environment(backend_record, session, model_name) + return env + + +class DNSBinding(models.AbstractModel): + """ Abstract Model for the Bindigs. + All the models used as bindings between dnspod and OpenERP + (``dnspod.res.partner``, ``dnspod.product.product``, ...) should + ``_inherit`` it. + """ + _name = 'dns.binding' + _inherit = 'external.binding' + _description = 'dns Binding (abstract)' + + dns_backend_id = fields.Many2one( + comodel_name='dns.backend', + String='DNS Backend', + store=True, + ondelete='restrict' + ) + # fields.char because 0 is a valid dnspod ID + dns_id = fields.Char('ID on other software') + # state of the record synchronization with dnspod + state = fields.Selection( + [('draft', 'Draft'), ('done', 'Done'), + ('exception', 'Exception')], 'State', + default="draft", + help='Done when succeed otherwise Exception') + + +def add_checkpoint(session, model_name, record_id, backend_id): + """ Add a row in the model ``connector.checkpoint`` for a record, + meaning it has to be reviewed by a user. + :param session: current session + :type session: :class:`openerp.addons.connector.session.ConnectorSession` + :param model_name: name of the model of the record to be reviewed + :type model_name: str + :param record_id: ID of the record to be reviewed + :type record_id: int + :param backend_id: ID of the dnspod Backend + :type backend_id: int + """ + return checkpoint.add_checkpoint(session, model_name, record_id, + 'dns.backend', backend_id) diff --git a/connector_dns/dns_menu.xml b/connector_dns/dns_menu.xml new file mode 100755 index 0000000..1325c07 --- /dev/null +++ b/connector_dns/dns_menu.xml @@ -0,0 +1,26 @@ + + + + + + + + + + + + diff --git a/connector_dns/dns_view.xml b/connector_dns/dns_view.xml new file mode 100755 index 0000000..6bf4500 --- /dev/null +++ b/connector_dns/dns_view.xml @@ -0,0 +1,162 @@ + + + + + + DNS Backend + dns.backend + ir.actions.act_window + form + tree,form + + + + DNS Backend form + dns.backend + + + + + + + + + DNS Backend form + dns.backend + +
+
+
+ +
+
+ + + + + +
+
+
+
+ + + + DNS Domain Action + dns.domain + form + tree,form + + + + DNS domain tree + dns.domain + + + + + + + + + + + DNS domain form + dns.domain + +
+
+
+ +
+
+ + + +
+
+
+
+ + + + + DNS Record Action + dns.record + form + tree,form + + + + DNS Record tree + dns.record + + + + + + + + + + + + + + + + DNS Record form + dns.record + +
+
+ +
+ +
+
+ + + + + + + + + +
+
+
+
+
+
diff --git a/connector_dns/models/__init__.py b/connector_dns/models/__init__.py new file mode 100644 index 0000000..82e3357 --- /dev/null +++ b/connector_dns/models/__init__.py @@ -0,0 +1,4 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 Elico Corp +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). +from . import dns diff --git a/connector_dns/models/dns.py b/connector_dns/models/dns.py new file mode 100755 index 0000000..366bb0d --- /dev/null +++ b/connector_dns/models/dns.py @@ -0,0 +1,107 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 Elico Corp +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). +from openerp import models, fields, api + + +class DNSBackend(models.Model): + _name = 'dns.backend' + _inherit = 'connector.backend' + _backend_type = 'dns' + + def _select_version(self): + return [] + + login = fields.Char( + string='Login', + help="Provider's login.", + required=True + ) + password = fields.Char( + string='Password', + help="Provider's password.", + required=True + ) + state = fields.Selection( + [('draft', 'Draft'), ('done', 'Done'), + ('exception', 'Exception')], + string='State', + default="draft", + help='"Confirmed" when the domain has been succesfully created.' + ) + version = fields.Selection( + selection='_select_version', + string='Service Provider', + help='DNS service provider', + required=True + ) + + @api.multi + def name_get(self): + res = [] + for backend in self: + res.append((backend.id, '%s (%s)' % (backend.name, backend.login))) + return res + + +class DNSDomain(models.Model): + _name = 'dns.domain' + _inherit = 'dns.binding' + + name = fields.Char( + string='Name', + required=True, + help='Domain name without "www",such as"dnspod.cn"' + ) + record_ids = fields.One2many( + comodel_name='dns.record', + inverse_name='domain_id', + string='Subdomains' + ) + + +class DNSRecord(models.Model): + _name = 'dns.record' + _inherit = 'dns.binding' + + def _line_select_version(self): + return [] + + def _type_select_version(self): + return [] + + name = fields.Char( + string='Sub domain', + help="host record,such as 'www'", + required=True) + domain_id = fields.Many2one( + comodel_name='dns.domain', + string="Domain", + domain="[('state','=','done')]", + ondelete='cascade', + help="Domain which has already confirmed" + ) + type = fields.Selection( + selection='_type_select_version', + string='Record Type' + ) + line = fields.Selection( + selection='_line_select_version', + string='Record Line' + ) + value = fields.Text( + string='Value', + help="such as IP:200.200.200.200", + required=True + ) + mx_priority = fields.Integer( + string='MX priority', + help="scope:1-20", + default=1 + ) + ttl = fields.Integer( + string='TTL', + default=600, + help="scope:1-604800", + required=True + ) diff --git a/connector_dns/tests/__init__.py b/connector_dns/tests/__init__.py new file mode 100644 index 0000000..f78fd74 --- /dev/null +++ b/connector_dns/tests/__init__.py @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 Elico Corp +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). +from . import common +from . import test_backend +from . import test_binder diff --git a/connector_dns/tests/common.py b/connector_dns/tests/common.py new file mode 100644 index 0000000..30034dc --- /dev/null +++ b/connector_dns/tests/common.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# +# +# Authors: Guewen Baconnier +# Copyright 2015 Camptocamp SA +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# + +import importlib +from contextlib import contextmanager + +import mock + + +@contextmanager +def mock_job_delay_to_direct(job_path): + """ Replace the .delay() of a job by a direct call + + job_path is the python path as string, such as:: + + 'openerp.addons.magentoerpconnect.stock_picking.export_picking_done' + + This is a context manager, all the calls made to the job function in + job_path inside the context manager will be executed synchronously. + + .. note:: It uses :meth:`mock.patch` so it has the same pitfall + regarding the python path. If the mock seems to have no + effect, read `Where to patch + `_ + in the mock documentation. + + """ + job_module, job_name = job_path.rsplit('.', 1) + module = importlib.import_module(job_module) + job_func = getattr(module, job_name, None) + assert job_func, "The function %s must exist in %s" % (job_name, + job_module) + + def clean_args_for_func(*args, **kwargs): + # remove the special args reserved to '.delay()' + kwargs.pop('priority', None) + kwargs.pop('eta', None) + kwargs.pop('model_name', None) + kwargs.pop('max_retries', None) + kwargs.pop('description', None) + job_func(*args, **kwargs) + + with mock.patch(job_path) as patched_job: + # call the function directly instead of '.delay()' + patched_job.delay.side_effect = clean_args_for_func + yield patched_job diff --git a/connector_dns/tests/test_backend.py b/connector_dns/tests/test_backend.py new file mode 100644 index 0000000..4cfd53d --- /dev/null +++ b/connector_dns/tests/test_backend.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 Elico Corp +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). +import openerp.tests.common as common +from openerp.addons.connector.backend import Backend +from openerp.addons.connector.session import ConnectorSession +from openerp.addons.connector_dns.unit.binder import DNSModelBinder + + +class TestDNSBackend(common.TransactionCase): + """ + Test DNS Backend + """ + + def setUp(self): + super(TestDNSBackend, self).setUp() + self.service = "dns" + + def test_new_backend(self): + """ Create a backend""" + backend = Backend(self.service) + self.assertEqual(backend.service, self.service) + + def test_parent(self): + """ Bind the backend to a parent backend""" + backend = Backend(self.service) + child_backend = Backend(parent=backend) + self.assertEqual(child_backend.service, backend.service) + + def test_no_service(self): + """ Should raise an error because no service or parent is defined""" + with self.assertRaises(ValueError): + Backend() + + +class test_backend_register(common.TransactionCase): + """ Test registration of classes on the Backend""" + + def setUp(self): + super(test_backend_register, self).setUp() + self.service = 'dns' + self.parent = Backend(self.service) + self.backend = Backend(parent=self.parent) + self.session = ConnectorSession(self.cr, self.uid) + + def test_register_class(self): + class BenderBinder(DNSModelBinder): + _model_name = 'res.users' + + self.backend.register_class(BenderBinder) + ref = self.backend.get_class(DNSModelBinder, + self.session, + 'res.users') + self.assertEqual(ref, BenderBinder) + + def test_register_class_parent(self): + """ It should get the parent's class when no class is defined""" + + @self.parent + class FryBinder(DNSModelBinder): + _model_name = 'res.users' + + ref = self.backend.get_class(DNSModelBinder, + self.session, + 'res.users') + self.assertEqual(ref, FryBinder) diff --git a/connector_dns/tests/test_binder.py b/connector_dns/tests/test_binder.py new file mode 100644 index 0000000..ee39b87 --- /dev/null +++ b/connector_dns/tests/test_binder.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- + +import mock +import openerp +from openerp.addons.connector.backend import Backend +from openerp.addons.connector_dns.unit.binder import DNSModelBinder +from openerp.addons.connector.connector import ConnectorEnvironment +from openerp.addons.connector.session import ConnectorSession +from openerp.tests.common import TransactionCase + + +@openerp.tests.common.at_install(False) +@openerp.tests.common.post_install(True) +class TestDNSModelBinder(TransactionCase): + """ Test the DNS Model binder implementation""" + def setUp(self): + super(TestDNSModelBinder, self).setUp() + + class TestDNSBinder(DNSModelBinder): + """ + we use already existing fields for the binding + """ + _model_name = 'dns.binding' + _external_field = 'ref' + _sync_date_field = 'date' + _backend_field = 'color' + _openerp_field = 'id' + + self.session = ConnectorSession(self.cr, self.uid) + self.backend = Backend('dummy', version='1.0') + backend_record = mock.Mock() + backend_record.id = 1 + backend_record.get_backend.return_value = self.backend + self.connector_env = ConnectorEnvironment( + backend_record, self.session, 'dns.binding') + self.test_dns_binder = TestDNSBinder(self.connector_env) + + def test_binder(self): + """ Small scenario with the default binder """ + dns_model = mock.Mock() + dns_model.id = 0 + dns_model.dns_id = 0 + # bind the main partner to external id = 0 + self.test_dns_binder.bind(0, dns_model.id) + # find the openerp partner bound to external partner 0 + self.test_dns_binder.to_openerp = mock.Mock() + self.test_dns_binder.to_openerp.return_value.id = 0 + openerp_id = self.test_dns_binder.to_openerp(0) + self.assertEqual(openerp_id.id, dns_model.id) + openerp_id = self.test_dns_binder.to_openerp(0, unwrap=True) + self.assertEqual(openerp_id.id, dns_model.id) + self.test_dns_binder.to_backend = mock.Mock() + self.test_dns_binder.to_backend.return_value = '0' + external_id = self.test_dns_binder.to_backend(dns_model.id) + self.assertEqual(external_id, '0') + external_id = self.test_dns_binder.to_backend(dns_model.id, wrap=True) + self.assertEqual(external_id, '0') diff --git a/connector_dns/unit/__init__.py b/connector_dns/unit/__init__.py new file mode 100755 index 0000000..2b0da66 --- /dev/null +++ b/connector_dns/unit/__init__.py @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 Elico Corp +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). +from . import backend_adapter +from . import binder +from . import export_synchronizer diff --git a/connector_dns/unit/backend_adapter.py b/connector_dns/unit/backend_adapter.py new file mode 100755 index 0000000..eb39281 --- /dev/null +++ b/connector_dns/unit/backend_adapter.py @@ -0,0 +1,97 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 Elico Corp +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). +import logging +from openerp.addons.connector.unit.backend_adapter import CRUDAdapter + +_logger = logging.getLogger(__name__) + + +recorder = {} + + +def call_to_key(method, arguments): + """ Used to 'freeze' the method and arguments of a call to DNS + so they can be hashable; they will be stored in a dict. + + Used in both the recorder and the tests. + """ + def freeze(arg): + if isinstance(arg, dict): + items = dict((key, freeze(value)) for key, value + in arg.iteritems()) + return frozenset(items.iteritems()) + elif isinstance(arg, list): + return tuple([freeze(item) for item in arg]) + else: + return arg + + new_args = [] + for arg in arguments: + new_args.append(freeze(arg)) + return (method, tuple(new_args)) + + +def record(method, arguments, result): + """ Utility function which can be used to record test data + during synchronisations. Call it from DNSAdapter._call + + Then ``output_recorder`` can be used to write the data recorded + to a file. + """ + recorder[call_to_key(method, arguments)] = result + + +def output_recorder(filename): + import pprint + with open(filename, 'w') as f: + pprint.pprint(recorder, f) + _logger.debug('Recorder written to file %s', filename) + + +class DNSLocation(object): + + def __init__(self, login, password): + self.login = login + self.password = password + + +class DNSAdapter(CRUDAdapter): + """ External Records Adapter for DNS """ + + def __init__(self, environment): + """ + :param environment: current environment (backend, session, ...) + :type environment: :py:class:`connector.connector.Environment` + """ + super(DNSAdapter, self).__init__(environment) + self.DNS = DNSLocation( + self.backend_record.login, self.backend_record.password) + + def search(self, filters=None): + """ Search records according to some criterias + and returns a list of ids """ + raise NotImplementedError + + def read(self, id, attributes=None): + """ Returns the information of a record """ + raise NotImplementedError + + def search_read(self, filters=None): + """ Search records according to some criterias + and returns their information""" + raise NotImplementedError + + def create(self, data): + raise NotImplementedError + + def write(self, data): + """ Update records on the external system """ + raise NotImplementedError + + def delete(self, data): + """ Delete a record on the external system """ + raise NotImplementedError + + def _call(self, action, arguments): + raise NotImplementedError diff --git a/connector_dns/unit/binder.py b/connector_dns/unit/binder.py new file mode 100755 index 0000000..6526ab1 --- /dev/null +++ b/connector_dns/unit/binder.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 Elico Corp +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). +from datetime import datetime +from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT +from openerp.addons.connector.connector import Binder +from ..backend import dns + + +@dns +class DNSModelBinder(Binder): + """ + Bindings are done directly on the binding model. + + Binding models are models called ``dns.{normal_model}``, + like ``dns.record`` or ``dns.domain``. + They are ``_inherits`` of the normal models and contains + the DNS ID, the ID of the DNS Backend and the additional + fields belonging to the DNS instance. + """ + _model_name = [ + 'dns.record', + 'dns.domain' + ] + _external_field = 'dns_id' + _backend_field = 'dns_backend_id' + _openerp_field = 'openerp_id' + _sync_date_field = 'sync_date' + + def to_openerp(self, external_id, unwrap=False): + """ Give the OpenERP ID for an external ID + + :param external_id: external ID for which we want the OpenERP ID + :param unwrap: if True, returns the openerp_id of the dns_xx record, + else return the id (binding id) of that record + :return: a record ID, depending on the value of unwrap, + or None if the external_id is not mapped + :rtype: int + """ + binding_ids = self.session.search( + self.model._name, + [(self._external_field, '=', str(external_id)), + (self._backend_field, '=', self.backend_record.id)]) + if not binding_ids: + return None + assert len(binding_ids) == 1, "Several records found: %s" % binding_ids + binding_id = binding_ids[0] + if unwrap: + model_id = self.session.read( + self.model._name, binding_id, [self._openerp_field] + ) + assert model_id + return model_id[self._openerp_field][0] + else: + return binding_id + + def to_backend(self, binding_id): + """ Give the external ID for an OpenERP ID + + :param binding_id: OpenERP ID for which we want the external id + :return: backend identifier of the record + """ + dns_record = self.session.read( + self.model._name, binding_id, [self._external_field] + ) + assert dns_record + return dns_record[self._external_field] + + def bind(self, external_id, binding_id): + """ Create the link between an external ID and an OpenERP ID and + update the last synchronization date. + + :param external_id: External ID to bind + :param binding_id: OpenERP ID to bind + :type binding_id: int + """ + # avoid to trigger the export when we modify the `dns_id` + model = self.model.with_context(connector_no_export=True) + binding = model.browse(binding_id) + now_fmt = datetime.now().strftime(DEFAULT_SERVER_DATETIME_FORMAT) + if external_id: + state = 'done' + else: + state = 'exception' + binding.write({'dns_id': str(external_id), + 'state': state, + 'sync_date': now_fmt}) diff --git a/connector_dns/unit/export_synchronizer.py b/connector_dns/unit/export_synchronizer.py new file mode 100755 index 0000000..4a525bd --- /dev/null +++ b/connector_dns/unit/export_synchronizer.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 Elico Corp +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). +import logging +from openerp.addons.connector.unit.synchronizer import ExportSynchronizer + + +_logger = logging.getLogger(__name__) + + +""" + +Exporters for DNS. + +In addition to its export job, an exporter has to: + +* check in DNS if the record has been updated more recently than the + last sync date and if yes, delay an import +* call the ``bind`` method of the binder to update the last sync date + +""" + + +class DNSBaseExporter(ExportSynchronizer): + + """ Base exporter for DNS """ + + def __init__(self, environment): + """ + :param environment: current environment (backend, session, ...) + :type environment: :py:class:`connector.connector.Environment` + """ + super(DNSBaseExporter, self).__init__(environment) + self.binding_id = None + self.external_id = None + + def _get_odoo_data(self): + """ Return the raw OpenERP data for ``self.binding_id`` """ + return self.session.browse(self.model._name, self.binding_id) + + def run(self, binding_id, *args, **kwargs): + """ Run the synchronization + + :param binding_id: identifier of the binding record to export + """ + self.binding_id = binding_id + self.binding_record = self._get_openerp_data() + + self.external_id = self.binder.to_backend(self.binding_id) + result = self._run(*args, **kwargs) + + self.binder.bind(self.external_id, self.binding_id) + return result + + def _run(self): + """ Flow of the synchronization, implemented in inherited classes""" + raise NotImplementedError From 3dd02dcb430a902405cf3e93a4f3fdf9150a0946 Mon Sep 17 00:00:00 2001 From: Dave Lasley Date: Tue, 13 Sep 2016 21:27:23 -0700 Subject: [PATCH 02/25] [IMP] connector_dns: Code and test improvements * Separate models by file * Add `import_synchronizer` * Add `export_synchronizer` * Add `delete_synchronizer` * Add `mapper` * Add `fail_date` to abstract bind, remove state * Add `dns.record.type` model and default data * Add external date field handling to binder * Rename `dns.domain` to `dns.zone` * Remove `line` field from `dns.record` * Change inheritance structure, add a `{model_name}.bind` framework * Add security * Code cleanup * Add test coverage * Don't cover test assets * Change bind field to `odoo_id` to ease future migration * Improve ReadMe --- connector_dns/README.rst | 57 +- connector_dns/__init__.py | 3 +- connector_dns/__openerp__.py | 16 +- connector_dns/connector.py | 48 +- connector_dns/consumer.py | 34 ++ connector_dns/data/dns_record_type.xml | 141 +++++ connector_dns/dns_view.xml | 162 ------ connector_dns/models/__init__.py | 6 +- connector_dns/models/dns.py | 107 ---- connector_dns/models/dns_backend.py | 181 +++++++ connector_dns/models/dns_record.py | 83 +++ connector_dns/models/dns_record_type.py | 55 ++ connector_dns/models/dns_zone.py | 40 ++ connector_dns/security/dns.xml | 20 + connector_dns/security/ir.model.access.csv | 7 + connector_dns/tests/__init__.py | 28 +- connector_dns/tests/common.py | 185 +++++-- connector_dns/tests/models/__init__.py | 7 + .../tests/models/test_dns_backend.py | 182 +++++++ connector_dns/tests/models/test_dns_record.py | 25 + .../tests/models/test_dns_record_type.py | 19 + connector_dns/tests/test_backend.py | 66 --- connector_dns/tests/test_backend_adapter.py | 65 +++ connector_dns/tests/test_base_exporter.py | 287 ++++++++++ connector_dns/tests/test_batch_importer.py | 79 +++ connector_dns/tests/test_binder.py | 123 +++-- connector_dns/tests/test_connector.py | 98 ++++ connector_dns/tests/test_consumer.py | 57 ++ .../tests/test_delayed_batch_importer.py | 46 ++ .../tests/test_direct_batch_importer.py | 44 ++ connector_dns/tests/test_dns_deleter.py | 47 ++ connector_dns/tests/test_dns_exporter.py | 466 ++++++++++++++++ connector_dns/tests/test_dns_import_mapper.py | 25 + connector_dns/tests/test_dns_importer.py | 500 ++++++++++++++++++ connector_dns/unit/__init__.py | 4 + connector_dns/unit/backend_adapter.py | 14 +- connector_dns/unit/binder.py | 104 ++-- connector_dns/unit/delete_synchronizer.py | 27 + connector_dns/unit/export_synchronizer.py | 357 ++++++++++++- connector_dns/unit/import_synchronizer.py | 303 +++++++++++ connector_dns/unit/mapper.py | 15 + connector_dns/views/dns_backend.xml | 64 +++ connector_dns/{ => views}/dns_menu.xml | 12 +- connector_dns/views/dns_record.xml | 52 ++ connector_dns/views/dns_zone.xml | 57 ++ 45 files changed, 3745 insertions(+), 573 deletions(-) create mode 100644 connector_dns/consumer.py create mode 100644 connector_dns/data/dns_record_type.xml delete mode 100755 connector_dns/dns_view.xml delete mode 100755 connector_dns/models/dns.py create mode 100755 connector_dns/models/dns_backend.py create mode 100755 connector_dns/models/dns_record.py create mode 100755 connector_dns/models/dns_record_type.py create mode 100755 connector_dns/models/dns_zone.py create mode 100755 connector_dns/security/dns.xml create mode 100644 connector_dns/security/ir.model.access.csv create mode 100644 connector_dns/tests/models/__init__.py create mode 100644 connector_dns/tests/models/test_dns_backend.py create mode 100644 connector_dns/tests/models/test_dns_record.py create mode 100644 connector_dns/tests/models/test_dns_record_type.py delete mode 100644 connector_dns/tests/test_backend.py create mode 100644 connector_dns/tests/test_backend_adapter.py create mode 100644 connector_dns/tests/test_base_exporter.py create mode 100644 connector_dns/tests/test_batch_importer.py create mode 100644 connector_dns/tests/test_connector.py create mode 100644 connector_dns/tests/test_consumer.py create mode 100644 connector_dns/tests/test_delayed_batch_importer.py create mode 100644 connector_dns/tests/test_direct_batch_importer.py create mode 100644 connector_dns/tests/test_dns_deleter.py create mode 100644 connector_dns/tests/test_dns_exporter.py create mode 100644 connector_dns/tests/test_dns_import_mapper.py create mode 100644 connector_dns/tests/test_dns_importer.py create mode 100644 connector_dns/unit/delete_synchronizer.py mode change 100755 => 100644 connector_dns/unit/export_synchronizer.py create mode 100644 connector_dns/unit/import_synchronizer.py create mode 100644 connector_dns/unit/mapper.py create mode 100755 connector_dns/views/dns_backend.xml rename connector_dns/{ => views}/dns_menu.xml (64%) create mode 100755 connector_dns/views/dns_record.xml create mode 100755 connector_dns/views/dns_zone.xml diff --git a/connector_dns/README.rst b/connector_dns/README.rst index db1e7b1..5ed81dc 100755 --- a/connector_dns/README.rst +++ b/connector_dns/README.rst @@ -6,41 +6,68 @@ Connector DNS ============= -This module aims to allows to manage your DNS domain through Odoo. +This module aims to create a framework for DNS management through the +odoo-connector in order to be able to manage your DNS records in Odoo and be +able to connect to external service providers via API and the odoo-connector +job-queue. + +This module only introduces the main data model and can be used as is to +manually store DNS records. It provides the objects or basic mapping to +create API connection but does not provide any connector per se. + +Additional modules for specific connectors need to be added to manage the +service provider connection. + +With specific DNS provider module, the DNS connector supports: + +* Import the domains and records from your DNS provider into Odoo +* DNS domain creation / deletion (TBD) / update and synchronization to your + DNS provider +* Records creation / deletion (TBD) / update and synchronization to your + DNS provider Installation ============ -To install this module, you need to: - - * have basic modules installed (connector) +To install this module, you need to install the odoo-connector module. Configuration ============= To configure this module, you need to: -#. Go to ... - -.. figure:: path/to/local/image.png - :alt: alternative description - :width: 600 px +#. Install a specific module such as connector_dns_dnspod +#. Create and set up the authentication for the DNS service provider in + Connectors/DNS/backends Usage ===== To use this module, you need to: -#. Go to ... +#. Create your domains, select the DNS provider and confirm them in + Connectors/DNS/Domains +#. Once the domains are created, you can create the records accordingly + in Connectors/DNS/records +#. Every time you create, delete or update a new record, a job will be + created in Connectors/Queue/Jobs +#. if a job fails, you can check the error and retry the job if necessary. + +You might want to check the official documentation of the +`Odoo Connector `_ to build your own +DNS provider connector. .. image:: https://odoo-community.org/website/image/ir.attachment/5784_f2813bd/datas :alt: Try me on Runbot - :target: https://runbot.odoo-community.org/runbot/{repo_id}/{branch} + :target: https://runbot.odoo-community.org/runbot/224/8.0 Known issues / Roadmap ====================== -* TBD +* Add validations for record types: ``SPF``, ``NAPTR`` +* Add a delete synchronizer +* Add tests for each of the ``dns.record.type`` validation regexes +* Add missing tests for ``export_synchronizer`` & ``import_synchronizer`` Bug Tracker =========== @@ -53,14 +80,10 @@ help us smashing it by providing a detailed and welcomed feedback. Credits ======= -Images ------- - -* Odoo Community Association: `Icon `_. - Contributors ------------ +* Dave Lasley * Eric Caudal * Noah Wang * Liu Lixia diff --git a/connector_dns/__init__.py b/connector_dns/__init__.py index 09f5c98..286f612 100755 --- a/connector_dns/__init__.py +++ b/connector_dns/__init__.py @@ -4,7 +4,6 @@ from . import backend from . import connector -from .models import dns -from . import tests +from . import models from . import unit diff --git a/connector_dns/__openerp__.py b/connector_dns/__openerp__.py index 3167a34..4dd2ce2 100755 --- a/connector_dns/__openerp__.py +++ b/connector_dns/__openerp__.py @@ -1,18 +1,26 @@ # -*- coding: utf-8 -*- # Copyright 2015 Elico Corp +# Copyright 2016 LasLabs Inc. # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). + { 'name': 'DNS connector', 'version': '8.0.1.0.0', 'category': 'Connector', 'depends': ['connector'], - 'author': 'Elico Corp,Odoo Community Association (OCA)', + 'author': 'Elico Corp, ' + 'LasLabs, ' + 'Odoo Community Association (OCA)', 'license': 'AGPL-3', 'website': 'https://www.elico-corp.com', 'data': [ - 'dns_view.xml', - 'dns_menu.xml' + 'views/dns_backend.xml', + 'views/dns_record.xml', + 'views/dns_zone.xml', + 'views/dns_menu.xml', + 'data/dns_record_type.xml', + 'security/dns.xml', + 'security/ir.model.access.csv', ], 'installable': True, - 'application': False } diff --git a/connector_dns/connector.py b/connector_dns/connector.py index fb1c97c..1aec13f 100755 --- a/connector_dns/connector.py +++ b/connector_dns/connector.py @@ -1,42 +1,56 @@ # -*- coding: utf-8 -*- # Copyright 2015 Elico Corp +# Copyright 2016 LasLabs Inc. # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). -from openerp import models, fields -from openerp.addons.connector.connector import (Environment) + +from openerp import models, fields, api +from openerp.addons.connector.connector import Environment from openerp.addons.connector.checkpoint import checkpoint def get_environment(session, model_name, backend_id): """ Create an environment to work with. """ - backend_record = session.browse('dns.backend', backend_id) + backend_record = session.env['dns.backend'].browse(backend_id) env = Environment(backend_record, session, model_name) return env class DNSBinding(models.AbstractModel): """ Abstract Model for the Bindigs. - All the models used as bindings between dnspod and OpenERP - (``dnspod.res.partner``, ``dnspod.product.product``, ...) should - ``_inherit`` it. + All the models used as bindings between External System and Odoo + (``aws.dns.record``, ``aws.dns.zone``, ...) should ``_inherit`` it. """ _name = 'dns.binding' _inherit = 'external.binding' - _description = 'dns Binding (abstract)' + _description = 'DNS Binding (abstract)' dns_backend_id = fields.Many2one( comodel_name='dns.backend', - String='DNS Backend', + string='DNS Backend', store=True, - ondelete='restrict' + required=True, + ondelete='restrict', + default=lambda s: s._default_dns_backend_id() + ) + dns_id_external = fields.Char( + string='External ID', + help='ID of the record in external system.', ) - # fields.char because 0 is a valid dnspod ID - dns_id = fields.Char('ID on other software') - # state of the record synchronization with dnspod - state = fields.Selection( - [('draft', 'Draft'), ('done', 'Done'), - ('exception', 'Exception')], 'State', - default="draft", - help='Done when succeed otherwise Exception') + fail_date = fields.Datetime() + + _sql_constraints = [ + ('backend_uniq', 'unique(dns_backend_id, dns_id_external)', + 'A binding already exists with the same DNS External ID.'), + ] + + @api.model + def _default_dns_backend_id(self): + return self.env['dns.backend'].search([ + ('is_default', '=', True), + ('active', '=', True), + ], + limit=1, + ) def add_checkpoint(session, model_name, record_id, backend_id): diff --git a/connector_dns/consumer.py b/connector_dns/consumer.py new file mode 100644 index 0000000..6059683 --- /dev/null +++ b/connector_dns/consumer.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 LasLabs Inc. +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). + +from .unit.export_synchronizer import export_record + + +import logging +_logger = logging.getLogger(__name__) + + +def delay_export(session, model_name, record_id, vals): + """ Delay a job which export a binding record. + (A binding record being a ``dns.record.bind``, + ``dns.zone.bind``, ...) + """ + if session.context.get('connector_no_export'): + return + fields = vals.keys() + export_record.delay(session, model_name, record_id, fields=fields) + + +def delay_export_all_bindings(session, model_name, record_id, vals): + """ Delay a job which export all the bindings of a record. + In this case, it is called on records of normal models and will delay + the export for all the bindings. + """ + if session.context.get('connector_no_export'): + return + record = session.env[model_name].browse(record_id) + fields = vals.keys() + for binding in record.dns_bind_ids: + export_record.delay(session, binding._model._name, binding.id, + fields=fields) diff --git a/connector_dns/data/dns_record_type.xml b/connector_dns/data/dns_record_type.xml new file mode 100644 index 0000000..a309297 --- /dev/null +++ b/connector_dns/data/dns_record_type.xml @@ -0,0 +1,141 @@ + + + + + + IPv4 Address + A + IPv4 address. Enter multiple addresses + on separate lines. + Example: + 192.0.2.235 + 198.51.100.234 + + ^((?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\n?)+$ + + + + IPv6 Address + AAAA + IPv6 address. Enter multiple addresses + on separate lines. + Example: + 2001:0db8:85a3:0:0:8a2e:0370:7334 + fe80:0:0:0:202:b3ff:fe1e:8329 + + ^((?:[A-F0-9]{1,4}:){7}[A-F0-9]{1,4}\n?)+$ + + + + Canonical Name + CNAME + The domain name that you want to + resolve to instead of the value in the + Name field. + Example: + www.example.com + + ^([a-z0-9]+(-[a-z0-9]+)*\.)+[a-z]{2,}$ + + + + Mail Exchange + MX + A priority and a domain name that + specifies a mail server. Enter multiple + values on separate lines. + Format: + [priority] [mail server host name] + Example: + 10 mailserver.example.com. + 20 mailserver2.example.com. + + ^([12]?\d ([a-z0-9]+(-[a-z0-9]+)*\.)+[a-z]{2,}\n)+$ + + + + Text + TXT + A text record. + Enter multiple values on separate lines. + Enclose text in quotation marks. + Example: + "Sample Text Entries" + "Enclose entries in quotation marks" + + ^(".*"\n)+$ + + + + Pointer + PTR + The domain name that you want to return. + Example: + www.example.com + + ^([a-z0-9]+(-[a-z0-9]+)*\.)+[a-z]{2,}$ + + + + Service Locator + SRV + A SRV record. For information about SRV + record format, refer to the applicable + documentation. Enter multiple values + on separate lines. + Format: + [priority] [weight] [port] [server host name] + Example: + 1 10 5269 xmpp-server.example.com. + 2 12 5060 sip-server.example.com. + + ^(\d+ \d+ \d+ ([a-z0-9]+(-[a-z0-9]+)*\.)+[a-z]{2,}\n)+$ + + + + Sender Policy Framework + SPF + An SPF record. For information about SPF + record format, refer to the applicable + documentation. Enter multiple values + on separate lines. Enclose values in + quotation marks. + Example: + "v=spf1 ip4:192.168.0.1/16-all" + + .* + + + + Name Authority Pointer + NAPTR + An NAPTR record. For information about NAPTR + record format, refer to the applicable + documentation. Enter multiple values + on separate lines. + Format: + [order] [preference] [flags] [services] [regexp] [replacement] + Example: + 100 100 "U" "" "!^.*$!sip:info@bar.example.com!" . + 10 100 "S" "SIP+D2U" "" foo.example.com. + + .* + + + + Name Server + A + The domain name of a name server. + Enter multiple name servers on + separate lines. + Example: + ns1.amazon.com + ns2.amazon.org + ns3.amazon.net + ns4.amazon.co.uk + + ^(([a-z0-9]+(-[a-z0-9]+)*\.)+[a-z]{2,}\n)+$ + + + + diff --git a/connector_dns/dns_view.xml b/connector_dns/dns_view.xml deleted file mode 100755 index 6bf4500..0000000 --- a/connector_dns/dns_view.xml +++ /dev/null @@ -1,162 +0,0 @@ - - - - - - DNS Backend - dns.backend - ir.actions.act_window - form - tree,form - - - - DNS Backend form - dns.backend - - - - - - - - - DNS Backend form - dns.backend - -
-
-
- -
-
- - - - - -
-
-
-
- - - - DNS Domain Action - dns.domain - form - tree,form - - - - DNS domain tree - dns.domain - - - - - - - - - - - DNS domain form - dns.domain - -
-
-
- -
-
- - - -
-
-
-
- - - - - DNS Record Action - dns.record - form - tree,form - - - - DNS Record tree - dns.record - - - - - - - - - - - - - - - - DNS Record form - dns.record - -
-
- -
- -
-
- - - - - - - - - -
-
-
-
-
-
diff --git a/connector_dns/models/__init__.py b/connector_dns/models/__init__.py index 82e3357..270926c 100644 --- a/connector_dns/models/__init__.py +++ b/connector_dns/models/__init__.py @@ -1,4 +1,8 @@ # -*- coding: utf-8 -*- # Copyright 2015 Elico Corp # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). -from . import dns + +from . import dns_backend +from . import dns_zone +from . import dns_record +from . import dns_record_type diff --git a/connector_dns/models/dns.py b/connector_dns/models/dns.py deleted file mode 100755 index 366bb0d..0000000 --- a/connector_dns/models/dns.py +++ /dev/null @@ -1,107 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2015 Elico Corp -# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). -from openerp import models, fields, api - - -class DNSBackend(models.Model): - _name = 'dns.backend' - _inherit = 'connector.backend' - _backend_type = 'dns' - - def _select_version(self): - return [] - - login = fields.Char( - string='Login', - help="Provider's login.", - required=True - ) - password = fields.Char( - string='Password', - help="Provider's password.", - required=True - ) - state = fields.Selection( - [('draft', 'Draft'), ('done', 'Done'), - ('exception', 'Exception')], - string='State', - default="draft", - help='"Confirmed" when the domain has been succesfully created.' - ) - version = fields.Selection( - selection='_select_version', - string='Service Provider', - help='DNS service provider', - required=True - ) - - @api.multi - def name_get(self): - res = [] - for backend in self: - res.append((backend.id, '%s (%s)' % (backend.name, backend.login))) - return res - - -class DNSDomain(models.Model): - _name = 'dns.domain' - _inherit = 'dns.binding' - - name = fields.Char( - string='Name', - required=True, - help='Domain name without "www",such as"dnspod.cn"' - ) - record_ids = fields.One2many( - comodel_name='dns.record', - inverse_name='domain_id', - string='Subdomains' - ) - - -class DNSRecord(models.Model): - _name = 'dns.record' - _inherit = 'dns.binding' - - def _line_select_version(self): - return [] - - def _type_select_version(self): - return [] - - name = fields.Char( - string='Sub domain', - help="host record,such as 'www'", - required=True) - domain_id = fields.Many2one( - comodel_name='dns.domain', - string="Domain", - domain="[('state','=','done')]", - ondelete='cascade', - help="Domain which has already confirmed" - ) - type = fields.Selection( - selection='_type_select_version', - string='Record Type' - ) - line = fields.Selection( - selection='_line_select_version', - string='Record Line' - ) - value = fields.Text( - string='Value', - help="such as IP:200.200.200.200", - required=True - ) - mx_priority = fields.Integer( - string='MX priority', - help="scope:1-20", - default=1 - ) - ttl = fields.Integer( - string='TTL', - default=600, - help="scope:1-604800", - required=True - ) diff --git a/connector_dns/models/dns_backend.py b/connector_dns/models/dns_backend.py new file mode 100755 index 0000000..641c8dc --- /dev/null +++ b/connector_dns/models/dns_backend.py @@ -0,0 +1,181 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 Elico Corp +# Copyright 2016 LasLabs Inc. +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). + +from datetime import datetime + +from openerp import models, fields, api, _ +from openerp.exceptions import ValidationError + +from openerp.addons.connector.session import ConnectorSession + +from ..unit.import_synchronizer import (import_batch, + import_record, + ) + + +class DNSBackend(models.Model): + _name = 'dns.backend' + _inherit = 'connector.backend' + _backend_type = 'dns' + + login = fields.Char( + string='Login', + help="Provider's login.", + required=True + ) + password = fields.Char( + string='Password', + help="Provider's password.", + required=True + ) + uri = fields.Char( + help='URI to Provider endpoint.', + ) + version = fields.Selection( + selection='_select_version', + string='Service Provider', + help='DNS service provider', + required=True + ) + company_id = fields.Many2one( + string='Company', + comodel_name='res.company', + default=lambda s: s.env.user.company_id, + ) + is_default = fields.Boolean( + default=True, + help='Check this if this is the default connector for the company.' + ' All newly created records for this company will be synced to the' + ' default system. Only records that originated from non-default' + ' systems will be synced with them.', + ) + active = fields.Boolean( + default=True, + ) + import_zones_from_date = fields.Datetime() + import_records_from_date = fields.Datetime() + + @api.model + def _select_version(self): + """ It returns the available DNS backend versions """ + return [('none', 'None')] + + @api.multi + @api.constrains('is_default', 'company_id') + def _check_default_for_company(self): + """ It raises ``ValidationError`` when multiple defaults selected """ + for rec_id in self: + domain = [ + ('company_id', '=', rec_id.company_id.id), + ('is_default', '=', True), + ] + if len(self.search(domain)) > 1: + raise ValidationError(_( + 'This company already has a default CarePoint connector.', + )) + + @api.multi + def name_get(self): + res = [] + for backend in self: + res.append((backend.id, '%s (%s)' % (backend.name, backend.login))) + return res + + @api.multi + def check_dns_structure(self): + """ It provides a central method used in every data import + + It should support non-singleton Recordsets. + """ + return True + + @api.multi + def _import_all(self, model): + """ It runs delayed import for found external records for model + + Args: + model (str): Binding model to perform import for: ``aws.dns.zone`` + """ + session = self._get_session() + self.check_dns_structure() + for backend in self: + import_batch.delay(session, model, backend.id) + + @api.multi + def _import_from_date(self, model, from_date_field, chg_date_field=None): + """ It imports updated external records and sets last sync time + + Args: + model (str): Binding model to perform import for: ``aws.dns.zone`` + from_date_field (str): Name of field on backend containing time + of last sync for type. Will update to import start time after + completed. + chg_date_field (str): Name of field on external record containing + last update time. ``None`` or ``False`` to use + ``binder._external_date_field`` + """ + session = self._get_session() + if not chg_date_field: + binder = session.binder_for(model) + chg_date_field = binder._external_date_field + import_start_time = datetime.now() + self.check_dns_structure() + for backend in self: + filters = {chg_date_field: {'<=': import_start_time}} + from_date = getattr(backend, from_date_field) + if from_date: + filters[chg_date_field]['>='] = fields.Datetime.from_string( + from_date + ) + import_batch.delay(session, model, backend.id, filters=filters) + self.write({ + from_date_field: fields.Datetime.to_string(import_start_time), + }) + + @api.model + def resync_all(self, binding_model, backend_ids=None): + """ It re-imports all bound records with their external systems. + + This method is particularly useful if the external system does not + have a webhook to notify Odoo of updated records. + + Args: + binding_model (str): Name of binding model to sync + backend_ids (list): List of ids for Backend records that should + be used as search filter. ``None`` or ``False`` for all. + """ + session = self._get_session() + domain = [] + if backend_ids: + if isinstance(backend_ids, models.BaseModel): + backend_ids = backend_ids.ids + domain.append( + ('dns_backend_id', 'in', backend_ids), + ) + for record_id in self.env[binding_model].search(domain): + for binding_id in record_id.dns_bind_ids: + import_record.delay(session, + binding_model, + binding_id.backend_id.id, + binding_id.dns_id_external, + force=True, + ) + + @api.multi + def import_dns_zones(self): + self._import_from_date('dns.zone.bind', + 'import_zones_from_date') + + @api.multi + def import_dns_records(self): + self._import_from_date('dns.record.bind', + 'import_records_from_date') + + @api.model + def _get_session(self): + """ It returns a ConnectorSession for the environment """ + return ConnectorSession( + self.env.cr, self.env.uid, context=self.env.context + ) diff --git a/connector_dns/models/dns_record.py b/connector_dns/models/dns_record.py new file mode 100755 index 0000000..bf8a12e --- /dev/null +++ b/connector_dns/models/dns_record.py @@ -0,0 +1,83 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 Elico Corp +# Copyright 2016 LasLabs Inc. +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). + +import re + +from openerp import models, fields, api, _ +from openerp.exceptions import ValidationError + + +class DNSRecordBind(models.Model): + _name = 'dns.record.bind' + _description = 'DNS Record Binding' + _inherit = 'dns.binding' + _inherits = {'dns.record': 'odoo_id'} + + odoo_id = fields.Many2one( + comodel_name='dns.record', + string='DNS Record', + required=True, + ondelete='cascade', + ) + + +class DNSRecord(models.Model): + _name = 'dns.record' + _description = 'DNS Record' + + name = fields.Char( + string='Sub domain', + help='Host record, such as "www".', + required=True, + ) + zone_id = fields.Many2one( + string="Zone", + comodel_name='dns.zone', + ondelete='cascade', + help="Hosted zone that this record is applied to.", + ) + type_id = fields.Many2one( + string='Record Type', + comodel_name='dns.record.type', + required=True, + ) + type_help = fields.Text( + string='Record Help', + related='type_id.help', + ) + value = fields.Text( + string='Value', + help="Enter multiple values on separate lines. Enclose text in " + "quotation marks.", + required=True, + ) + ttl = fields.Integer( + string='TTL', + default=600, + help="Time to Live, in seconds. Scope: 1-604800", + required=True, + ) + dns_bind_ids = fields.One2many( + string='External Bindings', + comodel_name='dns.record.bind', + inverse_name='odoo_id', + ) + + @api.multi + @api.constrains('type_id', 'value') + def _check_value(self): + """ It should raise ValidationError on invalid values """ + for rec_id in self: + if not rec_id.type_id.validate_regex: + continue + if not re.search( + rec_id.type_id.validate_regex.replace('\\\\', '\\'), + rec_id.value, + flags=re.MULTILINE | re.IGNORECASE, + ): + raise ValidationError( + _('"%s" does not match validation rule for a "%s" record') + % (rec_id.value, rec_id.type_id.display_name) + ) diff --git a/connector_dns/models/dns_record_type.py b/connector_dns/models/dns_record_type.py new file mode 100755 index 0000000..ce03993 --- /dev/null +++ b/connector_dns/models/dns_record_type.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 LasLabs Inc. +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). + +from openerp import models, fields, api + + +class DNSRecordTypeBind(models.Model): + _name = 'dns.record.type.bind' + _description = 'DNS Record Type Binding' + _inherit = 'dns.binding' + _inherits = {'dns.record.type': 'odoo_id'} + + odoo_id = fields.Many2one( + comodel_name='dns.record.type', + string='DNS Record', + required=True, + ondelete='cascade', + ) + + +class DNSRecordType(models.Model): + _name = 'dns.record.type' + _description = 'DNS Record Type' + + name = fields.Char( + required=True, + help='Name of DNS record type, such a "A" or "CNAME".', + ) + code = fields.Char( + required=True, + ) + help = fields.Text( + help="Text that will be displayed to user as a formatting guide " + "for this record type.", + ) + validate_regex = fields.Char( + help='This is a regex that is used for validation of the record ' + 'value. Leave blank for no validation.', + ) + supported_backend_ids = fields.Many2many( + string='Supported Backends', + comodel_name='dns.backend', + ) + dns_bind_ids = fields.One2many( + string='External Bindings', + comodel_name='dns.record.type.bind', + inverse_name='odoo_id', + ) + + @api.multi + def name_get(self): + return [ + (r.id, '%s - %s' % (r.code, r.name)) for r in self + ] diff --git a/connector_dns/models/dns_zone.py b/connector_dns/models/dns_zone.py new file mode 100755 index 0000000..86c1d99 --- /dev/null +++ b/connector_dns/models/dns_zone.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 Elico Corp +# Copyright 2016 LasLabs Inc. +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). + +from openerp import models, fields + + +class DNSZoneBind(models.Model): + _name = 'dns.zone.bind' + _description = 'DNS Zone Binding' + _inherit = 'dns.binding' + _inherits = {'dns.zone': 'odoo_id'} + + odoo_id = fields.Many2one( + comodel_name='dns.zone', + string='DNS Zone', + required=True, + ondelete='cascade', + ) + + +class DNSZone(models.Model): + _name = 'dns.zone' + + name = fields.Char( + string='Name', + required=True, + help='Hosted zone name, such as "amazon.com".', + ) + record_ids = fields.One2many( + string='DNS Records', + comodel_name='dns.record', + inverse_name='zone_id', + ) + dns_bind_ids = fields.One2many( + string='External Bindings', + comodel_name='dns.zone.bind', + inverse_name='odoo_id', + ) diff --git a/connector_dns/security/dns.xml b/connector_dns/security/dns.xml new file mode 100755 index 0000000..e696e42 --- /dev/null +++ b/connector_dns/security/dns.xml @@ -0,0 +1,20 @@ + + + + + + DNS User + + + + + + DNS Manager + + + + + + diff --git a/connector_dns/security/ir.model.access.csv b/connector_dns/security/ir.model.access.csv new file mode 100644 index 0000000..4ec8f14 --- /dev/null +++ b/connector_dns/security/ir.model.access.csv @@ -0,0 +1,7 @@ +id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink +access_dns_zone_user,access_dns_zone_user,connector_dns.model_dns_zone,connector_dns.group_dns_user,1,0,0,0 +access_dns_zone_manager,access_dns_zone_manager,connector_dns.model_dns_zone,connector_dns.group_dns_manager,1,1,1,1 +access_dns_record_user,access_dns_record_user,connector_dns.model_dns_record,connector_dns.group_dns_user,1,0,0,0 +access_dns_record_manager,access_dns_record_manager,connector_dns.model_dns_record,connector_dns.group_dns_manager,1,1,1,1 +access_dns_record_type_user,access_dns_record_type_user,connector_dns.model_dns_record_type,connector_dns.group_dns_user,1,0,0,0 +access_dns_record_type_manager,access_dns_record_type_manager,connector_dns.model_dns_record_type,connector_dns.group_dns_manager,1,1,1,1 diff --git a/connector_dns/tests/__init__.py b/connector_dns/tests/__init__.py index f78fd74..10f7023 100644 --- a/connector_dns/tests/__init__.py +++ b/connector_dns/tests/__init__.py @@ -1,6 +1,32 @@ # -*- coding: utf-8 -*- # Copyright 2015 Elico Corp +# Copyright 2016 LasLabs Inc. # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). + +# Common from . import common -from . import test_backend + +# Unit from . import test_binder +from . import test_connector +from . import test_consumer +from . import test_backend_adapter + +# Mapper +from . import test_dns_import_mapper + +# Importer +from . import test_batch_importer +from . import test_delayed_batch_importer +from . import test_direct_batch_importer +from . import test_dns_importer + +# Exporter +from . import test_base_exporter +from . import test_dns_exporter + +# Deleter +from . import test_dns_deleter + +# Models +from .models import * diff --git a/connector_dns/tests/common.py b/connector_dns/tests/common.py index 30034dc..cbcd146 100644 --- a/connector_dns/tests/common.py +++ b/connector_dns/tests/common.py @@ -1,64 +1,149 @@ # -*- coding: utf-8 -*- -# -# -# Authors: Guewen Baconnier -# Copyright 2015 Camptocamp SA -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . -# -# +# Copyright 2015 Camptocamp SA +# Copyright 2016 LasLabs Inc. +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). + +""" +Helpers usable in the tests +""" import importlib +import mock + from contextlib import contextmanager -import mock +import openerp.tests.common as common + +import openerp.addons.connector.backend as backend +from openerp.addons.connector.session import ConnectorSession + +from ..backend import dns +from ..unit.binder import DNSModelBinder + +backend_adapter = 'openerp.addons.connector_dns.unit.backend_adapter' + + +class EndTestException(Exception): + """ It is used to break code execution for logic isolation """ + + +class DNSHelper(object): + """ Emulate a ConnectorEnvironment """ + + def __init__(self, env, model_name, backend): + self.cr = env.cr + self.model = env[model_name] + self.backend = backend.get_backend() + self.backend_record = backend + self.session = ConnectorSession( + env.cr, + env.uid, + env.context, + ) + self.connector_unit = {} + + def get_connector_unit(self, unit_class): + try: + return self.connector_unit[unit_class] + except KeyError: + self.connector_unit[unit_class] = mock.MagicMock() + return self.connector_unit[unit_class] + + +class SetUpDNSBase(common.TransactionCase): + """ Base class - Test the imports from a DNS Mock. """ + + def setUp(self): + super(SetUpDNSBase, self).setUp() + self.backend_model = self.env['dns.backend'] + self.dns_id = 123456789 + self.session = ConnectorSession( + self.env.cr, self.env.uid, context=self.env.context, + ) + self.test_backend = backend.Backend( + parent=dns, + version='none', + ) + self.EndTestException = EndTestException + self.backend = self.backend_model.create({ + 'name': 'Test DNS', + 'version': 'none', + 'uri': 'URI', + 'login': 'username', + 'password': 'passwd', + 'is_default': True, + }) + + def get_dns_helper(self, model_name): + """ It returns a simulated ConnectorEnvironment for model_name + + Args: + model_name (str): Name of model to simulate environment for + + Returns: + Simulated ``ConnectorEnvironment`` for testing + """ + return DNSHelper( + self.env, model_name, self.backend + ) + def get_mock_binder(self): + """ It returns a mock specced as a DNSModelBinder """ + binder = mock.MagicMock(spec=DNSModelBinder) + binder._external_field = DNSModelBinder._external_field + binder._backend_field = DNSModelBinder._backend_field + binder._openerp_field = DNSModelBinder._openerp_field + binder._sync_date_field = DNSModelBinder._sync_date_field + binder._fail_date_field = DNSModelBinder._fail_date_field + binder._external_date_field = DNSModelBinder._external_date_field + return binder -@contextmanager -def mock_job_delay_to_direct(job_path): - """ Replace the .delay() of a job by a direct call + @contextmanager + def mock_adapter(self, unit, binder_for=False): + """ It returns a mocked backend_adapter on unit for testing - job_path is the python path as string, such as:: + Args: + unit (connector.ConnectorUnit): to mock adapter on + binder_for (bool): Also mock ``binder_for`` method on unit - 'openerp.addons.magentoerpconnect.stock_picking.export_picking_done' + Yields: + mock.Mock() + """ + with mock.patch.object(unit, '_backend_adapter') as API: + if binder_for: + with mock.patch.object(unit, 'binder_for') as bind: + bind.return_value = self.get_mock_binder() + yield API + else: + yield API - This is a context manager, all the calls made to the job function in - job_path inside the context manager will be executed synchronously. + @contextmanager + def mock_job_delay_to_direct(self, job_path): + """ Replace the ``.delay()`` of a job with a direct call - .. note:: It uses :meth:`mock.patch` so it has the same pitfall - regarding the python path. If the mock seems to have no - effect, read `Where to patch - `_ - in the mock documentation. + Args: + job_path (str): The python path of the job, such as + ``openerp.addons.dns.models.dns_record.export_record`` - """ - job_module, job_name = job_path.rsplit('.', 1) - module = importlib.import_module(job_module) - job_func = getattr(module, job_name, None) - assert job_func, "The function %s must exist in %s" % (job_name, - job_module) + Yields: + Patched job + """ + job_module, job_name = job_path.rsplit('.', 1) + module = importlib.import_module(job_module) + job_func = getattr(module, job_name, None) + assert job_func, "The function %s must exist in %s" % (job_name, + job_module) - def clean_args_for_func(*args, **kwargs): - # remove the special args reserved to '.delay()' - kwargs.pop('priority', None) - kwargs.pop('eta', None) - kwargs.pop('model_name', None) - kwargs.pop('max_retries', None) - kwargs.pop('description', None) - job_func(*args, **kwargs) + def clean_args_for_func(*args, **kwargs): + # remove the special args reserved to .delay() + kwargs.pop('priority', None) + kwargs.pop('eta', None) + kwargs.pop('model_name', None) + kwargs.pop('max_retries', None) + kwargs.pop('description', None) + job_func(*args, **kwargs) - with mock.patch(job_path) as patched_job: - # call the function directly instead of '.delay()' - patched_job.delay.side_effect = clean_args_for_func - yield patched_job + with mock.patch(job_path) as patched_job: + # call the direct export instead of 'delay()' + patched_job.delay.side_effect = clean_args_for_func + yield patched_job diff --git a/connector_dns/tests/models/__init__.py b/connector_dns/tests/models/__init__.py new file mode 100644 index 0000000..fec7241 --- /dev/null +++ b/connector_dns/tests/models/__init__.py @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 LasLabs Inc. +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). + +from . import test_dns_backend +from . import test_dns_record +from . import test_dns_record_type diff --git a/connector_dns/tests/models/test_dns_backend.py b/connector_dns/tests/models/test_dns_backend.py new file mode 100644 index 0000000..f18d48b --- /dev/null +++ b/connector_dns/tests/models/test_dns_backend.py @@ -0,0 +1,182 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 LasLabs Inc. +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). + +import mock +from datetime import timedelta, datetime + +from ..common import SetUpDNSBase + +from openerp import fields, models +from openerp.exceptions import ValidationError + + +model = 'openerp.addons.connector_dns.models.dns_backend' + + +class TestDNSBackend(SetUpDNSBase): + + def setUp(self): + super(TestDNSBackend, self).setUp() + self.Model = self.env['dns.backend'] + + def test_check_default_for_company(self): + """ It should not allow two defaults for the same company """ + with self.assertRaises(ValidationError): + self.backend.copy() + + def test_select_version(self): + """ It should return proper versions """ + self.assertIsInstance( + self.Model._select_version(), + list, + ) + + @mock.patch('%s.ConnectorSession' % model) + def test_import_all_gets_session(self, session): + """ It should get session for import """ + session.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + self.backend._import_all(None) + + @mock.patch('%s.ConnectorSession' % model) + def test_import_all_checks_stucture(self, session): + """ It should check internal structure on all backends """ + with mock.patch.object(self.backend, 'check_dns_structure') as chk: + chk.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + self.backend._import_all('model') + + @mock.patch('%s.import_batch' % model) + @mock.patch('%s.ConnectorSession' % model) + def test_import_all_calls_import(self, session, batch): + """ It should call delayed batch import for model """ + expect = 'model' + self.backend._import_all(expect) + batch.delay.assert_called_once_with( + session(), expect, self.backend.id, + ) + + @mock.patch('%s.ConnectorSession' % model) + def test_import_from_date_checks_stucture(self, session): + """ It should check internal structure on all backends """ + with mock.patch.object(self.backend, 'check_dns_structure') as chk: + chk.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + self.backend._import_from_date(None, None, None) + + @mock.patch('%s.datetime' % model) + @mock.patch('%s.import_batch' % model) + @mock.patch('%s.ConnectorSession' % model) + def test_import_from_date_calls_import(self, session, batch, dt_mk): + """ It should call delayed batch import for model """ + expect = 'model', 'import_zones_from_date', 'chg' + dt_mk.now.return_value = datetime.now() + expect_date = dt_mk.now() - timedelta(days=5) + self.backend.import_zones_from_date = expect_date + expect_date = self.backend.import_zones_from_date + self.backend._import_from_date(*expect) + batch.delay.assert_called_once_with( + session(), expect[0], self.backend.id, + filters={ + expect[2]: { + '>=': fields.Datetime.from_string( + expect_date, + ), + '<=': dt_mk.now(), + }, + } + ) + + @mock.patch('%s.datetime' % model) + @mock.patch('%s.import_batch' % model) + @mock.patch('%s.ConnectorSession' % model) + def test_import_from_date_writes_new_date(self, session, batch, dt_mk): + """ It should call delayed batch import for model """ + dt_mk.now.return_value = datetime.now() + expect_date = dt_mk.now() - timedelta(days=5) + self.backend.import_zones_from_date = expect_date + self.backend._import_from_date( + 'model', 'import_zones_from_date', 'chg' + ) + expect = dt_mk.now() + self.assertEqual( + fields.Datetime.to_string(expect), + self.backend.import_zones_from_date, + ) + + def test_import_dns_zones(self): + """ It should import proper model on date field """ + with mock.patch.object(self.backend, '_import_from_date') as mk: + self.backend.import_dns_zones() + mk.assert_called_once_with( + 'dns.zone.bind', + 'import_zones_from_date', + ) + + def test_import_dns_records(self): + """ It should import proper model on date field """ + with mock.patch.object(self.backend, '_import_from_date') as mk: + self.backend.import_dns_records() + mk.assert_called_once_with( + 'dns.record.bind', + 'import_records_from_date', + ) + + def test_name_get(self): + """ It should conjoin name and login """ + self.assertEqual( + '%s (%s)' % (self.backend.name, self.backend.login), + self.backend.display_name, + ) + + def test_resync_all_get_session(self): + """ It should obtain current session """ + with mock.patch.object(self.backend, '_get_session') as get: + get.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + self.backend.resync_all(None, None) + + def test_resync_all_search(self): + """ It should search for domain on binding model """ + expect = [1, 2] + with mock.patch.object(self.backend, '_get_session'): + with mock.patch.object(self.backend, 'env') as env: + search = env[self.Model._name].search + search.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + self.backend.resync_all(self.Model._name, expect) + search.assert_called_once_with( + ('dns_backend_id', 'in', expect), + ) + + def test_resync_all_search_recordset(self): + """ It should support recordset inputs for convenience """ + expect = mock.MagicMock(spec=models.BaseModel) + with mock.patch.object(self.backend, '_get_session'): + with mock.patch.object(self.backend, 'env') as env: + search = env[self.Model._name].search + search.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + self.backend.resync_all(self.Model._name, expect) + search.assert_called_once_with( + ('dns_backend_id', 'in', expect.ids), + ) + + @mock.patch('%s.import_record' % model) + def test_resync_all_import_record(self, import_record): + """ It should call delayed import for all bindings """ + record, binding = mock.MagicMock(), mock.MagicMock() + record.dns_bind_ids = [binding] + with mock.patch.object(self.backend, '_get_session') as get: + with mock.patch.object(self.backend, 'env') as env: + search = env[self.Model._name].search + search.return_value = [record] + self.backend.resync_all(self.Model._name) + import_record.delay.assert_called_once_with( + get(), + self.Model._name, + binding.backend_id.id, + binding.dns_id_external, + force=True, + ) diff --git a/connector_dns/tests/models/test_dns_record.py b/connector_dns/tests/models/test_dns_record.py new file mode 100644 index 0000000..d0269db --- /dev/null +++ b/connector_dns/tests/models/test_dns_record.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 LasLabs Inc. +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). + +from openerp.exceptions import ValidationError +from openerp.tests.common import TransactionCase + + +class TestDNSRecord(TransactionCase): + + def new_record(self): + self.type = self.env.ref('connector_dns.type_a') + self.zone = self.env['dns.zone'].create({'name': 'Zone'}) + return self.env['dns.record'].create({ + 'name': 'Test', + 'zone_id': self.zone.id, + 'type_id': self.type.id, + 'value': '192.168.1.1', + }) + + def test_invalid_value(self): + """ It should raise ValidationError on invalid value """ + record = self.new_record() + with self.assertRaises(ValidationError): + record.write({'value': 'Not an IP'}) diff --git a/connector_dns/tests/models/test_dns_record_type.py b/connector_dns/tests/models/test_dns_record_type.py new file mode 100644 index 0000000..cf362d2 --- /dev/null +++ b/connector_dns/tests/models/test_dns_record_type.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 LasLabs Inc. +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). + +from openerp.tests.common import TransactionCase + + +class TestDNSRecordType(TransactionCase): + + def setUp(self): + super(TestDNSRecordType, self).setUp() + self.record = self.env.ref('connector_dns.type_a') + + def test_name_get(self): + """ It should conjoin code and name """ + self.assertEqual( + '%s - %s' % (self.record.code, self.record.name), + self.record.display_name, + ) diff --git a/connector_dns/tests/test_backend.py b/connector_dns/tests/test_backend.py deleted file mode 100644 index 4cfd53d..0000000 --- a/connector_dns/tests/test_backend.py +++ /dev/null @@ -1,66 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2015 Elico Corp -# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). -import openerp.tests.common as common -from openerp.addons.connector.backend import Backend -from openerp.addons.connector.session import ConnectorSession -from openerp.addons.connector_dns.unit.binder import DNSModelBinder - - -class TestDNSBackend(common.TransactionCase): - """ - Test DNS Backend - """ - - def setUp(self): - super(TestDNSBackend, self).setUp() - self.service = "dns" - - def test_new_backend(self): - """ Create a backend""" - backend = Backend(self.service) - self.assertEqual(backend.service, self.service) - - def test_parent(self): - """ Bind the backend to a parent backend""" - backend = Backend(self.service) - child_backend = Backend(parent=backend) - self.assertEqual(child_backend.service, backend.service) - - def test_no_service(self): - """ Should raise an error because no service or parent is defined""" - with self.assertRaises(ValueError): - Backend() - - -class test_backend_register(common.TransactionCase): - """ Test registration of classes on the Backend""" - - def setUp(self): - super(test_backend_register, self).setUp() - self.service = 'dns' - self.parent = Backend(self.service) - self.backend = Backend(parent=self.parent) - self.session = ConnectorSession(self.cr, self.uid) - - def test_register_class(self): - class BenderBinder(DNSModelBinder): - _model_name = 'res.users' - - self.backend.register_class(BenderBinder) - ref = self.backend.get_class(DNSModelBinder, - self.session, - 'res.users') - self.assertEqual(ref, BenderBinder) - - def test_register_class_parent(self): - """ It should get the parent's class when no class is defined""" - - @self.parent - class FryBinder(DNSModelBinder): - _model_name = 'res.users' - - ref = self.backend.get_class(DNSModelBinder, - self.session, - 'res.users') - self.assertEqual(ref, FryBinder) diff --git a/connector_dns/tests/test_backend_adapter.py b/connector_dns/tests/test_backend_adapter.py new file mode 100644 index 0000000..cefa482 --- /dev/null +++ b/connector_dns/tests/test_backend_adapter.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 LasLabs Inc. +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). + +import mock +import inspect + +from openerp.addons.connector_dns.unit import backend_adapter + +from .common import SetUpDNSBase + + +_file = 'openerp.addons.connector_dns.unit.backend_adapter' + + +class TestBackendAdapter(SetUpDNSBase): + + CRUD_METHODS = [ + 'search', + 'read', + 'search_read', + 'create', + 'write', + 'delete', + '_call', + ] + + def setUp(self): + super(TestBackendAdapter, self).setUp() + backend_adapter.dnss = {} + self.model = self.env['dns.zone.bind'] + self.Unit = backend_adapter.DNSAdapter + + def _new_unit(self): + return self.Unit( + self.get_dns_helper(self.model._name), + ) + + @mock.patch('%s.DNSLocation' % _file) + def test_init_creates_location(self, location): + """ It should creaete new ``DNSLocation`` on init """ + unit = self._new_unit() + location.assert_called_once_with( + unit.backend_record.uri, + unit.backend_record.login, + unit.backend_record.password, + ) + + def test_init_sets_location(self): + """ It should set unit.DNS to the new DNSLocation """ + unit = self._new_unit() + self.assertIsInstance( + unit.DNS, + backend_adapter.DNSLocation, + ) + + def test_not_implemented(self): + """ It should define CRUD methods and raise NotImplemented """ + unit = self._new_unit() + for method in self.CRUD_METHODS: + method = getattr(unit, method) + arg_spec = inspect.getargspec(method) + args = arg_spec.args[1:] + with self.assertRaises(NotImplementedError): + method(*args) diff --git a/connector_dns/tests/test_base_exporter.py b/connector_dns/tests/test_base_exporter.py new file mode 100644 index 0000000..d6007a3 --- /dev/null +++ b/connector_dns/tests/test_base_exporter.py @@ -0,0 +1,287 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 LasLabs Inc. +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). + +import mock +from contextlib import contextmanager + +from openerp import fields + +from openerp.addons.connector.exception import IDMissingInBackend + +from openerp.addons.connector_dns.unit import export_synchronizer + +from .common import SetUpDNSBase + + +model = 'openerp.addons.connector_dns.unit.export_synchronizer' + + +class TestBaseExporter(SetUpDNSBase): + + def setUp(self): + super(TestBaseExporter, self).setUp() + self.model = 'dns.zone.bind' + self.binding_id = 1234 + self.Exporter = export_synchronizer.DNSBaseExporter + + def _new_exporter(self, dns_id=None, binding_record=None, + binding_id=None, + ): + exporter = self.Exporter(self.get_dns_helper( + self.model + )) + exporter.dns_id = dns_id + exporter.binding_record = binding_record + exporter.binding_id = binding_id + self.exporter = exporter + return exporter + + def _new_record(self, sync_date=False): + rec = self.env[self.model].create({ + 'name': 'Test', + 'sync_date': sync_date, + 'dns_id_external': self.dns_id, + }) + self.binding_id = rec.id + return rec + + @contextmanager + def _mock_should_import(self, exporter): + with mock.patch.object(exporter, '_should_import') as mk: + with mock.patch.object(exporter, 'binder_for') as bind: + bind.return_value = self.get_mock_binder() + yield mk + + def test_exporter_init_binding_id(self): + """ It should init binding_id as None """ + exporter = self._new_exporter() + self.assertEqual(None, exporter.binding_id) + + def test_exporter_init_dns_id(self): + """ It should init dns_id as None """ + exporter = self._new_exporter() + self.assertEqual(None, exporter.dns_id) + + def test_delay_import_assets_dns_id(self): + """ It should not allow a false dns_id """ + exporter = self._new_exporter() + with self.assertRaises(AssertionError): + exporter._delay_import() + + @mock.patch('%s.import_record' % model) + def test_delay_import_delays_import(self, mk): + """ It should call delayed import w/ proper args """ + exporter = self._new_exporter(self.dns_id) + exporter._delay_import() + mk.delay.assert_called_once_with( + exporter.session, + exporter.model._name, + exporter.backend_record.id, + exporter.dns_id, + force=True, + ) + + def test_should_import_asserts_binding(self): + """ It should throw AssertionError on no binding_record """ + exporter = self._new_exporter() + with self.assertRaises(AssertionError): + exporter._should_import() + + def test_should_import_false_dns_id(self): + """ It should return False when no dns_id """ + exporter = self._new_exporter( + binding_record=self._new_record() + ) + res = exporter._should_import() + self.assertFalse(res) + + def test_should_import_no_previous_sync(self): + """ It should return True when there is not a previous sync """ + exporter = self._new_exporter( + dns_id=self.dns_id, + binding_record=self._new_record(), + ) + with self.mock_adapter(exporter, True): + res = exporter._should_import() + self.assertTrue(res) + + def test_should_import_no_updated_at(self): + """ It should return False when no updated_at col """ + exporter = self._new_exporter( + dns_id=self.dns_id, + binding_record=self._new_record('2016-06-12 00:00:00'), + ) + with self.mock_adapter(exporter, True) as adapter: + adapter.read.return_value = { + 'updated_at': False + } + res = exporter._should_import() + self.assertFalse(res) + + def test_should_import_not_changed(self): + """ It should return False if the record is not changed """ + expect = '2016-06-12 00:00:00' + exporter = self._new_exporter( + dns_id=self.dns_id, + binding_record=self._new_record(expect), + ) + with self.mock_adapter(exporter, True) as adapter: + adapter.read.return_value = { + 'updated_at': fields.Datetime.from_string( + expect + ) + } + res = exporter._should_import() + self.assertFalse(res) + + def test_get_odoo_data_browse(self): + """ It should browse model for binding """ + exporter = self._new_exporter( + dns_id=self.dns_id, + binding_record=self._new_record('2016-06-12 00:00:00'), + binding_id=self.binding_id, + ) + with mock.patch.object(exporter.connector_env, 'model') as mk: + exporter._get_odoo_data() + mk.browse.assert_called_once_with(self.binding_id) + + def test_get_odoo_data_return(self): + """ It should return browse record """ + exporter = self._new_exporter( + dns_id=self.dns_id, + binding_record=self._new_record('2016-06-12 00:00:00'), + binding_id=self.binding_id, + ) + with mock.patch.object(exporter.connector_env, 'model') as mk: + res = exporter._get_odoo_data() + self.assertEqual(mk.browse(), res) + + def test_run_sets_binding_id(self): + """ It should set binding_id on instance """ + exporter = self._new_exporter( + dns_id=self.dns_id, + binding_record=self._new_record('2016-06-12 00:00:00'), + ) + with mock.patch.object(exporter, '_get_odoo_data') as mk: + mk.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + exporter.run(self.binding_id) + self.assertEqual( + self.binding_id, exporter.binding_id, + ) + + def test_run_should_import(self): + """ It should see if the record needs to be imported """ + exporter = self._new_exporter( + dns_id=self.dns_id, + binding_record=self._new_record('2016-06-12 00:00:00'), + ) + with self._mock_should_import(exporter) as mk: + mk.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + exporter.run(self.binding_id) + + def test_run_should_import_missing_dns_id(self): + """ It should set dns_id to None if missing """ + exporter = self._new_exporter( + dns_id=self.dns_id, + binding_record=self._new_record('2016-06-12 00:00:00'), + ) + with self._mock_should_import(exporter) as mk: + with mock.patch.object(exporter, '_run') as run: + mk.side_effect = IDMissingInBackend + run.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + exporter.run(self.binding_id) + self.assertEqual( + None, exporter.dns_id, + ) + + def test_run_should_import_true(self): + """ It should call delay import if should_import """ + exporter = self._new_exporter( + dns_id=self.dns_id, + binding_record=self._new_record('2016-06-12 00:00:00'), + ) + with self._mock_should_import(exporter) as should: + should.return_value = True + with mock.patch.object(exporter, '_delay_import') as mk: + mk.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + exporter.run(self.binding_id) + + def test_run_calls_private_run(self): + """ It should call private run interface with args """ + exporter = self._new_exporter( + dns_id=self.dns_id, + binding_record=self._new_record('2016-06-12 00:00:00'), + ) + expect_list = [1, 2, 3] + expect_dict = {'1': 'test', '2': 'derp'} + with self._mock_should_import(exporter) as mk: + mk.return_value = False + with mock.patch.object(exporter, '_run') as mk: + mk.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + exporter.run(self.binding_id, *expect_list, **expect_dict) + mk.assert_called_once_with(*expect_list, **expect_dict) + + def test_run_calls_bind(self): + """ It should call bind with proper args """ + exporter = self._new_exporter( + dns_id=self.dns_id, + binding_record=self._new_record('2016-06-12 00:00:00'), + ) + with self._mock_should_import(exporter) as mk: + mk.return_value = False + with mock.patch.object(exporter, '_run'): + with mock.patch.object(exporter, '_binder') as binder: + binder.bind.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + exporter.run(self.binding_id) + binder.bind.assert_called_once_with( + u'%s' % self.dns_id, self.binding_id, + ) + + def test_run_commits_session(self): + """ It should commit session for export isolation """ + exporter = self._new_exporter( + dns_id=self.dns_id, + binding_record=self._new_record('2016-06-12 00:00:00'), + ) + with self._mock_should_import(exporter) as mk: + mk.return_value = False + with mock.patch.object(exporter, '_run'): + with mock.patch.object(exporter.binder, 'bind'): + with mock.patch.object(exporter, 'session') as session: + session.commit.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + exporter.run(self.binding_id) + + def test_run_calls_after_export(self): + """ It should call _after_export when done """ + exporter = self._new_exporter( + dns_id=self.dns_id, + binding_record=self._new_record('2016-06-12 00:00:00'), + ) + with self._mock_should_import(exporter) as mk: + mk.return_value = False + with mock.patch.object(exporter, '_run'): + with mock.patch.object(exporter.binder, 'bind'): + with mock.patch.object(exporter, '_after_export') as mk: + mk.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + exporter.run(self.binding_id) + + def test__run_exception(self): + """ Private run should not be implemented at this level """ + exporter = self._new_exporter() + with self.assertRaises(NotImplementedError): + exporter._run() + + def test_after_export(self): + """ It should return None """ + exporter = self._new_exporter() + res = exporter._after_export() + self.assertEqual(None, res) diff --git a/connector_dns/tests/test_batch_importer.py b/connector_dns/tests/test_batch_importer.py new file mode 100644 index 0000000..84fe894 --- /dev/null +++ b/connector_dns/tests/test_batch_importer.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 LasLabs Inc. +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). + +import mock + +from openerp.addons.connector_dns.unit import import_synchronizer + +from .common import SetUpDNSBase + +model = 'openerp.addons.connector_dns.unit.import_synchronizer' + + +class EndTestException(Exception): + pass + + +class TestBatchImporter(SetUpDNSBase): + + def setUp(self): + super(TestBatchImporter, self).setUp() + self.Importer = import_synchronizer.BatchImporter + self.model = 'dns.zone.bind' + self.mock_env = self.get_dns_helper( + self.model + ) + + def _new_importer(self, dns_id=None, dns_record=None): + importer = self.Importer(self.mock_env) + if dns_id is not None: + importer.dns_id = dns_id + if dns_record is not None: + importer.dns_record = dns_record + return importer + + def test_run_search_no_filter(self): + """ It should create a blank dict on no filter """ + expect = {} + importer = self._new_importer() + with self.mock_adapter(importer): + importer.backend_adapter.search.side_effect = EndTestException + with self.assertRaises(EndTestException): + importer.run() + importer.backend_adapter.search.assert_called_once_with( + **expect + ) + + def test_run_search(self): + """ It should search backend adapter w/ filters """ + expect = {'expect': 1234, 'test': 45456} + importer = self._new_importer() + with self.mock_adapter(importer): + with mock.patch.object(importer, '_import_record'): + importer.backend_adapter.search.side_effect = EndTestException + with self.assertRaises(EndTestException): + importer.run(expect) + importer.backend_adapter.search.assert_called_once_with( + **expect + ) + + def test_run_import(self): + """ It should import record """ + expect = ['expect'] + importer = self._new_importer() + with self.mock_adapter(importer): + with mock.patch.object(importer, '_import_record'): + importer.backend_adapter.search.return_value = expect + importer._import_record.side_effect = EndTestException + with self.assertRaises(EndTestException): + importer.run() + importer._import_record.assert_called_once_with( + expect[0] + ) + + def test_import_record(self): + """ It should raise NotImplemented on base class """ + importer = self._new_importer() + with self.assertRaises(NotImplementedError): + importer._import_record(True) diff --git a/connector_dns/tests/test_binder.py b/connector_dns/tests/test_binder.py index ee39b87..71e964e 100644 --- a/connector_dns/tests/test_binder.py +++ b/connector_dns/tests/test_binder.py @@ -1,57 +1,74 @@ # -*- coding: utf-8 -*- +# Copyright 2016 LasLabs Inc. +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). import mock -import openerp -from openerp.addons.connector.backend import Backend -from openerp.addons.connector_dns.unit.binder import DNSModelBinder -from openerp.addons.connector.connector import ConnectorEnvironment -from openerp.addons.connector.session import ConnectorSession -from openerp.tests.common import TransactionCase - - -@openerp.tests.common.at_install(False) -@openerp.tests.common.post_install(True) -class TestDNSModelBinder(TransactionCase): - """ Test the DNS Model binder implementation""" +from openerp.models import BaseModel + +from openerp.addons.connector_dns.unit import binder + +from .common import SetUpDNSBase + + +_file = 'openerp.addons.connector_dns.unit.binder' + + +class TestBinder(SetUpDNSBase): + def setUp(self): - super(TestDNSModelBinder, self).setUp() - - class TestDNSBinder(DNSModelBinder): - """ - we use already existing fields for the binding - """ - _model_name = 'dns.binding' - _external_field = 'ref' - _sync_date_field = 'date' - _backend_field = 'color' - _openerp_field = 'id' - - self.session = ConnectorSession(self.cr, self.uid) - self.backend = Backend('dummy', version='1.0') - backend_record = mock.Mock() - backend_record.id = 1 - backend_record.get_backend.return_value = self.backend - self.connector_env = ConnectorEnvironment( - backend_record, self.session, 'dns.binding') - self.test_dns_binder = TestDNSBinder(self.connector_env) - - def test_binder(self): - """ Small scenario with the default binder """ - dns_model = mock.Mock() - dns_model.id = 0 - dns_model.dns_id = 0 - # bind the main partner to external id = 0 - self.test_dns_binder.bind(0, dns_model.id) - # find the openerp partner bound to external partner 0 - self.test_dns_binder.to_openerp = mock.Mock() - self.test_dns_binder.to_openerp.return_value.id = 0 - openerp_id = self.test_dns_binder.to_openerp(0) - self.assertEqual(openerp_id.id, dns_model.id) - openerp_id = self.test_dns_binder.to_openerp(0, unwrap=True) - self.assertEqual(openerp_id.id, dns_model.id) - self.test_dns_binder.to_backend = mock.Mock() - self.test_dns_binder.to_backend.return_value = '0' - external_id = self.test_dns_binder.to_backend(dns_model.id) - self.assertEqual(external_id, '0') - external_id = self.test_dns_binder.to_backend(dns_model.id, wrap=True) - self.assertEqual(external_id, '0') + super(TestBinder, self).setUp() + self.model = 'dns.zone.bind' + self.dns_id = 1234567 + self.Binder = binder.DNSModelBinder + + def _new_binder(self): + return self.Binder(self.get_dns_helper( + self.model + )) + + def test_bind_super(self): + """ It should call super w/ proper args """ + expect = mock.MagicMock(), mock.MagicMock() + with mock.patch.object(binder.Binder, 'bind') as mk: + _binder = self._new_binder() + _binder.bind(*expect) + mk.assert_called_once_with(*expect) + + def test_bind_fail_write_no_export(self): + """ It should set no export context on failure write """ + expect = mock.MagicMock(), mock.MagicMock(spec=BaseModel) + with mock.patch.object(binder.Binder, 'bind') as mk: + _binder = self._new_binder() + mk.side_effect = AssertionError + _binder.bind(*expect) + expect[1].with_context.assert_called_once_with( + connector_no_export=True, + ) + + @mock.patch('%s.fields' % _file) + def test_bind_handles_assertion_fail(self, fields): + """ It should write fail time to bind record """ + expect = mock.MagicMock(), mock.MagicMock(spec=BaseModel) + with mock.patch.object(binder.Binder, 'bind') as mk: + _binder = self._new_binder() + mk.side_effect = AssertionError + _binder.bind(*expect) + expect[1].with_context().write.assert_called_once_with({ + _binder._fail_date_field: fields.Datetime.now(), + }) + + def test_bind_fail_write_int(self): + """ It should browse on model if not instance of BaseModel """ + expect = mock.MagicMock(), mock.MagicMock() + with mock.patch.object(binder.Binder, 'bind') as mk: + _binder = self._new_binder() + mk.side_effect = AssertionError + with mock.patch.object(_binder.connector_env, 'model') as model: + _binder.bind(*expect) + model.browse.assert_called_once_with(expect[1]) + + def test_external_date_method(self): + """ It should return input arg """ + expect = mock.MagicMock() + res = self._new_binder()._external_date_method(expect) + self.assertEqual(expect, res) diff --git a/connector_dns/tests/test_connector.py b/connector_dns/tests/test_connector.py new file mode 100644 index 0000000..69c9cc8 --- /dev/null +++ b/connector_dns/tests/test_connector.py @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 LasLabs Inc. +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). + +import mock + +from openerp.addons.connector_dns import connector + +from .common import SetUpDNSBase + + +mk_file = 'openerp.addons.connector_dns.connector' + + +class EndTestException(Exception): + pass + + +class TestConnector(SetUpDNSBase): + + def setUp(self): + super(TestConnector, self).setUp() + self.model = 'dns.zone.bind' + self.binding_id = self._new_record() + self.session = mock.MagicMock() + + def _new_record(self): + return self.env[self.model].create({ + 'name': 'Test DNS', + 'dns_id_external': self.dns_id, + }) + + def test_default_dns_backend_id(self): + self.assertEqual(self.backend, self.binding_id.dns_backend_id) + + def test_get_environment_gets_backend_record(self): + """ It should browse for backend_record for id """ + mk = self.session.env['dns.backend'].browse + mk.side_effect = EndTestException + with self.assertRaises(EndTestException): + connector.get_environment( + self.session, self.model, self.binding_id.dns_backend_id.id, + ) + mk.assert_called_once_with(self.binding_id.dns_backend_id.id) + + def test_get_environment_creates_environment(self): + """ It should create environment for binding """ + with mock.patch('%s.Environment' % mk_file) as env: + env.side_effect = EndTestException + with self.assertRaises(EndTestException): + connector.get_environment( + self.session, + self.model, + self.binding_id.dns_backend_id.id, + ) + env.assert_called_once_with( + self.session.env['dns.backend'].browse(), + self.session, + self.model, + ) + + def test_get_environment_return(self): + """ It should return new environment """ + with mock.patch('%s.Environment' % mk_file) as env: + res = connector.get_environment( + self.session, self.model, self.binding_id.dns_backend_id.id, + ) + self.assertEqual(env(), res) + + def test_add_checkpoint_call(self): + """ It should call add_checkpoint w/ proper args """ + with mock.patch('%s.checkpoint' % mk_file) as mk: + mk.add_checkpoint.side_effect = EndTestException + with self.assertRaises(EndTestException): + connector.add_checkpoint( + self.session, + self.model, + self.binding_id.id, + self.binding_id.dns_backend_id.id, + ) + mk.add_checkpoint.assert_called_once_with( + self.session, + self.model, + self.binding_id.id, + 'dns.backend', + self.binding_id.dns_backend_id.id, + ) + + def test_add_checkpoint_return(self): + """ It should return new checkpoint """ + with mock.patch('%s.checkpoint' % mk_file) as mk: + res = connector.add_checkpoint( + self.session, + self.model, + self.binding_id.id, + self.binding_id.dns_backend_id.id, + ) + self.assertEqual(mk.add_checkpoint(), res) diff --git a/connector_dns/tests/test_consumer.py b/connector_dns/tests/test_consumer.py new file mode 100644 index 0000000..e41f0be --- /dev/null +++ b/connector_dns/tests/test_consumer.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 LasLabs Inc. +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). + +import mock + +from openerp.addons.connector_dns import consumer + +from .common import SetUpDNSBase + + +mk_file = 'openerp.addons.connector_dns.consumer' + + +class TestConsumer(SetUpDNSBase): + + def setUp(self): + super(TestConsumer, self).setUp() + self.model = 'dns.zone.bind' + self.binding_id = self._new_record() + + def _new_record(self, bind=True): + return self.env[self.model].create({ + 'name': 'Test DNS', + 'dns_id_external': self.dns_id if bind else None, + }) + + def test_delay_export_context_no_export(self): + """ It should not export if context prohibits """ + self.session = mock.MagicMock() + self.session.context = {'connector_no_export': True} + res = consumer.delay_export(self.session, 0, 0, 0) + self.assertEqual(None, res) + + def test_delay_export(self): + """ It should call export_record.delay w/ proper args """ + fields = {'test': 123, 'test2': 456} + expect = [self.session, self.model, self.binding_id] + with mock.patch('%s.export_record' % mk_file) as mk: + consumer.delay_export(*expect, vals=fields) + mk.delay.assert_called_once_with(*expect, fields=fields.keys()) + + def test_delay_export_all_bindings_context_no_export(self): + """ It should not export if context prohibits """ + self.session = mock.MagicMock() + self.session.context = {'connector_no_export': True} + res = consumer.delay_export_all_bindings(self.session, 0, 0, 0) + self.assertEqual(None, res) + + def test_delay_export_all_bindings(self): + """ It should call export_record.delay w/ proper args """ + fields = {'test': 123, 'test2': 456} + send = [self.session, 'dns.zone', self.binding_id.odoo_id.id] + expect = [self.session, self.model, self.binding_id.id] + with mock.patch('%s.export_record' % mk_file) as mk: + consumer.delay_export_all_bindings(*send, vals=fields) + mk.delay.assert_called_once_with(*expect, fields=fields.keys()) diff --git a/connector_dns/tests/test_delayed_batch_importer.py b/connector_dns/tests/test_delayed_batch_importer.py new file mode 100644 index 0000000..09763fb --- /dev/null +++ b/connector_dns/tests/test_delayed_batch_importer.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 LasLabs Inc. +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). + +import mock + +from openerp.addons.connector_dns.unit import import_synchronizer + +from .common import SetUpDNSBase + +model = 'openerp.addons.connector_dns.unit.import_synchronizer' + + +class TestDelayedBatchImporter(SetUpDNSBase): + + def setUp(self): + super(TestDelayedBatchImporter, self).setUp() + self.Importer = import_synchronizer.DelayedBatchImporter + self.model = 'dns.zone.bind' + self.mock_env = self.get_dns_helper( + self.model + ) + + def _new_importer(self, dns_id=None, dns_record=None): + importer = self.Importer(self.mock_env) + if dns_id is not None: + importer.dns_id = dns_id + if dns_record is not None: + importer.dns_record = dns_record + return importer + + def test_import_record(self): + """ It should call import_record w/ proper args """ + importer = self._new_importer() + expect = 'expect' + kwargs = {'test1': 1234, 'test2': 5678} + with mock.patch('%s.import_record' % model) as mk: + with mock.patch('%s.int_or_str' % model) as int_or_str: + importer._import_record(expect, **kwargs) + mk.delay.assert_called_once_with( + importer.session, + importer.model._name, + importer.backend_record.id, + int_or_str(expect), + **kwargs + ) diff --git a/connector_dns/tests/test_direct_batch_importer.py b/connector_dns/tests/test_direct_batch_importer.py new file mode 100644 index 0000000..7f1ddee --- /dev/null +++ b/connector_dns/tests/test_direct_batch_importer.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 LasLabs Inc. +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). + +import mock + +from openerp.addons.connector_dns.unit import import_synchronizer + +from .common import SetUpDNSBase + +model = 'openerp.addons.connector_dns.unit.import_synchronizer' + + +class TestDirectBatchImporter(SetUpDNSBase): + + def setUp(self): + super(TestDirectBatchImporter, self).setUp() + self.Importer = import_synchronizer.DirectBatchImporter + self.model = 'dns.zone.bind' + self.mock_env = self.get_dns_helper( + self.model + ) + + def _new_importer(self, dns_id=None, dns_record=None): + importer = self.Importer(self.mock_env) + if dns_id is not None: + importer.dns_id = dns_id + if dns_record is not None: + importer.dns_record = dns_record + return importer + + def test_import_record(self): + """ It should call import_record w/ proper args """ + importer = self._new_importer() + expect = 'expect' + with mock.patch('%s.import_record' % model) as mk: + with mock.patch('%s.int_or_str' % model) as int_or_str: + importer._import_record(expect) + mk.assert_called_once_with( + importer.session, + importer.model._name, + importer.backend_record.id, + int_or_str(expect) + ) diff --git a/connector_dns/tests/test_dns_deleter.py b/connector_dns/tests/test_dns_deleter.py new file mode 100644 index 0000000..c548e6f --- /dev/null +++ b/connector_dns/tests/test_dns_deleter.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 LasLabs Inc. +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). + +from openerp.addons.connector_dns.unit import delete_synchronizer + +from .common import SetUpDNSBase + + +model = 'openerp.addons.connector_dns.unit.delete_synchronizer' + + +class EndTestException(Exception): + pass + + +class TestDNSDeleter(SetUpDNSBase): + + def setUp(self): + super(TestDNSDeleter, self).setUp() + self.model = 'dns.zone.bind' + self.dns_id = 'dns_id' + self.binding_id = 1234 + self.Exporter = delete_synchronizer.DNSDeleter + + def _new_exporter(self, dns_id=None, binding_record=None, + binding_id=None, + ): + exporter = self.Exporter(self.get_dns_helper( + self.model + )) + exporter.dns_id = dns_id + exporter.binding_record = binding_record + exporter.binding_id = binding_id + return exporter + + def _new_record(self, sync_date=False): + return self.env[self.model].create({ + 'name': 'Test', + 'sync_date': sync_date, + 'warehouse_id': self.env.ref('stock.warehouse0').id, + }) + + def test_run_not_implemented(self): + """ It should raise NotImplementedError """ + with self.assertRaises(NotImplementedError): + self._new_exporter().run(True) diff --git a/connector_dns/tests/test_dns_exporter.py b/connector_dns/tests/test_dns_exporter.py new file mode 100644 index 0000000..950c1f0 --- /dev/null +++ b/connector_dns/tests/test_dns_exporter.py @@ -0,0 +1,466 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 LasLabs Inc. +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). + +import mock +import psycopg2 +from contextlib import contextmanager + +from openerp import _ + +from openerp.addons.connector_dns.unit import export_synchronizer + +from .common import SetUpDNSBase + +model = 'openerp.addons.connector_dns.unit.export_synchronizer' + + +@contextmanager +def mock_retryable_job_error(): + with mock.patch('%s.RetryableJobError' % model) as mk: + yield mk + + +class UniqueViolationException(psycopg2.IntegrityError): + def __init__(self, pgcode=psycopg2.errorcodes.UNIQUE_VIOLATION): + self.pgcode = pgcode + + +class TestDNSExporter(SetUpDNSBase): + + def setUp(self): + super(TestDNSExporter, self).setUp() + self.model = 'dns.zone.bind' + self.binding_id = 1234 + self.Exporter = export_synchronizer.DNSExporter + + @contextmanager + def mock_base_exporter(self, obj, patches=None, add=True): + """ Inject mock as only parent to DNSExporter + Normal method of injection would not work due to super raising + ``TypeError: must be type, not MagicMock`` + """ + _patches = [ + 'binder_for', + 'unit_for', + 'session', + '_mapper', + ] + if patches: + if add: + patches = _patches + patches + else: + patches = _patches + patches = {p: mock.DEFAULT for p in patches} + with mock.patch.multiple(obj, **patches) as mk: + if mk.get('binder_for'): + mk['binder_for'] = self.get_mock_binder() + yield mk + + def _new_exporter(self, dns_id=None, binding_record=None, + binding_id=None, + ): + self.mock_env = self.get_dns_helper( + self.model + ) + exporter = self.Exporter(self.mock_env) + exporter.dns_id = dns_id + exporter.binding_record = binding_record + exporter.binding_id = binding_id + return exporter + + def _new_record(self): + return self.env[self.model].create({ + 'name': 'Test', + }) + + def test_lock_sql(self): + """ It should attempt proper SQL execution """ + exporter = self._new_exporter(binding_id=self.binding_id) + with self.mock_base_exporter(exporter): + exporter.session.cr.execute.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + exporter._lock() + exporter.session.cr.execute.assert_called_once_with( + "SELECT id FROM %s WHERE ID = %%s FOR UPDATE NOWAIT" % ( + self.model.replace('.', '_'), + ), + (self.binding_id, ), + log_exceptions=False, + ) + + # def test_lock_retryable(self): + # """ It should attempt proper SQL execution """ + # exporter = self._new_exporter() + # with self.mock_base_exporter(exporter): + # with mock_retryable_job_error() as err: + # exporter.session.cr.execute.side_effect = \ + # psycopg2.OperationalError + # with self.assertRaises(err): + # exporter._lock() + + def test_has_to_skip(self): + """ It should return False """ + exporter = self._new_exporter() + with self.mock_base_exporter(exporter): + res = exporter._has_to_skip() + self.assertFalse(res) + + def test_export_dependency_no_relation(self): + """ It should return None when no relation """ + exporter = self._new_exporter() + with self.mock_base_exporter(exporter): + res = exporter._export_dependency(None, None) + self.assertEqual(None, res) + + def test_export_dependency_gets_binder(self): + """ It should get binder for model """ + expect = self._new_record() + exporter = self._new_exporter() + with self.mock_base_exporter(exporter): + exporter.binder_for.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + exporter._export_dependency(expect, self.model) + exporter.binder_for.assert_called_once_with(self.model) + + def test_export_dependency_wrap_search(self): + """ It should perform query for binding record when wrapped """ + rec_id = self._new_record() + expect = rec_id.odoo_id + exporter = self._new_exporter() + with self.mock_base_exporter(exporter): + with mock.patch.object(exporter.session, 'env'): + search = exporter.env[self.model].search + search.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + exporter._export_dependency(expect, self.model) + search.assert_called_once_with([ + (exporter.binder_for()._openerp_field, '=', expect.id), + (exporter.binder_for()._backend_field, + '=', exporter.backend_record.id), + ]) + + def test_export_dependency_wrap_multiple_results(self): + """ It should assert max of one binding result """ + expect = self._new_record().odoo_id + exporter = self._new_exporter() + with self.mock_base_exporter(exporter): + with mock.patch.object(exporter.session, 'env'): + search = exporter.env[self.model].search + search.return_value = [1, 2] + with self.assertRaises(AssertionError): + exporter._export_dependency(expect, self.model) + + def test_export_dependency_unwrapped(self): + """ It should call to_backend with proper args from unwrapped """ + expect = self._new_record() + exporter = self._new_exporter() + with self.mock_base_exporter(exporter): + to_backend = exporter.binder_for().to_backend + to_backend.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + exporter._export_dependency(expect, self.model) + to_backend.assert_called_once_with( + expect, + wrap=False + ) + + def test_export_dependency_run_no_force(self): + """ It should not trigger export when not forced and existing """ + expect = self._new_record() + exporter = self._new_exporter() + with self.mock_base_exporter(exporter): + to_backend = exporter.binder_for().to_backend + to_backend.return_value = True + exporter._export_dependency(expect, self.model, force=False) + exporter.unit_for().run.assert_not_called() + + def test_export_dependency_run_force(self): + """ It should trigger export when forced and existing """ + expect = self._new_record() + exporter = self._new_exporter() + with self.mock_base_exporter(exporter): + to_backend = exporter.binder_for().to_backend + to_backend.return_value = True + exporter._export_dependency(expect, self.model, force=True) + exporter.unit_for().run.assert_called_once_with(expect.id) + + def test_export_dependency_run_no_exist(self): + """ It should trigger export when not forced and not existing """ + expect = self._new_record() + exporter = self._new_exporter() + with self.mock_base_exporter(exporter): + to_backend = exporter.binder_for().to_backend + to_backend.return_value = False + exporter._export_dependency(expect, self.model, force=False) + exporter.unit_for().run.assert_called_once_with(expect.id) + + def test_export_dependencies(self): + """ It should return None """ + res = self._new_exporter()._export_dependencies() + self.assertEqual(None, res) + + def test_map_data_call(self): + """ It should get map record for binding record """ + exporter = self._new_exporter() + with self.mock_base_exporter(exporter): + exporter._map_data() + exporter.mapper.map_record.assert_called_once_with( + exporter.binding_record + ) + + def test_map_data_return(self): + """ It should return map record for binding record """ + exporter = self._new_exporter() + with self.mock_base_exporter(exporter): + res = exporter._map_data() + self.assertEqual(exporter.mapper.map_record(), res) + + def test_validate_create_data(self): + """ It should return None """ + res = self._new_exporter()._validate_create_data(True) + self.assertEqual(None, res) + + def test_validate_update_data(self): + """ It should return None """ + res = self._new_exporter()._validate_update_data(True) + self.assertEqual(None, res) + + def test_create_data_call(self): + """ It should inject proper vals into map record """ + map_record = mock.MagicMock() + expect = {'test': 123, 'test2': 456} + fields = expect.keys() + self._new_exporter(self.dns_id)._create_data( + map_record, fields, **expect + ) + map_record.values.assert_called_once_with( + for_create=True, fields=fields, **expect + ) + + def test_create_data_return(self): + """ It should inject proper vals into map record """ + map_record = mock.MagicMock() + res = self._new_exporter(self.dns_id)._create_data(map_record) + self.assertEqual(map_record.values(), res) + + def test_create_validates_data(self): + """ It should validate data """ + expect = 'expect' + exporter = self._new_exporter() + with self.mock_base_exporter(exporter, ['_validate_create_data']): + exporter._validate_create_data.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + exporter._create(expect) + exporter._validate_create_data.assert_called_once_with(expect) + + def test_create_does_create(self): + """ It should create remote record w/ data """ + expect = 'expect' + exporter = self._new_exporter() + with self.mock_adapter(exporter) as mk: + mk.create.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + exporter._create(expect) + mk.create.assert_called_once_with(expect) + + def test_create_returns_binding(self): + """ It should return new binding """ + exporter = self._new_exporter() + with self.mock_adapter(exporter) as mk: + res = exporter._create(None) + self.assertEqual( + mk.create(), res + ) + + def test_update_data_call(self): + """ It should inject proper vals into map record """ + map_record = mock.MagicMock() + expect = {'test': 123, 'test2': 456} + fields = expect.keys() + self._new_exporter(self.dns_id)._update_data( + map_record, fields, **expect + ) + map_record.values.assert_called_once_with(fields=fields, **expect) + + def test_update_data_return(self): + """ It should inject proper vals into map record """ + map_record = mock.MagicMock() + res = self._new_exporter(self.dns_id)._update_data(map_record) + self.assertEqual(map_record.values(), res) + + def test_update_does_write(self): + """ It should update binding w/ data """ + expect = 'expect' + mk = mock.MagicMock() + exporter = self._new_exporter(dns_id=self.dns_id) + with self.mock_adapter(exporter) as mk: + mk.write.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + exporter._update(expect) + mk.write.assert_called_once_with(self.dns_id, expect) + + def test_run_assert_binding_id(self): + """ It should assert existing binding_id """ + exporter = self._new_exporter(binding_record=True) + with self.assertRaises(AssertionError): + exporter._run() + + def test_run_assert_binding_id(self): + """ It should assert existing binding_record """ + exporter = self._new_exporter(binding_id=True) + with self.assertRaises(AssertionError): + exporter._run() + + def test_run_has_to_skip(self): + """ It should return None if _has_to_skip """ + exporter = self._new_exporter(binding_id=True, binding_record=True) + with self.mock_base_exporter(exporter, ['_has_to_skip']): + exporter._has_to_skip.return_value = True + res = exporter._run() + self.assertEqual(None, res) + + def test_run_export_dependencies(self): + """ It should first export dependencies """ + exporter = self._new_exporter(binding_id=True, binding_record=True) + with self.mock_base_exporter(exporter, ['_export_dependencies', + '_has_to_skip', + ]): + exporter._has_to_skip.return_value = False + exporter._export_dependencies.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + exporter._run() + + def test_run_lock(self): + """ It should call lock """ + exporter = self._new_exporter(binding_id=True, binding_record=True) + with self.mock_base_exporter(exporter, ['_has_to_skip', + '_lock', + ]): + exporter._has_to_skip.return_value = False + exporter._lock.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + exporter._run() + + def test_run_map_data(self): + """ It should get map_data """ + exporter = self._new_exporter(binding_id=True, binding_record=True) + with self.mock_base_exporter(exporter, ['_export_dependencies', + '_has_to_skip', + '_lock', + '_map_data', + ]): + exporter._has_to_skip.return_value = False + exporter._map_data.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + exporter._run() + + def test_run_map_data(self): + """ It should get map_data """ + exporter = self._new_exporter(binding_id=True, binding_record=True) + with self.mock_base_exporter(exporter, ['_has_to_skip', + '_lock', + '_map_data', + ]): + exporter._has_to_skip.return_value = False + exporter._map_data.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + exporter._run() + + def test_run_update_data(self): + """ It should identify data to update on pre-existing binds """ + exporter = self._new_exporter(self.dns_id, True, True) + expect = ['test1', 'test2'] + with self.mock_base_exporter(exporter, ['_has_to_skip', + '_lock', + '_map_data', + '_update_data', + ]): + exporter._has_to_skip.return_value = False + exporter._update_data.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + exporter._run(expect) + exporter._update_data.assert_called_once_with( + exporter._map_data(), fields=expect + ) + + def test_run_update_no_record(self): + """ It should identify data to update on pre-existing binds """ + exporter = self._new_exporter(self.dns_id, True, True) + with self.mock_base_exporter(exporter, ['_has_to_skip', + '_lock', + '_map_data', + '_update_data', + ]): + exporter._has_to_skip.return_value = False + exporter._update_data.return_value = False + res = exporter._run() + self.assertEqual( + _('Nothing to export.'), res, + ) + + def test_run_update_no_record(self): + """ It should identify data to update on pre-existing binds """ + exporter = self._new_exporter(self.dns_id, True, True) + with self.mock_base_exporter(exporter, ['_has_to_skip', + '_lock', + '_map_data', + '_update_data', + '_update', + ]): + exporter._has_to_skip.return_value = False + exporter._update.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + exporter._run() + exporter._update.assert_called_once_with( + exporter._update_data() + ) + + def test_run_create_data(self): + """ It should identify data to create on pre-existing binds """ + exporter = self._new_exporter(False, True, True) + expect = ['test1', 'test2'] + with self.mock_base_exporter(exporter, ['_has_to_skip', + '_lock', + '_map_data', + '_create_data', + ]): + exporter._has_to_skip.return_value = False + exporter._create_data.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + exporter._run(expect) + exporter._create_data.assert_called_once_with( + exporter._map_data(), fields=None + ) + + def test_run_create_no_record(self): + """ It should identify data to create on pre-existing binds """ + exporter = self._new_exporter(False, True, True) + with self.mock_base_exporter(exporter, ['_has_to_skip', + '_lock', + '_map_data', + '_create_data', + ]): + exporter._has_to_skip.return_value = False + exporter._create_data.return_value = False + res = exporter._run() + self.assertEqual( + _('Nothing to export.'), res, + ) + + def test_run_create_no_record(self): + """ It should identify data to create on pre-existing binds """ + exporter = self._new_exporter(False, True, True) + with self.mock_base_exporter(exporter, ['_has_to_skip', + '_lock', + '_map_data', + '_create_data', + '_create', + ]): + exporter._has_to_skip.return_value = False + exporter._create.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + exporter._run() + exporter._create.assert_called_once_with( + exporter._create_data() + ) diff --git a/connector_dns/tests/test_dns_import_mapper.py b/connector_dns/tests/test_dns_import_mapper.py new file mode 100644 index 0000000..fe6c831 --- /dev/null +++ b/connector_dns/tests/test_dns_import_mapper.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 LasLabs Inc. +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). + +from openerp.addons.connector_dns.unit import mapper + +from .common import SetUpDNSBase + + +class TestDNSImporterMapper(SetUpDNSBase): + + def setUp(self): + super(TestDNSImporterMapper, self).setUp() + self.Importer = mapper.DNSImportMapper + self.model = 'dns.zone.bind' + self.mock_env = self.get_dns_helper( + self.model + ) + self.importer = self.Importer(self.mock_env) + + def test_dns_backend_id(self): + """ It should map backend_id correctly """ + res = self.importer.dns_backend_id(True) + expect = {'dns_backend_id': self.importer.backend_record.id} + self.assertDictEqual(expect, res) diff --git a/connector_dns/tests/test_dns_importer.py b/connector_dns/tests/test_dns_importer.py new file mode 100644 index 0000000..c0e6b9c --- /dev/null +++ b/connector_dns/tests/test_dns_importer.py @@ -0,0 +1,500 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 LasLabs Inc. +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). + +import mock +from contextlib import contextmanager + +from openerp import fields, _ + +from openerp.addons.connector_dns.unit import import_synchronizer + +from .common import SetUpDNSBase + +model = 'openerp.addons.connector_dns.unit.import_synchronizer' + + +class TestDNSImporter(SetUpDNSBase): + + def setUp(self): + super(TestDNSImporter, self).setUp() + self.model = 'dns.zone.bind' + self.dns_id = 'dns_id' + self.dns_record = { + 'updated_at': fields.Datetime.from_string('2016-05-10 00:00:00'), + } + self.binding_id = 1234 + self.Importer = import_synchronizer.DNSImporter + self.mock_env = self.get_dns_helper( + self.model + ) + + @contextmanager + def mock_base_importer(self, obj, patches=None, add=True): + """ Inject mock as only parent to DNSExporter + Normal method of injection would not work due to super raising + ``TypeError: must be type, not MagicMock`` + """ + _patches = [ + 'binder_for', + 'unit_for', + '_validate_data', + 'advisory_lock_or_retry', + '_must_skip', + '_before_import', + '_get_dns_data', + ] + if patches: + if add: + patches = _patches + patches + else: + patches = _patches + patches = {p: mock.DEFAULT for p in patches} + with mock.patch.multiple(obj, **patches) as mk: + if mk.get('binder_for'): + mk['binder_for'].return_value = self.get_mock_binder() + yield mk + + def _new_record(self, sync_date='2016-01-01 00:00:00'): + return self.env[self.model].create({ + 'name': 'Test', + 'sync_date': sync_date, + }) + + def _new_importer(self, dns_id=None, dns_record=None): + importer = self.Importer(self.mock_env) + if dns_id is not None: + importer.dns_id = dns_id + if dns_record is not None: + importer.dns_record = dns_record + return importer + + def test_int_or_str_int(self): + """ It should return an int when parseable as such """ + expect = 12345 + res = import_synchronizer.int_or_str(str(expect)) + self.assertEqual(expect, res) + + def test_int_or_str_str(self): + """ It should return a string when not parseable as int """ + expect = mock + res = import_synchronizer.int_or_str(expect) + self.assertEqual(str(expect), res) + + def test_init_calls_sets_dns_id(self): + """ It should blank dns_id on init """ + res = self._new_importer() + self.assertEqual(None, res.dns_id) + + def test_init_calls_sets_dns_record(self): + """ It should blank dns_record on init """ + res = self._new_importer() + self.assertEqual(None, res.dns_record) + + def test_get_dns_data_read(self): + """ It should call read on adapter for dns id """ + importer = self._new_importer(self.dns_id) + with self.mock_adapter(importer) as mk: + importer._get_dns_data() + mk.read.assert_called_once_with(self.dns_id) + + def test_get_dns_data_return(self): + """ It should return result of adapter read op """ + importer = self._new_importer(self.dns_id) + with self.mock_adapter(importer) as mk: + res = importer._get_dns_data() + self.assertEqual(mk.read(), res) + + def test_is_current_assert_record(self): + """ It should assert that a dns_record is set """ + with self.assertRaises(AssertionError): + self._new_importer()._is_current(None) + + def test_is_current_no_updated_at(self): + """ It should return None when no updated_at present on record """ + importer = self._new_importer(dns_record={'updated_at': False}) + with self.mock_base_importer(importer): + res = importer._is_current(True) + self.assertEqual(None, res) + + def test_is_current_no_binding(self): + """ It should return None when no binding was provided """ + res = self._new_importer(dns_record=self.dns_record) + res = res._is_current(False) + self.assertEqual(None, res) + + def test_is_current_no_sync_date(self): + """ It should return None when no sync_date in binding """ + rec_id = self._new_record(None) + importer = self._new_importer(dns_record=self.dns_record) + with self.mock_base_importer(importer): + res = importer._is_current(rec_id) + self.assertEqual(None, res) + + def test_is_current_should_not_sync(self): + """ It should return False when DNS is newer than Binding """ + rec_id = self._new_record() + importer = self._new_importer(dns_record=self.dns_record) + with self.mock_base_importer(importer): + res = importer._is_current(rec_id) + self.assertFalse(res) + + def test_import_dependency_no_dns_id(self): + """ It should return None when no dns_id supplied """ + res = self._new_importer()._import_dependency(False, True) + self.assertEqual(None, res) + + def test_import_dependency_gets_binder(self): + """ It should get binder for binding_model """ + importer = self._new_importer() + with self.mock_base_importer(importer): + importer.binder_for.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + importer._import_dependency(True, self.model) + importer.binder_for.assert_called_once_with( + self.model, + ) + + def test_import_dependency_always(self): + """ It should always proceed to import if always is True """ + importer = self._new_importer() + with self.mock_base_importer(importer): + importer.unit_for.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + importer._import_dependency( + True, self.model, always=True + ) + importer.binder_for.to_openerp.assert_not_called() + + def test_import_dependency_no_odoo_binder(self): + """ It should attempt to get odoo for binder if not always """ + importer = self._new_importer() + with self.mock_base_importer(importer): + importer._import_dependency( + self.dns_id, self.model, + ) + importer.binder_for().to_openerp.assert_called_once_with( + self.dns_id + ) + + def test_import_dependency_gets_unit_default(self): + """ It should get proper importer unit w/ default Importer """ + importer = self._new_importer() + with self.mock_base_importer(importer): + importer.unit_for.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + importer._import_dependency( + True, self.model, always=True + ) + importer.unit_for.assert_called_once_with( + self.Importer, model=self.model, + ) + + def test_import_dependency_gets_unit_defined(self): + """ It should get proper importer unit w/ defined Importer """ + expect = mock.MagicMock() + importer = self._new_importer() + with self.mock_base_importer(importer): + importer.unit_for.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + importer._import_dependency( + True, self.model, importer_class=expect, always=True + ) + importer.unit_for.assert_called_once_with( + expect, model=self.model, + ) + + def test_import_dependency_runs_import(self): + """ It should run importer w/ proper args """ + importer = self._new_importer() + with self.mock_base_importer(importer): + importer._import_dependency( + self.dns_id, self.model, always=True + ) + importer.unit_for().run.assert_called_once_with( + self.dns_id + ) + + def test_import_dependencies_none(self): + """ It should return None on base class """ + res = self._new_importer()._import_dependencies() + self.assertEqual(None, res) + + def test_map_data_call(self): + """ It should get map record w/ proper args """ + importer = self._new_importer(dns_record=self.dns_record) + with self.mock_base_importer(importer, ['_mapper']): + importer._map_data() + importer.mapper.map_record.assert_called_once_with( + self.dns_record + ) + + def test_map_data_return(self): + """ It should return data mapper """ + importer = self._new_importer(dns_record=self.dns_record) + with self.mock_base_importer(importer, ['_mapper']): + res = importer._map_data() + self.assertEqual(importer.mapper.map_record(), res) + + def test_validate_data_none(self): + """ It should return None on base class """ + res = self._new_importer()._validate_data(True) + self.assertEqual(None, res) + + def test_must_skip_none(self): + """ It should return None on base class """ + res = self._new_importer()._must_skip() + self.assertEqual(None, res) + + def test_get_binding_call(self): + """ It should get binding w/ proper args """ + importer = self._new_importer(self.dns_id) + with self.mock_base_importer(importer): + importer._get_binding() + importer.binder.to_openerp.assert_called_once_with( + self.dns_id, unwrap=False, browse=True, + ) + + def test_get_binding_return(self): + """ It should return resulting binding """ + importer = self._new_importer(self.dns_id) + with self.mock_base_importer(importer): + res = importer._get_binding() + self.assertEqual(importer.binder.to_openerp(), res) + + def test_create_data_call(self): + """ It should inject proper vals into map record """ + map_record = mock.MagicMock() + expect = {'test': 123, 'test2': 456} + self._new_importer(self.dns_id)._create_data( + map_record, **expect + ) + map_record.values.assert_called_once_with( + for_create=True, **expect + ) + + def test_create_data_return(self): + """ It should inject proper vals into map record """ + map_record = mock.MagicMock() + res = self._new_importer(self.dns_id)._create_data(map_record) + self.assertEqual(map_record.values(), res) + + def test_create_validates_data(self): + """ It should validate data """ + expect = 'expect' + importer = self._new_importer() + with self.mock_base_importer(importer): + importer._validate_data.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + importer._create(expect) + importer._validate_data.assert_called_once_with(expect) + + def test_create_gets_model_with_context(self): + """ It should get model with context to avoid infinite loop """ + importer = self._new_importer() + with self.mock_base_importer(importer, ['connector_env']): + importer.connector_env.model.with_context.side_effect = \ + self.EndTestException + with self.assertRaises(self.EndTestException): + importer._create(None) + importer.connector_env.model.with_context.assert_called_once_with( + connector_no_export=True, + ) + + def test_create_does_create(self): + """ It should create binding w/ data """ + expect = 'expect' + importer = self._new_importer() + with self.mock_base_importer(importer, ['connector_env']): + mk = importer.connector_env.model.with_context + mk().create.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + importer._create(expect) + mk().create.assert_called_once_with(expect) + + def test_create_returns_binding(self): + """ It should return new binding """ + importer = self._new_importer() + with self.mock_base_importer(importer, ['connector_env']): + res = importer._create(None) + self.assertEqual( + importer.connector_env.model.with_context().create(), res + ) + + def test_update_data_call(self): + """ It should inject proper vals into map record """ + map_record = mock.MagicMock() + expect = {'test': 123, 'test2': 456} + self._new_importer(self.dns_id)._update_data( + map_record, **expect + ) + map_record.values.assert_called_once_with(**expect) + + def test_update_data_return(self): + """ It should inject proper vals into map record """ + map_record = mock.MagicMock() + res = self._new_importer(self.dns_id)._update_data(map_record) + self.assertEqual(map_record.values(), res) + + def test_update_gets_binding_with_context(self): + """ It should get model with context to avoid infinite loop """ + expect = 'expect' + mk = mock.MagicMock() + importer = self._new_importer() + with self.mock_base_importer(importer): + mk.with_context.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + importer._update(mk, expect) + mk.with_context.assert_called_once_with( + connector_no_export=True, + ) + + def test_update_does_write(self): + """ It should update binding w/ data """ + expect = 'expect' + mk = mock.MagicMock() + importer = self._new_importer() + with self.mock_base_importer(importer): + mk.with_context().write.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + importer._update(mk, expect) + mk.with_context().write.assert_called_once_with(expect) + + def test_after_import_none(self): + """ It should return None on base class """ + res = self._new_importer()._after_import(None) + self.assertEqual(None, res) + + def test_run_sets_dns_id(self): + """ It should set dns_id on importer """ + importer = self._new_importer() + with self.mock_base_importer(importer): + importer._get_dns_data.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + importer.run(self.dns_id) + self.assertEqual(self.dns_id, importer.dns_id) + + def test_run_returns_skip_if_skip(self): + """ It should return skip if skip """ + expect = 'expect' + importer = self._new_importer() + with self.mock_base_importer(importer): + importer._must_skip.return_value = expect + res = importer.run(self.dns_id) + self.assertEqual(expect, res) + + def test_run_gets_binding(self): + """ It should get binding """ + importer = self._new_importer() + with self.mock_base_importer(importer, ['_get_binding']): + importer._must_skip.return_value = False + importer._get_binding.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + importer.run(self.dns_id) + + def test_run_does_force(self): + """ It should not see if binding is current if forced """ + importer = self._new_importer() + with self.mock_base_importer( + importer, ['_before_import', '_is_current'] + ): + importer._must_skip.return_value = False + importer._before_import.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + importer.run(self.dns_id, True) + importer._is_current.assert_not_called() + + def test_run_no_force(self): + """ It should return translated up to date if current """ + importer = self._new_importer() + with self.mock_base_importer(importer, ['_is_current']): + importer._must_skip.return_value = False + importer._is_current.return_value = True + res = importer.run(self.dns_id) + self.assertEqual( + _('Already Up To Date.'), res, + ) + + def test_run_import_depends(self): + """ It should import dependencies first """ + importer = self._new_importer() + with self.mock_base_importer(importer, ['_import_dependencies']): + importer._must_skip.return_value = False + importer._import_dependencies.side_effect = self.EndTestException + with self.assertRaises(self.EndTestException): + importer.run(self.dns_id, True) + + def test_run_gets_map_record(self): + """ It should get the map record """ + importer = self._new_importer() + with self.mock_base_importer(importer, ['_map_data']): + importer._map_data.side_effect = self.EndTestException + importer._must_skip.return_value = False + with self.assertRaises(self.EndTestException): + importer.run(self.dns_id, True) + + def test_run_update_data(self): + """ It should call update_data w/ map_record if existing """ + importer = self._new_importer() + with self.mock_base_importer(importer, ['_map_data', + '_get_binding', + '_update_data', + ]): + importer._get_binding.return_value = True + importer._update_data.side_effect = self.EndTestException + importer._must_skip.return_value = False + with self.assertRaises(self.EndTestException): + importer.run(self.dns_id, True) + importer._update_data.assert_called_once_with( + importer._map_data(), + ) + + def test_run_update(self): + """ It should call update w/ binding and record """ + importer = self._new_importer() + with self.mock_base_importer(importer, ['_map_data', + '_get_binding', + '_update_data', + '_update', + ]): + importer._get_binding.return_value = True + importer._update.side_effect = self.EndTestException + importer._must_skip.return_value = False + with self.assertRaises(self.EndTestException): + importer.run(self.dns_id, True) + importer._update.assert_called_once_with( + importer._get_binding(), importer._update_data(), + ) + + def test_run_create_data(self): + """ It should call update_data w/ map_record if not existing """ + importer = self._new_importer() + with self.mock_base_importer(importer, ['_map_data', + '_get_binding', + '_create_data', + ]): + importer._get_binding.return_value = False + importer._create_data.side_effect = self.EndTestException + importer._must_skip.return_value = False + with self.assertRaises(self.EndTestException): + importer.run(self.dns_id, True) + importer._create_data.assert_called_once_with( + importer._map_data(), + ) + + def test_run_create(self): + """ It should call create w/ record """ + importer = self._new_importer() + with self.mock_base_importer(importer, ['_map_data', + '_get_binding', + '_create_data', + '_create', + ]): + importer._get_binding.return_value = False + importer._create.side_effect = self.EndTestException + importer._must_skip.return_value = False + with self.assertRaises(self.EndTestException): + importer.run(self.dns_id, True) + importer._create.assert_called_once_with( + importer._create_data(), + ) diff --git a/connector_dns/unit/__init__.py b/connector_dns/unit/__init__.py index 2b0da66..08d3652 100755 --- a/connector_dns/unit/__init__.py +++ b/connector_dns/unit/__init__.py @@ -1,6 +1,10 @@ # -*- coding: utf-8 -*- # Copyright 2015 Elico Corp # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). + from . import backend_adapter from . import binder from . import export_synchronizer +from . import import_synchronizer +from . import delete_synchronizer +from . import mapper diff --git a/connector_dns/unit/backend_adapter.py b/connector_dns/unit/backend_adapter.py index eb39281..f857238 100755 --- a/connector_dns/unit/backend_adapter.py +++ b/connector_dns/unit/backend_adapter.py @@ -10,7 +10,7 @@ recorder = {} -def call_to_key(method, arguments): +def call_to_key(method, arguments): # pragma: no cover """ Used to 'freeze' the method and arguments of a call to DNS so they can be hashable; they will be stored in a dict. @@ -32,7 +32,7 @@ def freeze(arg): return (method, tuple(new_args)) -def record(method, arguments, result): +def record(method, arguments, result): # pragma: no cover """ Utility function which can be used to record test data during synchronisations. Call it from DNSAdapter._call @@ -42,7 +42,7 @@ def record(method, arguments, result): recorder[call_to_key(method, arguments)] = result -def output_recorder(filename): +def output_recorder(filename): # pragma: no cover import pprint with open(filename, 'w') as f: pprint.pprint(recorder, f) @@ -51,7 +51,8 @@ def output_recorder(filename): class DNSLocation(object): - def __init__(self, login, password): + def __init__(self, uri, login, password): + self.uri = uri self.login = login self.password = password @@ -66,7 +67,10 @@ def __init__(self, environment): """ super(DNSAdapter, self).__init__(environment) self.DNS = DNSLocation( - self.backend_record.login, self.backend_record.password) + self.backend_record.uri, + self.backend_record.login, + self.backend_record.password, + ) def search(self, filters=None): """ Search records according to some criterias diff --git a/connector_dns/unit/binder.py b/connector_dns/unit/binder.py index 6526ab1..419b5f9 100755 --- a/connector_dns/unit/binder.py +++ b/connector_dns/unit/binder.py @@ -1,87 +1,67 @@ # -*- coding: utf-8 -*- # Copyright 2015 Elico Corp +# Copyright 2016 LasLabs Inc. # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). -from datetime import datetime -from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT + +from openerp import fields, models from openerp.addons.connector.connector import Binder from ..backend import dns @dns class DNSModelBinder(Binder): - """ - Bindings are done directly on the binding model. + """ Bindings are done directly on the binding model. - Binding models are models called ``dns.{normal_model}``, - like ``dns.record`` or ``dns.domain``. + Binding models are models called ``{normal_model}.bind``, + like ``dns.record.bind`` or ``dns.zone.bind``. They are ``_inherits`` of the normal models and contains the DNS ID, the ID of the DNS Backend and the additional fields belonging to the DNS instance. """ + _model_name = [ - 'dns.record', - 'dns.domain' + 'dns.record.bind', + 'dns.zone.bind', ] - _external_field = 'dns_id' + + # Name of Odoo field containing external record ID + _external_field = 'dns_id_external' + # Name of Odoo field containing backend record relation _backend_field = 'dns_backend_id' - _openerp_field = 'openerp_id' + # Name of Odoo field on binding record, relating to regular record + _openerp_field = 'odoo_id' + # Name of Odoo field containing last successful sync date _sync_date_field = 'sync_date' + # Name of Odoo field containing last failed sync date + _fail_date_field = 'fail_date' + # Name of field on external system indicating last change time + _external_date_field = 'updated_at' - def to_openerp(self, external_id, unwrap=False): - """ Give the OpenERP ID for an external ID - - :param external_id: external ID for which we want the OpenERP ID - :param unwrap: if True, returns the openerp_id of the dns_xx record, - else return the id (binding id) of that record - :return: a record ID, depending on the value of unwrap, - or None if the external_id is not mapped - :rtype: int + def bind(self, external_id, binding_id): + """ Create the link between an External ID and an Odoo ID + :param external_id: external id to bind + :param binding_id: Odoo ID to bind + :type binding_id: int """ - binding_ids = self.session.search( - self.model._name, - [(self._external_field, '=', str(external_id)), - (self._backend_field, '=', self.backend_record.id)]) - if not binding_ids: - return None - assert len(binding_ids) == 1, "Several records found: %s" % binding_ids - binding_id = binding_ids[0] - if unwrap: - model_id = self.session.read( - self.model._name, binding_id, [self._openerp_field] - ) - assert model_id - return model_id[self._openerp_field][0] - else: - return binding_id + try: + super(DNSModelBinder, self).bind(external_id, binding_id) + except AssertionError: + if not isinstance(binding_id, models.BaseModel): + binding_id = self.model.browse(binding_id) + binding_id.with_context(connector_no_export=True).write({ + self._fail_date_field: fields.Datetime.now(), + }) - def to_backend(self, binding_id): - """ Give the external ID for an OpenERP ID + def _external_date_method(self, field_value): + """ It executes w/ _external_date_field to create a Datetime obj - :param binding_id: OpenERP ID for which we want the external id - :return: backend identifier of the record - """ - dns_record = self.session.read( - self.model._name, binding_id, [self._external_field] - ) - assert dns_record - return dns_record[self._external_field] + Default implementation assumes it is already a Datetime obj. - def bind(self, external_id, binding_id): - """ Create the link between an external ID and an OpenERP ID and - update the last synchronization date. + Args: + field_value (mixed): Value of _external_date_field from + external system - :param external_id: External ID to bind - :param binding_id: OpenERP ID to bind - :type binding_id: int + Return: + datetime.datetime() """ - # avoid to trigger the export when we modify the `dns_id` - model = self.model.with_context(connector_no_export=True) - binding = model.browse(binding_id) - now_fmt = datetime.now().strftime(DEFAULT_SERVER_DATETIME_FORMAT) - if external_id: - state = 'done' - else: - state = 'exception' - binding.write({'dns_id': str(external_id), - 'state': state, - 'sync_date': now_fmt}) + return field_value diff --git a/connector_dns/unit/delete_synchronizer.py b/connector_dns/unit/delete_synchronizer.py new file mode 100644 index 0000000..b6f7d9e --- /dev/null +++ b/connector_dns/unit/delete_synchronizer.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 LasLabs Inc. +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). + + +# from openerp.addons.connector.queue.job import job +from openerp.addons.connector.unit.synchronizer import Deleter +# from ..connector import get_environment + + +class DNSDeleter(Deleter): + """ Base deleter for DNS """ + + def run(self, dns_id): + """ + Run the synchronization, delete the record on DNS + :param dns_id: identifier of the record to delete + """ + raise NotImplementedError('Cannot delete records from DNS.') + + +# @job(default_channel='root.dns') +# def export_delete_record(session, model_name, backend_id, dns_id): +# """ Delete a record on DNS """ +# env = get_environment(session, model_name, backend_id) +# deleter = env.get_connector_unit(DNSDeleter) +# return deleter.run(dns_id) diff --git a/connector_dns/unit/export_synchronizer.py b/connector_dns/unit/export_synchronizer.py old mode 100755 new mode 100644 index 4a525bd..3cf48c3 --- a/connector_dns/unit/export_synchronizer.py +++ b/connector_dns/unit/export_synchronizer.py @@ -1,57 +1,374 @@ # -*- coding: utf-8 -*- -# Copyright 2015 Elico Corp -# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). +# Copyright 2013 Camptocamp SA +# Copyright 2015 LasLabs Inc. +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). + import logging -from openerp.addons.connector.unit.synchronizer import ExportSynchronizer +from contextlib import contextmanager -_logger = logging.getLogger(__name__) +import psycopg2 +from openerp.tools.translate import _ -""" +from openerp.addons.connector.queue.job import job, related_action +from openerp.addons.connector.related_action import unwrap_binding +from openerp.addons.connector.unit.synchronizer import Exporter +from openerp.addons.connector.exception import (IDMissingInBackend, + RetryableJobError, + ) + +from .import_synchronizer import import_record +from ..connector import get_environment + + +_logger = logging.getLogger(__name__) -Exporters for DNS. + +""" Exporters for DNS. In addition to its export job, an exporter has to: * check in DNS if the record has been updated more recently than the last sync date and if yes, delay an import * call the ``bind`` method of the binder to update the last sync date - """ -class DNSBaseExporter(ExportSynchronizer): - +class DNSBaseExporter(Exporter): """ Base exporter for DNS """ - def __init__(self, environment): + def __init__(self, connector_env): """ - :param environment: current environment (backend, session, ...) - :type environment: :py:class:`connector.connector.Environment` + :param connector_env: current environment (backend, session, ...) + :type connector_env: :class:`connector.connector.ConnectorEnvironment` """ - super(DNSBaseExporter, self).__init__(environment) + super(DNSBaseExporter, self).__init__(connector_env) self.binding_id = None - self.external_id = None + self.dns_id = None + self.binding_record = None + + def _delay_import(self): + """ Schedule an import of the record. + Adapt in the sub-classes when the model is not imported + using ``import_record``. + """ + # force is True because the sync_date will be more recent + # so the import would be skipped + assert self.dns_id + import_record.delay(self.session, self.model._name, + self.backend_record.id, self.dns_id, + force=True) + + def _should_import(self): + """ Compare the update date in DNS with the last sync datew in Odoo. + If the former is more recent, schedule an import in order to not + overwrite changes on remote system. + + This base method only validates that the proper + + This should be overloaded in custom exporters to provide contextual + logic for the backend in question. + """ + assert self.binding_record + if not self.dns_id: + return False + binder = self.binder_for(self.model._name) + sync = getattr(self.binding_record, binder._sync_date_field, False) + if not sync: + return True def _get_odoo_data(self): - """ Return the raw OpenERP data for ``self.binding_id`` """ - return self.session.browse(self.model._name, self.binding_id) + """ Return the raw Odoo data for ``self.binding_id`` """ + return self.model.browse(self.binding_id) def run(self, binding_id, *args, **kwargs): """ Run the synchronization - :param binding_id: identifier of the binding record to export """ + self.binding_id = binding_id - self.binding_record = self._get_openerp_data() + self.binding_record = self._get_odoo_data() + + binder = self.binder_for(self.model._name) + self.dns_id = getattr( + self.binding_record, binder._external_field, False, + ) + + try: + should_import = self._should_import() + except IDMissingInBackend: + self.dns_id = None + should_import = False + + if should_import: + self._delay_import() - self.external_id = self.binder.to_backend(self.binding_id) result = self._run(*args, **kwargs) - self.binder.bind(self.external_id, self.binding_id) + self.binder.bind(self.dns_id, self.binding_id) + # Commit so we keep the external ID when there are several + # exports (due to dependencies) and one of them fails. + # The commit will also release the lock acquired on the binding + # record + self.session.commit() + + self._after_export() return result def _run(self): """ Flow of the synchronization, implemented in inherited classes""" raise NotImplementedError + + def _after_export(self): + """ Can do several actions after exporting a record on dns """ + return + + +class DNSExporter(DNSBaseExporter): + """ A common flow for the exports to DNS """ + + def _lock(self): + """ Lock the binding record. + Lock the binding record so we are sure that only one export + job is running for this record if concurrent jobs have to export the + same record. + When concurrent jobs try to export the same record, the first one + will lock and proceed, the others will fail to lock and will be + retried later. + This behavior works also when the export becomes multilevel + with :meth:`_export_dependencies`. Each level will set its own lock + on the binding record it has to export. + """ + sql = ("SELECT id FROM %s WHERE ID = %%s FOR UPDATE NOWAIT" % + self.model._table) + try: + self.session.cr.execute(sql, (self.binding_id, ), + log_exceptions=False) + except psycopg2.OperationalError: + _logger.info('A concurrent job is already exporting the same ' + 'record (%s with id %s). Job delayed later.', + self.model._name, self.binding_id) + raise RetryableJobError( + 'A concurrent job is already exporting the same record ' + '(%s with id %s). The job will be retried later.' % + (self.model._name, self.binding_id)) + + def _has_to_skip(self): + """ Return True if the export can be skipped """ + return False + + @contextmanager + def _retry_unique_violation(self): + """ Context manager: catch Unique constraint error and retry the + job later. + When we execute several jobs workers concurrently, it happens + that 2 jobs are creating the same record at the same time (binding + record created by :meth:`_export_dependency`), resulting in: + IntegrityError: duplicate key value violates unique + constraint "dns_product_product_odoo_uniq" + DETAIL: Key (backend_id, odoo_id)=(1, 4851) already exists. + In that case, we'll retry the import just later. + .. warning:: The unique constraint must be created on the + binding record to prevent 2 bindings to be created + for the same DNS record. + """ + try: + yield + except psycopg2.IntegrityError as err: + if err.pgcode == psycopg2.errorcodes.UNIQUE_VIOLATION: + raise RetryableJobError( + 'A database error caused the failure of the job:\n' + '%s\n\n' + 'Likely due to 2 concurrent jobs wanting to create ' + 'the same record. The job will be retried later.' % err) + else: + raise + + def _export_dependency(self, relation, binding_model, exporter_class=None, + binding_field='dns_bind_ids', + binding_extra_vals=None, + force=False, + ): + """ Export a dependency. The exporter class is a subclass of + ``DNSExporter``. If a more precise class need to be defined, + it can be passed to the ``exporter_class`` keyword argument. + .. warning:: a commit is done at the end of the export of each + dependency. The reason for that is that we pushed a record + on the backend and we absolutely have to keep its ID. + So you *must* take care not to modify the Odoo + database during an export, excepted when writing + back the external ID or eventually to store + external data that we have to keep on this side. + You should call this method only at the beginning + of the exporter synchronization, + in :meth:`~._export_dependencies`. + :param relation: record to export if not already exported + :type relation: :py:class:`odoo.models.BaseModel` + :param binding_model: name of the binding model for the relation + :type binding_model: str | unicode + :param exporter_cls: :py:class:`odoo.addons.connector\ + .connector.ConnectorUnit` + class or parent class to use for the export. + By default: DNSExporter + :type exporter_cls: :py:class:`odoo.addons.connector\ + .connector.MetaConnectorUnit` + :param binding_field: name of the one2many field on a normal + record that points to the binding record + (default: dns_bind_ids). + It is used only when the relation is not + a binding but is a normal record. + :type binding_field: str | unicode + :param binding_extra_vals: In case we want to create a new binding + pass extra values for this binding + :type binding_extra_vals: dict + :param force: Trigger export workflow even if record exists + :type force: bool + """ + if not relation: + return + if exporter_class is None: + exporter_class = DNSExporter + rel_binder = self.binder_for(binding_model) + # wrap is typically True if the relation is for instance a + # 'product.product' record but the binding model is + # 'dns.product.product' + wrap = relation._model._name != binding_model + + if wrap and hasattr(relation, binding_field): + domain = [ + (rel_binder._openerp_field, '=', relation.id), + (rel_binder._backend_field, '=', self.backend_record.id), + ] + binding = self.env[binding_model].search(domain) + if binding: + assert len(binding) == 1, ( + 'only 1 binding for a backend is ' + 'supported in _export_dependency') + # we are working with a unwrapped record (e.g. + # product.category) and the binding does not exist yet. + # Example: I created a product.product and its binding + # dns.product.product and we are exporting it, but we need to + # create the binding for the product.category on which it + # depends. + else: + bind_values = { + rel_binder._backend_field: self.backend_record.id, + rel_binder._openerp_field: relation.id, + } + if binding_extra_vals: + bind_values.update(binding_extra_vals) + # If 2 jobs create it at the same time, retry + # one later. A unique constraint (backend_id, + # odoo_id) should exist on the binding model + with self._retry_unique_violation(): + binding = (self.env[binding_model] + .with_context(connector_no_export=True) + .sudo() + .create(bind_values)) + # Eager commit to avoid having 2 jobs + # exporting at the same time. The constraint + # will pop if an other job already created + # the same binding. It will be caught and + # raise a RetryableJobError. + self.session.commit() + else: + # If dns_bind_ids does not exist we are typically in a + # "direct" binding (the binding record is the same record). + # If wrap is True, relation is already a binding record. + binding = relation + + if force or not rel_binder.to_backend(binding, wrap=False): + exporter = self.unit_for(exporter_class, model=binding_model) + exporter.run(binding.id) + + def _export_dependencies(self): + """ Export the dependencies for the record""" + return + + def _map_data(self): + """ Returns an instance of + :py:class:`~odoo.addons.connector.unit.mapper.MapRecord` + """ + return self.mapper.map_record(self.binding_record) + + def _validate_create_data(self, data): + """ Check if the values to import are correct + Pro-actively check before the ``Model.create`` if some fields + are missing or invalid + Raise `InvalidDataError` + """ + return + + def _validate_update_data(self, data): + """ Check if the values to import are correct + Pro-actively check before the ``Model.update`` if some fields + are missing or invalid + Raise `InvalidDataError` + """ + return + + def _create_data(self, map_record, fields=None, **kwargs): + """ Get the data to pass to :py:meth:`_create` """ + return map_record.values(for_create=True, fields=fields, **kwargs) + + def _create(self, data): + """ Create the DNS record """ + # special check on data before export + self._validate_create_data(data) + return self.backend_adapter.create(data) + + def _update_data(self, map_record, fields=None, **kwargs): + """ Get the data to pass to :py:meth:`_update` """ + return map_record.values(fields=fields, **kwargs) + + def _update(self, data): + """ Update an DNS record """ + assert self.dns_id + # special check on data before export + self._validate_update_data(data) + self.backend_adapter.write(self.dns_id, data) + + def _run(self, fields=None): + """ Flow of the base synchronization """ + assert self.binding_id + assert self.binding_record + + if not self.dns_id: + fields = None # should be created with all the fields + + if self._has_to_skip(): + return + + # export the missing linked resources + self._export_dependencies() + + # prevent other jobs to export the same record + # will be released on commit (or rollback) + self._lock() + + map_record = self._map_data() + + if self.dns_id: + record = self._update_data(map_record, fields=fields) + if not record: + return _('Nothing to export.') + self._update(record) + else: + record = self._create_data(map_record, fields=fields) + if not record: + return _('Nothing to export.') + self.dns_id = self._create(record) + return _( + 'Record exported with ID %s on DNS.' + ) % self.dns_id + + +@job(default_channel='root.dns') +@related_action(action=unwrap_binding) +def export_record(session, model_name, binding_id, fields=None): + """ Export a record to DNS """ + record = session.env[model_name].browse(binding_id) + env = get_environment(session, model_name, record.backend_id.id) + exporter = env.get_connector_unit(DNSExporter) + return exporter.run(binding_id, fields=fields) diff --git a/connector_dns/unit/import_synchronizer.py b/connector_dns/unit/import_synchronizer.py new file mode 100644 index 0000000..e0b6f23 --- /dev/null +++ b/connector_dns/unit/import_synchronizer.py @@ -0,0 +1,303 @@ +# -*- coding: utf-8 -*- +# Copyright 2013 Camptocamp SA +# Copyright 2015 LasLabs Inc. +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). + + +""" Importers for DNS. + +An import can be skipped if the last sync date is more recent than +the last update in DNS. + +They should call the ``bind`` method if the binder even if the records +are already bound, to update the last sync date. +""" + + +import logging +from openerp import fields, _ +from openerp.addons.connector.queue.job import job +from openerp.addons.connector.connector import ConnectorUnit +from openerp.addons.connector.unit.synchronizer import Importer +from ..backend import dns +from ..connector import get_environment, add_checkpoint + + +_logger = logging.getLogger(__name__) + + +def int_or_str(val): + try: + return int(val) + except: + return str(val) + + +class DNSImporter(Importer): + """ Base importer for DNS """ + + def __init__(self, connector_env): + """ + :param connector_env: current environment (backend, session, ...) + :type connector_env: :class:`connector.connector.ConnectorEnvironment` + """ + super(DNSImporter, self).__init__(connector_env) + self.dns_id = None + self.dns_record = None + + def _get_dns_data(self): + """ Return the raw DNS data for ``self.dns_id`` """ + _logger.debug('Getting CP data for %s', self.dns_id) + return self.backend_adapter.read(self.dns_id) + + def _before_import(self): + """ Hook called before the import, when we have the DNS + data""" + + def _is_current(self, binding): + """ Return True if the import should be skipped because + it is already up to date in Odoo""" + assert self.dns_record + + if not binding: + return # it does not exist so it should not be skipped + + binder = self.binder_for(self.model._name) + + dns_date = getattr( + binding, binder._external_date_field, False, + ) + if not dns_date: + return # No external update date, always import + + sync_date = getattr( + binding, binder._sync_date_field, False, + ) + if not sync_date: + return # No internal update date, always import + + # Convert fields to Datetime objs for comparison + sync_date = fields.Datetime.from_string(sync_date) + dns_date = getattr(binding, binder._external_date_field)(dns_date) + + # if the last synchronization date is greater than the last + # update in dns, we skip the import. + # Important: at the beginning of the exporters flows, we have to + # check if the dns_date is more recent than the sync_date + # and if so, schedule a new import. If we don't do that, we'll + # miss changes done in DNS + return dns_date < sync_date + + def _import_dependency(self, dns_id, binding_model, + importer_class=None, always=False): + """ Import a dependency. + The importer class is a class or subclass of + :class:`DNSImporter`. A specific class can be defined. + :param dns_id: id of the related binding to import + :param binding_model: name of the binding model for the relation + :type binding_model: str | unicode + :param importer_cls: :class:`odoo.addons.connector.\ + connector.ConnectorUnit` + class or parent class to use for the export. + By default: DNSImporter + :type importer_cls: :class:`odoo.addons.connector.\ + connector.MetaConnectorUnit` + :param always: if True, the record is updated even if it already + exists, note that it is still skipped if it has + not been modified on DNS since the last + update. When False, it will import it only when + it does not yet exist. + :type always: boolean + """ + if not dns_id: + return + if importer_class is None: + importer_class = DNSImporter + binder = self.binder_for(binding_model) + if always or binder.to_openerp(dns_id) is None: + importer = self.unit_for(importer_class, model=binding_model) + importer.run(dns_id) + + def _import_dependencies(self): + """ Import the dependencies for the record + Import of dependencies can be done manually or by calling + :meth:`_import_dependency` for each dependency. + """ + return + + def _map_data(self): + """ Returns an instance of + :py:class:`~odoo.addons.connector.unit.mapper.MapRecord` + """ + return self.mapper.map_record(self.dns_record) + + def _validate_data(self, data): + """ Check if the values to import are correct + Pro-actively check before the ``_create`` or + ``_update`` if some fields are missing or invalid. + Raise `InvalidDataError` + """ + return + + def _must_skip(self): + """ Hook called right after we read the data from the backend. + If the method returns a message giving a reason for the + skipping, the import will be interrupted and the message + recorded in the job (if the import is called directly by the + job, not by dependencies). + If it returns None, the import will continue normally. + :returns: None | str | unicode + """ + return + + def _get_binding(self): + return self.binder.to_openerp(self.dns_id, + unwrap=False, + browse=True, + ) + + def _create_data(self, map_record, **kwargs): + return map_record.values(for_create=True, **kwargs) + + def _create(self, data): + """ Create the Odoo record """ + # special check on data before import + self._validate_data(data) + model = self.model.with_context(connector_no_export=True) + binding = model.create(data) + return binding + + def _update_data(self, map_record, **kwargs): + return map_record.values(**kwargs) + + def _update(self, binding, data): + """ Update an Odoo record """ + # special check on data before import + self._validate_data(data) + binding.with_context(connector_no_export=True).write(data) + return + + def _after_import(self, binding): + """ Hook called at the end of the import """ + return + + def run(self, dns_id, force=False): + """ Run the synchronization + :param dns_id: identifier of the record on DNS + """ + self.dns_id = dns_id + self.dns_record = self._get_dns_data() + lock_name = 'import({}, {}, {}, {})'.format( + self.backend_record._name, + self.backend_record.id, + self.model._name, + dns_id, + ) + # Keep a lock on this import until the transaction is committed + self.advisory_lock_or_retry(lock_name) + + skip = self._must_skip() + if skip: + return skip + + binding = self._get_binding() + + if not force and self._is_current(binding): + return _('Already Up To Date.') + self._before_import() + + # import the missing linked resources + self._import_dependencies() + + map_record = self._map_data() + + if binding: + record = self._update_data(map_record) + self._update(binding, record) + else: + record = self._create_data(map_record) + binding = self._create(record) + + self.binder.bind(self.dns_id, binding) + + self._after_import(binding) + + +class BatchImporter(Importer): + """ The role of a BatchImporter is to search for a list of + items to import, then it can either import them directly or delay + the import of each item separately. + """ + + def run(self, filters=None): + """ Run the synchronization """ + if filters is None: + filters = {} + record_ids = self.backend_adapter.search(**filters) + for record_id in record_ids: + self._import_record(record_id) + + def _import_record(self, record_id): + """ Import a record directly or delay the import of the record. + Method to be implemented in sub-classes. + """ + raise NotImplementedError + + +class DirectBatchImporter(BatchImporter): + """ Import the records directly, without delaying the jobs. """ + _model_name = None + + def _import_record(self, record_id): + """ Import the record directly """ + import_record(self.session, + self.model._name, + self.backend_record.id, + int_or_str(record_id)) + + +class DelayedBatchImporter(BatchImporter): + """ Delay import of the records """ + _model_name = None + + def _import_record(self, record_id, **kwargs): + """ Delay the import of the records""" + import_record.delay(self.session, + self.model._name, + self.backend_record.id, + int_or_str(record_id), + **kwargs) + + +@dns +class AddCheckpoint(ConnectorUnit): + """ Add a connector.checkpoint on the underlying model + (not the dns.* but the _inherits'ed model) """ + + _model_name = ['dns.product.product', + 'dns.product.category', + ] + + def run(self, odoo_binding_id): + binding = self.model.browse(odoo_binding_id) + record = binding.odoo_id + add_checkpoint(self.session, + record._model._name, + record.id, + self.backend_record.id) + + +@job(default_channel='root.dns') +def import_batch(session, model_name, backend_id, filters=None): + """ Prepare a batch import of records from DNS """ + env = get_environment(session, model_name, backend_id) + importer = env.get_connector_unit(BatchImporter) + importer.run(filters=filters) + + +@job(default_channel='root.dns') +def import_record(session, model_name, backend_id, dns_id, force=False): + """ Import a record from DNS """ + env = get_environment(session, model_name, backend_id) + importer = env.get_connector_unit(DNSImporter) + importer.run(dns_id, force=force) diff --git a/connector_dns/unit/mapper.py b/connector_dns/unit/mapper.py new file mode 100644 index 0000000..532034f --- /dev/null +++ b/connector_dns/unit/mapper.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2016 LasLabs Inc. +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). + +from openerp.addons.connector.unit.mapper import (mapping, + ImportMapper, + ) + + +class DNSImportMapper(ImportMapper): + """ It provides a default mapper class to be used for all DNS mappers """ + + @mapping + def dns_backend_id(self, record): + return {'dns_backend_id': self.backend_record.id} diff --git a/connector_dns/views/dns_backend.xml b/connector_dns/views/dns_backend.xml new file mode 100755 index 0000000..c30a2de --- /dev/null +++ b/connector_dns/views/dns_backend.xml @@ -0,0 +1,64 @@ + + + + + + DNS Backend Action + dns.backend + ir.actions.act_window + form + tree,form + + + + DNS Backend Tree + dns.backend + + + + + + + + + DNS Backend Form + dns.backend + +
+
+
+ +
+
+ + + + + + + + + + + +
+
+
+
+ +
+
diff --git a/connector_dns/dns_menu.xml b/connector_dns/views/dns_menu.xml similarity index 64% rename from connector_dns/dns_menu.xml rename to connector_dns/views/dns_menu.xml index 1325c07..827533f 100755 --- a/connector_dns/dns_menu.xml +++ b/connector_dns/views/dns_menu.xml @@ -3,24 +3,24 @@ + parent="connector.menu_connector_root" /> + action="dns_backend_action" /> - + action="dns_zone_action" /> + action="dns_record_action" /> diff --git a/connector_dns/views/dns_record.xml b/connector_dns/views/dns_record.xml new file mode 100755 index 0000000..a5f50d8 --- /dev/null +++ b/connector_dns/views/dns_record.xml @@ -0,0 +1,52 @@ + + + + + + DNS Record Action + dns.record + form + tree,form + + + + DNS Record Tree + dns.record + + + + + + + + + + + + + DNS Record Form + dns.record + +
+
+ +
+
+ + + + + + + +
+ + + + + + diff --git a/connector_dns/views/dns_zone.xml b/connector_dns/views/dns_zone.xml new file mode 100755 index 0000000..294973d --- /dev/null +++ b/connector_dns/views/dns_zone.xml @@ -0,0 +1,57 @@ + + + + + + DNS Zone + dns.zone + form + tree,form + + + + DNS Zone Tree + dns.zone + + + + + + + + + DNS Zone Form + dns.zone + +
+
+
+ +
+
+
+
+
+
+ +
+
From b89a2cb587f840ad7d821c70d5e318c81691e34e Mon Sep 17 00:00:00 2001 From: Dave Lasley Date: Wed, 5 Oct 2016 15:13:44 +0200 Subject: [PATCH 03/25] [FIX] connector_dns: Build and functional errors * Require zone in record * Fix action names in backend, zone, record * Remove states from backend and zone --- connector_dns/models/dns_record.py | 1 + connector_dns/views/dns_backend.xml | 5 +---- connector_dns/views/dns_record.xml | 2 +- connector_dns/views/dns_zone.xml | 3 +-- 4 files changed, 4 insertions(+), 7 deletions(-) diff --git a/connector_dns/models/dns_record.py b/connector_dns/models/dns_record.py index bf8a12e..6bf85ca 100755 --- a/connector_dns/models/dns_record.py +++ b/connector_dns/models/dns_record.py @@ -36,6 +36,7 @@ class DNSRecord(models.Model): string="Zone", comodel_name='dns.zone', ondelete='cascade', + required=True, help="Hosted zone that this record is applied to.", ) type_id = fields.Many2one( diff --git a/connector_dns/views/dns_backend.xml b/connector_dns/views/dns_backend.xml index c30a2de..1eb81eb 100755 --- a/connector_dns/views/dns_backend.xml +++ b/connector_dns/views/dns_backend.xml @@ -3,9 +3,8 @@ - DNS Backend Action + DNS Backends dns.backend - ir.actions.act_window form tree,form @@ -29,12 +28,10 @@
diff --git a/connector_dns/views/dns_record.xml b/connector_dns/views/dns_record.xml index a5f50d8..50714a5 100755 --- a/connector_dns/views/dns_record.xml +++ b/connector_dns/views/dns_record.xml @@ -3,7 +3,7 @@ - DNS Record Action + DNS Records dns.record form tree,form diff --git a/connector_dns/views/dns_zone.xml b/connector_dns/views/dns_zone.xml index 294973d..3540b20 100755 --- a/connector_dns/views/dns_zone.xml +++ b/connector_dns/views/dns_zone.xml @@ -3,7 +3,7 @@ - DNS Zone + DNS Zones dns.zone form tree,form @@ -28,7 +28,6 @@