summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rwxr-xr-xcontrib/fixleadingzeros.py5
-rw-r--r--designate/api/v2/controllers/rest.py20
-rw-r--r--designate/backend/impl_pdns4.py4
-rw-r--r--designate/central/service.py223
-rw-r--r--designate/common/decorators/__init__.py0
-rw-r--r--designate/common/decorators/lock.py107
-rw-r--r--designate/common/decorators/notification.py90
-rw-r--r--designate/common/decorators/rpc.py49
-rw-r--r--designate/context.py16
-rw-r--r--designate/objects/adapters/base.py4
-rw-r--r--designate/objects/base.py32
-rw-r--r--designate/objects/record.py8
-rwxr-xr-xdesignate/objects/recordset.py6
-rw-r--r--designate/objects/rrdata_a.py5
-rw-r--r--designate/objects/rrdata_aaaa.py5
-rw-r--r--designate/objects/rrdata_caa.py7
-rw-r--r--designate/objects/rrdata_cert.py7
-rw-r--r--designate/objects/rrdata_cname.py5
-rw-r--r--designate/objects/rrdata_mx.py5
-rw-r--r--designate/objects/rrdata_naptr.py10
-rw-r--r--designate/objects/rrdata_ns.py5
-rw-r--r--designate/objects/rrdata_ptr.py5
-rw-r--r--designate/objects/rrdata_soa.py10
-rw-r--r--designate/objects/rrdata_spf.py5
-rw-r--r--designate/objects/rrdata_srv.py5
-rw-r--r--designate/objects/rrdata_sshfp.py5
-rw-r--r--designate/objects/rrdata_txt.py5
-rw-r--r--designate/rpc.py27
-rw-r--r--designate/schema/format.py9
-rw-r--r--designate/service.py2
-rw-r--r--designate/sqlalchemy/base.py5
-rw-r--r--designate/sqlalchemy/types.py1
-rw-r--r--designate/tests/test_central/test_decorator.py75
-rw-r--r--designate/tests/test_storage/test_sqlalchemy.py3
-rw-r--r--designate/tests/unit/objects/test_adapters.py99
-rw-r--r--designate/tests/unit/objects/test_base.py34
-rw-r--r--designate/tests/unit/objects/test_recordset.py12
-rw-r--r--designate/tests/unit/objects/test_rrdata_a.py13
-rw-r--r--designate/tests/unit/objects/test_rrdata_caa.py28
-rw-r--r--designate/tests/unit/objects/test_rrdata_cert.py12
-rw-r--r--designate/tests/unit/objects/test_rrdata_mx.py2
-rw-r--r--designate/tests/unit/objects/test_rrdata_naptr.py2
-rw-r--r--designate/tests/unit/objects/test_rrdata_sshfp.py2
-rw-r--r--designate/tests/unit/objects/test_rrdata_txt.py4
-rw-r--r--designate/tests/unit/test_central/test_basic.py25
-rw-r--r--designate/tests/unit/test_central/test_lock_decorator.py111
-rw-r--r--designate/worker/service.py2
-rw-r--r--doc/ext/support_matrix.py2
-rw-r--r--etc/designate/rootwrap.conf.sample6
-rw-r--r--releasenotes/notes/remove-netaddr-requirement-ab9b9c2d15aa8e1c.yaml5
-rw-r--r--releasenotes/source/locale/en_GB/LC_MESSAGES/releasenotes.po51
-rw-r--r--requirements.txt3
52 files changed, 704 insertions, 479 deletions
diff --git a/contrib/fixleadingzeros.py b/contrib/fixleadingzeros.py
index cdf49f1d..3659a29a 100755
--- a/contrib/fixleadingzeros.py
+++ b/contrib/fixleadingzeros.py
@@ -15,6 +15,7 @@
# License for the specific language governing permissions and limitations
# under the License.
import argparse
+import ipaddress
import logging
import sys
@@ -22,7 +23,6 @@ import dns.exception
from dns.ipv4 import inet_aton
from keystoneauth1.identity import generic
from keystoneauth1 import session as keystone_session
-import netaddr
from designateclient import shell
from designateclient.v2 import client
@@ -72,8 +72,9 @@ def fix_bad_recordsets(bad_recordsets):
for rs in bad_recordsets:
new_records = []
for ip in bad_recordsets[rs]['records']:
+ ip = '.'.join(f'{int(i)}' for i in ip.split('.'))
new_records.append(
- str(netaddr.IPAddress(ip, flags=netaddr.ZEROFILL).ipv4())
+ str(ipaddress.IPv4Address(ip))
)
bad_recordsets[rs]['records'] = new_records
return bad_recordsets
diff --git a/designate/api/v2/controllers/rest.py b/designate/api/v2/controllers/rest.py
index e6de3593..7f6c2c92 100644
--- a/designate/api/v2/controllers/rest.py
+++ b/designate/api/v2/controllers/rest.py
@@ -62,7 +62,7 @@ class RestController(pecan.rest.RestController):
else:
return criterion
- def _handle_post(self, method, remainder):
+ def _handle_post(self, method, remainder, request=None):
'''
Routes ``POST`` actions to the appropriate controller.
'''
@@ -75,7 +75,8 @@ class RestController(pecan.rest.RestController):
controller = getattr(self, remainder[0], None)
if controller and not inspect.ismethod(controller):
- return pecan.routing.lookup_controller(controller, remainder[1:])
+ return pecan.routing.lookup_controller(controller, remainder[1:],
+ request=request)
# finally, check for the regular post_one/post requests
controller = self._find_controller('post_one', 'post')
@@ -84,7 +85,7 @@ class RestController(pecan.rest.RestController):
pecan.abort(405)
- def _handle_patch(self, method, remainder):
+ def _handle_patch(self, method, remainder, request=None):
'''
Routes ``PATCH`` actions to the appropriate controller.
'''
@@ -97,7 +98,8 @@ class RestController(pecan.rest.RestController):
controller = getattr(self, remainder[0], None)
if controller and not inspect.ismethod(controller):
- return pecan.routing.lookup_controller(controller, remainder[1:])
+ return pecan.routing.lookup_controller(controller, remainder[1:],
+ request=request)
# finally, check for the regular patch_one/patch requests
controller = self._find_controller('patch_one', 'patch')
@@ -106,7 +108,7 @@ class RestController(pecan.rest.RestController):
pecan.abort(405)
- def _handle_put(self, method, remainder):
+ def _handle_put(self, method, remainder, request=None):
'''
Routes ``PUT`` actions to the appropriate controller.
'''
@@ -119,7 +121,8 @@ class RestController(pecan.rest.RestController):
controller = getattr(self, remainder[0], None)
if controller and not inspect.ismethod(controller):
- return pecan.routing.lookup_controller(controller, remainder[1:])
+ return pecan.routing.lookup_controller(controller, remainder[1:],
+ request=request)
# finally, check for the regular put_one/put requests
controller = self._find_controller('put_one', 'put')
@@ -128,7 +131,7 @@ class RestController(pecan.rest.RestController):
pecan.abort(405)
- def _handle_delete(self, method, remainder):
+ def _handle_delete(self, method, remainder, request=None):
'''
Routes ``DELETE`` actions to the appropriate controller.
'''
@@ -141,7 +144,8 @@ class RestController(pecan.rest.RestController):
controller = getattr(self, remainder[0], None)
if controller and not inspect.ismethod(controller):
- return pecan.routing.lookup_controller(controller, remainder[1:])
+ return pecan.routing.lookup_controller(controller, remainder[1:],
+ request=request)
# finally, check for the regular delete_one/delete requests
controller = self._find_controller('delete_one', 'delete')
diff --git a/designate/backend/impl_pdns4.py b/designate/backend/impl_pdns4.py
index a7e8cf6e..30172233 100644
--- a/designate/backend/impl_pdns4.py
+++ b/designate/backend/impl_pdns4.py
@@ -11,10 +11,10 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
+import ipaddress
import os.path
import urllib
-import netaddr
from oslo_config import cfg
from oslo_log import log as logging
import requests
@@ -83,7 +83,7 @@ class PDNS4Backend(base.Backend):
masters = []
for master in self.masters:
host = master.host
- if netaddr.IPAddress(host).version == 6:
+ if ipaddress.ip_address(host).version == 6:
host = '[%s]' % host
masters.append('%s:%d' % (host, master.port))
diff --git a/designate/central/service.py b/designate/central/service.py
index 05173539..34e39338 100644
--- a/designate/central/service.py
+++ b/designate/central/service.py
@@ -14,16 +14,12 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
-import collections
import copy
-import functools
-import itertools
import random
from random import SystemRandom
import re
import signal
import string
-import threading
import time
from dns import exception as dnsexception
@@ -33,16 +29,16 @@ from oslo_log import log as logging
import oslo_messaging as messaging
from designate.common import constants
-from designate import context as dcontext
+from designate.common.decorators import lock
+from designate.common.decorators import notification
+from designate.common.decorators import rpc
from designate import coordination
from designate import dnsutils
from designate import exceptions
from designate import network_api
-from designate import notifications
from designate import objects
from designate import policy
from designate import quota
-from designate import rpc
from designate import scheduler
from designate import service
from designate import storage
@@ -51,135 +47,7 @@ from designate.storage import transaction_shallow_copy
from designate import utils
from designate.worker import rpcapi as worker_rpcapi
-
LOG = logging.getLogger(__name__)
-ZONE_LOCKS = threading.local()
-NOTIFICATION_BUFFER = threading.local()
-
-
-def synchronized_zone(zone_arg=1, new_zone=False):
- """Ensures only a single operation is in progress for each zone
-
- A Decorator which ensures only a single operation can be happening
- on a single zone at once, within the current designate-central instance
- """
- def outer(f):
- @functools.wraps(f)
- def sync_wrapper(self, *args, **kwargs):
- if not hasattr(ZONE_LOCKS, 'held'):
- # Create the held set if necessary
- ZONE_LOCKS.held = set()
-
- zone_id = None
-
- if 'zone_id' in kwargs:
- zone_id = kwargs['zone_id']
- elif 'zone' in kwargs:
- zone_id = kwargs['zone'].id
- elif 'recordset' in kwargs:
- zone_id = kwargs['recordset'].zone_id
- elif 'record' in kwargs:
- zone_id = kwargs['record'].zone_id
-
- # The various objects won't always have an ID set, we should
- # attempt to locate an Object containing the ID.
- if zone_id is None:
- for arg in itertools.chain(kwargs.values(), args):
- if isinstance(arg, objects.Zone):
- zone_id = arg.id
- if zone_id:
- break
- elif (isinstance(arg, objects.RecordSet) or
- isinstance(arg, objects.Record) or
- isinstance(arg, objects.ZoneTransferRequest) or
- isinstance(arg, objects.ZoneTransferAccept)):
- zone_id = arg.zone_id
- if zone_id:
- break
-
- # If we still don't have an ID, find the Nth argument as
- # defined by the zone_arg decorator option.
- if not zone_id and len(args) > zone_arg:
- zone_id = args[zone_arg]
- if isinstance(zone_id, objects.Zone):
- # If the value is a Zone object, extract it's ID.
- zone_id = zone_id.id
-
- if new_zone and not zone_id:
- lock_name = 'create-new-zone'
- elif not new_zone and zone_id:
- lock_name = 'zone-%s' % zone_id
- else:
- raise Exception('Failed to determine zone id for '
- 'synchronized operation')
-
- if zone_id in ZONE_LOCKS.held:
- return f(self, *args, **kwargs)
-
- with self.coordination.get_lock(lock_name):
- try:
- ZONE_LOCKS.held.add(zone_id)
- return f(self, *args, **kwargs)
- finally:
- ZONE_LOCKS.held.remove(zone_id)
-
- sync_wrapper.__wrapped_function = f
- sync_wrapper.__wrapper_name = 'synchronized_zone'
- return sync_wrapper
-
- return outer
-
-
-def notification(notification_type):
- def outer(f):
- @functools.wraps(f)
- def notification_wrapper(self, *args, **kwargs):
- if not hasattr(NOTIFICATION_BUFFER, 'queue'):
- # Create the notifications queue if necessary
- NOTIFICATION_BUFFER.stack = 0
- NOTIFICATION_BUFFER.queue = collections.deque()
-
- NOTIFICATION_BUFFER.stack += 1
-
- try:
- # Find the context argument
- context = dcontext.DesignateContext.\
- get_context_from_function_and_args(f, args, kwargs)
-
- # Call the wrapped function
- result = f(self, *args, **kwargs)
-
- # Feed the args/result to a notification plugin
- # to determine what is emitted
- payloads = notifications.get_plugin().emit(
- notification_type, context, result, args, kwargs)
-
- # Enqueue the notification
- for payload in payloads:
- LOG.debug('Queueing notification for %(type)s ',
- {'type': notification_type})
- NOTIFICATION_BUFFER.queue.appendleft(
- (context, notification_type, payload,))
-
- return result
-
- finally:
- NOTIFICATION_BUFFER.stack -= 1
-
- if NOTIFICATION_BUFFER.stack == 0:
- LOG.debug('Emitting %(count)d notifications',
- {'count': len(NOTIFICATION_BUFFER.queue)})
- # Send the queued notifications, in order.
- for value in NOTIFICATION_BUFFER.queue:
- LOG.debug('Emitting %(type)s notification',
- {'type': value[1]})
- self.notifier.info(value[0], value[1], value[2])
-
- # Reset the queue
- NOTIFICATION_BUFFER.queue.clear()
-
- return notification_wrapper
- return outer
class Service(service.RPCService):
@@ -188,6 +56,9 @@ class Service(service.RPCService):
target = messaging.Target(version=RPC_API_VERSION)
def __init__(self):
+ self.zone_lock_local = lock.ZoneLockLocal()
+ self.notification_thread_local = notification.NotificationThreadLocal()
+
self._scheduler = None
self._storage = None
self._quota = None
@@ -196,11 +67,9 @@ class Service(service.RPCService):
self.service_name, cfg.CONF['service:central'].topic,
threads=cfg.CONF['service:central'].threads,
)
-
self.coordination = coordination.Coordination(
self.service_name, self.tg, grouping_enabled=False
)
-
self.network_api = network_api.get_network_api(cfg.CONF.network_api)
@property
@@ -713,7 +582,7 @@ class Service(service.RPCService):
# TLD Methods
@rpc.expected_exceptions()
- @notification('dns.tld.create')
+ @notification.notify_type('dns.tld.create')
@transaction
def create_tld(self, context, tld):
policy.check('create_tld', context)
@@ -738,7 +607,7 @@ class Service(service.RPCService):
return self.storage.get_tld(context, tld_id)
@rpc.expected_exceptions()
- @notification('dns.tld.update')
+ @notification.notify_type('dns.tld.update')
@transaction
def update_tld(self, context, tld):
target = {
@@ -751,7 +620,7 @@ class Service(service.RPCService):
return tld
@rpc.expected_exceptions()
- @notification('dns.tld.delete')
+ @notification.notify_type('dns.tld.delete')
@transaction
def delete_tld(self, context, tld_id):
policy.check('delete_tld', context, {'tld_id': tld_id})
@@ -762,7 +631,7 @@ class Service(service.RPCService):
# TSIG Key Methods
@rpc.expected_exceptions()
- @notification('dns.tsigkey.create')
+ @notification.notify_type('dns.tsigkey.create')
@transaction
def create_tsigkey(self, context, tsigkey):
policy.check('create_tsigkey', context)
@@ -788,7 +657,7 @@ class Service(service.RPCService):
return self.storage.get_tsigkey(context, tsigkey_id)
@rpc.expected_exceptions()
- @notification('dns.tsigkey.update')
+ @notification.notify_type('dns.tsigkey.update')
@transaction
def update_tsigkey(self, context, tsigkey):
target = {
@@ -803,7 +672,7 @@ class Service(service.RPCService):
return tsigkey
@rpc.expected_exceptions()
- @notification('dns.tsigkey.delete')
+ @notification.notify_type('dns.tsigkey.delete')
@transaction
def delete_tsigkey(self, context, tsigkey_id):
policy.check('delete_tsigkey', context, {'tsigkey_id': tsigkey_id})
@@ -862,9 +731,9 @@ class Service(service.RPCService):
return pool.ns_records
@rpc.expected_exceptions()
- @notification('dns.domain.create')
- @notification('dns.zone.create')
- @synchronized_zone(new_zone=True)
+ @notification.notify_type('dns.domain.create')
+ @notification.notify_type('dns.zone.create')
+ @lock.synchronized_zone(new_zone=True)
def create_zone(self, context, zone):
"""Create zone: perform checks and then call _create_zone()
"""
@@ -1060,9 +929,9 @@ class Service(service.RPCService):
sort_key, sort_dir)
@rpc.expected_exceptions()
- @notification('dns.domain.update')
- @notification('dns.zone.update')
- @synchronized_zone()
+ @notification.notify_type('dns.domain.update')
+ @notification.notify_type('dns.zone.update')
+ @lock.synchronized_zone()
def update_zone(self, context, zone, increment_serial=True):
"""Update zone. Perform checks and then call _update_zone()
@@ -1134,9 +1003,9 @@ class Service(service.RPCService):
return zone
@rpc.expected_exceptions()
- @notification('dns.domain.delete')
- @notification('dns.zone.delete')
- @synchronized_zone()
+ @notification.notify_type('dns.domain.delete')
+ @notification.notify_type('dns.zone.delete')
+ @lock.synchronized_zone()
def delete_zone(self, context, zone_id):
"""Delete or abandon a zone
On abandon, delete the zone from the DB immediately.
@@ -1294,8 +1163,8 @@ class Service(service.RPCService):
# RecordSet Methods
@rpc.expected_exceptions()
- @notification('dns.recordset.create')
- @synchronized_zone()
+ @notification.notify_type('dns.recordset.create')
+ @lock.synchronized_zone()
def create_recordset(self, context, zone_id, recordset,
increment_serial=True):
zone = self.storage.get_zone(context, zone_id)
@@ -1467,8 +1336,8 @@ class Service(service.RPCService):
recordsets=recordsets)
@rpc.expected_exceptions()
- @notification('dns.recordset.update')
- @synchronized_zone()
+ @notification.notify_type('dns.recordset.update')
+ @lock.synchronized_zone()
def update_recordset(self, context, recordset, increment_serial=True):
zone_id = recordset.obj_get_original_value('zone_id')
zone = self.storage.get_zone(context, zone_id)
@@ -1550,8 +1419,8 @@ class Service(service.RPCService):
return recordset, zone
@rpc.expected_exceptions()
- @notification('dns.recordset.delete')
- @synchronized_zone()
+ @notification.notify_type('dns.recordset.delete')
+ @lock.synchronized_zone()
def delete_recordset(self, context, zone_id, recordset_id,
increment_serial=True):
zone = self.storage.get_zone(context, zone_id)
@@ -2049,7 +1918,7 @@ class Service(service.RPCService):
# Blacklisted zones
@rpc.expected_exceptions()
- @notification('dns.blacklist.create')
+ @notification.notify_type('dns.blacklist.create')
@transaction
def create_blacklist(self, context, blacklist):
policy.check('create_blacklist', context)
@@ -2078,7 +1947,7 @@ class Service(service.RPCService):
return blacklists
@rpc.expected_exceptions()
- @notification('dns.blacklist.update')
+ @notification.notify_type('dns.blacklist.update')
@transaction
def update_blacklist(self, context, blacklist):
target = {
@@ -2091,7 +1960,7 @@ class Service(service.RPCService):
return blacklist
@rpc.expected_exceptions()
- @notification('dns.blacklist.delete')
+ @notification.notify_type('dns.blacklist.delete')
@transaction
def delete_blacklist(self, context, blacklist_id):
policy.check('delete_blacklist', context)
@@ -2102,7 +1971,7 @@ class Service(service.RPCService):
# Server Pools
@rpc.expected_exceptions()
- @notification('dns.pool.create')
+ @notification.notify_type('dns.pool.create')
@transaction
def create_pool(self, context, pool):
# Verify that there is a tenant_id
@@ -2141,7 +2010,7 @@ class Service(service.RPCService):
return self.storage.get_pool(context, pool_id)
@rpc.expected_exceptions()
- @notification('dns.pool.update')
+ @notification.notify_type('dns.pool.update')
@transaction
def update_pool(self, context, pool):
policy.check('update_pool', context)
@@ -2202,7 +2071,7 @@ class Service(service.RPCService):
return updated_pool
@rpc.expected_exceptions()
- @notification('dns.pool.delete')
+ @notification.notify_type('dns.pool.delete')
@transaction
def delete_pool(self, context, pool_id):
@@ -2225,10 +2094,10 @@ class Service(service.RPCService):
# Pool Manager Integration
@rpc.expected_exceptions()
- @notification('dns.domain.update')
- @notification('dns.zone.update')
+ @notification.notify_type('dns.domain.update')
+ @notification.notify_type('dns.zone.update')
@transaction
- @synchronized_zone()
+ @lock.synchronized_zone()
def update_status(self, context, zone_id, status, serial, action=None):
"""
:param context: Security context information.
@@ -2356,7 +2225,7 @@ class Service(service.RPCService):
return ''.join(sysrand.choice(chars) for _ in range(size))
@rpc.expected_exceptions()
- @notification('dns.zone_transfer_request.create')
+ @notification.notify_type('dns.zone_transfer_request.create')
@transaction
def create_zone_transfer_request(self, context, zone_transfer_request):
@@ -2427,7 +2296,7 @@ class Service(service.RPCService):
return requests
@rpc.expected_exceptions()
- @notification('dns.zone_transfer_request.update')
+ @notification.notify_type('dns.zone_transfer_request.update')
@transaction
def update_zone_transfer_request(self, context, zone_transfer_request):
@@ -2449,7 +2318,7 @@ class Service(service.RPCService):
return request
@rpc.expected_exceptions()
- @notification('dns.zone_transfer_request.delete')
+ @notification.notify_type('dns.zone_transfer_request.delete')
@transaction
def delete_zone_transfer_request(self, context, zone_transfer_request_id):
# Get zone transfer request
@@ -2469,7 +2338,7 @@ class Service(service.RPCService):
zone_transfer_request_id)
@rpc.expected_exceptions()
- @notification('dns.zone_transfer_accept.create')
+ @notification.notify_type('dns.zone_transfer_accept.create')
@transaction
def create_zone_transfer_accept(self, context, zone_transfer_accept):
elevated_context = context.elevated(all_tenants=True)
@@ -2571,7 +2440,7 @@ class Service(service.RPCService):
# Zone Import Methods
@rpc.expected_exceptions()
- @notification('dns.zone_import.create')
+ @notification.notify_type('dns.zone_import.create')
def create_zone_import(self, context, request_body):
if policy.enforce_new_defaults():
target = {constants.RBAC_PROJECT_ID: context.project_id}
@@ -2667,7 +2536,7 @@ class Service(service.RPCService):
self.update_zone_import(context, zone_import)
@rpc.expected_exceptions()
- @notification('dns.zone_import.update')
+ @notification.notify_type('dns.zone_import.update')
def update_zone_import(self, context, zone_import):
if policy.enforce_new_defaults():
target = {constants.RBAC_PROJECT_ID: zone_import.tenant_id}
@@ -2710,7 +2579,7 @@ class Service(service.RPCService):
return self.storage.get_zone_import(context, zone_import_id)
@rpc.expected_exceptions()
- @notification('dns.zone_import.delete')
+ @notification.notify_type('dns.zone_import.delete')
@transaction
def delete_zone_import(self, context, zone_import_id):
@@ -2733,7 +2602,7 @@ class Service(service.RPCService):
# Zone Export Methods
@rpc.expected_exceptions()
- @notification('dns.zone_export.create')
+ @notification.notify_type('dns.zone_export.create')
def create_zone_export(self, context, zone_id):
# Try getting the zone to ensure it exists
zone = self.storage.get_zone(context, zone_id)
@@ -2797,7 +2666,7 @@ class Service(service.RPCService):
return self.storage.get_zone_export(context, zone_export_id)
@rpc.expected_exceptions()
- @notification('dns.zone_export.update')
+ @notification.notify_type('dns.zone_export.update')
def update_zone_export(self, context, zone_export):
if policy.enforce_new_defaults():
@@ -2810,7 +2679,7 @@ class Service(service.RPCService):
return self.storage.update_zone_export(context, zone_export)
@rpc.expected_exceptions()
- @notification('dns.zone_export.delete')
+ @notification.notify_type('dns.zone_export.delete')
@transaction
def delete_zone_export(self, context, zone_export_id):
diff --git a/designate/common/decorators/__init__.py b/designate/common/decorators/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/designate/common/decorators/__init__.py
diff --git a/designate/common/decorators/lock.py b/designate/common/decorators/lock.py
new file mode 100644
index 00000000..f633fa4d
--- /dev/null
+++ b/designate/common/decorators/lock.py
@@ -0,0 +1,107 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import functools
+import itertools
+import threading
+
+from oslo_log import log as logging
+
+from designate import objects
+
+LOG = logging.getLogger(__name__)
+
+
+class ZoneLockLocal(threading.local):
+ def __init__(self):
+ super(ZoneLockLocal, self).__init__()
+ self._held = set()
+
+ def hold(self, name):
+ self._held.add(name)
+
+ def release(self, name):
+ self._held.remove(name)
+
+ def has_lock(self, name):
+ return name in self._held
+
+
+def extract_zone_id(args, kwargs):
+ zone_id = None
+
+ if 'zone_id' in kwargs:
+ zone_id = kwargs['zone_id']
+ elif 'zone' in kwargs:
+ zone_id = kwargs['zone'].id
+ elif 'recordset' in kwargs:
+ zone_id = kwargs['recordset'].zone_id
+ elif 'record' in kwargs:
+ zone_id = kwargs['record'].zone_id
+
+ if not zone_id:
+ for arg in itertools.chain(args, kwargs.values()):
+ if not isinstance(arg, objects.DesignateObject):
+ continue
+ if isinstance(arg, objects.Zone):
+ zone_id = arg.id
+ if zone_id:
+ break
+ elif isinstance(arg, (objects.RecordSet,
+ objects.Record,
+ objects.ZoneTransferRequest,
+ objects.ZoneTransferAccept)):
+ zone_id = arg.zone_id
+ if zone_id:
+ break
+
+ if not zone_id and len(args) > 1:
+ arg = args[1]
+ if isinstance(arg, str):
+ zone_id = arg
+ elif isinstance(zone_id, objects.Zone):
+ zone_id = arg.id
+
+ return zone_id
+
+
+def synchronized_zone(new_zone=False):
+ """Ensures only a single operation is in progress for each zone
+
+ A Decorator which ensures only a single operation can be happening
+ on a single zone at once, within the current designate-central instance
+ """
+ def outer(f):
+ @functools.wraps(f)
+ def sync_wrapper(cls, *args, **kwargs):
+ if new_zone is True:
+ lock_name = 'create-new-zone'
+ else:
+ zone_id = extract_zone_id(args, kwargs)
+ if zone_id:
+ lock_name = 'zone-%s' % zone_id
+ else:
+ raise Exception('Failed to determine zone id for '
+ 'synchronized operation')
+
+ if cls.zone_lock_local.has_lock(lock_name):
+ return f(cls, *args, **kwargs)
+
+ with cls.coordination.get_lock(lock_name):
+ try:
+ cls.zone_lock_local.hold(lock_name)
+ return f(cls, *args, **kwargs)
+ finally:
+ cls.zone_lock_local.release(lock_name)
+
+ return sync_wrapper
+ return outer
diff --git a/designate/common/decorators/notification.py b/designate/common/decorators/notification.py
new file mode 100644
index 00000000..c43a92bd
--- /dev/null
+++ b/designate/common/decorators/notification.py
@@ -0,0 +1,90 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import collections
+import functools
+import itertools
+import threading
+
+from oslo_log import log as logging
+
+from designate import context as designate_context
+from designate import notifications
+
+LOG = logging.getLogger(__name__)
+
+
+class NotificationThreadLocal(threading.local):
+ def __init__(self):
+ super(NotificationThreadLocal, self).__init__()
+ self.stack = 0
+ self.queue = collections.deque()
+
+ def reset_queue(self):
+ self.queue.clear()
+
+
+def notify_type(notification_type):
+ def outer(f):
+ @functools.wraps(f)
+ def notification_wrapper(cls, *args, **kwargs):
+ cls.notification_thread_local.stack += 1
+
+ context = None
+ for arg in itertools.chain(args, kwargs.values()):
+ if isinstance(arg, designate_context.DesignateContext):
+ context = arg
+ break
+
+ try:
+ result = f(cls, *args, **kwargs)
+
+ payloads = notifications.get_plugin().emit(
+ notification_type, context, result, args, kwargs
+ )
+ for payload in payloads:
+ LOG.debug(
+ 'Queueing notification for %(type)s',
+ {
+ 'type': notification_type
+ }
+ )
+ cls.notification_thread_local.queue.appendleft(
+ (context, notification_type, payload,)
+ )
+
+ return result
+
+ finally:
+ cls.notification_thread_local.stack -= 1
+
+ if cls.notification_thread_local.stack == 0:
+ LOG.debug(
+ 'Emitting %(count)d notifications',
+ {
+ 'count': len(cls.notification_thread_local.queue)
+ }
+ )
+
+ for message in cls.notification_thread_local.queue:
+ LOG.debug(
+ 'Emitting %(type)s notification',
+ {
+ 'type': message[1]
+ }
+ )
+ cls.notifier.info(message[0], message[1], message[2])
+
+ cls.notification_thread_local.reset_queue()
+
+ return notification_wrapper
+ return outer
diff --git a/designate/common/decorators/rpc.py b/designate/common/decorators/rpc.py
new file mode 100644
index 00000000..69cad608
--- /dev/null
+++ b/designate/common/decorators/rpc.py
@@ -0,0 +1,49 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import functools
+import threading
+
+from oslo_messaging.rpc import dispatcher as rpc_dispatcher
+
+import designate.exceptions
+
+
+class ExceptionThreadLocal(threading.local):
+ def __init__(self):
+ super(ExceptionThreadLocal, self).__init__()
+ self.depth = 0
+
+ def reset_depth(self):
+ self.depth = 0
+
+
+def expected_exceptions():
+ def outer(f):
+ @functools.wraps(f)
+ def exception_wrapper(cls, *args, **kwargs):
+ cls.exception_thread_local.depth += 1
+
+ # We only want to wrap the first function wrapped.
+ if cls.exception_thread_local.depth > 1:
+ return f(cls, *args, **kwargs)
+
+ try:
+ return f(cls, *args, **kwargs)
+ except designate.exceptions.DesignateException as e:
+ if e.expected:
+ raise rpc_dispatcher.ExpectedException()
+ raise
+ finally:
+ cls.exception_thread_local.reset_depth()
+ return exception_wrapper
+ return outer
diff --git a/designate/context.py b/designate/context.py
index 01ea0ce8..5e033446 100644
--- a/designate/context.py
+++ b/designate/context.py
@@ -14,7 +14,6 @@
# License for the specific language governing permissions and limitations
# under the License.
import copy
-import itertools
from keystoneauth1.access import service_catalog as ksa_service_catalog
from keystoneauth1 import plugin
@@ -145,21 +144,6 @@ class DesignateContext(context.RequestContext):
return cls(None, **kwargs)
- @classmethod
- def get_context_from_function_and_args(cls, function, args, kwargs):
- """
- Find an arg of type DesignateContext and return it.
-
- This is useful in a couple of decorators where we don't
- know much about the function we're wrapping.
- """
-
- for arg in itertools.chain(kwargs.values(), args):
- if isinstance(arg, cls):
- return arg
-
- return None
-
@property
def all_tenants(self):
return self._all_tenants
diff --git a/designate/objects/adapters/base.py b/designate/objects/adapters/base.py
index 9ba825fb..b2f65c78 100644
--- a/designate/objects/adapters/base.py
+++ b/designate/objects/adapters/base.py
@@ -169,8 +169,6 @@ class DesignateAdapter(object, metaclass=DesignateObjectAdapterMetaclass):
'Creating %s object with values %r',
output_object.obj_name(), values
)
- LOG.debug(output_object)
-
try:
adapter = cls.get_object_adapter(output_object, obj_format)
if isinstance(output_object, objects.ListObjectMixin):
@@ -285,7 +283,7 @@ class DesignateAdapter(object, metaclass=DesignateObjectAdapterMetaclass):
if error_keys:
raise exceptions.InvalidObject(
- 'Provided object does not match schema. Keys {0} are not '
+ 'Provided object does not match schema. Keys {0} are not '
'valid for {1}'.format(
error_keys, cls.MODIFICATIONS['options']['resource_name']
)
diff --git a/designate/objects/base.py b/designate/objects/base.py
index 4a7c5927..876cd95e 100644
--- a/designate/objects/base.py
+++ b/designate/objects/base.py
@@ -12,6 +12,7 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
+
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_versionedobjects import base
@@ -19,11 +20,10 @@ from oslo_versionedobjects.base import VersionedObjectDictCompat as DictObjectMi
from oslo_versionedobjects import exception
from oslo_versionedobjects import fields as ovoo_fields
-
from designate import exceptions
-from designate.i18n import _
from designate.objects import fields
+
LOG = logging.getLogger(__name__)
@@ -54,16 +54,15 @@ class DesignateObject(base.VersionedObject):
self.FIELDS = self.fields
@classmethod
- def _make_obj_str(cls, keys):
- msg = "<%(name)s" % {'name': cls.obj_name()}
- for key in keys:
- msg += " {0}:'%({0})s'".format(key)
+ def _make_obj_str(cls, data):
+ msg = "<%s" % cls.obj_name()
+ for key in cls.STRING_KEYS:
+ msg += " %s:'%s'" % (key, data.get(key))
msg += ">"
return msg
- def __str__(self):
- return (self._make_obj_str(self.STRING_KEYS)
- % self)
+ def __repr__(self):
+ return self._make_obj_str(self.to_dict())
def save(self, context):
pass
@@ -153,9 +152,6 @@ class DesignateObject(base.VersionedObject):
def __ne__(self, other):
return not (self.__eq__(other))
- def __repr__(self):
- return "OVO Objects"
-
# TODO(daidv): all of bellow functions should
# be removed when we completed migration.
def nested_sort(self, key, value):
@@ -360,11 +356,13 @@ class ListObjectMixin(base.ObjectListBase):
return list_
- def __str__(self):
- return (_("<%(type)s count:'%(count)s' object:'%(list_type)s'>")
- % {'count': len(self),
- 'type': self.LIST_ITEM_TYPE.obj_name(),
- 'list_type': self.obj_name()})
+ def __repr__(self):
+ return ("<%(type)s count:'%(count)s' object:'%(list_type)s'>" %
+ {
+ 'type': self.LIST_ITEM_TYPE.obj_name(),
+ 'count': len(self),
+ 'list_type': self.obj_name()
+ })
def __iter__(self):
"""List iterator interface"""
diff --git a/designate/objects/record.py b/designate/objects/record.py
index 6c9698ff..9e727424 100644
--- a/designate/objects/record.py
+++ b/designate/objects/record.py
@@ -66,11 +66,11 @@ class Record(base.DesignateObject, base.PersistentObjectMixin,
'id', 'recordset_id', 'data'
]
- def __str__(self):
+ def __repr__(self):
record = self.to_dict()
- record['data'] = record['data'][:35]
- return (self._make_obj_str(self.STRING_KEYS)
- % record)
+ if 'data' in record:
+ record['data'] = record['data'][:35]
+ return self._make_obj_str(record)
@base.DesignateRegistry.register
diff --git a/designate/objects/recordset.py b/designate/objects/recordset.py
index e2f1ffbc..1f5d7feb 100755
--- a/designate/objects/recordset.py
+++ b/designate/objects/recordset.py
@@ -151,11 +151,11 @@ class RecordSet(base.DesignateObject, base.DictObjectMixin,
for record in old_records:
record_obj = record_cls()
try:
- record_obj._from_string(record.data)
- # The _from_string() method will throw a ValueError if there is not
+ record_obj.from_string(record.data)
+ # The from_string() method will throw a ValueError if there is not
# enough data blobs
except ValueError as e:
- # Something broke in the _from_string() method
+ # Something broke in the from_string() method
# Fake a correct looking ValidationError() object
e = ValidationError()
e.path = ['records', i]
diff --git a/designate/objects/rrdata_a.py b/designate/objects/rrdata_a.py
index bbe298e0..d34bd029 100644
--- a/designate/objects/rrdata_a.py
+++ b/designate/objects/rrdata_a.py
@@ -28,10 +28,7 @@ class A(Record):
'address': fields.IPV4AddressField()
}
- def _to_string(self):
- return self.address
-
- def _from_string(self, value):
+ def from_string(self, value):
self.address = value
# The record type is defined in the RFC. This will be used when the record
diff --git a/designate/objects/rrdata_aaaa.py b/designate/objects/rrdata_aaaa.py
index b2073907..f3127231 100644
--- a/designate/objects/rrdata_aaaa.py
+++ b/designate/objects/rrdata_aaaa.py
@@ -28,10 +28,7 @@ class AAAA(Record):
'address': fields.IPV6AddressField()
}
- def _to_string(self):
- return self.address
-
- def _from_string(self, value):
+ def from_string(self, value):
self.address = value
# The record type is defined in the RFC. This will be used when the record
diff --git a/designate/objects/rrdata_caa.py b/designate/objects/rrdata_caa.py
index 4b141b78..f5294b29 100644
--- a/designate/objects/rrdata_caa.py
+++ b/designate/objects/rrdata_caa.py
@@ -30,11 +30,8 @@ class CAA(Record):
'prpt': fields.CaaPropertyField()
}
- def _to_string(self):
- return ("%(flag)s %(prpt)s" % self)
-
- def _from_string(self, v):
- flags, prpt = v.split(' ', 1)
+ def from_string(self, value):
+ flags, prpt = value.split(' ', 1)
self.flags = int(flags)
self.prpt = prpt
diff --git a/designate/objects/rrdata_cert.py b/designate/objects/rrdata_cert.py
index d0b27ed9..e9f9f05c 100644
--- a/designate/objects/rrdata_cert.py
+++ b/designate/objects/rrdata_cert.py
@@ -92,11 +92,8 @@ class CERT(Record):
raise ValueError('Cert certificate is not valid.')
return certificate
- def _to_string(self):
- return '%(cert_type)s %(key_tag)s %(cert_algo)s %(certificate)s' % self
-
- def _from_string(self, v):
- cert_type, key_tag, cert_algo, certificate = v.split(' ', 3)
+ def from_string(self, value):
+ cert_type, key_tag, cert_algo, certificate = value.split(' ', 3)
self.cert_type = self.validate_cert_type(cert_type)
self.key_tag = int(key_tag)
diff --git a/designate/objects/rrdata_cname.py b/designate/objects/rrdata_cname.py
index b876585a..8123babc 100644
--- a/designate/objects/rrdata_cname.py
+++ b/designate/objects/rrdata_cname.py
@@ -28,10 +28,7 @@ class CNAME(Record):
'cname': fields.DomainField(maxLength=255)
}
- def _to_string(self):
- return self.cname
-
- def _from_string(self, value):
+ def from_string(self, value):
self.cname = value
# The record type is defined in the RFC. This will be used when the record
diff --git a/designate/objects/rrdata_mx.py b/designate/objects/rrdata_mx.py
index caf6dedb..7d924358 100644
--- a/designate/objects/rrdata_mx.py
+++ b/designate/objects/rrdata_mx.py
@@ -29,10 +29,7 @@ class MX(Record):
'exchange': fields.StringFields(maxLength=255),
}
- def _to_string(self):
- return '%(priority)s %(exchange)s' % self
-
- def _from_string(self, value):
+ def from_string(self, value):
priority, exchange = value.split(' ')
if repr(int(priority)) != priority:
diff --git a/designate/objects/rrdata_naptr.py b/designate/objects/rrdata_naptr.py
index cbc22231..ff6485e1 100644
--- a/designate/objects/rrdata_naptr.py
+++ b/designate/objects/rrdata_naptr.py
@@ -35,12 +35,10 @@ class NAPTR(Record):
'replacement': fields.DomainField(maxLength=255)
}
- def _to_string(self):
- return ("%(order)s %(preference)s %(flags)s %(service)s %(regexp)s "
- "%(replacement)s" % self)
-
- def _from_string(self, v):
- order, preference, flags, service, regexp, replacement = v.split(' ')
+ def from_string(self, value):
+ order, preference, flags, service, regexp, replacement = (
+ value.split(' ')
+ )
self.order = int(order)
self.preference = int(preference)
self.flags = flags
diff --git a/designate/objects/rrdata_ns.py b/designate/objects/rrdata_ns.py
index a411d218..d3cd2720 100644
--- a/designate/objects/rrdata_ns.py
+++ b/designate/objects/rrdata_ns.py
@@ -34,10 +34,7 @@ class NS(Record):
'name': fields.DomainField(),
}
- def _to_string(self):
- return self.nsdname
-
- def _from_string(self, value):
+ def from_string(self, value):
self.nsdname = value
# The record type is defined in the RFC. This will be used when the record
diff --git a/designate/objects/rrdata_ptr.py b/designate/objects/rrdata_ptr.py
index a3ae7e31..bb2564a4 100644
--- a/designate/objects/rrdata_ptr.py
+++ b/designate/objects/rrdata_ptr.py
@@ -28,10 +28,7 @@ class PTR(Record):
'ptrdname': fields.DomainField(maxLength=255)
}
- def _to_string(self):
- return self.ptrdname
-
- def _from_string(self, value):
+ def from_string(self, value):
self.ptrdname = value
# The record type is defined in the RFC. This will be used when the record
diff --git a/designate/objects/rrdata_soa.py b/designate/objects/rrdata_soa.py
index 72809b49..8bfe2c18 100644
--- a/designate/objects/rrdata_soa.py
+++ b/designate/objects/rrdata_soa.py
@@ -35,12 +35,10 @@ class SOA(Record):
'minimum': fields.IntegerFields(minimum=0, maximum=2147483647)
}
- def _to_string(self):
- return ("%(mname)s %(rname)s %(serial)s %(refresh)s %(retry)s "
- "%(expire)s %(minimum)s" % self)
-
- def _from_string(self, v):
- mname, rname, serial, refresh, retry, expire, minimum = v.split(' ')
+ def from_string(self, value):
+ mname, rname, serial, refresh, retry, expire, minimum = (
+ value.split(' ')
+ )
self.mname = mname
self.rname = rname
self.serial = int(serial)
diff --git a/designate/objects/rrdata_spf.py b/designate/objects/rrdata_spf.py
index f409991d..9755ef6a 100644
--- a/designate/objects/rrdata_spf.py
+++ b/designate/objects/rrdata_spf.py
@@ -28,10 +28,7 @@ class SPF(Record):
'txt_data': fields.StringFields()
}
- def _to_string(self):
- return self.txt_data
-
- def _from_string(self, value):
+ def from_string(self, value):
if not value.startswith('"') and not value.endswith('"'):
# value with spaces should be quoted as per RFC1035 5.1
for element in value:
diff --git a/designate/objects/rrdata_srv.py b/designate/objects/rrdata_srv.py
index 5a2d638b..4ecbcc64 100644
--- a/designate/objects/rrdata_srv.py
+++ b/designate/objects/rrdata_srv.py
@@ -37,10 +37,7 @@ class SRV(Record):
'name': fields.SRVField(maxLength=255, nullable=True)
}
- def _to_string(self):
- return "%(priority)s %(weight)s %(target)s %(port)s" % self
-
- def _from_string(self, value):
+ def from_string(self, value):
priority, weight, port, target = value.split(' ')
self.priority = int(priority)
self.weight = int(weight)
diff --git a/designate/objects/rrdata_sshfp.py b/designate/objects/rrdata_sshfp.py
index 2ab6bc69..2fe4078f 100644
--- a/designate/objects/rrdata_sshfp.py
+++ b/designate/objects/rrdata_sshfp.py
@@ -30,10 +30,7 @@ class SSHFP(Record):
'fingerprint': fields.Sshfp(nullable=True),
}
- def _to_string(self):
- return "%(algorithm)s %(fp_type)s %(fingerprint)s" % self
-
- def _from_string(self, value):
+ def from_string(self, value):
algorithm, fp_type, fingerprint = value.split(' ')
for value in {algorithm, fp_type}:
diff --git a/designate/objects/rrdata_txt.py b/designate/objects/rrdata_txt.py
index 654094ef..736ffc35 100644
--- a/designate/objects/rrdata_txt.py
+++ b/designate/objects/rrdata_txt.py
@@ -28,9 +28,6 @@ class TXT(Record):
'txt_data': fields.TxtField()
}
- def _to_string(self):
- return self.txt_data
-
@staticmethod
def _is_wrapped_in_double_quotes(value):
return value.startswith('"') and value.endswith('"')
@@ -71,7 +68,7 @@ class TXT(Record):
'Quotation marks should be escaped with backslash.'
)
- def _from_string(self, value):
+ def from_string(self, value):
if len(value) > 255:
# expecting record containing multiple strings as
# per rfc7208 3.3 and rfc1035 3.3.14
diff --git a/designate/rpc.py b/designate/rpc.py
index 51efeb71..48636ab9 100644
--- a/designate/rpc.py
+++ b/designate/rpc.py
@@ -11,8 +11,6 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
-import functools
-import threading
from oslo_config import cfg
import oslo_messaging as messaging
@@ -40,7 +38,6 @@ __all__ = [
]
CONF = cfg.CONF
-EXPECTED_EXCEPTION = threading.local()
NOTIFICATION_TRANSPORT = None
NOTIFIER = None
TRANSPORT = None
@@ -237,27 +234,3 @@ def create_transport(url):
return messaging.get_rpc_transport(CONF,
url=url,
allowed_remote_exmods=exmods)
-
-
-def expected_exceptions():
- def outer(f):
- @functools.wraps(f)
- def exception_wrapper(self, *args, **kwargs):
- if not hasattr(EXPECTED_EXCEPTION, 'depth'):
- EXPECTED_EXCEPTION.depth = 0
- EXPECTED_EXCEPTION.depth += 1
-
- # We only want to wrap the first function wrapped.
- if EXPECTED_EXCEPTION.depth > 1:
- return f(self, *args, **kwargs)
-
- try:
- return f(self, *args, **kwargs)
- except designate.exceptions.DesignateException as e:
- if e.expected:
- raise rpc_dispatcher.ExpectedException()
- raise
- finally:
- EXPECTED_EXCEPTION.depth = 0
- return exception_wrapper
- return outer
diff --git a/designate/schema/format.py b/designate/schema/format.py
index bbed2017..72f968c4 100644
--- a/designate/schema/format.py
+++ b/designate/schema/format.py
@@ -13,10 +13,10 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
+import ipaddress
import re
import jsonschema
-import netaddr
# NOTE(kiall): All of the below regular expressions are terminated with
@@ -59,9 +59,7 @@ def is_ipv4(instance):
return True
try:
- address = netaddr.IPAddress(instance, version=4)
- # netaddr happly accepts, and expands "127.0" into "127.0.0.0"
- if str(address) != instance:
+ if ipaddress.ip_address(instance).version != 4:
return False
except Exception:
return False
@@ -79,7 +77,8 @@ def is_ipv6(instance):
return True
try:
- netaddr.IPAddress(instance, version=6)
+ if ipaddress.ip_address(instance).version != 6:
+ return False
except Exception:
return False
diff --git a/designate/service.py b/designate/service.py
index ce84cbdb..0fec3e02 100644
--- a/designate/service.py
+++ b/designate/service.py
@@ -30,6 +30,7 @@ from oslo_service import sslutils
from oslo_service import wsgi
from oslo_utils import netutils
+from designate.common.decorators import rpc as rpc_decorator
from designate.common import profiler
import designate.conf
from designate.i18n import _
@@ -77,6 +78,7 @@ class RPCService(Service):
rpc_topic, self.name)
self.endpoints = [self]
+ self.exception_thread_local = rpc_decorator.ExceptionThreadLocal()
self.notifier = None
self.rpc_server = None
self.rpc_topic = rpc_topic
diff --git a/designate/sqlalchemy/base.py b/designate/sqlalchemy/base.py
index 905e4b40..d72f36ca 100644
--- a/designate/sqlalchemy/base.py
+++ b/designate/sqlalchemy/base.py
@@ -21,7 +21,7 @@ from oslo_db import exception as oslo_db_exception
from oslo_db.sqlalchemy import utils as oslodb_utils
from oslo_log import log as logging
from oslo_utils import timeutils
-from sqlalchemy import select, or_, between, func, distinct
+from sqlalchemy import select, or_, between, func, distinct, inspect
from designate import exceptions
from designate import objects
@@ -96,6 +96,9 @@ class SQLAlchemy(object, metaclass=abc.ABCMeta):
def rollback(self):
self.session.rollback()
+ def get_inspector(self):
+ return inspect(self.engine)
+
@staticmethod
def _apply_criterion(table, query, criterion):
if criterion is not None:
diff --git a/designate/sqlalchemy/types.py b/designate/sqlalchemy/types.py
index b576e8bf..6063454d 100644
--- a/designate/sqlalchemy/types.py
+++ b/designate/sqlalchemy/types.py
@@ -27,6 +27,7 @@ class UUID(TypeDecorator):
Copied verbatim from SQLAlchemy documentation.
"""
+ cache_ok = True
impl = CHAR
def load_dialect_impl(self, dialect):
diff --git a/designate/tests/test_central/test_decorator.py b/designate/tests/test_central/test_decorator.py
deleted file mode 100644
index 66472cca..00000000
--- a/designate/tests/test_central/test_decorator.py
+++ /dev/null
@@ -1,75 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-from unittest import mock
-
-from oslo_concurrency import lockutils
-from oslo_log import log as logging
-
-from designate.central import service
-from designate import exceptions
-from designate.objects import record
-from designate.objects import zone
-from designate.tests.test_central import CentralTestCase
-from designate import utils
-
-LOG = logging.getLogger(__name__)
-
-
-class FakeCoordination(object):
- def get_lock(self, name):
- return lockutils.lock(name)
-
-
-class CentralDecoratorTests(CentralTestCase):
- def test_synchronized_zone_exception_raised(self):
- @service.synchronized_zone()
- def mock_get_zone(cls, index, zone):
- self.assertEqual(service.ZONE_LOCKS.held, {zone.id})
- if index % 3 == 0:
- raise exceptions.ZoneNotFound()
-
- for index in range(9):
- try:
- mock_get_zone(mock.Mock(coordination=FakeCoordination()),
- index,
- zone.Zone(id=utils.generate_uuid()))
- except exceptions.ZoneNotFound:
- pass
-
- def test_synchronized_zone_recursive_decorator_call(self):
- @service.synchronized_zone()
- def mock_create_record(cls, context, record):
- self.assertEqual(service.ZONE_LOCKS.held, {record.zone_id})
- mock_get_zone(cls, context, zone.Zone(id=record.zone_id))
-
- @service.synchronized_zone()
- def mock_get_zone(cls, context, zone):
- self.assertEqual(service.ZONE_LOCKS.held, {zone.id})
-
- mock_create_record(mock.Mock(coordination=FakeCoordination()),
- self.get_context(),
- record=record.Record(zone_id=utils.generate_uuid()))
- mock_get_zone(mock.Mock(coordination=FakeCoordination()),
- self.get_context(),
- zone=zone.Zone(id=utils.generate_uuid()))
-
- def test_synchronized_zone_raises_exception_when_no_zone_provided(self):
- @service.synchronized_zone(new_zone=False)
- def mock_not_creating_new_zone(cls, context, record):
- pass
-
- self.assertRaisesRegex(
- Exception,
- 'Failed to determine zone id for '
- 'synchronized operation',
- mock_not_creating_new_zone, self.get_context(), None
- )
diff --git a/designate/tests/test_storage/test_sqlalchemy.py b/designate/tests/test_storage/test_sqlalchemy.py
index c70dec91..9a4bf321 100644
--- a/designate/tests/test_storage/test_sqlalchemy.py
+++ b/designate/tests/test_storage/test_sqlalchemy.py
@@ -53,7 +53,8 @@ class SqlalchemyStorageTest(StorageTestCase, TestCase):
u'zone_transfer_requests',
u'zones'
]
- self.assertEqual(table_names, self.storage.engine.table_names())
+ inspector = self.storage.get_inspector()
+ self.assertEqual(table_names, inspector.get_table_names())
def test_schema_table_indexes(self):
indexes_t = self.storage.engine.execute("SELECT * FROM sqlite_master WHERE type = 'index';") # noqa
diff --git a/designate/tests/unit/objects/test_adapters.py b/designate/tests/unit/objects/test_adapters.py
index 8a094864..e6e96747 100644
--- a/designate/tests/unit/objects/test_adapters.py
+++ b/designate/tests/unit/objects/test_adapters.py
@@ -20,68 +20,125 @@ from oslo_log import log as logging
from oslo_utils import timeutils
import oslotest.base
+from designate import exceptions
from designate import objects
from designate.objects import adapters
from designate.objects import base
+from designate.objects import fields
LOG = logging.getLogger(__name__)
-class DesignateTestAdapter(adapters.DesignateAdapter):
- ADAPTER_OBJECT = objects.DesignateObject
- ADAPTER_FORMAT = 'TEST_API'
-
- MODIFICATIONS = {
- 'fields': {},
- 'options': {}
+@base.DesignateRegistry.register
+class DesignateTestObject(base.DictObjectMixin, base.PersistentObjectMixin,
+ base.DesignateObject):
+ def __init__(self, *args, **kwargs):
+ super(DesignateTestObject, self).__init__(*args, **kwargs)
+
+ fields = {
+ 'name': fields.StringFields(maxLength=255),
+ 'description': fields.StringFields(nullable=True, maxLength=255)
}
+ STRING_KEYS = [
+ 'id', 'name'
+ ]
+
@base.DesignateRegistry.register
-class DesignateTestPersistentObject(objects.DesignateObject,
- objects.base.PersistentObjectMixin):
+class DesignateTestPersistentObject(objects.base.PersistentObjectMixin,
+ objects.DesignateObject):
pass
+class DesignateTestAdapter(adapters.DesignateAdapter):
+ ADAPTER_OBJECT = DesignateTestObject
+ ADAPTER_FORMAT = 'TEST_API'
+
+ MODIFICATIONS = {
+ 'fields': {
+ 'id': {},
+ 'name': {
+ 'read_only': False
+ },
+ 'description': {
+ 'read_only': False
+ },
+ 'created_at': {},
+ 'updated_at': {},
+ },
+ 'options': {
+ 'links': True,
+ 'resource_name': 'test_obj',
+ 'collection_name': 'test_obj',
+ }
+ }
+
+
class DesignateDateTimeAdaptor(adapters.DesignateAdapter):
ADAPTER_OBJECT = DesignateTestPersistentObject
ADAPTER_FORMAT = 'TEST_API'
MODIFICATIONS = {
'fields': {
- "id": {},
- "created_at": {},
- "updated_at": {},
+ 'id': {},
+ 'created_at': {},
+ 'updated_at': {},
},
'options': {}
}
class DesignateAdapterTest(oslotest.base.BaseTestCase):
+ def test_parse(self):
+ test_obj = adapters.DesignateAdapter.parse(
+ 'TEST_API', {'name': 'example.test.'}, DesignateTestObject()
+ )
+
+ self.assertIsInstance(test_obj, DesignateTestObject)
+ self.assertEqual('example.test.', test_obj.name)
+
+ def test_parse_schema_does_not_match(self):
+ self.assertRaisesRegex(
+ exceptions.InvalidObject,
+ 'Provided object does not match schema. '
+ 'Keys \\[\'address\'\\] are not valid for test_obj',
+ adapters.DesignateAdapter.parse,
+ 'TEST_API', {'address': '192.168.0.1'}, DesignateTestObject(),
+ )
+
def test_get_object_adapter(self):
adapter = adapters.DesignateAdapter.get_object_adapter(
- objects.DesignateObject(), 'TEST_API'
+ DesignateTestObject(), 'TEST_API'
)
+
self.assertIsInstance(adapter(), DesignateTestAdapter)
def test_object_render(self):
- test_obj = adapters.DesignateAdapter.render('TEST_API',
- objects.DesignateObject())
- self.assertEqual(dict(), test_obj)
+ test_obj = adapters.DesignateAdapter.render(
+ 'TEST_API', DesignateTestObject()
+ )
+
+ self.assertEqual(
+ sorted([
+ 'created_at', 'description', 'id', 'name', 'updated_at',
+ ]),
+ sorted(test_obj)
+ )
def test_datetime_format(self):
now = timeutils.utcnow()
test_obj = DesignateTestPersistentObject()
test_obj.created_at = now
-
test_dict = adapters.DesignateAdapter.render('TEST_API', test_obj)
- datetime.datetime.strptime(
- test_dict['created_at'], '%Y-%m-%dT%H:%M:%S.%f'
+ self.assertEqual(
+ datetime.datetime.strptime(
+ test_dict['created_at'], '%Y-%m-%dT%H:%M:%S.%f'
+ ),
+ test_obj.created_at
)
- self.assertEqual(now, test_obj.created_at)
-
class RecordSetAPIv2AdapterTest(oslotest.base.BaseTestCase):
def test_get_path(self):
diff --git a/designate/tests/unit/objects/test_base.py b/designate/tests/unit/objects/test_base.py
index b6d8079e..26b05094 100644
--- a/designate/tests/unit/objects/test_base.py
+++ b/designate/tests/unit/objects/test_base.py
@@ -39,6 +39,10 @@ class TestObject(objects.DesignateObject):
'nested_list': fields.ObjectFields('TestObjectList', nullable=True),
}
+ STRING_KEYS = [
+ 'id', 'name'
+ ]
+
@base.DesignateRegistry.register
class TestObjectDict(TestObject, objects.DictObjectMixin):
@@ -64,6 +68,36 @@ class TestValidatableObject(objects.DesignateObject):
class DesignateObjectTest(oslotest.base.BaseTestCase):
+ def test_obj_to_repr(self):
+ obj = TestObject.from_dict({
+ 'id': 1, 'name': 'example'
+ })
+ self.assertEqual(
+ "<TestObject id:'1' name:'example'>",
+ repr(obj)
+ )
+
+ def test_obj_to_str(self):
+ obj = TestObject.from_dict({
+ 'id': 1, 'name': 'example'
+ })
+ self.assertEqual(
+ "<TestObject id:'1' name:'example'>", str(obj)
+ )
+
+ def test_empty_obj_to_str(self):
+ self.assertEqual(
+ "<TestObject id:'None' name:'None'>", str(TestObject())
+ )
+
+ def test_record_to_str(self):
+ obj = objects.Record.from_dict({
+ 'id': 1, 'recordset_id': '2', 'data': 'example'
+ })
+ self.assertEqual(
+ "<Record id:'1' recordset_id:'2' data:'example'>", str(obj)
+ )
+
def test_obj_cls_from_name(self):
cls = objects.DesignateObject.obj_cls_from_name('TestObject')
self.assertEqual(TestObject, cls)
diff --git a/designate/tests/unit/objects/test_recordset.py b/designate/tests/unit/objects/test_recordset.py
index 634221bf..4786801d 100644
--- a/designate/tests/unit/objects/test_recordset.py
+++ b/designate/tests/unit/objects/test_recordset.py
@@ -28,6 +28,8 @@ LOG = logging.getLogger(__name__)
def create_test_recordset():
record_set = objects.RecordSet(
+ id='f6a2cbd6-7f9a-4e0c-a00d-98a02aa73fc8',
+ zone_id='74038683-cab1-4056-bdf8-b39bd155ff21',
name='www.example.org.',
type='A',
records=objects.RecordList(objects=[
@@ -39,12 +41,20 @@ def create_test_recordset():
class RecordSetTest(oslotest.base.BaseTestCase):
-
def test_init(self):
record_set = create_test_recordset()
self.assertEqual('www.example.org.', record_set.name)
self.assertEqual('A', record_set.type)
+ def test_to_repr(self):
+ record_set = create_test_recordset()
+ self.assertEqual(
+ "<RecordSet id:'f6a2cbd6-7f9a-4e0c-a00d-98a02aa73fc8' type:'A' "
+ "name:'www.example.org.' "
+ "zone_id:'74038683-cab1-4056-bdf8-b39bd155ff21'>",
+ repr(record_set)
+ )
+
def test_not_managed(self):
record_set = create_test_recordset()
self.assertFalse(record_set.managed)
diff --git a/designate/tests/unit/objects/test_rrdata_a.py b/designate/tests/unit/objects/test_rrdata_a.py
index 1a03ffba..b8407115 100644
--- a/designate/tests/unit/objects/test_rrdata_a.py
+++ b/designate/tests/unit/objects/test_rrdata_a.py
@@ -23,6 +23,19 @@ LOG = logging.getLogger(__name__)
class RRDataATest(oslotest.base.BaseTestCase):
+ def test_to_repr(self):
+ recordset = objects.RecordSet(
+ name='www.example.test.', type='A',
+ records=objects.RecordList(objects=[
+ objects.Record(data='192.168.0.1'),
+ ])
+ )
+ recordset.validate()
+ self.assertEqual(
+ "<Record id:'None' recordset_id:'None' data:'192.168.0.1'>",
+ repr(recordset.records[0])
+ )
+
def test_valid_a_record(self):
recordset = objects.RecordSet(
name='www.example.test.', type='A',
diff --git a/designate/tests/unit/objects/test_rrdata_caa.py b/designate/tests/unit/objects/test_rrdata_caa.py
index 9c93979d..24643b70 100644
--- a/designate/tests/unit/objects/test_rrdata_caa.py
+++ b/designate/tests/unit/objects/test_rrdata_caa.py
@@ -24,14 +24,14 @@ LOG = logging.getLogger(__name__)
class RRDataCAATest(oslotest.base.BaseTestCase):
def test_parse_caa_issue(self):
caa_record = objects.CAA()
- caa_record._from_string('0 issue ca.example.net')
+ caa_record.from_string('0 issue ca.example.net')
self.assertEqual(0, caa_record.flags)
self.assertEqual('issue ca.example.net', caa_record.prpt)
def test_parse_caa_issuewild(self):
caa_record = objects.CAA()
- caa_record._from_string('1 issuewild ca.example.net; policy=ev')
+ caa_record.from_string('1 issuewild ca.example.net; policy=ev')
self.assertEqual(1, caa_record.flags)
self.assertEqual('issuewild ca.example.net; policy=ev',
@@ -39,19 +39,19 @@ class RRDataCAATest(oslotest.base.BaseTestCase):
def test_parse_caa_iodef(self):
caa_record = objects.CAA()
- caa_record._from_string('0 iodef https://example.net/')
+ caa_record.from_string('0 iodef https://example.net/')
self.assertEqual(0, caa_record.flags)
self.assertEqual('iodef https://example.net/', caa_record.prpt)
caa_record = objects.CAA()
- caa_record._from_string('0 iodef mailto:security@example.net')
+ caa_record.from_string('0 iodef mailto:security@example.net')
self.assertEqual(0, caa_record.flags)
self.assertEqual('iodef mailto:security@example.net', caa_record.prpt)
caa_record = objects.CAA()
- caa_record._from_string('0 iodef mailto:security+caa@example.net')
+ caa_record.from_string('0 iodef mailto:security+caa@example.net')
self.assertEqual(0, caa_record.flags)
self.assertEqual('iodef mailto:security+caa@example.net',
@@ -62,7 +62,7 @@ class RRDataCAATest(oslotest.base.BaseTestCase):
self.assertRaisesRegex(
ValueError,
"Property tag 1 2 must be 'issue', 'issuewild' or 'iodef'",
- caa_record._from_string, '0 1 2'
+ caa_record.from_string, '0 1 2'
)
def test_parse_caa_issue_host_too_long(self):
@@ -72,7 +72,7 @@ class RRDataCAATest(oslotest.base.BaseTestCase):
ValueError,
'Host aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
'aaaaaaaaaa is too long',
- caa_record._from_string, '0 issue %s.net' % hostname
+ caa_record.from_string, '0 issue %s.net' % hostname
)
def test_parse_caa_issue_domain_not_valid(self):
@@ -80,7 +80,7 @@ class RRDataCAATest(oslotest.base.BaseTestCase):
self.assertRaisesRegex(
ValueError,
'Domain abc. is invalid',
- caa_record._from_string, '0 issue abc.'
+ caa_record.from_string, '0 issue abc.'
)
def test_parse_caa_issue_key_value_not_valid(self):
@@ -88,7 +88,7 @@ class RRDataCAATest(oslotest.base.BaseTestCase):
self.assertRaisesRegex(
ValueError,
'def is not a valid key-value pair',
- caa_record._from_string, '0 issue abc;def'
+ caa_record.from_string, '0 issue abc;def'
)
def test_parse_caa_iodef_mail_host_too_long(self):
@@ -98,7 +98,7 @@ class RRDataCAATest(oslotest.base.BaseTestCase):
ValueError,
'Host aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
'aaaaaaaaaa is too long',
- caa_record._from_string, '0 iodef mailto:me@%s.net' % hostname
+ caa_record.from_string, '0 iodef mailto:me@%s.net' % hostname
)
def test_parse_caa_iodef_mail_domain_not_valid(self):
@@ -106,7 +106,7 @@ class RRDataCAATest(oslotest.base.BaseTestCase):
self.assertRaisesRegex(
ValueError,
'Domain example.net. is invalid',
- caa_record._from_string, '0 iodef mailto:me@example.net.'
+ caa_record.from_string, '0 iodef mailto:me@example.net.'
)
def test_parse_caa_iodef_http_host_too_long(self):
@@ -116,7 +116,7 @@ class RRDataCAATest(oslotest.base.BaseTestCase):
ValueError,
'Host aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
'aaaaaaaaaa is too long',
- caa_record._from_string, '0 iodef https://%s.net/' % hostname
+ caa_record.from_string, '0 iodef https://%s.net/' % hostname
)
def test_parse_caa_iodef_http_domain_not_valid(self):
@@ -124,7 +124,7 @@ class RRDataCAATest(oslotest.base.BaseTestCase):
self.assertRaisesRegex(
ValueError,
'Domain example.net. is invalid',
- caa_record._from_string, '0 iodef https://example.net./'
+ caa_record.from_string, '0 iodef https://example.net./'
)
def test_parse_caa_iodef_not_valid_url(self):
@@ -132,5 +132,5 @@ class RRDataCAATest(oslotest.base.BaseTestCase):
self.assertRaisesRegex(
ValueError,
'https:// is not a valid URL',
- caa_record._from_string, '0 iodef https://'
+ caa_record.from_string, '0 iodef https://'
)
diff --git a/designate/tests/unit/objects/test_rrdata_cert.py b/designate/tests/unit/objects/test_rrdata_cert.py
index 030c5bd6..b6598679 100644
--- a/designate/tests/unit/objects/test_rrdata_cert.py
+++ b/designate/tests/unit/objects/test_rrdata_cert.py
@@ -25,7 +25,7 @@ LOG = logging.getLogger(__name__)
class RRDataCERTTest(oslotest.base.BaseTestCase):
def test_parse_cert(self):
cert_record = objects.CERT()
- cert_record._from_string(
+ cert_record.from_string(
'DPKIX 1 RSASHA256 KR1L0GbocaIOOim1+qdHtOSrDcOsGiI2NCcxuX2/Tqc='
)
@@ -42,7 +42,7 @@ class RRDataCERTTest(oslotest.base.BaseTestCase):
self.assertRaisesRegex(
ValueError,
'Cert type value should be between 0 and 65535',
- cert_record._from_string,
+ cert_record.from_string,
'99999 1 RSASHA256 KR1L0GbocaIOOim1+qdHtOSrDcOsGiI2NCcxuX2/Tqc='
)
@@ -51,7 +51,7 @@ class RRDataCERTTest(oslotest.base.BaseTestCase):
self.assertRaisesRegex(
ValueError,
'Cert type is not valid Mnemonic.',
- cert_record._from_string,
+ cert_record.from_string,
'FAKETYPE 1 RSASHA256 KR1L0GbocaIOOim1+qdHtOSrDcOsGiI2NCcxuX2/Tqc='
)
@@ -60,7 +60,7 @@ class RRDataCERTTest(oslotest.base.BaseTestCase):
self.assertRaisesRegex(
ValueError,
'Cert algorithm value should be between 0 and 255',
- cert_record._from_string,
+ cert_record.from_string,
'DPKIX 1 256 KR1L0GbocaIOOim1+qdHtOSrDcOsGiI2NCcxuX2/Tqc='
)
@@ -69,7 +69,7 @@ class RRDataCERTTest(oslotest.base.BaseTestCase):
self.assertRaisesRegex(
ValueError,
'Cert algorithm is not valid Mnemonic.',
- cert_record._from_string,
+ cert_record.from_string,
'DPKIX 1 FAKESHA256 KR1L0GbocaIOOim1+qdHtOSrDcOsGiI2NCcxuX2/Tqc='
)
@@ -78,6 +78,6 @@ class RRDataCERTTest(oslotest.base.BaseTestCase):
self.assertRaisesRegex(
ValueError,
'Cert certificate is not valid.',
- cert_record._from_string,
+ cert_record.from_string,
'DPKIX 1 RSASHA256 KR1L0GbocaIOOim1+qdHtOSrDcOsGiI2NCcxuX2/Tqc'
)
diff --git a/designate/tests/unit/objects/test_rrdata_mx.py b/designate/tests/unit/objects/test_rrdata_mx.py
index d5b70823..1344bf26 100644
--- a/designate/tests/unit/objects/test_rrdata_mx.py
+++ b/designate/tests/unit/objects/test_rrdata_mx.py
@@ -25,7 +25,7 @@ LOG = logging.getLogger(__name__)
class RRDataMXTest(oslotest.base.BaseTestCase):
def test_parse_mx(self):
mx_record = objects.MX()
- mx_record._from_string('0 mail.example.org.')
+ mx_record.from_string('0 mail.example.org.')
self.assertEqual(0, mx_record.priority)
self.assertEqual('mail.example.org.', mx_record.exchange)
diff --git a/designate/tests/unit/objects/test_rrdata_naptr.py b/designate/tests/unit/objects/test_rrdata_naptr.py
index 81223a20..4759ca3d 100644
--- a/designate/tests/unit/objects/test_rrdata_naptr.py
+++ b/designate/tests/unit/objects/test_rrdata_naptr.py
@@ -24,7 +24,7 @@ LOG = logging.getLogger(__name__)
class RRDataNAPTRTest(oslotest.base.BaseTestCase):
def test_parse_naptr(self):
naptr_record = objects.NAPTR()
- naptr_record._from_string(
+ naptr_record.from_string(
'0 0 S SIP+D2U !^.*$!sip:customer-service@example.com! _sip._udp.example.com.') # noqa
self.assertEqual(0, naptr_record.order)
diff --git a/designate/tests/unit/objects/test_rrdata_sshfp.py b/designate/tests/unit/objects/test_rrdata_sshfp.py
index eee2da6f..10a4fe0e 100644
--- a/designate/tests/unit/objects/test_rrdata_sshfp.py
+++ b/designate/tests/unit/objects/test_rrdata_sshfp.py
@@ -25,7 +25,7 @@ LOG = logging.getLogger(__name__)
class RRDataSSHTPTest(oslotest.base.BaseTestCase):
def test_parse_sshfp(self):
sshfp_record = objects.SSHFP()
- sshfp_record._from_string(
+ sshfp_record.from_string(
'0 0 72d30d211ce8c464de2811e534de23b9be9b4dc4')
self.assertEqual(0, sshfp_record.algorithm)
diff --git a/designate/tests/unit/objects/test_rrdata_txt.py b/designate/tests/unit/objects/test_rrdata_txt.py
index 55742664..76edfe6d 100644
--- a/designate/tests/unit/objects/test_rrdata_txt.py
+++ b/designate/tests/unit/objects/test_rrdata_txt.py
@@ -66,13 +66,13 @@ class RRDataTXTTest(oslotest.base.BaseTestCase):
ValueError,
"TXT record is missing a double quote either at beginning "
"or at end.",
- record._from_string,
+ record.from_string,
'"foo'
)
self.assertRaisesRegex(
ValueError,
"TXT record is missing a double quote either at beginning "
"or at end.",
- record._from_string,
+ record.from_string,
'foo"'
)
diff --git a/designate/tests/unit/test_central/test_basic.py b/designate/tests/unit/test_central/test_basic.py
index e4ec18ba..e9e7a6b9 100644
--- a/designate/tests/unit/test_central/test_basic.py
+++ b/designate/tests/unit/test_central/test_basic.py
@@ -392,7 +392,7 @@ class CentralServiceTestCase(CentralBasic):
def test_create_recordset_in_storage(self):
self.service._enforce_recordset_quota = mock.Mock()
- self.service._validate_recordset = mock.Mock()
+ self.service._validate_recordset = mock.Mock(spec=objects.RecordSet)
self.service.storage.create_recordset = mock.Mock(return_value='rs')
self.service._update_zone_in_storage = mock.Mock()
@@ -416,7 +416,7 @@ class CentralServiceTestCase(CentralBasic):
central_service.storage.create_recordset = mock.Mock(return_value='rs')
central_service._update_zone_in_storage = mock.Mock()
- recordset = mock.Mock()
+ recordset = mock.Mock(spec=objects.RecordSet)
recordset.obj_attr_is_set.return_value = True
recordset.records = [MockRecord()]
@@ -441,7 +441,7 @@ class CentralServiceTestCase(CentralBasic):
# NOTE(thirose): Since this is a race condition we assume that
# we will hit it if we try to do the operations in a loop 100 times.
for num in range(100):
- recordset = mock.Mock()
+ recordset = mock.Mock(spec=objects.RecordSet)
recordset.name = "b{}".format(num)
recordset.obj_attr_is_set.return_value = True
recordset.records = [MockRecord()]
@@ -1148,7 +1148,7 @@ class CentralZoneTestCase(CentralBasic):
def test_update_recordset_fail_on_changes(self):
self.service.storage.get_zone.return_value = RoObject()
- recordset = mock.Mock()
+ recordset = mock.Mock(spec=objects.RecordSet)
recordset.obj_get_original_value.return_value = '1'
recordset.obj_get_changes.return_value = ['tenant_id', 'foo']
@@ -1179,7 +1179,7 @@ class CentralZoneTestCase(CentralBasic):
self.service.storage.get_zone.return_value = RoObject(
action='DELETE',
)
- recordset = mock.Mock()
+ recordset = mock.Mock(spec=objects.RecordSet)
recordset.obj_get_changes.return_value = ['foo']
exc = self.assertRaises(rpc_dispatcher.ExpectedException,
@@ -1196,7 +1196,7 @@ class CentralZoneTestCase(CentralBasic):
tenant_id='2',
action='bogus',
)
- recordset = mock.Mock()
+ recordset = mock.Mock(spec=objects.RecordSet)
recordset.obj_get_changes.return_value = ['foo']
recordset.managed = True
self.context = mock.Mock()
@@ -1216,10 +1216,11 @@ class CentralZoneTestCase(CentralBasic):
tenant_id='2',
action='bogus',
)
- recordset = mock.Mock()
+ recordset = mock.Mock(spec=objects.RecordSet)
recordset.obj_get_changes.return_value = ['foo']
- recordset.obj_get_original_value.return_value =\
+ recordset.obj_get_original_value.return_value = (
'9c85d9b0-1e9d-4e99-aede-a06664f1af2e'
+ )
recordset.managed = False
self.service._update_recordset_in_storage = mock.Mock(
return_value=('x', 'y')
@@ -1239,7 +1240,7 @@ class CentralZoneTestCase(CentralBasic):
'recordset_id': '9c85d9b0-1e9d-4e99-aede-a06664f1af2e',
'project_id': '2'}, target)
- def test__update_recordset_in_storage(self):
+ def test_update_recordset_in_storage(self):
recordset = mock.Mock()
recordset.name = 'n'
recordset.type = 't'
@@ -1426,7 +1427,7 @@ class CentralZoneTestCase(CentralBasic):
self.assertTrue(
self.service._delete_recordset_in_storage.called)
- def test__delete_recordset_in_storage(self):
+ def test_delete_recordset_in_storage(self):
def mock_uds(c, zone, inc):
return zone
self.service._update_zone_in_storage = mock_uds
@@ -1730,7 +1731,7 @@ class CentralQuotaTest(unittest.TestCase):
service = Service()
service.storage.count_records.return_value = 10
- recordset = mock.Mock()
+ recordset = mock.Mock(spec=objects.RecordSet)
recordset.managed = False
recordset.records = ['1.1.1.%i' % (i + 1) for i in range(5)]
@@ -1801,7 +1802,7 @@ class CentralQuotaTest(unittest.TestCase):
1, 1,
]
- managed_recordset = mock.Mock()
+ managed_recordset = mock.Mock(spec=objects.RecordSet)
managed_recordset.managed = True
recordset_one_record = mock.Mock()
diff --git a/designate/tests/unit/test_central/test_lock_decorator.py b/designate/tests/unit/test_central/test_lock_decorator.py
new file mode 100644
index 00000000..c8d8058d
--- /dev/null
+++ b/designate/tests/unit/test_central/test_lock_decorator.py
@@ -0,0 +1,111 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from unittest import mock
+
+from oslo_concurrency import lockutils
+from oslo_log import log as logging
+import oslotest.base
+
+from designate.common.decorators import lock
+from designate import exceptions
+from designate.objects import record
+from designate.objects import zone
+from designate import utils
+
+LOG = logging.getLogger(__name__)
+
+
+class FakeCoordination:
+ def get_lock(self, name):
+ return lockutils.lock(name)
+
+
+class FakeService:
+ def __init__(self):
+ self.zone_lock_local = lock.ZoneLockLocal()
+ self.coordination = FakeCoordination()
+
+
+class CentralDecoratorTests(oslotest.base.BaseTestCase):
+ def setUp(self):
+ super().setUp()
+ self.context = mock.Mock()
+ self.service = FakeService()
+
+ def test_synchronized_zone_exception_raised(self):
+ @lock.synchronized_zone()
+ def mock_get_zone(cls, current_index, zone_obj):
+ self.assertEqual(
+ {'zone-%s' % zone_obj.id}, cls.zone_lock_local._held
+ )
+ if current_index % 3 == 0:
+ raise exceptions.ZoneNotFound()
+
+ for index in range(9):
+ try:
+ mock_get_zone(
+ self.service, index, zone.Zone(id=utils.generate_uuid())
+ )
+ except exceptions.ZoneNotFound:
+ pass
+
+ def test_synchronized_new_zone_with_recursion(self):
+ @lock.synchronized_zone(new_zone=True)
+ def mock_create_zone(cls, context):
+ self.assertEqual({'create-new-zone'}, cls.zone_lock_local._held)
+ mock_create_record(
+ cls, context, zone.Zone(id=utils.generate_uuid())
+ )
+
+ @lock.synchronized_zone()
+ def mock_create_record(cls, context, zone_obj):
+ self.assertIn('zone-%s' % zone_obj.id, cls.zone_lock_local._held)
+ self.assertIn('create-new-zone', cls.zone_lock_local._held)
+
+ mock_create_zone(
+ self.service, self.context
+ )
+
+ def test_synchronized_zone_recursive_decorator_call(self):
+ @lock.synchronized_zone()
+ def mock_create_record(cls, context, record_obj):
+ self.assertEqual(
+ {'zone-%s' % record_obj.zone_id}, cls.zone_lock_local._held
+ )
+ mock_get_zone(cls, context, zone.Zone(id=record_obj.zone_id))
+
+ @lock.synchronized_zone()
+ def mock_get_zone(cls, context, zone_obj):
+ self.assertEqual(
+ {'zone-%s' % zone_obj.id}, cls.zone_lock_local._held
+ )
+
+ mock_create_record(
+ self.service, self.context,
+ record_obj=record.Record(zone_id=utils.generate_uuid())
+ )
+ mock_get_zone(
+ self.service, self.context,
+ zone_obj=zone.Zone(id=utils.generate_uuid())
+ )
+
+ def test_synchronized_zone_raises_exception_when_no_zone_provided(self):
+ @lock.synchronized_zone(new_zone=False)
+ def mock_not_creating_new_zone(cls, context, record_obj):
+ pass
+
+ self.assertRaisesRegex(
+ Exception,
+ 'Failed to determine zone id for synchronized operation',
+ mock_not_creating_new_zone, self.service, mock.Mock(), None
+ )
diff --git a/designate/worker/service.py b/designate/worker/service.py
index b5d8a622..cf5a1a5d 100644
--- a/designate/worker/service.py
+++ b/designate/worker/service.py
@@ -21,9 +21,9 @@ import oslo_messaging as messaging
from designate import backend
from designate.central import rpcapi as central_api
+from designate.common.decorators import rpc
from designate.context import DesignateContext
from designate import exceptions
-from designate import rpc
from designate import service
from designate import storage
from designate.worker import processing
diff --git a/doc/ext/support_matrix.py b/doc/ext/support_matrix.py
index e0021b2a..0c84c24c 100644
--- a/doc/ext/support_matrix.py
+++ b/doc/ext/support_matrix.py
@@ -101,7 +101,7 @@ class SupportMatrixDirective(rst.Directive):
"support-matrix.ini")
rel_fpath, fpath = env.relfn2path(fname)
with open(fpath) as fp:
- cfg.readfp(fp)
+ cfg.read_file(fp)
# This ensures that the docs are rebuilt whenever the
# .ini file changes
diff --git a/etc/designate/rootwrap.conf.sample b/etc/designate/rootwrap.conf.sample
index 79bfb409..e5424354 100644
--- a/etc/designate/rootwrap.conf.sample
+++ b/etc/designate/rootwrap.conf.sample
@@ -25,3 +25,9 @@ syslog_log_facility=syslog
# INFO means log all usage
# ERROR means only log unsuccessful attempts
syslog_log_level=ERROR
+
+# Rootwrap daemon exits after this seconds of inactivity
+daemon_timeout=600
+
+# Rootwrap daemon limits itself to that many file descriptors (Linux only)
+rlimit_nofile=1024
diff --git a/releasenotes/notes/remove-netaddr-requirement-ab9b9c2d15aa8e1c.yaml b/releasenotes/notes/remove-netaddr-requirement-ab9b9c2d15aa8e1c.yaml
new file mode 100644
index 00000000..fb810839
--- /dev/null
+++ b/releasenotes/notes/remove-netaddr-requirement-ab9b9c2d15aa8e1c.yaml
@@ -0,0 +1,5 @@
+---
+other:
+ - |
+ The netaddr python module has been removed as a Designate requirement. It
+ has been replaced with the python standard library 'ipaddress' module.
diff --git a/releasenotes/source/locale/en_GB/LC_MESSAGES/releasenotes.po b/releasenotes/source/locale/en_GB/LC_MESSAGES/releasenotes.po
index fc28c36c..06f76fa9 100644
--- a/releasenotes/source/locale/en_GB/LC_MESSAGES/releasenotes.po
+++ b/releasenotes/source/locale/en_GB/LC_MESSAGES/releasenotes.po
@@ -8,11 +8,11 @@ msgid ""
msgstr ""
"Project-Id-Version: Designate Release Notes\n"
"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2022-05-27 18:46+0000\n"
+"POT-Creation-Date: 2022-06-25 06:02+0000\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-"PO-Revision-Date: 2022-05-30 09:36+0000\n"
+"PO-Revision-Date: 2022-07-19 09:55+0000\n"
"Last-Translator: Andi Chandler <andi@gowling.com>\n"
"Language-Team: English (United Kingdom)\n"
"Language: en_GB\n"
@@ -28,6 +28,9 @@ msgstr "1.0.2"
msgid "10.0.0"
msgstr "10.0.0"
+msgid "10.0.2-2"
+msgstr "10.0.2-2"
+
msgid "11.0.0"
msgstr "11.0.0"
@@ -37,23 +40,23 @@ msgstr "11.0.2"
msgid "12.0.0"
msgstr "12.0.0"
-msgid "12.0.1-8"
-msgstr "12.0.1-8"
+msgid "12.1.0"
+msgstr "12.1.0"
msgid "13.0.0"
msgstr "13.0.0"
-msgid "13.0.0-10"
-msgstr "13.0.0-10"
+msgid "13.0.1"
+msgstr "13.0.1"
msgid "14.0.0"
msgstr "14.0.0"
-msgid "14.0.0-34"
-msgstr "14.0.0-34"
+msgid "14.0.0-48"
+msgstr "14.0.0-48"
-msgid "14.0.0-6"
-msgstr "14.0.0-6"
+msgid "14.0.1"
+msgstr "14.0.1"
msgid "2.0.0"
msgstr "2.0.0"
@@ -194,9 +197,6 @@ msgstr ""
"use for AFXR requests. This key name is the name used to create the key in "
"powerdns, not the Designate UUID based ID for the key."
-msgid "Adds support for keystone default roles and scoped tokens."
-msgstr "Adds support for Keystone default roles and scoped tokens."
-
msgid ""
"All designate services will now report to designate-central when they are "
"running. This is implmented as a heartbeat reporting system. There is also a "
@@ -468,6 +468,9 @@ msgstr ""
"Fixes bug where requests to powerDNS fail if the DNS is configured for TLS "
"traffic."
+msgid "Fixes support for keystone default roles and scoped tokens."
+msgstr "Fixes support for Keystone default roles and scoped tokens."
+
msgid "Horizon Plugin moved out of tree"
msgstr "Horizon Plugin moved out of tree"
@@ -682,6 +685,15 @@ msgstr ""
"notifications now happen using the worker service."
msgid ""
+"Removed the ``monascastatsd`` based metrics solution as all calls using it "
+"has been changed or removed and designate is no longer tracking any metrics "
+"using the metrics endpoint."
+msgstr ""
+"Removed the ``monascastatsd`` based metrics solution as all calls using it "
+"has been changed or removed and Designate is no longer tracking any metrics "
+"using the metrics endpoint."
+
+msgid ""
"Removed the following unused central rpc calls. This should not impact "
"normal installations, but if these are used in any custom written backends "
"or plugins that you are using, you will need to update your code before "
@@ -782,6 +794,19 @@ msgstr ""
"of the PowerDNS 4 driver and is now being removed."
msgid ""
+"The ``SECONDARY zone`` RPC calls were moved from the ``mdns`` service to "
+"``worker`` service. When upgrading multi-controller deployments we recommend "
+"that you restart the ``central`` and ``worker`` services first to move the "
+"``SECONDARY zone`` calls to the ``worker``, and once both services has been "
+"upgraded go ahead and restart the ``mdns`` service."
+msgstr ""
+"The ``SECONDARY zone`` RPC calls were moved from the ``mdns`` service to "
+"``worker`` service. When upgrading multi-controller deployments we recommend "
+"that you restart the ``central`` and ``worker`` services first to move the "
+"``SECONDARY zone`` calls to the ``worker``, and once both services have been "
+"upgraded go ahead and restart the ``mdns`` service."
+
+msgid ""
"The ``[service:api] max_header_line`` parameter has been deprecated. This "
"parameter has hd no effect since the Train release. Use the ``[DEFAULT] "
"max_header_line`` parameter of the ``oslo.service`` library instead."
diff --git a/requirements.txt b/requirements.txt
index 6fa0ddf9..a2785914 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -12,14 +12,13 @@ Jinja2>=2.10 # BSD License (3 clause)
jsonschema>=3.2.0 # MIT
keystoneauth1>=3.4.0 # Apache-2.0
keystonemiddleware>=4.17.0 # Apache-2.0
-netaddr>=0.7.18 # BSD
oslo.config>=6.8.0 # Apache-2.0
oslo.concurrency>=4.2.0 # Apache-2.0
oslo.messaging>=12.4.0 # Apache-2.0
oslo.middleware>=3.31.0 # Apache-2.0
oslo.log>=4.3.0 # Apache-2.0
oslo.reports>=1.18.0 # Apache-2.0
-oslo.rootwrap>=5.8.0 # Apache-2.0
+oslo.rootwrap>=5.15.0 # Apache-2.0
oslo.serialization>=2.25.0 # Apache-2.0
oslo.service>=1.31.0 # Apache-2.0
oslo.upgradecheck>=1.3.0